1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28
29
30 #include "v8.h"
31
32 #if V8_TARGET_ARCH_MIPS
33
34 #include "codegen.h"
35 #include "debug.h"
36 #include "deoptimizer.h"
37 #include "full-codegen.h"
38 #include "runtime.h"
39
40 namespace v8 {
41 namespace internal {
42
43
44 #define __ ACCESS_MASM(masm)
45
46
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)47 void Builtins::Generate_Adaptor(MacroAssembler* masm,
48 CFunctionId id,
49 BuiltinExtraArguments extra_args) {
50 // ----------- S t a t e -------------
51 // -- a0 : number of arguments excluding receiver
52 // -- a1 : called function (only guaranteed when
53 // -- extra_args requires it)
54 // -- cp : context
55 // -- sp[0] : last argument
56 // -- ...
57 // -- sp[4 * (argc - 1)] : first argument
58 // -- sp[4 * agrc] : receiver
59 // -----------------------------------
60
61 // Insert extra arguments.
62 int num_extra_args = 0;
63 if (extra_args == NEEDS_CALLED_FUNCTION) {
64 num_extra_args = 1;
65 __ push(a1);
66 } else {
67 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68 }
69
70 // JumpToExternalReference expects s0 to contain the number of arguments
71 // including the receiver and the extra arguments.
72 __ Addu(s0, a0, num_extra_args + 1);
73 __ sll(s1, s0, kPointerSizeLog2);
74 __ Subu(s1, s1, kPointerSize);
75 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
76 }
77
78
79 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)80 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
81 Register result) {
82 // Load the native context.
83
84 __ lw(result,
85 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
86 __ lw(result,
87 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
88 // Load the InternalArray function from the native context.
89 __ lw(result,
90 MemOperand(result,
91 Context::SlotOffset(
92 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
93 }
94
95
96 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)97 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
98 // Load the native context.
99
100 __ lw(result,
101 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
102 __ lw(result,
103 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
104 // Load the Array function from the native context.
105 __ lw(result,
106 MemOperand(result,
107 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
108 }
109
110
Generate_InternalArrayCode(MacroAssembler * masm)111 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- a0 : number of arguments
114 // -- ra : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
118
119 // Get the InternalArray function.
120 GenerateLoadInternalArrayFunction(masm, a1);
121
122 if (FLAG_debug_code) {
123 // Initial map for the builtin InternalArray functions should be maps.
124 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
125 __ SmiTst(a2, t0);
126 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
127 t0, Operand(zero_reg));
128 __ GetObjectType(a2, a3, t0);
129 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
130 t0, Operand(MAP_TYPE));
131 }
132
133 // Run the native code for the InternalArray function called as a normal
134 // function.
135 // Tail call a stub.
136 InternalArrayConstructorStub stub(masm->isolate());
137 __ TailCallStub(&stub);
138 }
139
140
Generate_ArrayCode(MacroAssembler * masm)141 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
142 // ----------- S t a t e -------------
143 // -- a0 : number of arguments
144 // -- ra : return address
145 // -- sp[...]: constructor arguments
146 // -----------------------------------
147 Label generic_array_code;
148
149 // Get the Array function.
150 GenerateLoadArrayFunction(masm, a1);
151
152 if (FLAG_debug_code) {
153 // Initial map for the builtin Array functions should be maps.
154 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
155 __ SmiTst(a2, t0);
156 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
157 t0, Operand(zero_reg));
158 __ GetObjectType(a2, a3, t0);
159 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
160 t0, Operand(MAP_TYPE));
161 }
162
163 // Run the native code for the Array function called as a normal function.
164 // Tail call a stub.
165 Handle<Object> undefined_sentinel(
166 masm->isolate()->heap()->undefined_value(),
167 masm->isolate());
168 __ li(a2, Operand(undefined_sentinel));
169 ArrayConstructorStub stub(masm->isolate());
170 __ TailCallStub(&stub);
171 }
172
173
Generate_StringConstructCode(MacroAssembler * masm)174 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
175 // ----------- S t a t e -------------
176 // -- a0 : number of arguments
177 // -- a1 : constructor function
178 // -- ra : return address
179 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
180 // -- sp[argc * 4] : receiver
181 // -----------------------------------
182 Counters* counters = masm->isolate()->counters();
183 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
184
185 Register function = a1;
186 if (FLAG_debug_code) {
187 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
188 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
189 }
190
191 // Load the first arguments in a0 and get rid of the rest.
192 Label no_arguments;
193 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
194 // First args = sp[(argc - 1) * 4].
195 __ Subu(a0, a0, Operand(1));
196 __ sll(a0, a0, kPointerSizeLog2);
197 __ Addu(sp, a0, sp);
198 __ lw(a0, MemOperand(sp));
199 // sp now point to args[0], drop args[0] + receiver.
200 __ Drop(2);
201
202 Register argument = a2;
203 Label not_cached, argument_is_string;
204 __ LookupNumberStringCache(a0, // Input.
205 argument, // Result.
206 a3, // Scratch.
207 t0, // Scratch.
208 t1, // Scratch.
209 ¬_cached);
210 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
211 __ bind(&argument_is_string);
212
213 // ----------- S t a t e -------------
214 // -- a2 : argument converted to string
215 // -- a1 : constructor function
216 // -- ra : return address
217 // -----------------------------------
218
219 Label gc_required;
220 __ Allocate(JSValue::kSize,
221 v0, // Result.
222 a3, // Scratch.
223 t0, // Scratch.
224 &gc_required,
225 TAG_OBJECT);
226
227 // Initialising the String Object.
228 Register map = a3;
229 __ LoadGlobalFunctionInitialMap(function, map, t0);
230 if (FLAG_debug_code) {
231 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
232 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
233 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
234 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
235 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
236 t0, Operand(zero_reg));
237 }
238 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
239
240 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
241 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
242 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
243
244 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
245
246 // Ensure the object is fully initialized.
247 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
248
249 __ Ret();
250
251 // The argument was not found in the number to string cache. Check
252 // if it's a string already before calling the conversion builtin.
253 Label convert_argument;
254 __ bind(¬_cached);
255 __ JumpIfSmi(a0, &convert_argument);
256
257 // Is it a String?
258 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
259 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
260 STATIC_ASSERT(kNotStringTag != 0);
261 __ And(t0, a3, Operand(kIsNotStringMask));
262 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
263 __ mov(argument, a0);
264 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
265 __ Branch(&argument_is_string);
266
267 // Invoke the conversion builtin and put the result into a2.
268 __ bind(&convert_argument);
269 __ push(function); // Preserve the function.
270 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
271 {
272 FrameScope scope(masm, StackFrame::INTERNAL);
273 __ push(a0);
274 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
275 }
276 __ pop(function);
277 __ mov(argument, v0);
278 __ Branch(&argument_is_string);
279
280 // Load the empty string into a2, remove the receiver from the
281 // stack, and jump back to the case where the argument is a string.
282 __ bind(&no_arguments);
283 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
284 __ Drop(1);
285 __ Branch(&argument_is_string);
286
287 // At this point the argument is already a string. Call runtime to
288 // create a string wrapper.
289 __ bind(&gc_required);
290 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
291 {
292 FrameScope scope(masm, StackFrame::INTERNAL);
293 __ push(argument);
294 __ CallRuntime(Runtime::kNewStringWrapper, 1);
295 }
296 __ Ret();
297 }
298
299
CallRuntimePassFunction(MacroAssembler * masm,Runtime::FunctionId function_id)300 static void CallRuntimePassFunction(MacroAssembler* masm,
301 Runtime::FunctionId function_id) {
302 FrameScope scope(masm, StackFrame::INTERNAL);
303 // Push a copy of the function onto the stack.
304 // Push call kind information and function as parameter to the runtime call.
305 __ Push(a1, t1, a1);
306
307 __ CallRuntime(function_id, 1);
308 // Restore call kind information and receiver.
309 __ Pop(a1, t1);
310 }
311
312
GenerateTailCallToSharedCode(MacroAssembler * masm)313 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
314 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
315 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
316 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
317 __ Jump(at);
318 }
319
320
Generate_InRecompileQueue(MacroAssembler * masm)321 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
322 // Checking whether the queued function is ready for install is optional,
323 // since we come across interrupts and stack checks elsewhere. However,
324 // not checking may delay installing ready functions, and always checking
325 // would be quite expensive. A good compromise is to first check against
326 // stack limit as a cue for an interrupt signal.
327 Label ok;
328 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
329 __ Branch(&ok, hs, sp, Operand(t0));
330
331 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
332 // Tail call to returned code.
333 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
334 __ Jump(at);
335
336 __ bind(&ok);
337 GenerateTailCallToSharedCode(masm);
338 }
339
340
Generate_ConcurrentRecompile(MacroAssembler * masm)341 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
342 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
343 GenerateTailCallToSharedCode(masm);
344 }
345
346
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool count_constructions)347 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
348 bool is_api_function,
349 bool count_constructions) {
350 // ----------- S t a t e -------------
351 // -- a0 : number of arguments
352 // -- a1 : constructor function
353 // -- ra : return address
354 // -- sp[...]: constructor arguments
355 // -----------------------------------
356
357 // Should never count constructions for api objects.
358 ASSERT(!is_api_function || !count_constructions);
359
360 Isolate* isolate = masm->isolate();
361
362 // ----------- S t a t e -------------
363 // -- a0 : number of arguments
364 // -- a1 : constructor function
365 // -- ra : return address
366 // -- sp[...]: constructor arguments
367 // -----------------------------------
368
369 // Enter a construct frame.
370 {
371 FrameScope scope(masm, StackFrame::CONSTRUCT);
372
373 // Preserve the two incoming parameters on the stack.
374 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
375 __ MultiPushReversed(a0.bit() | a1.bit());
376
377 // Use t7 to hold undefined, which is used in several places below.
378 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
379
380 Label rt_call, allocated;
381 // Try to allocate the object without transitioning into C code. If any of
382 // the preconditions is not met, the code bails out to the runtime call.
383 if (FLAG_inline_new) {
384 Label undo_allocation;
385 #ifdef ENABLE_DEBUGGER_SUPPORT
386 ExternalReference debug_step_in_fp =
387 ExternalReference::debug_step_in_fp_address(isolate);
388 __ li(a2, Operand(debug_step_in_fp));
389 __ lw(a2, MemOperand(a2));
390 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
391 #endif
392
393 // Load the initial map and verify that it is in fact a map.
394 // a1: constructor function
395 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
396 __ JumpIfSmi(a2, &rt_call);
397 __ GetObjectType(a2, a3, t4);
398 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
399
400 // Check that the constructor is not constructing a JSFunction (see
401 // comments in Runtime_NewObject in runtime.cc). In which case the
402 // initial map's instance type would be JS_FUNCTION_TYPE.
403 // a1: constructor function
404 // a2: initial map
405 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
406 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
407
408 if (count_constructions) {
409 Label allocate;
410 // Decrease generous allocation count.
411 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
412 MemOperand constructor_count =
413 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset);
414 __ lbu(t0, constructor_count);
415 __ Subu(t0, t0, Operand(1));
416 __ sb(t0, constructor_count);
417 __ Branch(&allocate, ne, t0, Operand(zero_reg));
418
419 __ Push(a1, a2, a1); // a1 = Constructor.
420 // The call will replace the stub, so the countdown is only done once.
421 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
422
423 __ Pop(a1, a2);
424
425 __ bind(&allocate);
426 }
427
428 // Now allocate the JSObject on the heap.
429 // a1: constructor function
430 // a2: initial map
431 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
432 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
433
434 // Allocated the JSObject, now initialize the fields. Map is set to
435 // initial map and properties and elements are set to empty fixed array.
436 // a1: constructor function
437 // a2: initial map
438 // a3: object size
439 // t4: JSObject (not tagged)
440 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
441 __ mov(t5, t4);
442 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
443 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
444 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
445 __ Addu(t5, t5, Operand(3*kPointerSize));
446 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
447 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
448 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
449
450 // Fill all the in-object properties with appropriate filler.
451 // a1: constructor function
452 // a2: initial map
453 // a3: object size (in words)
454 // t4: JSObject (not tagged)
455 // t5: First in-object property of JSObject (not tagged)
456 __ sll(t0, a3, kPointerSizeLog2);
457 __ addu(t6, t4, t0); // End of object.
458 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
459 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
460 if (count_constructions) {
461 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
462 __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
463 kBitsPerByte);
464 __ sll(t0, a0, kPointerSizeLog2);
465 __ addu(a0, t5, t0);
466 // a0: offset of first field after pre-allocated fields
467 if (FLAG_debug_code) {
468 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
469 a0, Operand(t6));
470 }
471 __ InitializeFieldsWithFiller(t5, a0, t7);
472 // To allow for truncation.
473 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
474 }
475 __ InitializeFieldsWithFiller(t5, t6, t7);
476
477 // Add the object tag to make the JSObject real, so that we can continue
478 // and jump into the continuation code at any time from now on. Any
479 // failures need to undo the allocation, so that the heap is in a
480 // consistent state and verifiable.
481 __ Addu(t4, t4, Operand(kHeapObjectTag));
482
483 // Check if a non-empty properties array is needed. Continue with
484 // allocated object if not fall through to runtime call if it is.
485 // a1: constructor function
486 // t4: JSObject
487 // t5: start of next object (not tagged)
488 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
489 // The field instance sizes contains both pre-allocated property fields
490 // and in-object properties.
491 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
492 __ Ext(t6, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
493 kBitsPerByte);
494 __ Addu(a3, a3, Operand(t6));
495 __ Ext(t6, a0, Map::kInObjectPropertiesByte * kBitsPerByte,
496 kBitsPerByte);
497 __ subu(a3, a3, t6);
498
499 // Done if no extra properties are to be allocated.
500 __ Branch(&allocated, eq, a3, Operand(zero_reg));
501 __ Assert(greater_equal, kPropertyAllocationCountFailed,
502 a3, Operand(zero_reg));
503
504 // Scale the number of elements by pointer size and add the header for
505 // FixedArrays to the start of the next object calculation from above.
506 // a1: constructor
507 // a3: number of elements in properties array
508 // t4: JSObject
509 // t5: start of next object
510 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
511 __ Allocate(
512 a0,
513 t5,
514 t6,
515 a2,
516 &undo_allocation,
517 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
518
519 // Initialize the FixedArray.
520 // a1: constructor
521 // a3: number of elements in properties array (untagged)
522 // t4: JSObject
523 // t5: start of next object
524 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
525 __ mov(a2, t5);
526 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
527 __ sll(a0, a3, kSmiTagSize);
528 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
529 __ Addu(a2, a2, Operand(2 * kPointerSize));
530
531 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
532 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
533
534 // Initialize the fields to undefined.
535 // a1: constructor
536 // a2: First element of FixedArray (not tagged)
537 // a3: number of elements in properties array
538 // t4: JSObject
539 // t5: FixedArray (not tagged)
540 __ sll(t3, a3, kPointerSizeLog2);
541 __ addu(t6, a2, t3); // End of object.
542 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
543 { Label loop, entry;
544 if (count_constructions) {
545 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
546 } else if (FLAG_debug_code) {
547 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
548 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
549 }
550 __ jmp(&entry);
551 __ bind(&loop);
552 __ sw(t7, MemOperand(a2));
553 __ addiu(a2, a2, kPointerSize);
554 __ bind(&entry);
555 __ Branch(&loop, less, a2, Operand(t6));
556 }
557
558 // Store the initialized FixedArray into the properties field of
559 // the JSObject.
560 // a1: constructor function
561 // t4: JSObject
562 // t5: FixedArray (not tagged)
563 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
564 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
565
566 // Continue with JSObject being successfully allocated.
567 // a1: constructor function
568 // a4: JSObject
569 __ jmp(&allocated);
570
571 // Undo the setting of the new top so that the heap is verifiable. For
572 // example, the map's unused properties potentially do not match the
573 // allocated objects unused properties.
574 // t4: JSObject (previous new top)
575 __ bind(&undo_allocation);
576 __ UndoAllocationInNewSpace(t4, t5);
577 }
578
579 __ bind(&rt_call);
580 // Allocate the new receiver object using the runtime call.
581 // a1: constructor function
582 __ push(a1); // Argument for Runtime_NewObject.
583 __ CallRuntime(Runtime::kNewObject, 1);
584 __ mov(t4, v0);
585
586 // Receiver for constructor call allocated.
587 // t4: JSObject
588 __ bind(&allocated);
589 __ push(t4);
590 __ push(t4);
591
592 // Reload the number of arguments from the stack.
593 // sp[0]: receiver
594 // sp[1]: receiver
595 // sp[2]: constructor function
596 // sp[3]: number of arguments (smi-tagged)
597 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
598 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
599
600 // Set up pointer to last argument.
601 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
602
603 // Set up number of arguments for function call below.
604 __ srl(a0, a3, kSmiTagSize);
605
606 // Copy arguments and receiver to the expression stack.
607 // a0: number of arguments
608 // a1: constructor function
609 // a2: address of last argument (caller sp)
610 // a3: number of arguments (smi-tagged)
611 // sp[0]: receiver
612 // sp[1]: receiver
613 // sp[2]: constructor function
614 // sp[3]: number of arguments (smi-tagged)
615 Label loop, entry;
616 __ jmp(&entry);
617 __ bind(&loop);
618 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
619 __ Addu(t0, a2, Operand(t0));
620 __ lw(t1, MemOperand(t0));
621 __ push(t1);
622 __ bind(&entry);
623 __ Addu(a3, a3, Operand(-2));
624 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
625
626 // Call the function.
627 // a0: number of arguments
628 // a1: constructor function
629 if (is_api_function) {
630 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
631 Handle<Code> code =
632 masm->isolate()->builtins()->HandleApiCallConstruct();
633 ParameterCount expected(0);
634 __ InvokeCode(code, expected, expected,
635 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
636 } else {
637 ParameterCount actual(a0);
638 __ InvokeFunction(a1, actual, CALL_FUNCTION,
639 NullCallWrapper(), CALL_AS_METHOD);
640 }
641
642 // Store offset of return address for deoptimizer.
643 if (!is_api_function && !count_constructions) {
644 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
645 }
646
647 // Restore context from the frame.
648 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
649
650 // If the result is an object (in the ECMA sense), we should get rid
651 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
652 // on page 74.
653 Label use_receiver, exit;
654
655 // If the result is a smi, it is *not* an object in the ECMA sense.
656 // v0: result
657 // sp[0]: receiver (newly allocated object)
658 // sp[1]: constructor function
659 // sp[2]: number of arguments (smi-tagged)
660 __ JumpIfSmi(v0, &use_receiver);
661
662 // If the type of the result (stored in its map) is less than
663 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
664 __ GetObjectType(v0, a1, a3);
665 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
666
667 // Throw away the result of the constructor invocation and use the
668 // on-stack receiver as the result.
669 __ bind(&use_receiver);
670 __ lw(v0, MemOperand(sp));
671
672 // Remove receiver from the stack, remove caller arguments, and
673 // return.
674 __ bind(&exit);
675 // v0: result
676 // sp[0]: receiver (newly allocated object)
677 // sp[1]: constructor function
678 // sp[2]: number of arguments (smi-tagged)
679 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
680
681 // Leave construct frame.
682 }
683
684 __ sll(t0, a1, kPointerSizeLog2 - 1);
685 __ Addu(sp, sp, t0);
686 __ Addu(sp, sp, kPointerSize);
687 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
688 __ Ret();
689 }
690
691
Generate_JSConstructStubCountdown(MacroAssembler * masm)692 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
693 Generate_JSConstructStubHelper(masm, false, true);
694 }
695
696
Generate_JSConstructStubGeneric(MacroAssembler * masm)697 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
698 Generate_JSConstructStubHelper(masm, false, false);
699 }
700
701
Generate_JSConstructStubApi(MacroAssembler * masm)702 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
703 Generate_JSConstructStubHelper(masm, true, false);
704 }
705
706
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)707 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
708 bool is_construct) {
709 // Called from JSEntryStub::GenerateBody
710
711 // ----------- S t a t e -------------
712 // -- a0: code entry
713 // -- a1: function
714 // -- a2: receiver_pointer
715 // -- a3: argc
716 // -- s0: argv
717 // -----------------------------------
718 ProfileEntryHookStub::MaybeCallEntryHook(masm);
719
720 // Clear the context before we push it when entering the JS frame.
721 __ mov(cp, zero_reg);
722
723 // Enter an internal frame.
724 {
725 FrameScope scope(masm, StackFrame::INTERNAL);
726
727 // Set up the context from the function argument.
728 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
729
730 // Push the function and the receiver onto the stack.
731 __ Push(a1, a2);
732
733 // Copy arguments to the stack in a loop.
734 // a3: argc
735 // s0: argv, i.e. points to first arg
736 Label loop, entry;
737 __ sll(t0, a3, kPointerSizeLog2);
738 __ addu(t2, s0, t0);
739 __ b(&entry);
740 __ nop(); // Branch delay slot nop.
741 // t2 points past last arg.
742 __ bind(&loop);
743 __ lw(t0, MemOperand(s0)); // Read next parameter.
744 __ addiu(s0, s0, kPointerSize);
745 __ lw(t0, MemOperand(t0)); // Dereference handle.
746 __ push(t0); // Push parameter.
747 __ bind(&entry);
748 __ Branch(&loop, ne, s0, Operand(t2));
749
750 // Initialize all JavaScript callee-saved registers, since they will be seen
751 // by the garbage collector as part of handlers.
752 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
753 __ mov(s1, t0);
754 __ mov(s2, t0);
755 __ mov(s3, t0);
756 __ mov(s4, t0);
757 __ mov(s5, t0);
758 // s6 holds the root address. Do not clobber.
759 // s7 is cp. Do not init.
760
761 // Invoke the code and pass argc as a0.
762 __ mov(a0, a3);
763 if (is_construct) {
764 // No type feedback cell is available
765 Handle<Object> undefined_sentinel(
766 masm->isolate()->heap()->undefined_value(), masm->isolate());
767 __ li(a2, Operand(undefined_sentinel));
768 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
769 __ CallStub(&stub);
770 } else {
771 ParameterCount actual(a0);
772 __ InvokeFunction(a1, actual, CALL_FUNCTION,
773 NullCallWrapper(), CALL_AS_METHOD);
774 }
775
776 // Leave internal frame.
777 }
778
779 __ Jump(ra);
780 }
781
782
Generate_JSEntryTrampoline(MacroAssembler * masm)783 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
784 Generate_JSEntryTrampolineHelper(masm, false);
785 }
786
787
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)788 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
789 Generate_JSEntryTrampolineHelper(masm, true);
790 }
791
792
Generate_LazyCompile(MacroAssembler * masm)793 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
794 CallRuntimePassFunction(masm, Runtime::kLazyCompile);
795 // Do a tail-call of the compiled function.
796 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
797 __ Jump(t9);
798 }
799
800
Generate_LazyRecompile(MacroAssembler * masm)801 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
802 CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
803 // Do a tail-call of the compiled function.
804 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
805 __ Jump(t9);
806 }
807
808
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)809 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
810 // For now, we are relying on the fact that make_code_young doesn't do any
811 // garbage collection which allows us to save/restore the registers without
812 // worrying about which of them contain pointers. We also don't build an
813 // internal frame to make the code faster, since we shouldn't have to do stack
814 // crawls in MakeCodeYoung. This seems a bit fragile.
815
816 // Set a0 to point to the head of the PlatformCodeAge sequence.
817 __ Subu(a0, a0,
818 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
819
820 // The following registers must be saved and restored when calling through to
821 // the runtime:
822 // a0 - contains return address (beginning of patch sequence)
823 // a1 - isolate
824 RegList saved_regs =
825 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
826 FrameScope scope(masm, StackFrame::MANUAL);
827 __ MultiPush(saved_regs);
828 __ PrepareCallCFunction(1, 0, a2);
829 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
830 __ CallCFunction(
831 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
832 __ MultiPop(saved_regs);
833 __ Jump(a0);
834 }
835
836 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
837 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
838 MacroAssembler* masm) { \
839 GenerateMakeCodeYoungAgainCommon(masm); \
840 } \
841 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
842 MacroAssembler* masm) { \
843 GenerateMakeCodeYoungAgainCommon(masm); \
844 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)845 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
846 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
847
848
849 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
850 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
851 // that make_code_young doesn't do any garbage collection which allows us to
852 // save/restore the registers without worrying about which of them contain
853 // pointers.
854
855 // Set a0 to point to the head of the PlatformCodeAge sequence.
856 __ Subu(a0, a0,
857 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
858
859 // The following registers must be saved and restored when calling through to
860 // the runtime:
861 // a0 - contains return address (beginning of patch sequence)
862 // a1 - isolate
863 RegList saved_regs =
864 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
865 FrameScope scope(masm, StackFrame::MANUAL);
866 __ MultiPush(saved_regs);
867 __ PrepareCallCFunction(1, 0, a2);
868 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
869 __ CallCFunction(
870 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
871 2);
872 __ MultiPop(saved_regs);
873
874 // Perform prologue operations usually performed by the young code stub.
875 __ Push(ra, fp, cp, a1);
876 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
877
878 // Jump to point after the code-age stub.
879 __ Addu(a0, a0, Operand((kNoCodeAgeSequenceLength) * Assembler::kInstrSize));
880 __ Jump(a0);
881 }
882
883
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)884 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
885 GenerateMakeCodeYoungAgainCommon(masm);
886 }
887
888
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)889 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
890 SaveFPRegsMode save_doubles) {
891 {
892 FrameScope scope(masm, StackFrame::INTERNAL);
893
894 // Preserve registers across notification, this is important for compiled
895 // stubs that tail call the runtime on deopts passing their parameters in
896 // registers.
897 __ MultiPush(kJSCallerSaved | kCalleeSaved);
898 // Pass the function and deoptimization type to the runtime system.
899 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
900 __ MultiPop(kJSCallerSaved | kCalleeSaved);
901 }
902
903 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
904 __ Jump(ra); // Jump to miss handler
905 }
906
907
Generate_NotifyStubFailure(MacroAssembler * masm)908 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
909 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
910 }
911
912
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)913 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
914 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
915 }
916
917
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)918 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
919 Deoptimizer::BailoutType type) {
920 {
921 FrameScope scope(masm, StackFrame::INTERNAL);
922 // Pass the function and deoptimization type to the runtime system.
923 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
924 __ push(a0);
925 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
926 }
927
928 // Get the full codegen state from the stack and untag it -> t2.
929 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
930 __ SmiUntag(t2);
931 // Switch on the state.
932 Label with_tos_register, unknown_state;
933 __ Branch(&with_tos_register,
934 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
935 __ Ret(USE_DELAY_SLOT);
936 // Safe to fill delay slot Addu will emit one instruction.
937 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
938
939 __ bind(&with_tos_register);
940 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
941 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
942
943 __ Ret(USE_DELAY_SLOT);
944 // Safe to fill delay slot Addu will emit one instruction.
945 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
946
947 __ bind(&unknown_state);
948 __ stop("no cases left");
949 }
950
951
Generate_NotifyDeoptimized(MacroAssembler * masm)952 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
953 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
954 }
955
956
Generate_NotifySoftDeoptimized(MacroAssembler * masm)957 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
958 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
959 }
960
961
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)962 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
963 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
964 }
965
966
Generate_OnStackReplacement(MacroAssembler * masm)967 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
968 // Lookup the function in the JavaScript frame.
969 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
970 {
971 FrameScope scope(masm, StackFrame::INTERNAL);
972 // Lookup and calculate pc offset.
973 __ lw(a1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
974 __ lw(a2, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
975 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
976 __ Subu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
977 __ Subu(a1, a1, a2);
978 __ SmiTag(a1);
979
980 // Pass both function and pc offset as arguments.
981 __ push(a0);
982 __ push(a1);
983 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
984 }
985
986 // If the code object is null, just return to the unoptimized code.
987 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
988
989 // Load deoptimization data from the code object.
990 // <deopt_data> = <code>[#deoptimization_data_offset]
991 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
992
993 // Load the OSR entrypoint offset from the deoptimization data.
994 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
995 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
996 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
997 __ SmiUntag(a1);
998
999 // Compute the target address = code_obj + header_size + osr_offset
1000 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1001 __ addu(v0, v0, a1);
1002 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1003
1004 // And "return" to the OSR entry point of the function.
1005 __ Ret();
1006 }
1007
1008
Generate_OsrAfterStackCheck(MacroAssembler * masm)1009 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1010 // We check the stack limit as indicator that recompilation might be done.
1011 Label ok;
1012 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1013 __ Branch(&ok, hs, sp, Operand(at));
1014 {
1015 FrameScope scope(masm, StackFrame::INTERNAL);
1016 __ CallRuntime(Runtime::kStackGuard, 0);
1017 }
1018 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1019 RelocInfo::CODE_TARGET);
1020
1021 __ bind(&ok);
1022 __ Ret();
1023 }
1024
1025
Generate_FunctionCall(MacroAssembler * masm)1026 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1027 // 1. Make sure we have at least one argument.
1028 // a0: actual number of arguments
1029 { Label done;
1030 __ Branch(&done, ne, a0, Operand(zero_reg));
1031 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1032 __ push(t2);
1033 __ Addu(a0, a0, Operand(1));
1034 __ bind(&done);
1035 }
1036
1037 // 2. Get the function to call (passed as receiver) from the stack, check
1038 // if it is a function.
1039 // a0: actual number of arguments
1040 Label slow, non_function;
1041 __ sll(at, a0, kPointerSizeLog2);
1042 __ addu(at, sp, at);
1043 __ lw(a1, MemOperand(at));
1044 __ JumpIfSmi(a1, &non_function);
1045 __ GetObjectType(a1, a2, a2);
1046 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1047
1048 // 3a. Patch the first argument if necessary when calling a function.
1049 // a0: actual number of arguments
1050 // a1: function
1051 Label shift_arguments;
1052 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1053 { Label convert_to_object, use_global_receiver, patch_receiver;
1054 // Change context eagerly in case we need the global receiver.
1055 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1056
1057 // Do not transform the receiver for strict mode functions.
1058 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1059 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1060 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1061 kSmiTagSize)));
1062 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1063
1064 // Do not transform the receiver for native (Compilerhints already in a3).
1065 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1066 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1067
1068 // Compute the receiver in non-strict mode.
1069 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1070 __ sll(at, a0, kPointerSizeLog2);
1071 __ addu(a2, sp, at);
1072 __ lw(a2, MemOperand(a2, -kPointerSize));
1073 // a0: actual number of arguments
1074 // a1: function
1075 // a2: first argument
1076 __ JumpIfSmi(a2, &convert_to_object, t2);
1077
1078 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1079 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1080 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1081 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1082
1083 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1084 __ GetObjectType(a2, a3, a3);
1085 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1086
1087 __ bind(&convert_to_object);
1088 // Enter an internal frame in order to preserve argument count.
1089 {
1090 FrameScope scope(masm, StackFrame::INTERNAL);
1091 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1092 __ push(a0);
1093
1094 __ push(a2);
1095 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1096 __ mov(a2, v0);
1097
1098 __ pop(a0);
1099 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1100 // Leave internal frame.
1101 }
1102 // Restore the function to a1, and the flag to t0.
1103 __ sll(at, a0, kPointerSizeLog2);
1104 __ addu(at, sp, at);
1105 __ lw(a1, MemOperand(at));
1106 __ li(t0, Operand(0, RelocInfo::NONE32));
1107 __ Branch(&patch_receiver);
1108
1109 // Use the global receiver object from the called function as the
1110 // receiver.
1111 __ bind(&use_global_receiver);
1112 const int kGlobalIndex =
1113 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1114 __ lw(a2, FieldMemOperand(cp, kGlobalIndex));
1115 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
1116 __ lw(a2, FieldMemOperand(a2, kGlobalIndex));
1117 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
1118
1119 __ bind(&patch_receiver);
1120 __ sll(at, a0, kPointerSizeLog2);
1121 __ addu(a3, sp, at);
1122 __ sw(a2, MemOperand(a3, -kPointerSize));
1123
1124 __ Branch(&shift_arguments);
1125 }
1126
1127 // 3b. Check for function proxy.
1128 __ bind(&slow);
1129 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1130 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1131
1132 __ bind(&non_function);
1133 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1134
1135 // 3c. Patch the first argument when calling a non-function. The
1136 // CALL_NON_FUNCTION builtin expects the non-function callee as
1137 // receiver, so overwrite the first argument which will ultimately
1138 // become the receiver.
1139 // a0: actual number of arguments
1140 // a1: function
1141 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1142 __ sll(at, a0, kPointerSizeLog2);
1143 __ addu(a2, sp, at);
1144 __ sw(a1, MemOperand(a2, -kPointerSize));
1145
1146 // 4. Shift arguments and return address one slot down on the stack
1147 // (overwriting the original receiver). Adjust argument count to make
1148 // the original first argument the new receiver.
1149 // a0: actual number of arguments
1150 // a1: function
1151 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1152 __ bind(&shift_arguments);
1153 { Label loop;
1154 // Calculate the copy start address (destination). Copy end address is sp.
1155 __ sll(at, a0, kPointerSizeLog2);
1156 __ addu(a2, sp, at);
1157
1158 __ bind(&loop);
1159 __ lw(at, MemOperand(a2, -kPointerSize));
1160 __ sw(at, MemOperand(a2));
1161 __ Subu(a2, a2, Operand(kPointerSize));
1162 __ Branch(&loop, ne, a2, Operand(sp));
1163 // Adjust the actual number of arguments and remove the top element
1164 // (which is a copy of the last argument).
1165 __ Subu(a0, a0, Operand(1));
1166 __ Pop();
1167 }
1168
1169 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1170 // or a function proxy via CALL_FUNCTION_PROXY.
1171 // a0: actual number of arguments
1172 // a1: function
1173 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1174 { Label function, non_proxy;
1175 __ Branch(&function, eq, t0, Operand(zero_reg));
1176 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1177 __ mov(a2, zero_reg);
1178 __ SetCallKind(t1, CALL_AS_METHOD);
1179 __ Branch(&non_proxy, ne, t0, Operand(1));
1180
1181 __ push(a1); // Re-add proxy object as additional argument.
1182 __ Addu(a0, a0, Operand(1));
1183 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1184 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1185 RelocInfo::CODE_TARGET);
1186
1187 __ bind(&non_proxy);
1188 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
1189 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1190 RelocInfo::CODE_TARGET);
1191 __ bind(&function);
1192 }
1193
1194 // 5b. Get the code to call from the function and check that the number of
1195 // expected arguments matches what we're providing. If so, jump
1196 // (tail-call) to the code in register edx without checking arguments.
1197 // a0: actual number of arguments
1198 // a1: function
1199 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1200 __ lw(a2,
1201 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1202 __ sra(a2, a2, kSmiTagSize);
1203 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1204 __ SetCallKind(t1, CALL_AS_METHOD);
1205 // Check formal and actual parameter counts.
1206 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1207 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1208
1209 ParameterCount expected(0);
1210 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION,
1211 NullCallWrapper(), CALL_AS_METHOD);
1212 }
1213
1214
Generate_FunctionApply(MacroAssembler * masm)1215 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1216 const int kIndexOffset =
1217 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1218 const int kLimitOffset =
1219 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1220 const int kArgsOffset = 2 * kPointerSize;
1221 const int kRecvOffset = 3 * kPointerSize;
1222 const int kFunctionOffset = 4 * kPointerSize;
1223
1224 {
1225 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1226 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1227 __ push(a0);
1228 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1229 __ push(a0);
1230 // Returns (in v0) number of arguments to copy to stack as Smi.
1231 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1232
1233 // Check the stack for overflow. We are not trying to catch
1234 // interruptions (e.g. debug break and preemption) here, so the "real stack
1235 // limit" is checked.
1236 Label okay;
1237 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1238 // Make a2 the space we have left. The stack might already be overflowed
1239 // here which will cause a2 to become negative.
1240 __ subu(a2, sp, a2);
1241 // Check if the arguments will overflow the stack.
1242 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1243 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1244
1245 // Out of stack space.
1246 __ lw(a1, MemOperand(fp, kFunctionOffset));
1247 __ Push(a1, v0);
1248 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1249 // End of stack check.
1250
1251 // Push current limit and index.
1252 __ bind(&okay);
1253 __ push(v0); // Limit.
1254 __ mov(a1, zero_reg); // Initial index.
1255 __ push(a1);
1256
1257 // Get the receiver.
1258 __ lw(a0, MemOperand(fp, kRecvOffset));
1259
1260 // Check that the function is a JS function (otherwise it must be a proxy).
1261 Label push_receiver;
1262 __ lw(a1, MemOperand(fp, kFunctionOffset));
1263 __ GetObjectType(a1, a2, a2);
1264 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1265
1266 // Change context eagerly to get the right global object if necessary.
1267 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1268 // Load the shared function info while the function is still in a1.
1269 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1270
1271 // Compute the receiver.
1272 // Do not transform the receiver for strict mode functions.
1273 Label call_to_object, use_global_receiver;
1274 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1275 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1276 kSmiTagSize)));
1277 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1278
1279 // Do not transform the receiver for native (Compilerhints already in a2).
1280 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1281 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1282
1283 // Compute the receiver in non-strict mode.
1284 __ JumpIfSmi(a0, &call_to_object);
1285 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1286 __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1287 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1288 __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1289
1290 // Check if the receiver is already a JavaScript object.
1291 // a0: receiver
1292 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1293 __ GetObjectType(a0, a1, a1);
1294 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1295
1296 // Convert the receiver to a regular object.
1297 // a0: receiver
1298 __ bind(&call_to_object);
1299 __ push(a0);
1300 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1301 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1302 __ Branch(&push_receiver);
1303
1304 // Use the current global receiver object as the receiver.
1305 __ bind(&use_global_receiver);
1306 const int kGlobalOffset =
1307 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1308 __ lw(a0, FieldMemOperand(cp, kGlobalOffset));
1309 __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
1310 __ lw(a0, FieldMemOperand(a0, kGlobalOffset));
1311 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
1312
1313 // Push the receiver.
1314 // a0: receiver
1315 __ bind(&push_receiver);
1316 __ push(a0);
1317
1318 // Copy all arguments from the array to the stack.
1319 Label entry, loop;
1320 __ lw(a0, MemOperand(fp, kIndexOffset));
1321 __ Branch(&entry);
1322
1323 // Load the current argument from the arguments array and push it to the
1324 // stack.
1325 // a0: current argument index
1326 __ bind(&loop);
1327 __ lw(a1, MemOperand(fp, kArgsOffset));
1328 __ Push(a1, a0);
1329
1330 // Call the runtime to access the property in the arguments array.
1331 __ CallRuntime(Runtime::kGetProperty, 2);
1332 __ push(v0);
1333
1334 // Use inline caching to access the arguments.
1335 __ lw(a0, MemOperand(fp, kIndexOffset));
1336 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1337 __ sw(a0, MemOperand(fp, kIndexOffset));
1338
1339 // Test if the copy loop has finished copying all the elements from the
1340 // arguments object.
1341 __ bind(&entry);
1342 __ lw(a1, MemOperand(fp, kLimitOffset));
1343 __ Branch(&loop, ne, a0, Operand(a1));
1344
1345 // Invoke the function.
1346 Label call_proxy;
1347 ParameterCount actual(a0);
1348 __ sra(a0, a0, kSmiTagSize);
1349 __ lw(a1, MemOperand(fp, kFunctionOffset));
1350 __ GetObjectType(a1, a2, a2);
1351 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1352
1353 __ InvokeFunction(a1, actual, CALL_FUNCTION,
1354 NullCallWrapper(), CALL_AS_METHOD);
1355
1356 frame_scope.GenerateLeaveFrame();
1357 __ Ret(USE_DELAY_SLOT);
1358 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1359
1360 // Invoke the function proxy.
1361 __ bind(&call_proxy);
1362 __ push(a1); // Add function proxy as last argument.
1363 __ Addu(a0, a0, Operand(1));
1364 __ li(a2, Operand(0, RelocInfo::NONE32));
1365 __ SetCallKind(t1, CALL_AS_METHOD);
1366 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1367 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1368 RelocInfo::CODE_TARGET);
1369 // Tear down the internal frame and remove function, receiver and args.
1370 }
1371
1372 __ Ret(USE_DELAY_SLOT);
1373 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1374 }
1375
1376
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1377 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1378 __ sll(a0, a0, kSmiTagSize);
1379 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1380 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1381 __ Addu(fp, sp,
1382 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1383 }
1384
1385
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1386 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1387 // ----------- S t a t e -------------
1388 // -- v0 : result being passed through
1389 // -----------------------------------
1390 // Get the number of arguments passed (as a smi), tear down the frame and
1391 // then tear down the parameters.
1392 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1393 kPointerSize)));
1394 __ mov(sp, fp);
1395 __ MultiPop(fp.bit() | ra.bit());
1396 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1397 __ Addu(sp, sp, t0);
1398 // Adjust for the receiver.
1399 __ Addu(sp, sp, Operand(kPointerSize));
1400 }
1401
1402
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1403 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1404 // State setup as expected by MacroAssembler::InvokePrologue.
1405 // ----------- S t a t e -------------
1406 // -- a0: actual arguments count
1407 // -- a1: function (passed through to callee)
1408 // -- a2: expected arguments count
1409 // -- a3: callee code entry
1410 // -- t1: call kind information
1411 // -----------------------------------
1412
1413 Label invoke, dont_adapt_arguments;
1414
1415 Label enough, too_few;
1416 __ Branch(&dont_adapt_arguments, eq,
1417 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1418 // We use Uless as the number of argument should always be greater than 0.
1419 __ Branch(&too_few, Uless, a0, Operand(a2));
1420
1421 { // Enough parameters: actual >= expected.
1422 // a0: actual number of arguments as a smi
1423 // a1: function
1424 // a2: expected number of arguments
1425 // a3: code entry to call
1426 __ bind(&enough);
1427 EnterArgumentsAdaptorFrame(masm);
1428
1429 // Calculate copy start address into a0 and copy end address into a2.
1430 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1431 __ Addu(a0, fp, a0);
1432 // Adjust for return address and receiver.
1433 __ Addu(a0, a0, Operand(2 * kPointerSize));
1434 // Compute copy end address.
1435 __ sll(a2, a2, kPointerSizeLog2);
1436 __ subu(a2, a0, a2);
1437
1438 // Copy the arguments (including the receiver) to the new stack frame.
1439 // a0: copy start address
1440 // a1: function
1441 // a2: copy end address
1442 // a3: code entry to call
1443
1444 Label copy;
1445 __ bind(©);
1446 __ lw(t0, MemOperand(a0));
1447 __ push(t0);
1448 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1449 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1450
1451 __ jmp(&invoke);
1452 }
1453
1454 { // Too few parameters: Actual < expected.
1455 __ bind(&too_few);
1456 EnterArgumentsAdaptorFrame(masm);
1457
1458 // Calculate copy start address into a0 and copy end address is fp.
1459 // a0: actual number of arguments as a smi
1460 // a1: function
1461 // a2: expected number of arguments
1462 // a3: code entry to call
1463 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1464 __ Addu(a0, fp, a0);
1465 // Adjust for return address and receiver.
1466 __ Addu(a0, a0, Operand(2 * kPointerSize));
1467 // Compute copy end address. Also adjust for return address.
1468 __ Addu(t3, fp, kPointerSize);
1469
1470 // Copy the arguments (including the receiver) to the new stack frame.
1471 // a0: copy start address
1472 // a1: function
1473 // a2: expected number of arguments
1474 // a3: code entry to call
1475 // t3: copy end address
1476 Label copy;
1477 __ bind(©);
1478 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1479 __ Subu(sp, sp, kPointerSize);
1480 __ Subu(a0, a0, kPointerSize);
1481 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1482 __ sw(t0, MemOperand(sp)); // In the delay slot.
1483
1484 // Fill the remaining expected arguments with undefined.
1485 // a1: function
1486 // a2: expected number of arguments
1487 // a3: code entry to call
1488 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1489 __ sll(t2, a2, kPointerSizeLog2);
1490 __ Subu(a2, fp, Operand(t2));
1491 // Adjust for frame.
1492 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1493 2 * kPointerSize));
1494
1495 Label fill;
1496 __ bind(&fill);
1497 __ Subu(sp, sp, kPointerSize);
1498 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1499 __ sw(t0, MemOperand(sp));
1500 }
1501
1502 // Call the entry point.
1503 __ bind(&invoke);
1504
1505 __ Call(a3);
1506
1507 // Store offset of return address for deoptimizer.
1508 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1509
1510 // Exit frame and return.
1511 LeaveArgumentsAdaptorFrame(masm);
1512 __ Ret();
1513
1514
1515 // -------------------------------------------
1516 // Don't adapt arguments.
1517 // -------------------------------------------
1518 __ bind(&dont_adapt_arguments);
1519 __ Jump(a3);
1520 }
1521
1522
1523 #undef __
1524
1525 } } // namespace v8::internal
1526
1527 #endif // V8_TARGET_ARCH_MIPS
1528