• Home
  • Raw
  • Download

Lines Matching full:__

16 #define __ ACCESS_MASM(masm)  macro
29 __ AssertFunction(r3); in Generate_Adaptor()
35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate_Adaptor()
40 __ AddP(r2, r2, Operand(num_extra_args + 1)); in Generate_Adaptor()
43 __ SmiTag(r2); in Generate_Adaptor()
44 __ Push(r2, r3, r5); in Generate_Adaptor()
45 __ SmiUntag(r2); in Generate_Adaptor()
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
77 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
78 __ TestIfSmi(r4); in Generate_InternalArrayCode()
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); in Generate_InternalArrayCode()
80 __ CompareObjectType(r4, r5, r6, MAP_TYPE); in Generate_InternalArrayCode()
81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); in Generate_InternalArrayCode()
88 __ TailCallStub(&stub); in Generate_InternalArrayCode()
104 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
105 __ TestIfSmi(r4); in Generate_ArrayCode()
106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate_ArrayCode()
107 __ CompareObjectType(r4, r5, r6, MAP_TYPE); in Generate_ArrayCode()
108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate_ArrayCode()
111 __ LoadRR(r5, r3); in Generate_ArrayCode()
114 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
116 __ TailCallStub(&stub); in Generate_ArrayCode()
137 __ LoadRoot(r7, root_index); in Generate_MathMaxMin()
138 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
143 __ AddP(r6, r2, Operand(1)); in Generate_MathMaxMin()
146 __ LoadRR(r6, r2); in Generate_MathMaxMin()
147 __ bind(&loop); in Generate_MathMaxMin()
150 __ SubP(r6, Operand(1)); in Generate_MathMaxMin()
151 __ blt(&done_loop); in Generate_MathMaxMin()
154 __ ShiftLeftP(r1, r6, Operand(kPointerSizeLog2)); in Generate_MathMaxMin()
155 __ LoadP(r4, MemOperand(sp, r1)); in Generate_MathMaxMin()
160 __ bind(&convert); in Generate_MathMaxMin()
161 __ JumpIfSmi(r4, &convert_smi); in Generate_MathMaxMin()
162 __ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate_MathMaxMin()
163 __ JumpIfRoot(r5, Heap::kHeapNumberMapRootIndex, &convert_number); in Generate_MathMaxMin()
168 __ SmiTag(r2); in Generate_MathMaxMin()
169 __ SmiTag(r6); in Generate_MathMaxMin()
170 __ EnterBuiltinFrame(cp, r3, r2); in Generate_MathMaxMin()
171 __ Push(r6, r7); in Generate_MathMaxMin()
172 __ LoadRR(r2, r4); in Generate_MathMaxMin()
173 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
174 __ LoadRR(r4, r2); in Generate_MathMaxMin()
175 __ Pop(r6, r7); in Generate_MathMaxMin()
176 __ LeaveBuiltinFrame(cp, r3, r2); in Generate_MathMaxMin()
177 __ SmiUntag(r6); in Generate_MathMaxMin()
178 __ SmiUntag(r2); in Generate_MathMaxMin()
182 __ SmiToDouble(d1, r7); in Generate_MathMaxMin()
183 __ JumpIfSmi(r7, &done_restore); in Generate_MathMaxMin()
184 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
185 __ bind(&done_restore); in Generate_MathMaxMin()
188 __ b(&convert); in Generate_MathMaxMin()
189 __ bind(&convert_number); in Generate_MathMaxMin()
190 __ LoadDouble(d2, FieldMemOperand(r4, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
191 __ b(&done_convert); in Generate_MathMaxMin()
192 __ bind(&convert_smi); in Generate_MathMaxMin()
193 __ SmiToDouble(d2, r4); in Generate_MathMaxMin()
194 __ bind(&done_convert); in Generate_MathMaxMin()
199 __ cdbr(d1, d2); in Generate_MathMaxMin()
200 __ bunordered(&compare_nan); in Generate_MathMaxMin()
201 __ b(cond_done, &loop); in Generate_MathMaxMin()
202 __ b(CommuteCondition(cond_done), &compare_swap); in Generate_MathMaxMin()
205 __ TestDoubleIsMinusZero(reg, r1, r0); in Generate_MathMaxMin()
206 __ bne(&loop); in Generate_MathMaxMin()
209 __ bind(&compare_swap); in Generate_MathMaxMin()
210 __ ldr(d1, d2); in Generate_MathMaxMin()
211 __ LoadRR(r7, r4); in Generate_MathMaxMin()
212 __ b(&loop); in Generate_MathMaxMin()
216 __ bind(&compare_nan); in Generate_MathMaxMin()
217 __ LoadRoot(r7, Heap::kNanValueRootIndex); in Generate_MathMaxMin()
218 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
219 __ b(&loop); in Generate_MathMaxMin()
222 __ bind(&done_loop); in Generate_MathMaxMin()
224 __ AddP(r2, Operand(1)); in Generate_MathMaxMin()
225 __ Drop(r2); in Generate_MathMaxMin()
226 __ LoadRR(r2, r7); in Generate_MathMaxMin()
227 __ Ret(); in Generate_MathMaxMin()
244 __ LoadRR(r4, r2); // Store argc in r4. in Generate_NumberConstructor()
245 __ CmpP(r2, Operand::Zero()); in Generate_NumberConstructor()
246 __ beq(&no_arguments); in Generate_NumberConstructor()
247 __ SubP(r2, r2, Operand(1)); in Generate_NumberConstructor()
248 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); in Generate_NumberConstructor()
249 __ LoadP(r2, MemOperand(sp, r2)); in Generate_NumberConstructor()
255 __ SmiTag(r4); in Generate_NumberConstructor()
256 __ EnterBuiltinFrame(cp, r3, r4); in Generate_NumberConstructor()
257 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
258 __ LeaveBuiltinFrame(cp, r3, r4); in Generate_NumberConstructor()
259 __ SmiUntag(r4); in Generate_NumberConstructor()
264 __ Drop(r4); in Generate_NumberConstructor()
265 __ Ret(1); in Generate_NumberConstructor()
269 __ bind(&no_arguments); in Generate_NumberConstructor()
270 __ LoadSmiLiteral(r2, Smi::kZero); in Generate_NumberConstructor()
271 __ Ret(1); in Generate_NumberConstructor()
286 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
291 __ LoadRR(r8, r2); // Store argc in r8. in Generate_NumberConstructor_ConstructStub()
292 __ CmpP(r2, Operand::Zero()); in Generate_NumberConstructor_ConstructStub()
293 __ beq(&no_arguments); in Generate_NumberConstructor_ConstructStub()
294 __ SubP(r2, r2, Operand(1)); in Generate_NumberConstructor_ConstructStub()
295 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); in Generate_NumberConstructor_ConstructStub()
296 __ LoadP(r4, MemOperand(sp, r4)); in Generate_NumberConstructor_ConstructStub()
297 __ b(&done); in Generate_NumberConstructor_ConstructStub()
298 __ bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
299 __ LoadSmiLiteral(r4, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
300 __ bind(&done); in Generate_NumberConstructor_ConstructStub()
306 __ JumpIfSmi(r4, &done_convert); in Generate_NumberConstructor_ConstructStub()
307 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE); in Generate_NumberConstructor_ConstructStub()
308 __ beq(&done_convert); in Generate_NumberConstructor_ConstructStub()
311 __ SmiTag(r8); in Generate_NumberConstructor_ConstructStub()
312 __ EnterBuiltinFrame(cp, r3, r8); in Generate_NumberConstructor_ConstructStub()
313 __ Push(r5); in Generate_NumberConstructor_ConstructStub()
314 __ LoadRR(r2, r4); in Generate_NumberConstructor_ConstructStub()
315 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
316 __ LoadRR(r4, r2); in Generate_NumberConstructor_ConstructStub()
317 __ Pop(r5); in Generate_NumberConstructor_ConstructStub()
318 __ LeaveBuiltinFrame(cp, r3, r8); in Generate_NumberConstructor_ConstructStub()
319 __ SmiUntag(r8); in Generate_NumberConstructor_ConstructStub()
321 __ bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
326 __ CmpP(r3, r5); in Generate_NumberConstructor_ConstructStub()
327 __ bne(&new_object); in Generate_NumberConstructor_ConstructStub()
330 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); in Generate_NumberConstructor_ConstructStub()
331 __ b(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
334 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
337 __ SmiTag(r8); in Generate_NumberConstructor_ConstructStub()
338 __ EnterBuiltinFrame(cp, r3, r8); in Generate_NumberConstructor_ConstructStub()
339 __ Push(r4); // first argument in Generate_NumberConstructor_ConstructStub()
340 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_NumberConstructor_ConstructStub()
342 __ Pop(r4); in Generate_NumberConstructor_ConstructStub()
343 __ LeaveBuiltinFrame(cp, r3, r8); in Generate_NumberConstructor_ConstructStub()
344 __ SmiUntag(r8); in Generate_NumberConstructor_ConstructStub()
346 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); in Generate_NumberConstructor_ConstructStub()
348 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
350 __ Drop(r8); in Generate_NumberConstructor_ConstructStub()
351 __ Ret(1); in Generate_NumberConstructor_ConstructStub()
368 __ LoadRR(r4, r2); // Store argc in r4 in Generate_StringConstructor()
369 __ CmpP(r2, Operand::Zero()); in Generate_StringConstructor()
370 __ beq(&no_arguments); in Generate_StringConstructor()
371 __ SubP(r2, r2, Operand(1)); in Generate_StringConstructor()
372 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); in Generate_StringConstructor()
373 __ LoadP(r2, MemOperand(sp, r2)); in Generate_StringConstructor()
380 __ JumpIfSmi(r2, &to_string); in Generate_StringConstructor()
382 __ CompareObjectType(r2, r5, r5, FIRST_NONSTRING_TYPE); in Generate_StringConstructor()
383 __ bgt(&to_string); in Generate_StringConstructor()
384 __ beq(&symbol_descriptive_string); in Generate_StringConstructor()
385 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
389 __ bind(&no_arguments); in Generate_StringConstructor()
391 __ LoadRoot(r2, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
392 __ Ret(1); in Generate_StringConstructor()
396 __ bind(&to_string); in Generate_StringConstructor()
399 __ SmiTag(r4); in Generate_StringConstructor()
400 __ EnterBuiltinFrame(cp, r3, r4); in Generate_StringConstructor()
401 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
402 __ LeaveBuiltinFrame(cp, r3, r4); in Generate_StringConstructor()
403 __ SmiUntag(r4); in Generate_StringConstructor()
405 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
407 __ bind(&symbol_descriptive_string); in Generate_StringConstructor()
409 __ Drop(r4); in Generate_StringConstructor()
410 __ Drop(1); in Generate_StringConstructor()
411 __ Push(r2); in Generate_StringConstructor()
412 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
415 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
417 __ Drop(r4); in Generate_StringConstructor()
418 __ Ret(1); in Generate_StringConstructor()
435 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
440 __ LoadRR(r8, r2); // Store argc in r8. in Generate_StringConstructor_ConstructStub()
441 __ CmpP(r2, Operand::Zero()); in Generate_StringConstructor_ConstructStub()
442 __ beq(&no_arguments); in Generate_StringConstructor_ConstructStub()
443 __ SubP(r2, r2, Operand(1)); in Generate_StringConstructor_ConstructStub()
444 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); in Generate_StringConstructor_ConstructStub()
445 __ LoadP(r4, MemOperand(sp, r4)); in Generate_StringConstructor_ConstructStub()
446 __ b(&done); in Generate_StringConstructor_ConstructStub()
447 __ bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
448 __ LoadRoot(r4, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
449 __ bind(&done); in Generate_StringConstructor_ConstructStub()
455 __ JumpIfSmi(r4, &convert); in Generate_StringConstructor_ConstructStub()
456 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE); in Generate_StringConstructor_ConstructStub()
457 __ blt(&done_convert); in Generate_StringConstructor_ConstructStub()
458 __ bind(&convert); in Generate_StringConstructor_ConstructStub()
461 __ SmiTag(r8); in Generate_StringConstructor_ConstructStub()
462 __ EnterBuiltinFrame(cp, r3, r8); in Generate_StringConstructor_ConstructStub()
463 __ Push(r5); in Generate_StringConstructor_ConstructStub()
464 __ LoadRR(r2, r4); in Generate_StringConstructor_ConstructStub()
465 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
466 __ LoadRR(r4, r2); in Generate_StringConstructor_ConstructStub()
467 __ Pop(r5); in Generate_StringConstructor_ConstructStub()
468 __ LeaveBuiltinFrame(cp, r3, r8); in Generate_StringConstructor_ConstructStub()
469 __ SmiUntag(r8); in Generate_StringConstructor_ConstructStub()
471 __ bind(&done_convert); in Generate_StringConstructor_ConstructStub()
476 __ CmpP(r3, r5); in Generate_StringConstructor_ConstructStub()
477 __ bne(&new_object); in Generate_StringConstructor_ConstructStub()
480 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); in Generate_StringConstructor_ConstructStub()
481 __ b(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
484 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
487 __ SmiTag(r8); in Generate_StringConstructor_ConstructStub()
488 __ EnterBuiltinFrame(cp, r3, r8); in Generate_StringConstructor_ConstructStub()
489 __ Push(r4); // first argument in Generate_StringConstructor_ConstructStub()
490 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_StringConstructor_ConstructStub()
492 __ Pop(r4); in Generate_StringConstructor_ConstructStub()
493 __ LeaveBuiltinFrame(cp, r3, r8); in Generate_StringConstructor_ConstructStub()
494 __ SmiUntag(r8); in Generate_StringConstructor_ConstructStub()
496 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); in Generate_StringConstructor_ConstructStub()
498 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
500 __ Drop(r8); in Generate_StringConstructor_ConstructStub()
501 __ Ret(1); in Generate_StringConstructor_ConstructStub()
506 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
507 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
508 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToSharedCode()
509 __ JumpToJSEntry(ip); in GenerateTailCallToSharedCode()
524 __ SmiTag(r2); in GenerateTailCallToReturnedCode()
525 __ Push(r2, r3, r5, r3); in GenerateTailCallToReturnedCode()
527 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
528 __ LoadRR(r4, r2); in GenerateTailCallToReturnedCode()
531 __ Pop(r2, r3, r5); in GenerateTailCallToReturnedCode()
532 __ SmiUntag(r2); in GenerateTailCallToReturnedCode()
534 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToReturnedCode()
535 __ JumpToJSEntry(ip); in GenerateTailCallToReturnedCode()
545 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex)); in Generate_InOptimizationQueue()
546 __ bge(&ok, Label::kNear); in Generate_InOptimizationQueue()
550 __ bind(&ok); in Generate_InOptimizationQueue()
578 __ SmiTag(r6, r2); in Generate_JSConstructStubHelper()
579 __ LoadAndTestP(r6, r6); in Generate_JSConstructStubHelper()
580 __ Push(cp, r6); in Generate_JSConstructStubHelper()
581 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
583 __ SmiTag(r2); in Generate_JSConstructStubHelper()
584 __ Push(cp, r2); in Generate_JSConstructStubHelper()
587 __ Push(r3, r5); in Generate_JSConstructStubHelper()
588 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_JSConstructStubHelper()
590 __ LoadRR(r6, r2); in Generate_JSConstructStubHelper()
591 __ Pop(r3, r5); in Generate_JSConstructStubHelper()
600 __ LoadP(r2, MemOperand(sp)); in Generate_JSConstructStubHelper()
601 __ SmiUntag(r2); in Generate_JSConstructStubHelper()
602 __ LoadAndTestP(r2, r2); in Generate_JSConstructStubHelper()
607 __ Push(r6, r6); in Generate_JSConstructStubHelper()
611 __ bind(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
614 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
626 __ beq(&no_args); in Generate_JSConstructStubHelper()
627 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2)); in Generate_JSConstructStubHelper()
628 __ SubP(sp, sp, ip); in Generate_JSConstructStubHelper()
629 __ LoadRR(r1, r2); in Generate_JSConstructStubHelper()
630 __ bind(&loop); in Generate_JSConstructStubHelper()
631 __ lay(ip, MemOperand(ip, -kPointerSize)); in Generate_JSConstructStubHelper()
632 __ LoadP(r0, MemOperand(ip, r4)); in Generate_JSConstructStubHelper()
633 __ StoreP(r0, MemOperand(ip, sp)); in Generate_JSConstructStubHelper()
634 __ BranchOnCount(r1, &loop); in Generate_JSConstructStubHelper()
635 __ bind(&no_args); in Generate_JSConstructStubHelper()
643 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
656 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
669 __ JumpIfSmi(r2, &use_receiver); in Generate_JSConstructStubHelper()
673 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE); in Generate_JSConstructStubHelper()
674 __ bge(&exit); in Generate_JSConstructStubHelper()
678 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
679 __ LoadP(r2, MemOperand(sp)); in Generate_JSConstructStubHelper()
683 __ bind(&exit); in Generate_JSConstructStubHelper()
687 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); in Generate_JSConstructStubHelper()
689 __ LoadP(r3, MemOperand(sp)); in Generate_JSConstructStubHelper()
700 __ JumpIfNotSmi(r2, &dont_throw); in Generate_JSConstructStubHelper()
703 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
705 __ bind(&dont_throw); in Generate_JSConstructStubHelper()
708 __ SmiToPtrArrayOffset(r3, r3); in Generate_JSConstructStubHelper()
709 __ AddP(sp, sp, r3); in Generate_JSConstructStubHelper()
710 __ AddP(sp, sp, Operand(kPointerSize)); in Generate_JSConstructStubHelper()
712 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4); in Generate_JSConstructStubHelper()
714 __ Ret(); in Generate_JSConstructStubHelper()
728 __ pop(r3); in Generate_JSConstructStubHelper()
729 __ Push(r2, r2); in Generate_JSConstructStubHelper()
732 __ LoadP(r2, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubHelper()
733 __ SmiUntag(r2); in Generate_JSConstructStubHelper()
737 __ la(r5, MemOperand(fp, StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
738 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2)); in Generate_JSConstructStubHelper()
739 __ LoadP(r5, MemOperand(r5, ip)); in Generate_JSConstructStubHelper()
742 __ b(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
773 __ AssertGeneratorObject(r3); in Generate_ResumeGeneratorTrampoline()
776 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), in Generate_ResumeGeneratorTrampoline()
778 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5, in Generate_ResumeGeneratorTrampoline()
782 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset)); in Generate_ResumeGeneratorTrampoline()
785 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
786 __ LoadP(cp, FieldMemOperand(r6, JSFunction::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
793 __ mov(ip, Operand(debug_hook)); in Generate_ResumeGeneratorTrampoline()
794 __ LoadB(ip, MemOperand(ip)); in Generate_ResumeGeneratorTrampoline()
795 __ CmpSmiLiteral(ip, Smi::kZero, r0); in Generate_ResumeGeneratorTrampoline()
796 __ bne(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
803 __ mov(ip, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
804 __ LoadP(ip, MemOperand(ip)); in Generate_ResumeGeneratorTrampoline()
805 __ CmpP(ip, r3); in Generate_ResumeGeneratorTrampoline()
806 __ beq(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
807 __ bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
810 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
811 __ Push(ip); in Generate_ResumeGeneratorTrampoline()
826 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
827 __ LoadW( in Generate_ResumeGeneratorTrampoline()
831 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
833 __ CmpP(r2, Operand::Zero()); in Generate_ResumeGeneratorTrampoline()
834 __ beq(&done_loop); in Generate_ResumeGeneratorTrampoline()
836 __ SmiUntag(r2); in Generate_ResumeGeneratorTrampoline()
837 __ LoadAndTestP(r2, r2); in Generate_ResumeGeneratorTrampoline()
838 __ beq(&done_loop); in Generate_ResumeGeneratorTrampoline()
840 __ LoadRR(r1, r2); in Generate_ResumeGeneratorTrampoline()
841 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
842 __ push(ip); in Generate_ResumeGeneratorTrampoline()
843 __ BranchOnCount(r1, &loop); in Generate_ResumeGeneratorTrampoline()
844 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
849 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
850 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE); in Generate_ResumeGeneratorTrampoline()
851 __ Assert(eq, kMissingBytecodeArray); in Generate_ResumeGeneratorTrampoline()
859 __ LoadRR(r5, r3); in Generate_ResumeGeneratorTrampoline()
860 __ LoadRR(r3, r6); in Generate_ResumeGeneratorTrampoline()
861 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
862 __ JumpToJSEntry(ip); in Generate_ResumeGeneratorTrampoline()
865 __ bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
868 __ Push(r3, r4, r6); in Generate_ResumeGeneratorTrampoline()
869 __ CallRuntime(Runtime::kDebugOnFunctionCall); in Generate_ResumeGeneratorTrampoline()
870 __ Pop(r3, r4); in Generate_ResumeGeneratorTrampoline()
871 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
873 __ b(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
875 __ bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
878 __ Push(r3, r4); in Generate_ResumeGeneratorTrampoline()
879 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
880 __ Pop(r3, r4); in Generate_ResumeGeneratorTrampoline()
881 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
883 __ b(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
888 __ push(r3); in Generate_ConstructedNonConstructable()
889 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
901 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
904 __ SubP(r4, sp, r4); in Generate_CheckStackOverflow()
907 __ SmiToPtrArrayOffset(r0, argc); in Generate_CheckStackOverflow()
910 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2)); in Generate_CheckStackOverflow()
912 __ CmpP(r4, r0); in Generate_CheckStackOverflow()
913 __ bgt(&okay); // Signed comparison. in Generate_CheckStackOverflow()
916 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
918 __ bind(&okay); in Generate_CheckStackOverflow()
940 __ mov(cp, Operand(context_address)); in Generate_JSEntryTrampolineHelper()
941 __ LoadP(cp, MemOperand(cp)); in Generate_JSEntryTrampolineHelper()
943 __ InitializeRootRegister(); in Generate_JSEntryTrampolineHelper()
946 __ Push(r3, r4); in Generate_JSEntryTrampolineHelper()
963 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2)); in Generate_JSEntryTrampolineHelper()
964 __ SubRR(sp, r7); // Buy the stack frame to fit args in Generate_JSEntryTrampolineHelper()
965 __ LoadImmP(r9, Operand(zero)); // Initialize argv index in Generate_JSEntryTrampolineHelper()
966 __ bind(&argLoop); in Generate_JSEntryTrampolineHelper()
967 __ CmpPH(r7, Operand(zero)); in Generate_JSEntryTrampolineHelper()
968 __ beq(&argExit, Label::kNear); in Generate_JSEntryTrampolineHelper()
969 __ lay(r7, MemOperand(r7, -kPointerSize)); in Generate_JSEntryTrampolineHelper()
970 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter in Generate_JSEntryTrampolineHelper()
971 __ la(r9, MemOperand(r9, kPointerSize)); // r9++; in Generate_JSEntryTrampolineHelper()
972 __ LoadP(r0, MemOperand(r8)); // dereference handle in Generate_JSEntryTrampolineHelper()
973 __ StoreP(r0, MemOperand(r7, sp)); // push parameter in Generate_JSEntryTrampolineHelper()
974 __ b(&argLoop); in Generate_JSEntryTrampolineHelper()
975 __ bind(&argExit); in Generate_JSEntryTrampolineHelper()
978 __ LoadRR(r6, r2); in Generate_JSEntryTrampolineHelper()
979 __ LoadRR(r2, r5); in Generate_JSEntryTrampolineHelper()
980 __ LoadRR(r5, r6); in Generate_JSEntryTrampolineHelper()
984 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
985 __ LoadRR(r7, r6); in Generate_JSEntryTrampolineHelper()
986 __ LoadRR(r8, r6); in Generate_JSEntryTrampolineHelper()
987 __ LoadRR(r9, r6); in Generate_JSEntryTrampolineHelper()
993 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
998 __ b(r14); in Generate_JSEntryTrampolineHelper()
1015 __ LoadP(args_count, in LeaveInterpreterFrame()
1017 __ LoadlW(args_count, in LeaveInterpreterFrame()
1021 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1023 __ AddP(sp, sp, args_count); in LeaveInterpreterFrame()
1049 __ PushStandardFrame(r3); in Generate_InterpreterEntryTrampoline()
1053 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1057 __ LoadP(debug_info, in Generate_InterpreterEntryTrampoline()
1060 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1062 __ TestIfSmi(debug_info); in Generate_InterpreterEntryTrampoline()
1063 __ beq(&array_done); in Generate_InterpreterEntryTrampoline()
1064 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1066 __ bind(&array_done); in Generate_InterpreterEntryTrampoline()
1070 __ LoadP(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1071 __ CmpP(r2, Operand(masm->CodeObject())); // Self-reference to this code. in Generate_InterpreterEntryTrampoline()
1072 __ bne(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1075 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1076 __ LoadP(r6, FieldMemOperand(r6, Cell::kValueOffset)); in Generate_InterpreterEntryTrampoline()
1077 __ LoadP(r1, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1080 __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0); in Generate_InterpreterEntryTrampoline()
1081 __ StoreP(r1, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1087 __ TestIfSmi(kInterpreterBytecodeArrayRegister); in Generate_InterpreterEntryTrampoline()
1088 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEntryTrampoline()
1089 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg, in Generate_InterpreterEntryTrampoline()
1091 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEntryTrampoline()
1095 __ mov(r1, Operand(BytecodeArray::kNoAgeBytecodeAge)); in Generate_InterpreterEntryTrampoline()
1096 __ StoreByte(r1, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1101 __ mov(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1105 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1106 __ Push(r5, kInterpreterBytecodeArrayRegister, r4); in Generate_InterpreterEntryTrampoline()
1111 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1116 __ SubP(r5, sp, r4); in Generate_InterpreterEntryTrampoline()
1117 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1118 __ CmpLogicalP(r5, r0); in Generate_InterpreterEntryTrampoline()
1119 __ bge(&ok); in Generate_InterpreterEntryTrampoline()
1120 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1121 __ bind(&ok); in Generate_InterpreterEntryTrampoline()
1126 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1127 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2)); in Generate_InterpreterEntryTrampoline()
1128 __ LoadAndTestP(r4, r4); in Generate_InterpreterEntryTrampoline()
1129 __ beq(&no_args); in Generate_InterpreterEntryTrampoline()
1130 __ LoadRR(r1, r4); in Generate_InterpreterEntryTrampoline()
1131 __ bind(&loop); in Generate_InterpreterEntryTrampoline()
1132 __ push(r5); in Generate_InterpreterEntryTrampoline()
1133 __ SubP(r1, Operand(1)); in Generate_InterpreterEntryTrampoline()
1134 __ bne(&loop); in Generate_InterpreterEntryTrampoline()
1135 __ bind(&no_args); in Generate_InterpreterEntryTrampoline()
1139 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1140 __ mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1145 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1147 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); in Generate_InterpreterEntryTrampoline()
1148 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); in Generate_InterpreterEntryTrampoline()
1149 __ Call(ip); in Generate_InterpreterEntryTrampoline()
1155 __ Ret(); in Generate_InterpreterEntryTrampoline()
1160 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1161 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1162 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1163 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1164 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1165 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0); in Generate_InterpreterEntryTrampoline()
1166 __ RecordWriteCodeEntryField(r3, r6, r7); in Generate_InterpreterEntryTrampoline()
1167 __ JumpToJSEntry(r6); in Generate_InterpreterEntryTrampoline()
1176 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1179 __ SubP(scratch, sp, scratch); in Generate_StackOverflowCheck()
1181 __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2)); in Generate_StackOverflowCheck()
1182 __ CmpP(scratch, r0); in Generate_StackOverflowCheck()
1183 __ ble(stack_overflow); // Signed comparison. in Generate_StackOverflowCheck()
1194 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU in Generate_InterpreterPushArgs()
1195 __ LoadRR(r0, count); in Generate_InterpreterPushArgs()
1196 __ bind(&loop); in Generate_InterpreterPushArgs()
1197 __ LoadP(scratch, MemOperand(index, -kPointerSize)); in Generate_InterpreterPushArgs()
1198 __ lay(index, MemOperand(index, -kPointerSize)); in Generate_InterpreterPushArgs()
1199 __ push(scratch); in Generate_InterpreterPushArgs()
1200 __ SubP(r0, Operand(1)); in Generate_InterpreterPushArgs()
1201 __ bne(&loop); in Generate_InterpreterPushArgs()
1218 __ AddP(r5, r2, Operand(1)); in Generate_InterpreterPushArgsAndCallImpl()
1225 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1229 __ Jump(masm->isolate()->builtins()->CallWithSpread(), in Generate_InterpreterPushArgsAndCallImpl()
1232 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1237 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1239 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1241 __ bkpt(0); in Generate_InterpreterPushArgsAndCallImpl()
1258 __ LoadImmP(r0, Operand::Zero()); in Generate_InterpreterPushArgsAndConstructImpl()
1259 __ push(r0); in Generate_InterpreterPushArgsAndConstructImpl()
1263 __ CmpP(r2, Operand::Zero()); in Generate_InterpreterPushArgsAndConstructImpl()
1264 __ beq(&skip); in Generate_InterpreterPushArgsAndConstructImpl()
1266 __ bind(&skip); in Generate_InterpreterPushArgsAndConstructImpl()
1268 __ AssertUndefinedOrAllocationSite(r4, r7); in Generate_InterpreterPushArgsAndConstructImpl()
1270 __ AssertFunction(r3); in Generate_InterpreterPushArgsAndConstructImpl()
1274 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1275 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1277 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterPushArgsAndConstructImpl()
1278 __ Jump(ip); in Generate_InterpreterPushArgsAndConstructImpl()
1281 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), in Generate_InterpreterPushArgsAndConstructImpl()
1286 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1289 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1291 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1293 __ bkpt(0); in Generate_InterpreterPushArgsAndConstructImpl()
1308 __ AddP(r6, r2, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndConstructArray()
1314 __ LoadRR(r5, r3); in Generate_InterpreterPushArgsAndConstructArray()
1317 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1319 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1321 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1323 __ bkpt(0); in Generate_InterpreterPushArgsAndConstructArray()
1333 __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline()); in Generate_InterpreterEnterBytecode()
1334 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1338 __ mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1343 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1348 __ TestIfSmi(kInterpreterBytecodeArrayRegister); in Generate_InterpreterEnterBytecode()
1349 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEnterBytecode()
1350 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, in Generate_InterpreterEnterBytecode()
1352 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEnterBytecode()
1356 __ LoadP(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEnterBytecode()
1358 __ SmiUntag(kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEnterBytecode()
1361 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1363 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); in Generate_InterpreterEnterBytecode()
1364 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); in Generate_InterpreterEnterBytecode()
1365 __ Jump(ip); in Generate_InterpreterEnterBytecode()
1372 __ LoadP(r3, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1373 __ LoadP(r4, in Generate_InterpreterEnterBytecodeAdvance()
1375 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1378 __ Push(kInterpreterAccumulatorRegister, r3, r4); in Generate_InterpreterEnterBytecodeAdvance()
1379 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1380 __ Move(r4, r2); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1381 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1383 __ StoreP(r4, in Generate_InterpreterEnterBytecodeAdvance()
1409 __ LoadP(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); in Generate_CompileLazy()
1410 __ LoadP(index, FieldMemOperand(index, Cell::kValueOffset)); in Generate_CompileLazy()
1411 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime); in Generate_CompileLazy()
1413 __ LoadP(map, in Generate_CompileLazy()
1415 __ LoadP(map, in Generate_CompileLazy()
1417 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1418 __ CmpSmiLiteral(index, Smi::FromInt(2), r0); in Generate_CompileLazy()
1419 __ blt(&try_shared); in Generate_CompileLazy()
1428 __ LoadP(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1430 __ bind(&loop_top); in Generate_CompileLazy()
1435 __ SmiToPtrArrayOffset(array_pointer, index); in Generate_CompileLazy()
1436 __ AddP(array_pointer, map, array_pointer); in Generate_CompileLazy()
1437 __ LoadP(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1439 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1440 __ CmpP(temp, native_context); in Generate_CompileLazy()
1441 __ bne(&loop_bottom, Label::kNear); in Generate_CompileLazy()
1445 __ LoadP(entry, in Generate_CompileLazy()
1448 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1449 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1453 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1454 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); in Generate_CompileLazy()
1455 __ RecordWriteCodeEntryField(closure, entry, r7); in Generate_CompileLazy()
1461 __ LoadP( in Generate_CompileLazy()
1463 __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset), in Generate_CompileLazy()
1465 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp, in Generate_CompileLazy()
1470 __ StoreP( in Generate_CompileLazy()
1474 __ LoadRR(r7, closure); in Generate_CompileLazy()
1475 __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp, in Generate_CompileLazy()
1477 __ JumpToJSEntry(entry); in Generate_CompileLazy()
1479 __ bind(&loop_bottom); in Generate_CompileLazy()
1480 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength), in Generate_CompileLazy()
1482 __ CmpSmiLiteral(index, Smi::FromInt(1), r0); in Generate_CompileLazy()
1483 __ bgt(&loop_top); in Generate_CompileLazy()
1486 __ b(&gotta_call_runtime); in Generate_CompileLazy()
1488 __ bind(&try_shared); in Generate_CompileLazy()
1489 __ LoadP(entry, in Generate_CompileLazy()
1492 __ LoadlB(temp, FieldMemOperand( in Generate_CompileLazy()
1494 __ TestBit(temp, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0); in Generate_CompileLazy()
1495 __ bne(&gotta_call_runtime); in Generate_CompileLazy()
1498 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1499 __ mov(r7, Operand(masm->CodeObject())); in Generate_CompileLazy()
1500 __ CmpP(entry, r7); in Generate_CompileLazy()
1501 __ beq(&gotta_call_runtime); in Generate_CompileLazy()
1504 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1505 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); in Generate_CompileLazy()
1506 __ RecordWriteCodeEntryField(closure, entry, r7); in Generate_CompileLazy()
1507 __ JumpToJSEntry(entry); in Generate_CompileLazy()
1509 __ bind(&gotta_call_runtime); in Generate_CompileLazy()
1536 __ Move(r6, r2); in Generate_InstantiateAsmJs()
1538 __ SmiTag(r2); in Generate_InstantiateAsmJs()
1540 __ Push(r2, r3, r5, r3); in Generate_InstantiateAsmJs()
1547 __ CmpP(r6, Operand(j)); in Generate_InstantiateAsmJs()
1548 __ b(ne, &over); in Generate_InstantiateAsmJs()
1551 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1553 __ push(r6); in Generate_InstantiateAsmJs()
1556 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1559 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1560 __ bind(&over); in Generate_InstantiateAsmJs()
1563 __ bind(&args_done); in Generate_InstantiateAsmJs()
1566 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1568 __ JumpIfSmi(r2, &failed); in Generate_InstantiateAsmJs()
1570 __ Drop(2); in Generate_InstantiateAsmJs()
1571 __ pop(r6); in Generate_InstantiateAsmJs()
1572 __ SmiUntag(r6); in Generate_InstantiateAsmJs()
1575 __ AddP(r6, r6, Operand(1)); in Generate_InstantiateAsmJs()
1576 __ Drop(r6); in Generate_InstantiateAsmJs()
1577 __ Ret(); in Generate_InstantiateAsmJs()
1579 __ bind(&failed); in Generate_InstantiateAsmJs()
1581 __ Pop(r2, r3, r5); in Generate_InstantiateAsmJs()
1582 __ SmiUntag(r2); in Generate_InstantiateAsmJs()
1596 __ CleanseP(r14); in GenerateMakeCodeYoungAgainCommon()
1597 __ SubP(r14, Operand(kCodeAgingSequenceLength)); in GenerateMakeCodeYoungAgainCommon()
1598 __ LoadRR(r2, r14); in GenerateMakeCodeYoungAgainCommon()
1600 __ pop(r14); in GenerateMakeCodeYoungAgainCommon()
1609 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); in GenerateMakeCodeYoungAgainCommon()
1610 __ PrepareCallCFunction(2, 0, r4); in GenerateMakeCodeYoungAgainCommon()
1611 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); in GenerateMakeCodeYoungAgainCommon()
1612 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1614 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); in GenerateMakeCodeYoungAgainCommon()
1615 __ LoadRR(ip, r2); in GenerateMakeCodeYoungAgainCommon()
1616 __ Jump(ip); in GenerateMakeCodeYoungAgainCommon()
1634 __ CleanseP(r14); in CODE_AGE_LIST()
1635 __ SubP(r14, Operand(kCodeAgingSequenceLength)); in CODE_AGE_LIST()
1636 __ LoadRR(r2, r14); in CODE_AGE_LIST()
1638 __ pop(r14); in CODE_AGE_LIST()
1647 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); in CODE_AGE_LIST()
1648 __ PrepareCallCFunction(2, 0, r4); in CODE_AGE_LIST()
1649 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); in CODE_AGE_LIST()
1650 __ CallCFunction( in CODE_AGE_LIST()
1653 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); in CODE_AGE_LIST()
1654 __ LoadRR(ip, r2); in CODE_AGE_LIST()
1657 __ PushStandardFrame(r3); in CODE_AGE_LIST()
1660 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength)); in CODE_AGE_LIST()
1661 __ Jump(r2); in CODE_AGE_LIST()
1680 __ MultiPush(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1682 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1683 __ MultiPop(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1686 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state in Generate_NotifyStubFailureHelper()
1687 __ Ret(); // Jump to miss handler in Generate_NotifyStubFailureHelper()
1703 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type))); in Generate_NotifyDeoptimizedHelper()
1704 __ push(r2); in Generate_NotifyDeoptimizedHelper()
1705 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1709 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1710 __ SmiUntag(r8); in Generate_NotifyDeoptimizedHelper()
1713 __ CmpP( in Generate_NotifyDeoptimizedHelper()
1716 __ bne(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1717 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1718 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1720 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1722 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1723 __ CmpP( in Generate_NotifyDeoptimizedHelper()
1726 __ bne(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1727 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1728 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1730 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1731 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1756 __ LoadP(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1759 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, in CompatibleReceiverCheck()
1763 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1765 __ bind(&prototype_loop_start); in CompatibleReceiverCheck()
1768 __ GetMapConstructor(constructor, map, scratch, scratch); in CompatibleReceiverCheck()
1769 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1771 __ bne(&next_prototype); in CompatibleReceiverCheck()
1773 __ LoadP(type, in CompatibleReceiverCheck()
1775 __ LoadP(type, in CompatibleReceiverCheck()
1780 __ bind(&function_template_loop); in CompatibleReceiverCheck()
1783 __ CmpP(signature, type); in CompatibleReceiverCheck()
1784 __ beq(&receiver_check_passed); in CompatibleReceiverCheck()
1788 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1789 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); in CompatibleReceiverCheck()
1790 __ bne(&next_prototype); in CompatibleReceiverCheck()
1793 __ LoadP(type, in CompatibleReceiverCheck()
1795 __ b(&function_template_loop); in CompatibleReceiverCheck()
1798 __ bind(&next_prototype); in CompatibleReceiverCheck()
1799 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1800 __ DecodeField<Map::HasHiddenPrototype>(scratch); in CompatibleReceiverCheck()
1801 __ beq(receiver_check_failed); in CompatibleReceiverCheck()
1803 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1804 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1806 __ b(&prototype_loop_start); in CompatibleReceiverCheck()
1808 __ bind(&receiver_check_passed); in CompatibleReceiverCheck()
1823 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1824 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1828 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate_HandleFastApiCall()
1829 __ LoadP(r4, MemOperand(sp, r1)); in Generate_HandleFastApiCall()
1834 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1835 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1836 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1837 __ JumpToJSEntry(ip); in Generate_HandleFastApiCall()
1840 __ bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1842 __ AddP(r1, r1, Operand(kPointerSize)); in Generate_HandleFastApiCall()
1843 __ AddP(sp, sp, r1); in Generate_HandleFastApiCall()
1844 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1851 __ LoadP(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1852 __ LoadP(r2, MemOperand(r2, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1854 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1860 __ push(r2); in Generate_OnStackReplacementHelper()
1861 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1866 __ CmpSmiLiteral(r2, Smi::kZero, r0); in Generate_OnStackReplacementHelper()
1867 __ bne(&skip); in Generate_OnStackReplacementHelper()
1868 __ Ret(); in Generate_OnStackReplacementHelper()
1870 __ bind(&skip); in Generate_OnStackReplacementHelper()
1875 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1880 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset)); in Generate_OnStackReplacementHelper()
1884 __ LoadP( in Generate_OnStackReplacementHelper()
1887 __ SmiUntag(r3); in Generate_OnStackReplacementHelper()
1891 __ AddP(r2, r3); in Generate_OnStackReplacementHelper()
1892 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_OnStackReplacementHelper()
1893 __ LoadRR(r14, r0); in Generate_OnStackReplacementHelper()
1896 __ Ret(); in Generate_OnStackReplacementHelper()
1924 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); in Generate_FunctionPrototypeApply()
1925 __ AddP(new_sp, sp, arg_size); in Generate_FunctionPrototypeApply()
1926 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1927 __ LoadRR(scratch, r2); in Generate_FunctionPrototypeApply()
1928 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver in Generate_FunctionPrototypeApply()
1929 __ CmpP(arg_size, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1930 __ blt(&skip); in Generate_FunctionPrototypeApply()
1931 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg in Generate_FunctionPrototypeApply()
1932 __ beq(&skip); in Generate_FunctionPrototypeApply()
1933 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray in Generate_FunctionPrototypeApply()
1934 __ bind(&skip); in Generate_FunctionPrototypeApply()
1935 __ LoadRR(sp, new_sp); in Generate_FunctionPrototypeApply()
1936 __ StoreP(scratch, MemOperand(sp, 0)); in Generate_FunctionPrototypeApply()
1947 __ JumpIfSmi(r3, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1948 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1949 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1950 __ TestBit(r6, Map::kIsCallable); in Generate_FunctionPrototypeApply()
1951 __ beq(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1955 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1956 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1960 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1961 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1965 __ bind(&no_arguments); in Generate_FunctionPrototypeApply()
1967 __ LoadImmP(r2, Operand::Zero()); in Generate_FunctionPrototypeApply()
1968 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1972 __ bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1974 __ StoreP(r3, MemOperand(sp, 0)); in Generate_FunctionPrototypeApply()
1975 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1985 __ CmpP(r2, Operand::Zero()); in Generate_FunctionPrototypeCall()
1986 __ bne(&done, Label::kNear); in Generate_FunctionPrototypeCall()
1987 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1988 __ AddP(r2, Operand(1)); in Generate_FunctionPrototypeCall()
1989 __ bind(&done); in Generate_FunctionPrototypeCall()
1994 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); in Generate_FunctionPrototypeCall()
1995 __ LoadP(r3, MemOperand(sp, r4)); in Generate_FunctionPrototypeCall()
2005 __ AddP(r4, sp, r4); in Generate_FunctionPrototypeCall()
2007 __ bind(&loop); in Generate_FunctionPrototypeCall()
2008 __ LoadP(ip, MemOperand(r4, -kPointerSize)); in Generate_FunctionPrototypeCall()
2009 __ StoreP(ip, MemOperand(r4)); in Generate_FunctionPrototypeCall()
2010 __ SubP(r4, Operand(kPointerSize)); in Generate_FunctionPrototypeCall()
2011 __ CmpP(r4, sp); in Generate_FunctionPrototypeCall()
2012 __ bne(&loop); in Generate_FunctionPrototypeCall()
2015 __ SubP(r2, Operand(1)); in Generate_FunctionPrototypeCall()
2016 __ pop(); in Generate_FunctionPrototypeCall()
2020 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2040 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); in Generate_ReflectApply()
2041 __ AddP(new_sp, sp, arg_size); in Generate_ReflectApply()
2042 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2043 __ LoadRR(scratch, r3); in Generate_ReflectApply()
2044 __ LoadRR(r2, r3); in Generate_ReflectApply()
2045 __ CmpP(arg_size, Operand(kPointerSize)); in Generate_ReflectApply()
2046 __ blt(&skip); in Generate_ReflectApply()
2047 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target in Generate_ReflectApply()
2048 __ beq(&skip); in Generate_ReflectApply()
2049 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument in Generate_ReflectApply()
2050 __ CmpP(arg_size, Operand(2 * kPointerSize)); in Generate_ReflectApply()
2051 __ beq(&skip); in Generate_ReflectApply()
2052 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList in Generate_ReflectApply()
2053 __ bind(&skip); in Generate_ReflectApply()
2054 __ LoadRR(sp, new_sp); in Generate_ReflectApply()
2055 __ StoreP(scratch, MemOperand(sp, 0)); in Generate_ReflectApply()
2066 __ JumpIfSmi(r3, &target_not_callable); in Generate_ReflectApply()
2067 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); in Generate_ReflectApply()
2068 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_ReflectApply()
2069 __ TestBit(r6, Map::kIsCallable); in Generate_ReflectApply()
2070 __ beq(&target_not_callable); in Generate_ReflectApply()
2074 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2075 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2078 __ bind(&target_not_callable); in Generate_ReflectApply()
2080 __ StoreP(r3, MemOperand(sp, 0)); in Generate_ReflectApply()
2081 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2102 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); in Generate_ReflectConstruct()
2103 __ AddP(new_sp, sp, arg_size); in Generate_ReflectConstruct()
2104 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); in Generate_ReflectConstruct()
2105 __ LoadRR(r2, r3); in Generate_ReflectConstruct()
2106 __ LoadRR(r5, r3); in Generate_ReflectConstruct()
2107 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined) in Generate_ReflectConstruct()
2108 __ CmpP(arg_size, Operand(kPointerSize)); in Generate_ReflectConstruct()
2109 __ blt(&skip); in Generate_ReflectConstruct()
2110 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target in Generate_ReflectConstruct()
2111 __ LoadRR(r5, r3); // new.target defaults to target in Generate_ReflectConstruct()
2112 __ beq(&skip); in Generate_ReflectConstruct()
2113 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList in Generate_ReflectConstruct()
2114 __ CmpP(arg_size, Operand(2 * kPointerSize)); in Generate_ReflectConstruct()
2115 __ beq(&skip); in Generate_ReflectConstruct()
2116 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target in Generate_ReflectConstruct()
2117 __ bind(&skip); in Generate_ReflectConstruct()
2118 __ LoadRR(sp, new_sp); in Generate_ReflectConstruct()
2130 __ JumpIfSmi(r3, &target_not_constructor); in Generate_ReflectConstruct()
2131 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2132 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2133 __ TestBit(r6, Map::kIsConstructor); in Generate_ReflectConstruct()
2134 __ beq(&target_not_constructor); in Generate_ReflectConstruct()
2138 __ JumpIfSmi(r5, &new_target_not_constructor); in Generate_ReflectConstruct()
2139 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2140 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2141 __ TestBit(r6, Map::kIsConstructor); in Generate_ReflectConstruct()
2142 __ beq(&new_target_not_constructor); in Generate_ReflectConstruct()
2145 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2148 __ bind(&target_not_constructor); in Generate_ReflectConstruct()
2150 __ StoreP(r3, MemOperand(sp, 0)); in Generate_ReflectConstruct()
2151 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2155 __ bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2157 __ StoreP(r5, MemOperand(sp, 0)); in Generate_ReflectConstruct()
2158 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2163 __ SmiTag(r2); in EnterArgumentsAdaptorFrame()
2164 __ Load(r6, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
2172 __ lay(sp, MemOperand(sp, -5 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2175 __ CleanseP(r14); in EnterArgumentsAdaptorFrame()
2176 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2177 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2178 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2179 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2180 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize)); in EnterArgumentsAdaptorFrame()
2181 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp + in EnterArgumentsAdaptorFrame()
2191 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2194 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); in LeaveArgumentsAdaptorFrame()
2195 __ SmiToPtrArrayOffset(r3, r3); in LeaveArgumentsAdaptorFrame()
2196 __ lay(sp, MemOperand(sp, r3)); in LeaveArgumentsAdaptorFrame()
2212 __ JumpIfSmi(r2, &create_runtime); in Generate_Apply()
2215 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); in Generate_Apply()
2218 __ LoadP(r6, NativeContextMemOperand()); in Generate_Apply()
2221 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2222 __ CmpP(ip, r4); in Generate_Apply()
2223 __ beq(&create_arguments); in Generate_Apply()
2224 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2225 __ CmpP(ip, r4); in Generate_Apply()
2226 __ beq(&create_arguments); in Generate_Apply()
2229 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE); in Generate_Apply()
2230 __ beq(&create_array); in Generate_Apply()
2233 __ bind(&create_runtime); in Generate_Apply()
2236 __ Push(r3, r5, r2); in Generate_Apply()
2237 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2238 __ Pop(r3, r5); in Generate_Apply()
2239 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset)); in Generate_Apply()
2240 __ SmiUntag(r4); in Generate_Apply()
2242 __ b(&done_create); in Generate_Apply()
2245 __ bind(&create_arguments); in Generate_Apply()
2246 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset)); in Generate_Apply()
2247 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); in Generate_Apply()
2248 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset)); in Generate_Apply()
2249 __ CmpP(r4, ip); in Generate_Apply()
2250 __ bne(&create_runtime); in Generate_Apply()
2251 __ SmiUntag(r4); in Generate_Apply()
2252 __ LoadRR(r2, r6); in Generate_Apply()
2253 __ b(&done_create); in Generate_Apply()
2257 __ bind(&create_holey_array); in Generate_Apply()
2258 __ LoadP(r4, FieldMemOperand(r4, Map::kPrototypeOffset)); in Generate_Apply()
2259 __ LoadP(r6, ContextMemOperand(r6, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); in Generate_Apply()
2260 __ CmpP(r4, r6); in Generate_Apply()
2261 __ bne(&create_runtime); in Generate_Apply()
2262 __ LoadRoot(r6, Heap::kArrayProtectorRootIndex); in Generate_Apply()
2263 __ LoadP(r4, FieldMemOperand(r6, PropertyCell::kValueOffset)); in Generate_Apply()
2264 __ CmpSmiLiteral(r4, Smi::FromInt(Isolate::kProtectorValid), r0); in Generate_Apply()
2265 __ bne(&create_runtime); in Generate_Apply()
2266 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset)); in Generate_Apply()
2267 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset)); in Generate_Apply()
2268 __ SmiUntag(r4); in Generate_Apply()
2269 __ b(&done_create); in Generate_Apply()
2273 __ bind(&create_array); in Generate_Apply()
2274 __ LoadlB(r7, FieldMemOperand(r4, Map::kBitField2Offset)); in Generate_Apply()
2275 __ DecodeField<Map::ElementsKindBits>(r7); in Generate_Apply()
2280 __ CmpP(r7, Operand(FAST_HOLEY_ELEMENTS)); in Generate_Apply()
2281 __ bgt(&create_runtime); in Generate_Apply()
2283 __ TestBit(r7, Map::kHasNonInstancePrototype, r0); in Generate_Apply()
2284 __ bne(&create_holey_array); in Generate_Apply()
2286 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset)); in Generate_Apply()
2287 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset)); in Generate_Apply()
2288 __ SmiUntag(r4); in Generate_Apply()
2290 __ bind(&done_create); in Generate_Apply()
2298 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2301 __ SubP(ip, sp, ip); in Generate_Apply()
2303 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2)); in Generate_Apply()
2304 __ CmpP(ip, r0); // Signed comparison. in Generate_Apply()
2305 __ bgt(&done); in Generate_Apply()
2306 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2307 __ bind(&done); in Generate_Apply()
2320 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2322 __ CmpP(r4, Operand::Zero()); in Generate_Apply()
2323 __ beq(&no_args); in Generate_Apply()
2324 __ AddP(r2, r2, in Generate_Apply()
2326 __ LoadRR(r1, r4); in Generate_Apply()
2327 __ bind(&loop); in Generate_Apply()
2328 __ LoadP(ip, MemOperand(r2, kPointerSize)); in Generate_Apply()
2329 __ la(r2, MemOperand(r2, kPointerSize)); in Generate_Apply()
2330 __ CompareRoot(ip, Heap::kTheHoleValueRootIndex); in Generate_Apply()
2331 __ bne(&skip, Label::kNear); in Generate_Apply()
2332 __ LoadRR(ip, r8); in Generate_Apply()
2333 __ bind(&skip); in Generate_Apply()
2334 __ push(ip); in Generate_Apply()
2335 __ BranchOnCount(r1, &loop); in Generate_Apply()
2336 __ bind(&no_args); in Generate_Apply()
2337 __ LoadRR(r2, r4); in Generate_Apply()
2342 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2343 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); in Generate_Apply()
2344 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2360 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_CallForwardVarargs()
2361 __ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate_CallForwardVarargs()
2362 __ CmpP(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in Generate_CallForwardVarargs()
2363 __ beq(&arguments_adaptor); in Generate_CallForwardVarargs()
2365 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_CallForwardVarargs()
2366 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallForwardVarargs()
2367 __ LoadW(r2, FieldMemOperand( in Generate_CallForwardVarargs()
2369 __ LoadRR(r5, fp); in Generate_CallForwardVarargs()
2371 __ b(&arguments_done); in Generate_CallForwardVarargs()
2372 __ bind(&arguments_adaptor); in Generate_CallForwardVarargs()
2375 __ LoadP(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate_CallForwardVarargs()
2377 __ bind(&arguments_done); in Generate_CallForwardVarargs()
2380 __ SmiUntag(r2); in Generate_CallForwardVarargs()
2381 __ SubP(r2, r2, r4); in Generate_CallForwardVarargs()
2382 __ CmpP(r2, Operand::Zero()); in Generate_CallForwardVarargs()
2383 __ ble(&stack_empty); in Generate_CallForwardVarargs()
2391 __ AddP(r5, r5, Operand(kPointerSize)); in Generate_CallForwardVarargs()
2392 __ LoadRR(r4, r2); in Generate_CallForwardVarargs()
2393 __ bind(&loop); in Generate_CallForwardVarargs()
2395 __ ShiftLeftP(ip, r4, Operand(kPointerSizeLog2)); in Generate_CallForwardVarargs()
2396 __ LoadP(ip, MemOperand(r5, ip)); in Generate_CallForwardVarargs()
2397 __ push(ip); in Generate_CallForwardVarargs()
2398 __ SubP(r4, r4, Operand(1)); in Generate_CallForwardVarargs()
2399 __ CmpP(r4, Operand::Zero()); in Generate_CallForwardVarargs()
2400 __ bne(&loop); in Generate_CallForwardVarargs()
2404 __ b(&stack_done); in Generate_CallForwardVarargs()
2405 __ bind(&stack_overflow); in Generate_CallForwardVarargs()
2406 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_CallForwardVarargs()
2407 __ bind(&stack_empty); in Generate_CallForwardVarargs()
2410 __ mov(r2, Operand::Zero()); in Generate_CallForwardVarargs()
2412 __ bind(&stack_done); in Generate_CallForwardVarargs()
2414 __ Jump(code, RelocInfo::CODE_TARGET); in Generate_CallForwardVarargs()
2457 __ mov(scratch1, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2458 __ LoadlB(scratch1, MemOperand(scratch1)); in PrepareForTailCall()
2459 __ CmpP(scratch1, Operand::Zero()); in PrepareForTailCall()
2460 __ beq(&done); in PrepareForTailCall()
2465 __ LoadP(scratch3, in PrepareForTailCall()
2467 __ CmpP(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); in PrepareForTailCall()
2468 __ bne(&no_interpreter_frame); in PrepareForTailCall()
2469 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2470 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2476 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2477 __ LoadP( in PrepareForTailCall()
2480 __ CmpP(scratch3, in PrepareForTailCall()
2482 __ bne(&no_arguments_adaptor); in PrepareForTailCall()
2485 __ LoadRR(fp, scratch2); in PrepareForTailCall()
2486 __ LoadP(caller_args_count_reg, in PrepareForTailCall()
2488 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2489 __ b(&formal_parameter_count_loaded); in PrepareForTailCall()
2491 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2493 __ LoadP(scratch1, in PrepareForTailCall()
2495 __ LoadP(scratch1, in PrepareForTailCall()
2497 __ LoadW(caller_args_count_reg, in PrepareForTailCall()
2501 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2504 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2507 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2509 __ bind(&done); in PrepareForTailCall()
2521 __ AssertFunction(r3); in Generate_CallFunction()
2526 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2527 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset)); in Generate_CallFunction()
2528 __ TestBitMask(r5, FunctionKind::kClassConstructor in Generate_CallFunction()
2531 __ bne(&class_constructor); in Generate_CallFunction()
2536 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate_CallFunction()
2539 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) | in Generate_CallFunction()
2541 __ bne(&done_convert); in Generate_CallFunction()
2552 __ LoadGlobalProxy(r5); in Generate_CallFunction()
2555 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2)); in Generate_CallFunction()
2556 __ LoadP(r5, MemOperand(sp, r5)); in Generate_CallFunction()
2557 __ JumpIfSmi(r5, &convert_to_object); in Generate_CallFunction()
2559 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE); in Generate_CallFunction()
2560 __ bge(&done_convert); in Generate_CallFunction()
2563 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2565 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2566 __ bind(&convert_global_proxy); in Generate_CallFunction()
2569 __ LoadGlobalProxy(r5); in Generate_CallFunction()
2571 __ b(&convert_receiver); in Generate_CallFunction()
2573 __ bind(&convert_to_object); in Generate_CallFunction()
2579 __ SmiTag(r2); in Generate_CallFunction()
2580 __ Push(r2, r3); in Generate_CallFunction()
2581 __ LoadRR(r2, r5); in Generate_CallFunction()
2582 __ Push(cp); in Generate_CallFunction()
2583 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2585 __ Pop(cp); in Generate_CallFunction()
2586 __ LoadRR(r5, r2); in Generate_CallFunction()
2587 __ Pop(r2, r3); in Generate_CallFunction()
2588 __ SmiUntag(r2); in Generate_CallFunction()
2590 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2591 __ bind(&convert_receiver); in Generate_CallFunction()
2593 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2)); in Generate_CallFunction()
2594 __ StoreP(r5, MemOperand(sp, r6)); in Generate_CallFunction()
2596 __ bind(&done_convert); in Generate_CallFunction()
2609 __ LoadW( in Generate_CallFunction()
2612 __ SmiUntag(r4); in Generate_CallFunction()
2616 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2620 __ bind(&class_constructor); in Generate_CallFunction()
2623 __ push(r3); in Generate_CallFunction()
2624 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2639 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset)); in Generate_PushBoundArguments()
2640 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset)); in Generate_PushBoundArguments()
2641 __ SmiUntag(r6); in Generate_PushBoundArguments()
2642 __ LoadAndTestP(r6, r6); in Generate_PushBoundArguments()
2643 __ beq(&no_bound_arguments); in Generate_PushBoundArguments()
2656 __ LoadRR(r8, sp); // preserve previous stack pointer in Generate_PushBoundArguments()
2657 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2)); in Generate_PushBoundArguments()
2658 __ SubP(sp, sp, r9); in Generate_PushBoundArguments()
2662 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); in Generate_PushBoundArguments()
2663 __ bgt(&done); // Signed comparison. in Generate_PushBoundArguments()
2665 __ LoadRR(sp, r8); in Generate_PushBoundArguments()
2668 __ EnterFrame(StackFrame::INTERNAL); in Generate_PushBoundArguments()
2669 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_PushBoundArguments()
2671 __ bind(&done); in Generate_PushBoundArguments()
2680 __ LoadImmP(r7, Operand::Zero()); in Generate_PushBoundArguments()
2681 __ CmpP(r2, Operand::Zero()); in Generate_PushBoundArguments()
2682 __ beq(&skip); in Generate_PushBoundArguments()
2683 __ LoadRR(r1, r2); in Generate_PushBoundArguments()
2684 __ bind(&loop); in Generate_PushBoundArguments()
2685 __ LoadP(r0, MemOperand(r8, r7)); in Generate_PushBoundArguments()
2686 __ StoreP(r0, MemOperand(sp, r7)); in Generate_PushBoundArguments()
2687 __ AddP(r7, r7, Operand(kPointerSize)); in Generate_PushBoundArguments()
2688 __ BranchOnCount(r1, &loop); in Generate_PushBoundArguments()
2689 __ bind(&skip); in Generate_PushBoundArguments()
2695 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_PushBoundArguments()
2696 __ AddP(r4, r4, r9); in Generate_PushBoundArguments()
2697 __ LoadRR(r1, r6); in Generate_PushBoundArguments()
2698 __ bind(&loop); in Generate_PushBoundArguments()
2699 __ LoadP(r0, MemOperand(r4, -kPointerSize)); in Generate_PushBoundArguments()
2700 __ lay(r4, MemOperand(r4, -kPointerSize)); in Generate_PushBoundArguments()
2701 __ StoreP(r0, MemOperand(sp, r7)); in Generate_PushBoundArguments()
2702 __ AddP(r7, r7, Operand(kPointerSize)); in Generate_PushBoundArguments()
2703 __ BranchOnCount(r1, &loop); in Generate_PushBoundArguments()
2704 __ AddP(r2, r2, r6); in Generate_PushBoundArguments()
2707 __ bind(&no_bound_arguments); in Generate_PushBoundArguments()
2719 __ AssertBoundFunction(r3); in Generate_CallBoundFunctionImpl()
2726 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2727 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate_CallBoundFunctionImpl()
2728 __ StoreP(ip, MemOperand(sp, r1)); in Generate_CallBoundFunctionImpl()
2734 __ LoadP(r3, in Generate_CallBoundFunctionImpl()
2736 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, in Generate_CallBoundFunctionImpl()
2738 __ LoadP(ip, MemOperand(ip)); in Generate_CallBoundFunctionImpl()
2739 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2740 __ JumpToJSEntry(ip); in Generate_CallBoundFunctionImpl()
2752 __ JumpIfSmi(r3, &non_callable); in Generate_Call()
2753 __ bind(&non_smi); in Generate_Call()
2754 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); in Generate_Call()
2755 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2757 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); in Generate_Call()
2758 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2762 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_Call()
2763 __ TestBit(r6, Map::kIsCallable); in Generate_Call()
2764 __ beq(&non_callable); in Generate_Call()
2766 __ CmpP(r7, Operand(JS_PROXY_TYPE)); in Generate_Call()
2767 __ bne(&non_function); in Generate_Call()
2775 __ Push(r3); in Generate_Call()
2778 __ AddP(r2, r2, Operand(2)); in Generate_Call()
2780 __ JumpToExternalReference( in Generate_Call()
2785 __ bind(&non_function); in Generate_Call()
2787 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); in Generate_Call()
2788 __ StoreP(r3, MemOperand(sp, r7)); in Generate_Call()
2790 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3); in Generate_Call()
2791 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2796 __ bind(&non_callable); in Generate_Call()
2799 __ Push(r3); in Generate_Call()
2800 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2816 __ LoadP(spread, MemOperand(sp, 0)); in CheckSpreadAndPushToStack()
2817 __ JumpIfSmi(spread, &runtime_call); in CheckSpreadAndPushToStack()
2818 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2821 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); in CheckSpreadAndPushToStack()
2822 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2825 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); in CheckSpreadAndPushToStack()
2826 __ LoadP(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2827 __ LoadP(scratch2, in CheckSpreadAndPushToStack()
2829 __ CmpP(scratch, scratch2); in CheckSpreadAndPushToStack()
2830 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2834 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); in CheckSpreadAndPushToStack()
2835 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2836 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); in CheckSpreadAndPushToStack()
2837 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2840 __ LoadP(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2841 __ LoadP(scratch, in CheckSpreadAndPushToStack()
2844 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2845 __ LoadP(scratch2, in CheckSpreadAndPushToStack()
2848 __ CmpP(scratch, scratch2); in CheckSpreadAndPushToStack()
2849 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2854 __ LoadlB(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); in CheckSpreadAndPushToStack()
2855 __ DecodeField<Map::ElementsKindBits>(scratch); in CheckSpreadAndPushToStack()
2856 __ CmpP(scratch, Operand(FAST_HOLEY_ELEMENTS)); in CheckSpreadAndPushToStack()
2857 __ bgt(&runtime_call); in CheckSpreadAndPushToStack()
2859 __ CmpP(scratch, Operand(FAST_SMI_ELEMENTS)); in CheckSpreadAndPushToStack()
2860 __ beq(&no_protector_check); in CheckSpreadAndPushToStack()
2861 __ CmpP(scratch, Operand(FAST_ELEMENTS)); in CheckSpreadAndPushToStack()
2862 __ beq(&no_protector_check); in CheckSpreadAndPushToStack()
2864 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); in CheckSpreadAndPushToStack()
2865 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2866 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); in CheckSpreadAndPushToStack()
2867 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2869 __ bind(&no_protector_check); in CheckSpreadAndPushToStack()
2871 __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2872 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2873 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); in CheckSpreadAndPushToStack()
2874 __ b(&push_args); in CheckSpreadAndPushToStack()
2876 __ bind(&runtime_call); in CheckSpreadAndPushToStack()
2880 __ SmiTag(argc); in CheckSpreadAndPushToStack()
2881 __ Push(constructor, new_target, argc, spread); in CheckSpreadAndPushToStack()
2882 __ CallRuntime(Runtime::kSpreadIterableFixed); in CheckSpreadAndPushToStack()
2883 __ LoadRR(spread, r2); in CheckSpreadAndPushToStack()
2884 __ Pop(constructor, new_target, argc); in CheckSpreadAndPushToStack()
2885 __ SmiUntag(argc); in CheckSpreadAndPushToStack()
2890 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2891 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2893 __ bind(&push_args); in CheckSpreadAndPushToStack()
2895 __ AddP(argc, argc, spread_len); in CheckSpreadAndPushToStack()
2896 __ SubP(argc, argc, Operand(1)); in CheckSpreadAndPushToStack()
2899 __ Pop(scratch); in CheckSpreadAndPushToStack()
2907 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in CheckSpreadAndPushToStack()
2910 __ SubP(scratch, sp, scratch); in CheckSpreadAndPushToStack()
2912 __ ShiftLeftP(r0, spread_len, Operand(kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2913 __ CmpP(scratch, r0); in CheckSpreadAndPushToStack()
2914 __ bgt(&done); // Signed comparison. in CheckSpreadAndPushToStack()
2915 __ TailCallRuntime(Runtime::kThrowStackOverflow); in CheckSpreadAndPushToStack()
2916 __ bind(&done); in CheckSpreadAndPushToStack()
2921 __ LoadImmP(scratch, Operand::Zero()); in CheckSpreadAndPushToStack()
2923 __ bind(&loop); in CheckSpreadAndPushToStack()
2924 __ CmpP(scratch, spread_len); in CheckSpreadAndPushToStack()
2925 __ beq(&done); in CheckSpreadAndPushToStack()
2926 __ ShiftLeftP(r0, scratch, Operand(kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2927 __ AddP(scratch2, spread, r0); in CheckSpreadAndPushToStack()
2928 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); in CheckSpreadAndPushToStack()
2929 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); in CheckSpreadAndPushToStack()
2930 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); in CheckSpreadAndPushToStack()
2931 __ bind(&push); in CheckSpreadAndPushToStack()
2932 __ Push(scratch2); in CheckSpreadAndPushToStack()
2933 __ AddP(scratch, scratch, Operand(1)); in CheckSpreadAndPushToStack()
2934 __ b(&loop); in CheckSpreadAndPushToStack()
2935 __ bind(&done); in CheckSpreadAndPushToStack()
2947 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_CallWithSpread()
2949 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_CallWithSpread()
2961 __ AssertFunction(r3); in Generate_ConstructFunction()
2965 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2969 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2970 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2971 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructFunction()
2972 __ JumpToJSEntry(ip); in Generate_ConstructFunction()
2982 __ AssertBoundFunction(r3); in Generate_ConstructBoundFunction()
2989 __ CmpP(r3, r5); in Generate_ConstructBoundFunction()
2990 __ bne(&skip); in Generate_ConstructBoundFunction()
2991 __ LoadP(r5, in Generate_ConstructBoundFunction()
2993 __ bind(&skip); in Generate_ConstructBoundFunction()
2996 __ LoadP(r3, in Generate_ConstructBoundFunction()
2998 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); in Generate_ConstructBoundFunction()
2999 __ LoadP(ip, MemOperand(ip)); in Generate_ConstructBoundFunction()
3000 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
3001 __ JumpToJSEntry(ip); in Generate_ConstructBoundFunction()
3014 __ Push(r3, r5); in Generate_ConstructProxy()
3016 __ AddP(r2, r2, Operand(3)); in Generate_ConstructProxy()
3018 __ JumpToExternalReference( in Generate_ConstructProxy()
3033 __ JumpIfSmi(r3, &non_constructor); in Generate_Construct()
3036 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); in Generate_Construct()
3037 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
3041 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset)); in Generate_Construct()
3042 __ TestBit(r4, Map::kIsConstructor); in Generate_Construct()
3043 __ beq(&non_constructor); in Generate_Construct()
3047 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); in Generate_Construct()
3048 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
3052 __ CmpP(r7, Operand(JS_PROXY_TYPE)); in Generate_Construct()
3053 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
3059 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); in Generate_Construct()
3060 __ StoreP(r3, MemOperand(sp, r7)); in Generate_Construct()
3062 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3); in Generate_Construct()
3063 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
3069 __ bind(&non_constructor); in Generate_Construct()
3070 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
3083 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_ConstructWithSpread()
3092 __ SmiTag(r3); in Generate_AllocateInNewSpace()
3093 __ Push(r3); in Generate_AllocateInNewSpace()
3094 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_AllocateInNewSpace()
3095 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
3104 __ SmiTag(r3); in Generate_AllocateInOldSpace()
3105 __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
3106 __ Push(r3, r4); in Generate_AllocateInOldSpace()
3107 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_AllocateInOldSpace()
3108 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
3117 __ push(r3); in Generate_Abort()
3118 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_Abort()
3119 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
3133 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3134 __ CmpP(r2, r4); in Generate_ArgumentsAdaptorTrampoline()
3135 __ blt(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3136 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); in Generate_ArgumentsAdaptorTrampoline()
3137 __ beq(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3140 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
3150 __ SmiToPtrArrayOffset(r2, r2); in Generate_ArgumentsAdaptorTrampoline()
3151 __ AddP(r2, fp); in Generate_ArgumentsAdaptorTrampoline()
3153 __ AddP(r2, r2, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3154 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); in Generate_ArgumentsAdaptorTrampoline()
3155 __ SubP(r6, r2, r6); in Generate_ArgumentsAdaptorTrampoline()
3166 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3167 __ LoadP(r0, MemOperand(r2, 0)); in Generate_ArgumentsAdaptorTrampoline()
3168 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3169 __ CmpP(r2, r6); // Compare before moving to next argument. in Generate_ArgumentsAdaptorTrampoline()
3170 __ lay(r2, MemOperand(r2, -kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3171 __ bne(&copy); in Generate_ArgumentsAdaptorTrampoline()
3173 __ b(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3177 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3188 __ SmiToPtrArrayOffset(r2, r2); in Generate_ArgumentsAdaptorTrampoline()
3189 __ lay(r2, MemOperand(r2, fp)); in Generate_ArgumentsAdaptorTrampoline()
3198 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3200 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3201 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3202 __ CmpP(r2, fp); // Compare before moving to next argument. in Generate_ArgumentsAdaptorTrampoline()
3203 __ lay(r2, MemOperand(r2, -kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3204 __ bne(&copy); in Generate_ArgumentsAdaptorTrampoline()
3210 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
3211 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); in Generate_ArgumentsAdaptorTrampoline()
3212 __ SubP(r6, fp, r6); in Generate_ArgumentsAdaptorTrampoline()
3214 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in Generate_ArgumentsAdaptorTrampoline()
3218 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
3219 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3220 __ CmpP(sp, r6); in Generate_ArgumentsAdaptorTrampoline()
3221 __ bne(&fill); in Generate_ArgumentsAdaptorTrampoline()
3225 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3226 __ LoadRR(r2, r4); in Generate_ArgumentsAdaptorTrampoline()
3230 __ CallJSEntry(ip); in Generate_ArgumentsAdaptorTrampoline()
3237 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
3242 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3243 __ JumpToJSEntry(ip); in Generate_ArgumentsAdaptorTrampoline()
3245 __ bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
3248 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
3249 __ bkpt(0); in Generate_ArgumentsAdaptorTrampoline()
3253 #undef __