1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/compiler/trampoline/aarch64/common_call.h"
17
18 #include "ecmascript/compiler/assembler/assembler.h"
19 #include "ecmascript/compiler/argument_accessor.h"
20 #include "ecmascript/compiler/common_stubs.h"
21 #include "ecmascript/compiler/rt_call_signature.h"
22 #include "ecmascript/ecma_runtime_call_info.h"
23 #include "ecmascript/frames.h"
24 #include "ecmascript/js_function.h"
25 #include "ecmascript/method.h"
26 #include "ecmascript/js_thread.h"
27 #include "ecmascript/js_generator_object.h"
28 #include "ecmascript/message_string.h"
29 #include "ecmascript/runtime_call_id.h"
30
31 namespace panda::ecmascript::aarch64 {
32 using Label = panda::ecmascript::Label;
33 #define __ assembler->
34
35 // Generate code for entering asm interpreter
36 // c++ calling convention
37 // Input: glue - %X0
38 // callTarget - %X1
39 // method - %X2
40 // callField - %X3
41 // argc - %X4
42 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler * assembler)43 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
44 {
45 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
46 Label target;
47 size_t begin = __ GetCurrentPosition();
48 PushAsmInterpEntryFrame(assembler);
49 __ Bl(&target);
50 PopAsmInterpEntryFrame(assembler);
51 size_t end = __ GetCurrentPosition();
52 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
53 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
54 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
55 }
56 __ Ret();
57
58 __ Bind(&target);
59 {
60 AsmInterpEntryDispatch(assembler);
61 }
62 }
63
64 // Input: glue - %X0
65 // callTarget - %X1
66 // method - %X2
67 // callField - %X3
68 // argc - %X4
69 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpEntryDispatch(ExtendedAssembler * assembler)70 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
71 {
72 Label notJSFunction;
73 Label callNativeEntry;
74 Label callJSFunctionEntry;
75 Label notCallable;
76 Register glueRegister(X0);
77 Register argcRegister(X4, W);
78 Register argvRegister(X5);
79 Register callTargetRegister(X1);
80 Register callFieldRegister(X3);
81 Register bitFieldRegister(X16);
82 Register tempRegister(X17); // can not be used to store any variable
83 Register functionTypeRegister(X18, W);
84 __ Ldr(tempRegister, MemoryOperand(callTargetRegister, TaggedObject::HCLASS_OFFSET));
85 __ Ldr(bitFieldRegister, MemoryOperand(tempRegister, JSHClass::BIT_FIELD_OFFSET));
86 __ And(functionTypeRegister, bitFieldRegister.W(), LogicalImmediate::Create(0xFF, RegWSize));
87 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_FIRST)));
88 __ Cmp(functionTypeRegister, tempRegister.W());
89 __ B(Condition::LO, ¬JSFunction);
90 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_LAST)));
91 __ Cmp(functionTypeRegister, tempRegister.W());
92 __ B(Condition::LS, &callJSFunctionEntry);
93 __ Bind(¬JSFunction);
94 {
95 __ Tst(bitFieldRegister,
96 LogicalImmediate::Create(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), RegXSize));
97 __ B(Condition::EQ, ¬Callable);
98 // fall through
99 }
100 __ Bind(&callNativeEntry);
101 CallNativeEntry(assembler);
102 __ Bind(&callJSFunctionEntry);
103 {
104 __ Tbnz(callFieldRegister, MethodLiteral::IsNativeBit::START_BIT, &callNativeEntry);
105 // fast path
106 __ Add(argvRegister, argvRegister, Immediate(NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()));
107 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY);
108 }
109 __ Bind(¬Callable);
110 {
111 Register runtimeId(X11);
112 Register trampoline(X12);
113 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException));
114 // 3 : 3 means *8
115 __ Add(trampoline, glueRegister, Operand(runtimeId, LSL, 3));
116 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
117 __ Blr(trampoline);
118 __ Ret();
119 }
120 }
121
JSCallCommonEntry(ExtendedAssembler * assembler,JSCallMode mode)122 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler, JSCallMode mode)
123 {
124 Label stackOverflow;
125 Register glueRegister = __ GlueRegister();
126 Register fpRegister = __ AvailableRegister1();
127 Register currentSlotRegister = __ AvailableRegister3();
128 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
129 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
130 if (!kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
131 __ PushFpAndLr();
132 }
133 // save fp
134 __ Mov(fpRegister, Register(SP));
135 __ Mov(currentSlotRegister, Register(SP));
136
137 {
138 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
139 [[maybe_unused]] TempRegister1Scope scope(assembler);
140 Register tempRegister = __ TempRegister1();
141 __ Ldr(tempRegister, MemoryOperand(glueRegister, JSThread::GlueData::GetStackLimitOffset(false)));
142 __ Mov(Register(SP), tempRegister);
143 }
144
145 Register declaredNumArgsRegister = __ AvailableRegister2();
146 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
147
148 Label slowPathEntry;
149 Label fastPathEntry;
150 Label pushCallThis;
151 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
152 if (argc >= 0) {
153 __ Cmp(declaredNumArgsRegister, Immediate(argc));
154 } else {
155 __ Cmp(declaredNumArgsRegister, argcRegister);
156 }
157 __ B(Condition::NE, &slowPathEntry);
158 __ Bind(&fastPathEntry);
159 JSCallCommonFastPath(assembler, mode, &pushCallThis, &stackOverflow);
160 __ Bind(&pushCallThis);
161 PushCallThis(assembler, mode, &stackOverflow);
162 __ Bind(&slowPathEntry);
163 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
164
165 __ Bind(&stackOverflow);
166 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
167 __ Mov(Register(SP), fpRegister);
168 [[maybe_unused]] TempRegister1Scope scope(assembler);
169 Register temp = __ TempRegister1();
170 // only glue and acc are useful in exception handler
171 if (glueRegister.GetId() != X19) {
172 __ Mov(Register(X19), glueRegister);
173 }
174 Register acc(X23);
175 __ Mov(acc, Immediate(JSTaggedValue::VALUE_EXCEPTION));
176 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
177 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
178 // Reload pc to make sure stack trace is right
179 __ Mov(temp, callTargetRegister);
180 __ Ldr(Register(X20), MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
181 // Reload constpool and profileInfo to make sure gc map work normally
182 __ Ldr(Register(X22), MemoryOperand(methodRegister, Method::PROFILE_TYPE_INFO_OFFSET));
183 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
184
185 __ Mov(temp, kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException);
186 __ Add(temp, glueRegister, Operand(temp, UXTW, 3)); // 3: bc * 8
187 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
188 __ Br(temp);
189 } else {
190 [[maybe_unused]] TempRegister1Scope scope(assembler);
191 Register temp = __ TempRegister1();
192 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
193 }
194 }
195
JSCallCommonFastPath(ExtendedAssembler * assembler,JSCallMode mode,Label * pushCallThis,Label * stackOverflow)196 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *pushCallThis,
197 Label *stackOverflow)
198 {
199 Register glueRegister = __ GlueRegister();
200 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
201 Register currentSlotRegister = __ AvailableRegister3();
202 // call range
203 if (argc < 0) {
204 Register numRegister = __ AvailableRegister2();
205 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
206 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
207 __ Mov(numRegister, argcRegister);
208 [[maybe_unused]] TempRegister1Scope scope(assembler);
209 Register opRegister = __ TempRegister1();
210 PushArgsWithArgv(assembler, glueRegister, numRegister, argvRegister, opRegister,
211 currentSlotRegister, pushCallThis, stackOverflow);
212 } else if (argc > 0) {
213 if (argc > 2) { // 2: call arg2
214 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
215 __ Str(arg2, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
216 }
217 if (argc > 1) {
218 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
219 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
220 }
221 if (argc > 0) {
222 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
223 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
224 }
225 }
226 }
227
JSCallCommonSlowPath(ExtendedAssembler * assembler,JSCallMode mode,Label * fastPathEntry,Label * pushCallThis,Label * stackOverflow)228 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
229 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
230 {
231 Register glueRegister = __ GlueRegister();
232 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
233 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
234 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
235 Register currentSlotRegister = __ AvailableRegister3();
236 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
237 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
238 Label noExtraEntry;
239 Label pushArgsEntry;
240
241 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
242 Register declaredNumArgsRegister = __ AvailableRegister2();
243 __ Tbz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, &noExtraEntry);
244 // extra entry
245 {
246 [[maybe_unused]] TempRegister1Scope scope1(assembler);
247 Register tempArgcRegister = __ TempRegister1();
248 if (argc >= 0) {
249 __ PushArgc(argc, tempArgcRegister, currentSlotRegister);
250 } else {
251 __ PushArgc(argcRegister, tempArgcRegister, currentSlotRegister);
252 }
253 // fall through
254 }
255 __ Bind(&noExtraEntry);
256 {
257 if (argc == 0) {
258 {
259 [[maybe_unused]] TempRegister1Scope scope(assembler);
260 Register tempRegister = __ TempRegister1();
261 PushUndefinedWithArgc(assembler, glueRegister, declaredNumArgsRegister, tempRegister,
262 currentSlotRegister, nullptr, stackOverflow);
263 }
264 __ B(fastPathEntry);
265 return;
266 }
267 [[maybe_unused]] TempRegister1Scope scope1(assembler);
268 Register diffRegister = __ TempRegister1();
269 if (argc >= 0) {
270 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), Immediate(argc));
271 } else {
272 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), argcRegister.W());
273 }
274 [[maybe_unused]] TempRegister2Scope scope2(assembler);
275 Register tempRegister = __ TempRegister2();
276 PushUndefinedWithArgc(assembler, glueRegister, diffRegister, tempRegister,
277 currentSlotRegister, &pushArgsEntry, stackOverflow);
278 __ B(fastPathEntry);
279 }
280 // declare < actual
281 __ Bind(&pushArgsEntry);
282 {
283 __ Tbnz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, fastPathEntry);
284 // no extra branch
285 // arg1, declare must be 0
286 if (argc == 1) {
287 __ B(pushCallThis);
288 return;
289 }
290 __ Cmp(declaredNumArgsRegister, Immediate(0));
291 __ B(Condition::EQ, pushCallThis);
292 // call range
293 if (argc < 0) {
294 [[maybe_unused]] TempRegister1Scope scope(assembler);
295 Register opRegister = __ TempRegister1();
296 PushArgsWithArgv(assembler, glueRegister, declaredNumArgsRegister,
297 argvRegister, opRegister,
298 currentSlotRegister, nullptr, stackOverflow);
299 } else if (argc > 0) {
300 Label pushArgs0;
301 if (argc > 2) { // 2: call arg2
302 // decalare is 2 or 1 now
303 __ Cmp(declaredNumArgsRegister, Immediate(1));
304 __ B(Condition::EQ, &pushArgs0);
305 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
306 }
307 if (argc > 1) {
308 __ Bind(&pushArgs0);
309 // decalare is is 1 now
310 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
311 }
312 }
313 __ B(pushCallThis);
314 }
315 }
316
GetThisRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)317 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
318 {
319 switch (mode) {
320 case JSCallMode::CALL_GETTER:
321 case JSCallMode::CALL_THIS_ARG0:
322 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
323 case JSCallMode::CALL_SETTER:
324 case JSCallMode::CALL_THIS_ARG1:
325 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
326 case JSCallMode::CALL_THIS_ARG2:
327 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
328 case JSCallMode::CALL_THIS_WITH_ARGV:
329 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
330 case JSCallMode::CALL_THIS_ARG3:
331 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
332 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
333 case JSCallMode::CALL_FROM_AOT:
334 case JSCallMode::CALL_ENTRY: {
335 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
336 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -FRAME_SLOT_SIZE));
337 return defaultRegister;
338 }
339 default:
340 UNREACHABLE();
341 }
342 return INVALID_REG;
343 }
344
GetNewTargetRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)345 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
346 Register defaultRegister)
347 {
348 switch (mode) {
349 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
350 case JSCallMode::CALL_THIS_WITH_ARGV:
351 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
352 case JSCallMode::CALL_FROM_AOT:
353 case JSCallMode::CALL_ENTRY: {
354 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
355 // 2: new Target index
356 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -2 * FRAME_SLOT_SIZE));
357 return defaultRegister;
358 }
359 default:
360 UNREACHABLE();
361 }
362 return INVALID_REG;
363 }
364
365 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
366 // uint64_t callField, ...)
367 // GHC calling convention
368 // Input1: for callarg0/1/2/3 Input2: for callrange
369 // X19 - glue // X19 - glue
370 // FP - sp // FP - sp
371 // X20 - callTarget // X20 - callTarget
372 // X21 - method // X21 - method
373 // X22 - callField // X22 - callField
374 // X23 - arg0 // X23 - actualArgc
375 // X24 - arg1 // X24 - argv
376 // X25 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler * assembler)377 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
378 {
379 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
380 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV);
381 }
382
PushCallRangeAndDispatch(ExtendedAssembler * assembler)383 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
384 {
385 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
386 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV);
387 }
388
PushCallNewAndDispatch(ExtendedAssembler * assembler)389 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
390 {
391 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
392 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV);
393 }
394
PushCallArgs3AndDispatch(ExtendedAssembler * assembler)395 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
396 {
397 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
398 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3);
399 }
400
PushCallArgs2AndDispatch(ExtendedAssembler * assembler)401 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
402 {
403 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
404 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2);
405 }
406
PushCallArg1AndDispatch(ExtendedAssembler * assembler)407 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
408 {
409 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
410 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1);
411 }
412
PushCallArg0AndDispatch(ExtendedAssembler * assembler)413 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
414 {
415 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
416 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0);
417 }
418
PushCallThisArg0AndDispatch(ExtendedAssembler * assembler)419 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
420 {
421 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
422 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0);
423 }
424
PushCallThisArg1AndDispatch(ExtendedAssembler * assembler)425 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
426 {
427 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
428 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1);
429 }
430
PushCallThisArgs2AndDispatch(ExtendedAssembler * assembler)431 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
432 {
433 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
434 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2);
435 }
436
PushCallThisArgs3AndDispatch(ExtendedAssembler * assembler)437 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
438 {
439 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
440 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3);
441 }
442
443 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
444 // c++ calling convention call js function
445 // Input: X0 - glue
446 // X1 - nativeCode
447 // X2 - callTarget
448 // X3 - thisValue
449 // X4 - argc
450 // X5 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler * assembler)451 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
452 {
453 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
454 CallNativeWithArgv(assembler, false);
455 }
456
PushCallNewAndDispatchNative(ExtendedAssembler * assembler)457 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
458 {
459 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
460 CallNativeWithArgv(assembler, true);
461 }
462
CallNativeWithArgv(ExtendedAssembler * assembler,bool callNew)463 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew)
464 {
465 Register glue(X0);
466 Register nativeCode(X1);
467 Register callTarget(X2);
468 Register thisObj(X3);
469 Register argc(X4);
470 Register argv(X5);
471 Register opArgc(X8);
472 Register opArgv(X9);
473 Register temp(X10);
474 Register currentSlotRegister(X11);
475 Register spRegister(SP);
476
477 Label pushThis;
478 Label stackOverflow;
479 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV, temp, argc);
480
481 __ Mov(currentSlotRegister, spRegister);
482 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
483 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
484 __ Mov(Register(SP), temp);
485
486 __ Mov(opArgc, argc);
487 __ Mov(opArgv, argv);
488 PushArgsWithArgv(assembler, glue, opArgc, opArgv, temp, currentSlotRegister, &pushThis, &stackOverflow);
489
490 __ Bind(&pushThis);
491 // newTarget
492 if (callNew) {
493 // 16: this & newTarget
494 __ Stp(callTarget, thisObj, MemoryOperand(currentSlotRegister, -16, AddrMode::PREINDEX));
495 } else {
496 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
497 // 16: this & newTarget
498 __ Stp(temp, thisObj, MemoryOperand(currentSlotRegister, -16, AddrMode::PREINDEX));
499 }
500 // callTarget
501 __ Str(callTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
502 __ Add(temp, currentSlotRegister, Immediate(40)); // 40: skip frame type, numArgs, func, newTarget and this
503 __ Add(Register(FP), temp, Operand(argc, LSL, 3)); // 3: argc * 8
504
505 __ Add(temp, argc, Immediate(NUM_MANDATORY_JSFUNC_ARGS));
506 // 2: thread & argc
507 __ Stp(glue, temp, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
508 __ Add(Register(X0), currentSlotRegister, Immediate(0));
509
510 __ Align16(currentSlotRegister);
511 __ Mov(spRegister, currentSlotRegister);
512
513 CallNativeInternal(assembler, nativeCode);
514 __ Ret();
515
516 __ Bind(&stackOverflow);
517 {
518 // use builtin_with_argv_frame to mark gc map
519 Register frameType(X11);
520 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
521 __ Mov(spRegister, temp);
522 __ Mov(frameType, Immediate(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV)));
523 // 2: frame type and argc
524 __ Stp(Register(Zero), frameType, MemoryOperand(Register(SP), -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
525 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
526 // 2: fill this&newtgt slots
527 __ Stp(temp, temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
528 // 2: fill func&align slots
529 __ Stp(Register(Zero), temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
530 __ Mov(temp, spRegister);
531 __ Add(Register(FP), temp, Immediate(48)); // 48: skip frame type, numArgs, func, newTarget, this and align
532
533 Register runtimeId(X11);
534 Register trampoline(X12);
535 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
536 // 3 : 3 means *8
537 __ Add(trampoline, glue, Operand(runtimeId, LSL, 3));
538 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
539 __ Blr(trampoline);
540
541 // resume rsp
542 __ Mov(Register(SP), Register(FP));
543 __ RestoreFpAndLr();
544 __ Ret();
545 }
546 }
547
548 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
549 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
550 // Input: X0 - codeAddress
551 // stack layout: sp + N*8 argvN
552 // ........
553 // sp + 24: argv1
554 // sp + 16: argv0
555 // sp + 8: actualArgc
556 // sp: thread
557 // construct Native Leave Frame
558 // +--------------------------+
559 // | argV[N - 1] |
560 // |--------------------------|
561 // | . . . . |
562 // |--------------------------+
563 // | argV[2]=this |
564 // +--------------------------+
565 // | argV[1]=new-target |
566 // +--------------------------+
567 // | argV[0]=call-target |
568 // +--------------------------+ ---------
569 // | argc | ^
570 // |--------------------------| |
571 // | thread | |
572 // |--------------------------| |
573 // | returnAddr | BuiltinFrame
574 // |--------------------------| |
575 // | callsiteFp | |
576 // |--------------------------| |
577 // | frameType | v
578 // +--------------------------+ ---------
579
PushCallArgsAndDispatchNative(ExtendedAssembler * assembler)580 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
581 {
582 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
583
584 Register nativeCode(X0);
585 Register glue(X1);
586 Register argv(X5);
587 Register temp(X6);
588 Register sp(SP);
589 Register nativeCodeTemp(X2);
590
591 __ Mov(nativeCodeTemp, nativeCode);
592
593 __ Ldr(glue, MemoryOperand(sp, 0));
594 __ Add(Register(X0), sp, Immediate(0));
595 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME, temp, argv);
596
597 CallNativeInternal(assembler, nativeCodeTemp);
598 __ Ret();
599 }
600
PushBuiltinFrame(ExtendedAssembler * assembler,Register glue,FrameType type,Register op,Register next)601 void AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler, Register glue,
602 FrameType type, Register op, Register next)
603 {
604 Register sp(SP);
605 __ PushFpAndLr();
606 __ Mov(op, sp);
607 __ Str(op, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
608 __ Mov(op, Immediate(static_cast<int32_t>(type)));
609 if (type == FrameType::BUILTIN_FRAME) {
610 // push stack args
611 __ Add(next, sp, Immediate(BuiltinFrame::GetStackArgsToFpDelta(false)));
612 // 16: type & next
613 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
614 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE)); // 16: skip next and frame type
615 } else if (type == FrameType::BUILTIN_ENTRY_FRAME || type == FrameType::BUILTIN_CALL_LEAVE_FRAME) {
616 // 16: type & next
617 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
618 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE)); // 16: skip next and frame type
619 } else {
620 ASSERT(type == FrameType::BUILTIN_FRAME_WITH_ARGV);
621 // 16: type & next
622 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
623 }
624 }
625
CallNativeInternal(ExtendedAssembler * assembler,Register nativeCode)626 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
627 {
628 __ Blr(nativeCode);
629 // resume rsp
630 __ Mov(Register(SP), Register(FP));
631 __ RestoreFpAndLr();
632 }
633
634 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
635 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
636 // GHC calling convention
637 // X19 - glue
638 // FP - sp
639 // X20 - pc
640 // X21 - constantPool
641 // X22 - profileTypeInfo
642 // X23 - acc
643 // X24 - hotnessCounter
644 // X25 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler * assembler)645 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
646 {
647 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
648
649 Register glueRegister = __ GlueRegister();
650 Register sp(FP);
651 Register rsp(SP);
652 Register pc(X20);
653 Register jumpSizeRegister(X25);
654
655 Register ret(X23);
656 Register opcode(X6, W);
657 Register temp(X7);
658 Register bcStub(X7);
659 Register fp(X8);
660
661 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
662 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
663 int64_t spOffset = static_cast<int64_t>(AsmInterpretedFrame::GetBaseOffset(false))
664 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
665 int64_t thisOffset = static_cast<int64_t>(AsmInterpretedFrame::GetThisOffset(false))
666 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
667 ASSERT(fpOffset < 0);
668 ASSERT(spOffset < 0);
669
670 Label newObjectRangeReturn;
671 Label dispatch;
672 __ Ldur(fp, MemoryOperand(sp, fpOffset)); // store fp for temporary
673 __ Cmp(jumpSizeRegister, Immediate(0));
674 __ B(Condition::LE, &newObjectRangeReturn);
675 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
676
677 __ Add(pc, pc, Operand(jumpSizeRegister, LSL, 0));
678 __ Ldrb(opcode, MemoryOperand(pc, 0));
679 __ Bind(&dispatch);
680 {
681 __ Mov(rsp, fp); // resume rsp
682 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
683 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
684 __ Br(bcStub);
685 }
686
687 Label getThis;
688 Label notUndefined;
689 __ Bind(&newObjectRangeReturn);
690 {
691 __ Cmp(ret, Immediate(JSTaggedValue::VALUE_UNDEFINED));
692 __ B(Condition::NE, ¬Undefined);
693 ASSERT(thisOffset < 0);
694 __ Bind(&getThis);
695 __ Ldur(ret, MemoryOperand(sp, thisOffset)); // update acc
696 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
697 __ Mov(rsp, fp); // resume rsp
698 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
699 __ Ldrb(opcode, MemoryOperand(pc, 0));
700 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
701 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
702 __ Br(bcStub);
703 }
704 __ Bind(¬Undefined);
705 {
706 Label notEcmaObject;
707 __ Mov(temp, Immediate(JSTaggedValue::TAG_HEAPOBJECT_MASK));
708 __ And(temp, temp, ret);
709 __ Cmp(temp, Immediate(0));
710 __ B(Condition::NE, ¬EcmaObject);
711 // acc is heap object
712 __ Ldr(temp, MemoryOperand(ret, TaggedObject::HCLASS_OFFSET));
713 __ Ldr(temp, MemoryOperand(temp, JSHClass::BIT_FIELD_OFFSET));
714 __ And(temp.W(), temp.W(), LogicalImmediate::Create(0xFF, RegWSize));
715 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_LAST)));
716 __ B(Condition::HI, ¬EcmaObject);
717 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_FIRST)));
718 __ B(Condition::LO, ¬EcmaObject);
719 // acc is ecma object
720 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
721 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
722 __ Ldrb(opcode, MemoryOperand(pc, 0));
723 __ B(&dispatch);
724
725 __ Bind(¬EcmaObject);
726 {
727 int64_t constructorOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false))
728 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
729 ASSERT(constructorOffset < 0);
730 __ Ldur(temp, MemoryOperand(sp, constructorOffset)); // load constructor
731 __ Ldr(temp, MemoryOperand(temp, JSFunctionBase::METHOD_OFFSET));
732 __ Ldr(temp, MemoryOperand(temp, Method::EXTRA_LITERAL_INFO_OFFSET));
733 __ Lsr(temp.W(), temp.W(), MethodLiteral::FunctionKindBits::START_BIT);
734 __ And(temp.W(), temp.W(),
735 LogicalImmediate::Create((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, RegWSize));
736 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(FunctionKind::CLASS_CONSTRUCTOR)));
737 __ B(Condition::LS, &getThis); // constructor is base
738 // exception branch
739 {
740 __ Mov(opcode, kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException);
741 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
742 __ B(&dispatch);
743 }
744 }
745 }
746 }
747
748 // ResumeRspAndReturn(uintptr_t acc)
749 // GHC calling convention
750 // X19 - acc
751 // FP - prevSp
752 // X20 - sp
ResumeRspAndReturn(ExtendedAssembler * assembler)753 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
754 {
755 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
756 Register rsp(SP);
757 Register currentSp(X20);
758
759 [[maybe_unused]] TempRegister1Scope scope1(assembler);
760 Register fpRegister = __ TempRegister1();
761 int64_t offset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
762 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
763 ASSERT(offset < 0);
764 __ Ldur(fpRegister, MemoryOperand(currentSp, offset));
765 __ Mov(rsp, fpRegister);
766
767 // return
768 {
769 __ RestoreFpAndLr();
770 __ Mov(Register(X0), Register(X19));
771 __ Ret();
772 }
773 }
774
775 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
776 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
777 // GHC calling convention
778 // X19 - glue
779 // FP - sp
780 // X20 - pc
781 // X21 - constantPool
782 // X22 - profileTypeInfo
783 // X23 - acc
784 // X24 - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler * assembler)785 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
786 {
787 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
788
789 Register glue(X19);
790 Register pc(X20);
791 Register fp(X5);
792 Register opcode(X6, W);
793 Register bcStub(X7);
794
795 Label dispatch;
796 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
797 __ Cmp(fp, Immediate(0));
798 __ B(Condition::EQ, &dispatch);
799 // up frame
800 __ Mov(Register(SP), fp);
801 // fall through
802 __ Bind(&dispatch);
803 {
804 __ Ldrb(opcode, MemoryOperand(pc, 0));
805 __ Add(bcStub, glue, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
806 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
807 __ Br(bcStub);
808 }
809 }
810
811 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
812 // GHC calling convention
813 // X19 - glue
814 // FP - sp
815 // X20 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler * assembler)816 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
817 {
818 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
819
820 Register glue(X19);
821 Register fp(X5);
822 Register acc(X20);
823 Register cppRet(X0);
824
825 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
826 __ Mov(Register(SP), fp);
827 // this method will return to Execute(cpp calling convention), and the return value should be put into X0.
828 __ Mov(cppRet, acc);
829 __ RestoreFpAndLr();
830 __ Ret();
831 }
832
833 // c++ calling convention
834 // X0 - glue
835 // X1 - callTarget
836 // X2 - method
837 // X3 - callField
838 // X4 - receiver
839 // X5 - value
CallGetter(ExtendedAssembler * assembler)840 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
841 {
842 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
843 Label target;
844
845 PushAsmInterpBridgeFrame(assembler);
846 __ Bl(&target);
847 PopAsmInterpBridgeFrame(assembler);
848 __ Ret();
849 __ Bind(&target);
850 {
851 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER);
852 }
853 }
854
CallSetter(ExtendedAssembler * assembler)855 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
856 {
857 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
858 Label target;
859 PushAsmInterpBridgeFrame(assembler);
860 __ Bl(&target);
861 PopAsmInterpBridgeFrame(assembler);
862 __ Ret();
863 __ Bind(&target);
864 {
865 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER);
866 }
867 }
868
CallContainersArgs3(ExtendedAssembler * assembler)869 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
870 {
871 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
872 Label target;
873 PushAsmInterpBridgeFrame(assembler);
874 __ Bl(&target);
875 PopAsmInterpBridgeFrame(assembler);
876 __ Ret();
877 __ Bind(&target);
878 {
879 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN);
880 }
881 }
882
883 // Generate code for generator re-entering asm interpreter
884 // c++ calling convention
885 // Input: %X0 - glue
886 // %X1 - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler * assembler)887 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
888 {
889 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
890 Label target;
891 size_t begin = __ GetCurrentPosition();
892 PushAsmInterpEntryFrame(assembler);
893 __ Bl(&target);
894 PopAsmInterpEntryFrame(assembler);
895 size_t end = __ GetCurrentPosition();
896 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
897 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
898 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
899 }
900 __ Ret();
901 __ Bind(&target);
902 {
903 GeneratorReEnterAsmInterpDispatch(assembler);
904 }
905 }
906
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler * assembler)907 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
908 {
909 Label pushFrameState;
910 Label stackOverflow;
911 Register glue = __ GlueRegister();
912 Register contextRegister(X1);
913 Register spRegister(SP);
914 Register pc(X8);
915 Register prevSpRegister(FP);
916 Register callTarget(X4);
917 Register method(X5);
918 Register temp(X6); // can not be used to store any variable
919 Register currentSlotRegister(X7);
920 Register fpRegister(X9);
921 Register thisRegister(X25);
922 Register nRegsRegister(X26, W);
923 Register regsArrayRegister(X27);
924 Register newSp(X28);
925 __ Ldr(callTarget, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_METHOD_OFFSET));
926 __ Ldr(method, MemoryOperand(callTarget, JSFunctionBase::METHOD_OFFSET));
927 __ PushFpAndLr();
928 // save fp
929 __ Mov(fpRegister, spRegister);
930 __ Mov(currentSlotRegister, spRegister);
931 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
932 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
933 __ Mov(Register(SP), temp);
934 // push context regs
935 __ Ldr(nRegsRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
936 __ Ldr(thisRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_THIS_OFFSET));
937 __ Ldr(regsArrayRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET));
938 __ Add(regsArrayRegister, regsArrayRegister, Immediate(TaggedArray::DATA_OFFSET));
939 PushArgsWithArgv(assembler, glue, nRegsRegister, regsArrayRegister, temp,
940 currentSlotRegister, &pushFrameState, &stackOverflow);
941
942 __ Bind(&pushFrameState);
943 __ Mov(newSp, currentSlotRegister);
944 // push frame state
945 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTarget, thisRegister,
946 method, contextRegister, pc, temp);
947 __ Align16(currentSlotRegister);
948 __ Mov(Register(SP), currentSlotRegister);
949 // call bc stub
950 CallBCStub(assembler, newSp, glue, method, pc, temp);
951
952 __ Bind(&stackOverflow);
953 {
954 ThrowStackOverflowExceptionAndReturn(assembler, glue, fpRegister, temp);
955 }
956 }
957
PushCallThis(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow)958 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)
959 {
960 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
961 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
962 Register thisRegister = __ AvailableRegister2();
963 Register currentSlotRegister = __ AvailableRegister3();
964
965 Label pushVregs;
966 Label pushNewTarget;
967 Label pushCallTarget;
968 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
969 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
970 if (!haveThis) {
971 __ Mov(thisRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED)); // default this: undefined
972 } else {
973 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
974 if (thisRegister.GetId() != thisArgRegister.GetId()) {
975 __ Mov(thisRegister, thisArgRegister);
976 }
977 }
978 __ Tst(callFieldRegister, LogicalImmediate::Create(CALL_TYPE_MASK, RegXSize));
979 __ B(Condition::EQ, &pushVregs);
980 __ Tbz(callFieldRegister, MethodLiteral::HaveThisBit::START_BIT, &pushNewTarget);
981 if (!haveThis) {
982 [[maybe_unused]] TempRegister1Scope scope1(assembler);
983 Register tempRegister = __ TempRegister1();
984 __ Mov(tempRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED));
985 __ Str(tempRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
986 } else {
987 __ Str(thisRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
988 }
989 __ Bind(&pushNewTarget);
990 {
991 __ Tbz(callFieldRegister, MethodLiteral::HaveNewTargetBit::START_BIT, &pushCallTarget);
992 if (!haveNewTarget) {
993 [[maybe_unused]] TempRegister1Scope scope1(assembler);
994 Register newTarget = __ TempRegister1();
995 __ Mov(newTarget, Immediate(JSTaggedValue::VALUE_UNDEFINED));
996 __ Str(newTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
997 } else {
998 [[maybe_unused]] TempRegister1Scope scope1(assembler);
999 Register defaultRegister = __ TempRegister1();
1000 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
1001 __ Str(newTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1002 }
1003 }
1004 __ Bind(&pushCallTarget);
1005 {
1006 __ Tbz(callFieldRegister, MethodLiteral::HaveFuncBit::START_BIT, &pushVregs);
1007 __ Str(callTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1008 }
1009 __ Bind(&pushVregs);
1010 {
1011 PushVregs(assembler, stackOverflow);
1012 }
1013 }
1014
PushVregs(ExtendedAssembler * assembler,Label * stackOverflow)1015 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler, Label *stackOverflow)
1016 {
1017 Register glue = __ GlueRegister();
1018 Register prevSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::SP);
1019 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1020 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1021 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
1022 Register fpRegister = __ AvailableRegister1();
1023 Register thisRegister = __ AvailableRegister2();
1024 Register currentSlotRegister = __ AvailableRegister3();
1025
1026 Label pushFrameStateAndCall;
1027 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1028 Register tempRegister = __ TempRegister1();
1029 // args register can be reused now.
1030 Register newSpRegister = __ AvailableRegister4();
1031 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
1032 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
1033 PushUndefinedWithArgc(assembler, glue, numVregsRegister, tempRegister, currentSlotRegister, &pushFrameStateAndCall,
1034 stackOverflow);
1035 // fall through
1036 __ Bind(&pushFrameStateAndCall);
1037 {
1038 __ Mov(newSpRegister, currentSlotRegister);
1039
1040 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1041 Register pcRegister = __ TempRegister2();
1042 PushFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTargetRegister, thisRegister,
1043 methodRegister, pcRegister, tempRegister);
1044
1045 __ Align16(currentSlotRegister);
1046 __ Mov(Register(SP), currentSlotRegister);
1047 DispatchCall(assembler, pcRegister, newSpRegister);
1048 }
1049 }
1050
1051 // Input: X19 - glue
1052 // FP - sp
1053 // X20 - callTarget
1054 // X21 - method
DispatchCall(ExtendedAssembler * assembler,Register pcRegister,Register newSpRegister,Register accRegister)1055 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
1056 Register newSpRegister, Register accRegister)
1057 {
1058 Register glueRegister = __ GlueRegister();
1059 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1060
1061 if (glueRegister.GetId() != X19) {
1062 __ Mov(Register(X19), glueRegister);
1063 }
1064 __ Ldrh(Register(X24, W), MemoryOperand(methodRegister, Method::LITERAL_INFO_OFFSET));
1065 if (accRegister == INVALID_REG) {
1066 __ Mov(Register(X23), Immediate(JSTaggedValue::VALUE_HOLE));
1067 } else {
1068 ASSERT(accRegister == Register(X23));
1069 }
1070 __ Ldr(Register(X22), MemoryOperand(methodRegister, Method::PROFILE_TYPE_INFO_OFFSET));
1071 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
1072 __ Mov(Register(X20), pcRegister);
1073 __ Mov(Register(FP), newSpRegister);
1074
1075 Register bcIndexRegister = __ AvailableRegister1();
1076 Register tempRegister = __ AvailableRegister2();
1077 __ Ldrb(bcIndexRegister.W(), MemoryOperand(pcRegister, 0));
1078 __ Add(tempRegister, glueRegister, Operand(bcIndexRegister.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1079 __ Ldr(tempRegister, MemoryOperand(tempRegister, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1080 __ Br(tempRegister);
1081 }
1082
PushFrameState(ExtendedAssembler * assembler,Register prevSp,Register fp,Register currentSlot,Register callTarget,Register thisObj,Register method,Register pc,Register op)1083 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSp, Register fp,
1084 Register currentSlot, Register callTarget, Register thisObj, Register method, Register pc, Register op)
1085 {
1086 __ Mov(op, Immediate(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)));
1087 __ Stp(prevSp, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: frame type & prevSp
1088 __ Ldr(pc, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1089 __ Stp(fp, pc, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: pc & fp
1090 __ Ldr(op, MemoryOperand(callTarget, JSFunction::LEXICAL_ENV_OFFSET));
1091 __ Stp(op, Register(Zero), MemoryOperand(currentSlot,
1092 -2 * FRAME_SLOT_SIZE, // -2: jumpSizeAfterCall & env
1093 AddrMode::PREINDEX));
1094 __ Mov(op, Immediate(JSTaggedValue::VALUE_HOLE));
1095 __ Stp(thisObj, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: acc & this
1096 __ Str(callTarget, MemoryOperand(currentSlot, -FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -1: callTarget
1097 }
1098
GetNumVregsFromCallField(ExtendedAssembler * assembler,Register callField,Register numVregs)1099 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callField, Register numVregs)
1100 {
1101 __ Mov(numVregs, callField);
1102 __ Lsr(numVregs, numVregs, MethodLiteral::NumVregsBits::START_BIT);
1103 __ And(numVregs.W(), numVregs.W(), LogicalImmediate::Create(
1104 MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, RegWSize));
1105 }
1106
GetDeclaredNumArgsFromCallField(ExtendedAssembler * assembler,Register callField,Register declaredNumArgs)1107 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callField,
1108 Register declaredNumArgs)
1109 {
1110 __ Mov(declaredNumArgs, callField);
1111 __ Lsr(declaredNumArgs, declaredNumArgs, MethodLiteral::NumArgsBits::START_BIT);
1112 __ And(declaredNumArgs.W(), declaredNumArgs.W(), LogicalImmediate::Create(
1113 MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, RegWSize));
1114 }
1115
PushAsmInterpEntryFrame(ExtendedAssembler * assembler)1116 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
1117 {
1118 Register glue = __ GlueRegister();
1119 Register fp(X29);
1120 Register sp(SP);
1121
1122 size_t begin = __ GetCurrentPosition();
1123 if (!assembler->FromInterpreterHandler()) {
1124 __ CalleeSave();
1125 }
1126
1127 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1128 Register prevFrameRegister = __ TempRegister1();
1129 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1130 Register frameTypeRegister = __ TempRegister2();
1131
1132 __ PushFpAndLr();
1133
1134 // prev managed fp is leave frame or nullptr(the first frame)
1135 __ Ldr(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1136 __ Mov(frameTypeRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME)));
1137 // 2 : prevSp & frame type
1138 __ Stp(prevFrameRegister, frameTypeRegister, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1139 // 2 : pc & glue
1140 __ Stp(glue, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // pc
1141 if (!assembler->FromInterpreterHandler()) {
1142 size_t end = __ GetCurrentPosition();
1143 if ((end - begin) != FrameCompletionPos::ARM64CppToAsmInterp) {
1144 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64CppToAsmInterp
1145 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
1146 }
1147 }
1148 __ Add(fp, sp, Immediate(4 * FRAME_SLOT_SIZE)); // 32: skip frame type, prevSp, pc and glue
1149 }
1150
PopAsmInterpEntryFrame(ExtendedAssembler * assembler)1151 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
1152 {
1153 Register sp(SP);
1154
1155 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1156 Register prevFrameRegister = __ TempRegister1();
1157 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1158 Register glue = __ TempRegister2();
1159 // 2: glue & pc
1160 __ Ldp(glue, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1161 // 2: skip frame type & prev
1162 __ Ldp(prevFrameRegister, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1163 __ Str(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1164 size_t begin = __ GetCurrentPosition();
1165 __ RestoreFpAndLr();
1166 if (!assembler->FromInterpreterHandler()) {
1167 __ CalleeRestore();
1168 size_t end = __ GetCurrentPosition();
1169 if ((end - begin) != FrameCompletionPos::ARM64AsmInterpToCpp) {
1170 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64AsmInterpToCpp
1171 << "This frame has been modified, and the offset AsmInterpToCpp should be updated too.";
1172 }
1173 }
1174 }
1175
PushGeneratorFrameState(ExtendedAssembler * assembler,Register & prevSpRegister,Register & fpRegister,Register & currentSlotRegister,Register & callTargetRegister,Register & thisRegister,Register & methodRegister,Register & contextRegister,Register & pcRegister,Register & operatorRegister)1176 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register &prevSpRegister,
1177 Register &fpRegister, Register ¤tSlotRegister, Register &callTargetRegister, Register &thisRegister,
1178 Register &methodRegister, Register &contextRegister, Register &pcRegister, Register &operatorRegister)
1179 {
1180 __ Mov(operatorRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_FRAME)));
1181 __ Stp(prevSpRegister, operatorRegister,
1182 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : frameType and prevSp
1183 __ Ldr(pcRegister, MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1184 // offset need 8 align, GENERATOR_NREGS_OFFSET instead of GENERATOR_BC_OFFSET_OFFSET
1185 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
1186 // 32: get high 32bit
1187 __ Lsr(operatorRegister, operatorRegister, 32);
1188 __ Add(pcRegister, operatorRegister, pcRegister);
1189 // 2 : pc and fp
1190 __ Stp(fpRegister, pcRegister, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1191 // jumpSizeAfterCall
1192 __ Str(Register(Zero), MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1193 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET));
1194 // env
1195 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1196 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET));
1197 // acc
1198 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1199 __ Stp(callTargetRegister, thisRegister,
1200 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : acc and callTarget
1201 }
1202
CallBCStub(ExtendedAssembler * assembler,Register & newSp,Register & glue,Register & method,Register & pc,Register & temp)1203 void AsmInterpreterCall::CallBCStub(ExtendedAssembler *assembler, Register &newSp, Register &glue,
1204 Register &method, Register &pc, Register &temp)
1205 {
1206 // prepare call entry
1207 __ Mov(Register(X19), glue); // X19 - glue
1208 __ Mov(Register(FP), newSp); // FP - sp
1209 __ Mov(Register(X20), pc); // X20 - pc
1210 __ Ldr(Register(X21), MemoryOperand(method, Method::CONSTANT_POOL_OFFSET)); // X21 - constantpool
1211 __ Ldr(Register(X22), MemoryOperand(method, Method::PROFILE_TYPE_INFO_OFFSET)); // X22 - profileTypeInfo
1212 __ Mov(Register(X23), Immediate(JSTaggedValue::Hole().GetRawData())); // X23 - acc
1213 __ Ldr(Register(X24), MemoryOperand(method, Method::LITERAL_INFO_OFFSET)); // X24 - hotnessCounter
1214
1215 // call the first bytecode handler
1216 __ Ldrb(temp.W(), MemoryOperand(pc, 0));
1217 // 3 : 3 means *8
1218 __ Add(temp, glue, Operand(temp.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1219 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1220 __ Br(temp);
1221 }
1222
CallNativeEntry(ExtendedAssembler * assembler)1223 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler)
1224 {
1225 Register glue(X0);
1226 Register argv(X5);
1227 Register method(X2);
1228 Register function(X1);
1229 Register nativeCode(X7);
1230 Register temp(X9);
1231
1232 Register sp(SP);
1233 // 2: function & align
1234 __ Stp(function, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1235 // 2: skip argc & thread
1236 __ Sub(sp, sp, Immediate(2 * FRAME_SLOT_SIZE));
1237 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME, temp, argv);
1238 // get native pointer
1239 __ Ldr(nativeCode, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1240 __ Mov(temp, argv);
1241 __ Sub(Register(X0), temp, Immediate(2 * FRAME_SLOT_SIZE)); // 2: skip argc & thread
1242 CallNativeInternal(assembler, nativeCode);
1243
1244 // 4: skip function
1245 __ Add(sp, sp, Immediate(4 * FRAME_SLOT_SIZE));
1246 __ Ret();
1247 }
1248
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1249 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue,
1250 Register fp, Register op)
1251 {
1252 if (fp != Register(SP)) {
1253 __ Mov(Register(SP), fp);
1254 }
1255 __ Mov(op, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
1256 // 3 : 3 means *8
1257 __ Add(op, glue, Operand(op, LSL, 3));
1258 __ Ldr(op, MemoryOperand(op, JSThread::GlueData::GetRTStubEntriesOffset(false)));
1259 if (glue.GetId() != X0) {
1260 __ Mov(Register(X0), glue);
1261 }
1262 __ Blr(op);
1263 __ RestoreFpAndLr();
1264 __ Ret();
1265 }
1266 #undef __
1267 } // panda::ecmascript::aarch64