1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/compiler/trampoline/aarch64/common_call.h"
17
18 #include "ecmascript/js_generator_object.h"
19 #include "ecmascript/message_string.h"
20
21 namespace panda::ecmascript::aarch64 {
22 using Label = panda::ecmascript::Label;
23 #define __ assembler->
24
25 // Generate code for entering asm interpreter
26 // c++ calling convention
27 // Input: glue - %X0
28 // callTarget - %X1
29 // method - %X2
30 // callField - %X3
31 // argc - %X4
32 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler * assembler)33 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
34 {
35 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
36 Label target;
37 size_t begin = __ GetCurrentPosition();
38 PushAsmInterpEntryFrame(assembler);
39 __ Bl(&target);
40 PopAsmInterpEntryFrame(assembler);
41 size_t end = __ GetCurrentPosition();
42 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
43 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
44 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
45 }
46 __ Ret();
47
48 __ Bind(&target);
49 {
50 AsmInterpEntryDispatch(assembler);
51 }
52 }
53
54 // Input: glue - %X0
55 // callTarget - %X1
56 // method - %X2
57 // callField - %X3
58 // argc - %X4
59 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpEntryDispatch(ExtendedAssembler * assembler)60 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
61 {
62 Label notJSFunction;
63 Label callNativeEntry;
64 Label callJSFunctionEntry;
65 Label notCallable;
66 Register glueRegister(X0);
67 Register argcRegister(X4, W);
68 Register argvRegister(X5);
69 Register callTargetRegister(X1);
70 Register callFieldRegister(X3);
71 Register bitFieldRegister(X16);
72 Register tempRegister(X17); // can not be used to store any variable
73 Register functionTypeRegister(X18, W);
74 __ Ldr(tempRegister, MemoryOperand(callTargetRegister, TaggedObject::HCLASS_OFFSET));
75 __ And(tempRegister, tempRegister, LogicalImmediate::Create(TaggedObject::GC_STATE_MASK, RegXSize));
76 __ Ldr(bitFieldRegister, MemoryOperand(tempRegister, JSHClass::BIT_FIELD_OFFSET));
77 __ And(functionTypeRegister, bitFieldRegister.W(), LogicalImmediate::Create(0xFF, RegWSize));
78 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_FIRST)));
79 __ Cmp(functionTypeRegister, tempRegister.W());
80 __ B(Condition::LO, ¬JSFunction);
81 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_LAST)));
82 __ Cmp(functionTypeRegister, tempRegister.W());
83 __ B(Condition::LS, &callJSFunctionEntry);
84 __ Bind(¬JSFunction);
85 {
86 __ Tst(bitFieldRegister,
87 LogicalImmediate::Create(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), RegXSize));
88 __ B(Condition::EQ, ¬Callable);
89 CallNativeEntry(assembler, false);
90 }
91 __ Bind(&callNativeEntry);
92 CallNativeEntry(assembler, true);
93 __ Bind(&callJSFunctionEntry);
94 {
95 __ Tbnz(callFieldRegister, MethodLiteral::IsNativeBit::START_BIT, &callNativeEntry);
96 // fast path
97 __ Add(argvRegister, argvRegister, Immediate(NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()));
98 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
99 }
100 __ Bind(¬Callable);
101 {
102 Register runtimeId(X11);
103 Register trampoline(X12);
104 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException));
105 // 3 : 3 means *8
106 __ Add(trampoline, glueRegister, Operand(runtimeId, LSL, 3));
107 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
108 __ Blr(trampoline);
109 __ Ret();
110 }
111 }
112
JSCallCommonEntry(ExtendedAssembler * assembler,JSCallMode mode,FrameTransitionType type)113 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
114 JSCallMode mode, FrameTransitionType type)
115 {
116 Label stackOverflow;
117 Register glueRegister = __ GlueRegister();
118 Register fpRegister = __ AvailableRegister1();
119 Register currentSlotRegister = __ AvailableRegister3();
120 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
121 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
122 if (!kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode) || type == FrameTransitionType::BASELINE_TO_OTHER ||
123 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
124 __ PushFpAndLr();
125 }
126 // save fp
127 __ Mov(fpRegister, Register(SP));
128 __ Mov(currentSlotRegister, Register(SP));
129
130 {
131 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
132 [[maybe_unused]] TempRegister1Scope scope(assembler);
133 Register tempRegister = __ TempRegister1();
134 __ Ldr(tempRegister, MemoryOperand(glueRegister, JSThread::GlueData::GetStackLimitOffset(false)));
135 __ Mov(Register(SP), tempRegister);
136 }
137
138 Register declaredNumArgsRegister = __ AvailableRegister2();
139 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
140
141 Label slowPathEntry;
142 Label fastPathEntry;
143 Label pushCallThis;
144 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
145 if (argc >= 0) {
146 __ Cmp(declaredNumArgsRegister, Immediate(argc));
147 } else {
148 __ Cmp(declaredNumArgsRegister, argcRegister);
149 }
150 __ B(Condition::NE, &slowPathEntry);
151 __ Bind(&fastPathEntry);
152 JSCallCommonFastPath(assembler, mode, &pushCallThis, &stackOverflow);
153 __ Bind(&pushCallThis);
154 PushCallThis(assembler, mode, &stackOverflow, type);
155 __ Bind(&slowPathEntry);
156 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
157
158 __ Bind(&stackOverflow);
159 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
160 __ Mov(Register(SP), fpRegister);
161 [[maybe_unused]] TempRegister1Scope scope(assembler);
162 Register temp = __ TempRegister1();
163 // only glue and acc are useful in exception handler
164 if (glueRegister.GetId() != X19) {
165 __ Mov(Register(X19), glueRegister);
166 }
167 Register acc(X23);
168 __ Mov(acc, Immediate(JSTaggedValue::VALUE_EXCEPTION));
169 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
170 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
171 // Reload pc to make sure stack trace is right
172 __ Mov(temp, callTargetRegister);
173 __ Ldr(Register(X20), MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
174 // Reload constpool and profileInfo to make sure gc map work normally
175 __ Ldr(Register(X22), MemoryOperand(temp, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
176 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET));
177 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
178
179 __ Mov(temp, kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException);
180 __ Add(temp, glueRegister, Operand(temp, UXTW, 3)); // 3: bc * 8
181 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
182 __ Br(temp);
183 } else {
184 [[maybe_unused]] TempRegister1Scope scope(assembler);
185 Register temp = __ TempRegister1();
186 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
187 }
188 }
189
JSCallCommonFastPath(ExtendedAssembler * assembler,JSCallMode mode,Label * pushCallThis,Label * stackOverflow)190 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *pushCallThis,
191 Label *stackOverflow)
192 {
193 Register glueRegister = __ GlueRegister();
194 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
195 Register currentSlotRegister = __ AvailableRegister3();
196 // call range
197 if (argc < 0) {
198 Register numRegister = __ AvailableRegister2();
199 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
200 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
201 __ Mov(numRegister, argcRegister);
202 [[maybe_unused]] TempRegister1Scope scope(assembler);
203 Register opRegister = __ TempRegister1();
204 PushArgsWithArgv(assembler, glueRegister, numRegister, argvRegister, opRegister,
205 currentSlotRegister, pushCallThis, stackOverflow);
206 } else {
207 if (argc > 2) { // 2: call arg2
208 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
209 __ Str(arg2, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
210 }
211 if (argc > 1) {
212 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
213 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
214 }
215 if (argc > 0) {
216 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
217 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
218 }
219 if (stackOverflow != nullptr) {
220 [[maybe_unused]] TempRegister1Scope scope(assembler);
221 Register op = __ TempRegister1();
222 Register numRegister = __ AvailableRegister2();
223 __ Mov(numRegister, Immediate(argc));
224 StackOverflowCheck(assembler, glueRegister, currentSlotRegister, numRegister, op, stackOverflow);
225 }
226 }
227 }
228
JSCallCommonSlowPath(ExtendedAssembler * assembler,JSCallMode mode,Label * fastPathEntry,Label * pushCallThis,Label * stackOverflow)229 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
230 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
231 {
232 Register glueRegister = __ GlueRegister();
233 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
234 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
235 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
236 Register currentSlotRegister = __ AvailableRegister3();
237 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
238 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
239 Label noExtraEntry;
240 Label pushArgsEntry;
241
242 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
243 Register declaredNumArgsRegister = __ AvailableRegister2();
244 __ Tbz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, &noExtraEntry);
245 // extra entry
246 {
247 [[maybe_unused]] TempRegister1Scope scope1(assembler);
248 Register tempArgcRegister = __ TempRegister1();
249 if (argc >= 0) {
250 __ PushArgc(argc, tempArgcRegister, currentSlotRegister);
251 } else {
252 __ PushArgc(argcRegister, tempArgcRegister, currentSlotRegister);
253 }
254 // fall through
255 }
256 __ Bind(&noExtraEntry);
257 {
258 if (argc == 0) {
259 {
260 [[maybe_unused]] TempRegister1Scope scope(assembler);
261 Register tempRegister = __ TempRegister1();
262 PushUndefinedWithArgc(assembler, glueRegister, declaredNumArgsRegister, tempRegister,
263 currentSlotRegister, nullptr, stackOverflow);
264 }
265 __ B(fastPathEntry);
266 return;
267 }
268 [[maybe_unused]] TempRegister1Scope scope1(assembler);
269 Register diffRegister = __ TempRegister1();
270 if (argc >= 0) {
271 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), Immediate(argc));
272 } else {
273 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), argcRegister.W());
274 }
275 [[maybe_unused]] TempRegister2Scope scope2(assembler);
276 Register tempRegister = __ TempRegister2();
277 PushUndefinedWithArgc(assembler, glueRegister, diffRegister, tempRegister,
278 currentSlotRegister, &pushArgsEntry, stackOverflow);
279 __ B(fastPathEntry);
280 }
281 // declare < actual
282 __ Bind(&pushArgsEntry);
283 {
284 __ Tbnz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, fastPathEntry);
285 // no extra branch
286 // arg1, declare must be 0
287 if (argc == 1) {
288 __ B(pushCallThis);
289 return;
290 }
291 __ Cmp(declaredNumArgsRegister, Immediate(0));
292 __ B(Condition::EQ, pushCallThis);
293 // call range
294 if (argc < 0) {
295 [[maybe_unused]] TempRegister1Scope scope(assembler);
296 Register opRegister = __ TempRegister1();
297 PushArgsWithArgv(assembler, glueRegister, declaredNumArgsRegister,
298 argvRegister, opRegister,
299 currentSlotRegister, nullptr, stackOverflow);
300 } else if (argc > 0) {
301 Label pushArgs0;
302 if (argc > 2) { // 2: call arg2
303 // decalare is 2 or 1 now
304 __ Cmp(declaredNumArgsRegister, Immediate(1));
305 __ B(Condition::EQ, &pushArgs0);
306 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
307 }
308 if (argc > 1) {
309 __ Bind(&pushArgs0);
310 // decalare is is 1 now
311 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
312 }
313 }
314 __ B(pushCallThis);
315 }
316 }
317
GetThisRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)318 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
319 {
320 switch (mode) {
321 case JSCallMode::CALL_GETTER:
322 case JSCallMode::CALL_THIS_ARG0:
323 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
324 case JSCallMode::CALL_SETTER:
325 case JSCallMode::CALL_THIS_ARG1:
326 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
327 case JSCallMode::CALL_THIS_ARG2:
328 case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
329 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
330 case JSCallMode::SUPER_CALL_WITH_ARGV:
331 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
332 case JSCallMode::CALL_THIS_WITH_ARGV:
333 case JSCallMode::CALL_THIS_ARGV_WITH_RETURN:
334 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
335 case JSCallMode::CALL_THIS_ARG3:
336 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
337 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
338 case JSCallMode::CALL_FROM_AOT:
339 case JSCallMode::CALL_ENTRY: {
340 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
341 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -FRAME_SLOT_SIZE));
342 return defaultRegister;
343 }
344 default:
345 LOG_ECMA(FATAL) << "this branch is unreachable";
346 UNREACHABLE();
347 }
348 return INVALID_REG;
349 }
350
GetNewTargetRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)351 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
352 Register defaultRegister)
353 {
354 switch (mode) {
355 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
356 case JSCallMode::CALL_THIS_WITH_ARGV:
357 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
358 case JSCallMode::SUPER_CALL_WITH_ARGV:
359 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
360 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
361 case JSCallMode::CALL_FROM_AOT:
362 case JSCallMode::CALL_ENTRY: {
363 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
364 // 2: new Target index
365 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -2 * FRAME_SLOT_SIZE));
366 return defaultRegister;
367 }
368 default:
369 LOG_ECMA(FATAL) << "this branch is unreachable";
370 UNREACHABLE();
371 }
372 return INVALID_REG;
373 }
374
375 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
376 // uint64_t callField, ...)
377 // GHC calling convention
378 // Input1: for callarg0/1/2/3 Input2: for callrange
379 // X19 - glue // X19 - glue
380 // FP - sp // FP - sp
381 // X20 - callTarget // X20 - callTarget
382 // X21 - method // X21 - method
383 // X22 - callField // X22 - callField
384 // X23 - arg0 // X23 - actualArgc
385 // X24 - arg1 // X24 - argv
386 // X25 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler * assembler)387 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
388 {
389 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
390 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
391 }
392
PushCallRangeAndDispatch(ExtendedAssembler * assembler)393 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
394 {
395 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
396 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
397 }
398
PushCallNewAndDispatch(ExtendedAssembler * assembler)399 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
400 {
401 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
402 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
403 }
404
PushSuperCallAndDispatch(ExtendedAssembler * assembler)405 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
406 {
407 __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
408 JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
409 }
410
PushCallArgs3AndDispatch(ExtendedAssembler * assembler)411 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
412 {
413 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
414 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
415 }
416
PushCallArgs2AndDispatch(ExtendedAssembler * assembler)417 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
418 {
419 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
420 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
421 }
422
PushCallArg1AndDispatch(ExtendedAssembler * assembler)423 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
424 {
425 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
426 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
427 }
428
PushCallArg0AndDispatch(ExtendedAssembler * assembler)429 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
430 {
431 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
432 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
433 }
434
PushCallThisArg0AndDispatch(ExtendedAssembler * assembler)435 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
436 {
437 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
438 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
439 }
440
PushCallThisArg1AndDispatch(ExtendedAssembler * assembler)441 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
442 {
443 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
444 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
445 }
446
PushCallThisArgs2AndDispatch(ExtendedAssembler * assembler)447 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
448 {
449 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
450 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
451 }
452
PushCallThisArgs3AndDispatch(ExtendedAssembler * assembler)453 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
454 {
455 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
456 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
457 }
458
459 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
460 // c++ calling convention call js function
461 // Input: X0 - glue
462 // X1 - nativeCode
463 // X2 - callTarget
464 // X3 - thisValue
465 // X4 - argc
466 // X5 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler * assembler)467 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
468 {
469 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
470 CallNativeWithArgv(assembler, false);
471 }
472
PushCallNewAndDispatchNative(ExtendedAssembler * assembler)473 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
474 {
475 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
476 CallNativeWithArgv(assembler, true);
477 }
478
PushNewTargetAndDispatchNative(ExtendedAssembler * assembler)479 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
480 {
481 __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
482 CallNativeWithArgv(assembler, true, true);
483 }
484
CallNativeWithArgv(ExtendedAssembler * assembler,bool callNew,bool hasNewTarget)485 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
486 {
487 Register glue(X0);
488 Register nativeCode(X1);
489 Register callTarget(X2);
490 Register thisObj(X3);
491 Register argc(X4);
492 Register argv(X5);
493 Register newTarget(X6);
494 Register opArgc(X8);
495 Register opArgv(X9);
496 Register temp(X10);
497 Register currentSlotRegister(X11);
498 Register spRegister(SP);
499
500 Label pushThis;
501 Label stackOverflow;
502 bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV, temp, argc);
503
504 __ Mov(currentSlotRegister, spRegister);
505 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
506 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
507 __ Mov(Register(SP), temp);
508
509 __ Mov(opArgc, argc);
510 __ Mov(opArgv, argv);
511 PushArgsWithArgv(assembler, glue, opArgc, opArgv, temp, currentSlotRegister, &pushThis, &stackOverflow);
512
513 __ Bind(&pushThis);
514 // newTarget
515 if (callNew) {
516 if (hasNewTarget) {
517 // 16: this & newTarget
518 __ Stp(newTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
519 } else {
520 // 16: this & newTarget
521 __ Stp(callTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
522 }
523 } else {
524 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
525 // 16: this & newTarget
526 __ Stp(temp, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
527 }
528 // callTarget
529 __ Str(callTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
530 __ Add(temp, currentSlotRegister, Immediate(QUINTUPLE_SLOT_SIZE));
531 if (!isFrameComplete) {
532 __ Add(Register(FP), temp, Operand(argc, LSL, 3)); // 3: argc * 8
533 }
534
535 __ Add(temp, argc, Immediate(NUM_MANDATORY_JSFUNC_ARGS));
536 // 2: thread & argc
537 __ Stp(glue, temp, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
538 __ Add(Register(X0), currentSlotRegister, Immediate(0));
539
540 __ Align16(currentSlotRegister);
541 __ Mov(spRegister, currentSlotRegister);
542
543 CallNativeInternal(assembler, nativeCode);
544 __ Ret();
545
546 __ Bind(&stackOverflow);
547 {
548 // use builtin_with_argv_frame to mark gc map
549 Register frameType(X11);
550 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
551 __ Mov(spRegister, temp);
552 __ Mov(frameType, Immediate(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME)));
553 // 2: frame type and argc
554 __ Stp(Register(Zero), frameType, MemoryOperand(Register(SP), -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
555 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
556 // 2: fill this&newtgt slots
557 __ Stp(temp, temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
558 // 2: fill func&align slots
559 __ Stp(Register(Zero), temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
560 __ Mov(temp, spRegister);
561 // 6:frame type, argc, this, newTarget, func and align
562 // +----------------------------------------------------------------+ <---- fp = sp + 6 * frame_slot_size
563 // | FrameType = BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
564 // +----------------------------------------------------------------+
565 // | argc = 0 |
566 // |----------------------------------------------------------------|
567 // | this = undefined |
568 // |----------------------------------------------------------------|
569 // | newTarget = undefine |
570 // |----------------------------------------------------------------|
571 // | function = undefined |
572 // |----------------------------------------------------------------|
573 // | align |
574 // +----------------------------------------------------------------+ <---- sp
575 __ Add(Register(FP), temp, Immediate(FRAME_SLOT_SIZE * 6));
576
577 Register runtimeId(X11);
578 Register trampoline(X12);
579 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
580 // 3 : 3 means *8
581 __ Add(trampoline, glue, Operand(runtimeId, LSL, 3));
582 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
583 __ Blr(trampoline);
584
585 // resume rsp
586 __ Mov(Register(SP), Register(FP));
587 __ RestoreFpAndLr();
588 __ Ret();
589 }
590 }
591
592 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
593 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
594 // Input: X0 - codeAddress
595 // stack layout: sp + N*8 argvN
596 // ........
597 // sp + 24: argv1
598 // sp + 16: argv0
599 // sp + 8: actualArgc
600 // sp: thread
601 // construct Native Leave Frame
602 // +--------------------------+
603 // | argV[N - 1] |
604 // |--------------------------|
605 // | . . . . |
606 // |--------------------------+
607 // | argV[2]=this |
608 // +--------------------------+
609 // | argV[1]=new-target |
610 // +--------------------------+
611 // | argV[0]=call-target |
612 // +--------------------------+ ---------
613 // | argc | ^
614 // |--------------------------| |
615 // | thread | |
616 // |--------------------------| |
617 // | returnAddr | BuiltinFrame
618 // |--------------------------| |
619 // | callsiteFp | |
620 // |--------------------------| |
621 // | frameType | v
622 // +--------------------------+ ---------
623
PushCallArgsAndDispatchNative(ExtendedAssembler * assembler)624 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
625 {
626 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
627
628 Register nativeCode(X0);
629 Register glue(X1);
630 Register argv(X5);
631 Register temp(X6);
632 Register sp(SP);
633 Register nativeCodeTemp(X2);
634
635 __ Mov(nativeCodeTemp, nativeCode);
636
637 __ Ldr(glue, MemoryOperand(sp, 0));
638 __ Add(Register(X0), sp, Immediate(0));
639 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME, temp, argv);
640
641 CallNativeInternal(assembler, nativeCodeTemp);
642 __ Ret();
643 }
644
PushBuiltinFrame(ExtendedAssembler * assembler,Register glue,FrameType type,Register op,Register next)645 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler, Register glue,
646 FrameType type, Register op, Register next)
647 {
648 Register sp(SP);
649 __ PushFpAndLr();
650 __ Mov(op, sp);
651 __ Str(op, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
652 __ Mov(op, Immediate(static_cast<int32_t>(type)));
653 if (type == FrameType::BUILTIN_FRAME) {
654 // push stack args
655 __ Add(next, sp, Immediate(BuiltinFrame::GetStackArgsToFpDelta(false)));
656 // 2: -2 * FRAME_SLOT_SIZE means type & next
657 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
658 // 2: 2 * FRAME_SLOT_SIZE means skip next and frame type
659 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE));
660 return true;
661 } else if (type == FrameType::BUILTIN_ENTRY_FRAME) {
662 // 2: -2 * FRAME_SLOT_SIZE means type & next
663 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
664 // 2: 2 * FRAME_SLOT_SIZE means skip next and frame type
665 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE));
666 return true;
667 } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
668 // this frame push stack args must before update FP, otherwise cpu profiler maybe visit incomplete stack
669 // BuiltinWithArgvFrame layout please see frames.h
670 // 2: -2 * FRAME_SLOT_SIZE means type & next
671 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
672 return false;
673 } else {
674 LOG_ECMA(FATAL) << "this branch is unreachable";
675 UNREACHABLE();
676 }
677 }
678
CallNativeInternal(ExtendedAssembler * assembler,Register nativeCode)679 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
680 {
681 __ Blr(nativeCode);
682 // resume rsp
683 __ Mov(Register(SP), Register(FP));
684 __ RestoreFpAndLr();
685 }
686
687 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
688 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
689 // GHC calling convention
690 // X19 - glue
691 // FP - sp
692 // X20 - pc
693 // X21 - constantPool
694 // X22 - profileTypeInfo
695 // X23 - acc
696 // X24 - hotnessCounter
697 // X25 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler * assembler)698 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
699 {
700 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
701
702 Register glueRegister = __ GlueRegister();
703 Register sp(FP);
704 Register rsp(SP);
705 Register pc(X20);
706 Register jumpSizeRegister(X25);
707
708 Register ret(X23);
709 Register opcode(X6, W);
710 Register temp(X7);
711 Register bcStub(X7);
712 Register fp(X8);
713
714 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
715 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
716 int64_t spOffset = static_cast<int64_t>(AsmInterpretedFrame::GetBaseOffset(false))
717 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
718 int64_t thisOffset = static_cast<int64_t>(AsmInterpretedFrame::GetThisOffset(false))
719 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
720 ASSERT(fpOffset < 0);
721 ASSERT(spOffset < 0);
722
723 Label newObjectRangeReturn;
724 Label dispatch;
725 __ Ldur(fp, MemoryOperand(sp, fpOffset)); // store fp for temporary
726 __ Cmp(jumpSizeRegister, Immediate(0));
727 __ B(Condition::LE, &newObjectRangeReturn);
728 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
729
730 __ Add(pc, pc, Operand(jumpSizeRegister, LSL, 0));
731 __ Ldrb(opcode, MemoryOperand(pc, 0));
732 __ Bind(&dispatch);
733 {
734 __ Mov(rsp, fp); // resume rsp
735 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
736 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
737 __ Br(bcStub);
738 }
739
740 Label getThis;
741 Label notUndefined;
742 __ Bind(&newObjectRangeReturn);
743 {
744 __ Cmp(ret, Immediate(JSTaggedValue::VALUE_UNDEFINED));
745 __ B(Condition::NE, ¬Undefined);
746 ASSERT(thisOffset < 0);
747 __ Bind(&getThis);
748 __ Ldur(ret, MemoryOperand(sp, thisOffset)); // update acc
749 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
750 __ Mov(rsp, fp); // resume rsp
751 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
752 __ Ldrb(opcode, MemoryOperand(pc, 0));
753 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
754 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
755 __ Br(bcStub);
756 }
757 __ Bind(¬Undefined);
758 {
759 Label notEcmaObject;
760 __ Mov(temp, Immediate(JSTaggedValue::TAG_HEAPOBJECT_MASK));
761 __ And(temp, temp, ret);
762 __ Cmp(temp, Immediate(0));
763 __ B(Condition::NE, ¬EcmaObject);
764 // acc is heap object
765 __ Ldr(temp, MemoryOperand(ret, TaggedObject::HCLASS_OFFSET));
766 __ And(temp, temp, LogicalImmediate::Create(TaggedObject::GC_STATE_MASK, RegXSize));
767 __ Ldr(temp, MemoryOperand(temp, JSHClass::BIT_FIELD_OFFSET));
768 __ And(temp.W(), temp.W(), LogicalImmediate::Create(0xFF, RegWSize));
769 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_LAST)));
770 __ B(Condition::HI, ¬EcmaObject);
771 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_FIRST)));
772 __ B(Condition::LO, ¬EcmaObject);
773 // acc is ecma object
774 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
775 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
776 __ Ldrb(opcode, MemoryOperand(pc, 0));
777 __ B(&dispatch);
778
779 __ Bind(¬EcmaObject);
780 {
781 int64_t constructorOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false))
782 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
783 ASSERT(constructorOffset < 0);
784 __ Ldur(temp, MemoryOperand(sp, constructorOffset)); // load constructor
785 __ Ldr(temp, MemoryOperand(temp, JSFunctionBase::METHOD_OFFSET));
786 __ Ldr(temp, MemoryOperand(temp, Method::EXTRA_LITERAL_INFO_OFFSET));
787 __ Lsr(temp.W(), temp.W(), MethodLiteral::FunctionKindBits::START_BIT);
788 __ And(temp.W(), temp.W(),
789 LogicalImmediate::Create((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, RegWSize));
790 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(FunctionKind::CLASS_CONSTRUCTOR)));
791 __ B(Condition::LS, &getThis); // constructor is base
792 // exception branch
793 {
794 __ Mov(opcode, kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException);
795 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
796 __ B(&dispatch);
797 }
798 }
799 }
800 }
801
802 // ResumeRspAndReturn(uintptr_t acc)
803 // GHC calling convention
804 // X19 - acc
805 // FP - prevSp
806 // X20 - sp
ResumeRspAndReturn(ExtendedAssembler * assembler)807 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
808 {
809 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
810 Register rsp(SP);
811 Register currentSp(X20);
812
813 [[maybe_unused]] TempRegister1Scope scope1(assembler);
814 Register fpRegister = __ TempRegister1();
815 int64_t offset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
816 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
817 ASSERT(offset < 0);
818 __ Ldur(fpRegister, MemoryOperand(currentSp, offset));
819 __ Mov(rsp, fpRegister);
820
821 // return
822 {
823 __ RestoreFpAndLr();
824 __ Mov(Register(X0), Register(X19));
825 __ Ret();
826 }
827 }
828
829 // ResumeRspAndReturnBaseline(uintptr_t acc)
830 // GHC calling convention
831 // X19 - glue
832 // FP - acc
833 // X20 - prevSp
834 // X21 - sp
835 // X22 - jumpSizeAfterCall
ResumeRspAndReturnBaseline(ExtendedAssembler * assembler)836 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
837 {
838 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
839 Register glue(X19);
840 Register rsp(SP);
841 Register currentSp(X21);
842
843 [[maybe_unused]] TempRegister1Scope scope1(assembler);
844 Register fpRegister = __ TempRegister1();
845 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false)) -
846 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
847 ASSERT(fpOffset < 0);
848 __ Ldur(fpRegister, MemoryOperand(currentSp, fpOffset));
849 __ Mov(rsp, fpRegister);
850 __ RestoreFpAndLr();
851 __ Mov(Register(X0), Register(FP));
852
853 // Check and set result
854 Register ret = X0;
855 Register jumpSizeRegister = X22;
856 Label getThis;
857 Label notUndefined;
858 Label normalReturn;
859 Label newObjectRangeReturn;
860 __ Cmp(jumpSizeRegister, Immediate(0));
861 __ B(Condition::GT, &normalReturn);
862
863 __ Bind(&newObjectRangeReturn);
864 {
865 __ Cmp(ret, Immediate(JSTaggedValue::VALUE_UNDEFINED));
866 __ B(Condition::NE, ¬Undefined);
867
868 __ Bind(&getThis);
869 int64_t thisOffset = static_cast<int64_t>(AsmInterpretedFrame::GetThisOffset(false)) -
870 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
871 ASSERT(thisOffset < 0);
872 __ Ldur(ret, MemoryOperand(currentSp, thisOffset)); // update result
873 __ B(&normalReturn);
874
875 __ Bind(¬Undefined);
876 {
877 Register temp = X19;
878 Label notEcmaObject;
879 __ Mov(temp, Immediate(JSTaggedValue::TAG_HEAPOBJECT_MASK));
880 __ And(temp, temp, ret);
881 __ Cmp(temp, Immediate(0));
882 __ B(Condition::NE, ¬EcmaObject);
883 // acc is heap object
884 __ Ldr(temp, MemoryOperand(ret, TaggedObject::HCLASS_OFFSET));
885 __ And(temp, temp, LogicalImmediate::Create(TaggedObject::GC_STATE_MASK, RegXSize));
886 __ Ldr(temp, MemoryOperand(temp, JSHClass::BIT_FIELD_OFFSET));
887 __ And(temp.W(), temp.W(), LogicalImmediate::Create(0xFF, RegWSize));
888 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_LAST)));
889 __ B(Condition::HI, ¬EcmaObject);
890 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_FIRST)));
891 __ B(Condition::LO, ¬EcmaObject);
892 // acc is ecma object
893 __ B(&normalReturn);
894
895 __ Bind(¬EcmaObject);
896 {
897 int64_t funcOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false)) -
898 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
899 ASSERT(funcOffset < 0);
900 __ Ldur(temp, MemoryOperand(currentSp, funcOffset)); // load constructor
901 __ Ldr(temp, MemoryOperand(temp, JSFunctionBase::METHOD_OFFSET));
902 __ Ldr(temp, MemoryOperand(temp, Method::EXTRA_LITERAL_INFO_OFFSET));
903 __ Lsr(temp.W(), temp.W(), MethodLiteral::FunctionKindBits::START_BIT);
904 __ And(temp.W(), temp.W(),
905 LogicalImmediate::Create((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, RegWSize));
906 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(FunctionKind::CLASS_CONSTRUCTOR)));
907 __ B(Condition::LS, &getThis); // constructor is base
908 // fall through
909 }
910 }
911 }
912 __ Bind(&normalReturn);
913 __ Ret();
914 }
915
916 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
917 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
918 // GHC calling convention
919 // X19 - glue
920 // FP - sp
921 // X20 - pc
922 // X21 - constantPool
923 // X22 - profileTypeInfo
924 // X23 - acc
925 // X24 - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler * assembler)926 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
927 {
928 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
929
930 Register glue(X19);
931 Register pc(X20);
932 Register fp(X5);
933 Register opcode(X6, W);
934 Register bcStub(X7);
935
936 Label dispatch;
937 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
938 __ Cmp(fp, Immediate(0));
939 __ B(Condition::EQ, &dispatch);
940 // up frame
941 __ Mov(Register(SP), fp);
942 // fall through
943 __ Bind(&dispatch);
944 {
945 __ Ldrb(opcode, MemoryOperand(pc, 0));
946 __ Add(bcStub, glue, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
947 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
948 __ Br(bcStub);
949 }
950 }
951
952 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
953 // GHC calling convention
954 // X19 - glue
955 // FP - sp
956 // X20 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler * assembler)957 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
958 {
959 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
960
961 Register glue(X19);
962 Register fp(X5);
963 Register acc(X20);
964 Register cppRet(X0);
965
966 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
967 __ Mov(Register(SP), fp);
968 // this method will return to Execute(cpp calling convention), and the return value should be put into X0.
969 __ Mov(cppRet, acc);
970 __ RestoreFpAndLr();
971 __ Ret();
972 }
973
974 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
975 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
976 // GHC calling convention
977 // X19 - glue
978 // FP - sp
979 // X20 - pc
980 // X21 - constantPool
981 // X22 - profileTypeInfo
982 // X23 - acc
983 // X24 - hotnessCounter
984 // X25 - jumpSizeAfterCall
ResumeRspAndRollback(ExtendedAssembler * assembler)985 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
986 {
987 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
988
989 Register glueRegister = __ GlueRegister();
990 Register sp(FP);
991 Register rsp(SP);
992 Register pc(X20);
993 Register jumpSizeRegister(X25);
994
995 Register ret(X23);
996 Register opcode(X6, W);
997 Register bcStub(X7);
998 Register fp(X8);
999
1000 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
1001 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1002 int64_t spOffset = static_cast<int64_t>(AsmInterpretedFrame::GetBaseOffset(false))
1003 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1004 int64_t funcOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false))
1005 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1006 ASSERT(fpOffset < 0);
1007 ASSERT(spOffset < 0);
1008 ASSERT(funcOffset < 0);
1009
1010 __ Ldur(fp, MemoryOperand(sp, fpOffset)); // store fp for temporary
1011 __ Ldur(ret, MemoryOperand(sp, funcOffset)); // restore acc
1012 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
1013
1014 __ Add(pc, pc, Operand(jumpSizeRegister, LSL, 0));
1015 __ Ldrb(opcode, MemoryOperand(pc, 0));
1016
1017 __ Mov(rsp, fp); // resume rsp
1018 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
1019 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1020 __ Br(bcStub);
1021 }
1022
1023 // c++ calling convention
1024 // X0 - glue
1025 // X1 - callTarget
1026 // X2 - method
1027 // X3 - callField
1028 // X4 - receiver
1029 // X5 - value
CallGetter(ExtendedAssembler * assembler)1030 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1031 {
1032 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1033 Label target;
1034
1035 PushAsmInterpBridgeFrame(assembler);
1036 __ Bl(&target);
1037 PopAsmInterpBridgeFrame(assembler);
1038 __ Ret();
1039 __ Bind(&target);
1040 {
1041 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1042 }
1043 }
1044
CallSetter(ExtendedAssembler * assembler)1045 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1046 {
1047 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1048 Label target;
1049 PushAsmInterpBridgeFrame(assembler);
1050 __ Bl(&target);
1051 PopAsmInterpBridgeFrame(assembler);
1052 __ Ret();
1053 __ Bind(&target);
1054 {
1055 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1056 }
1057 }
1058
CallContainersArgs2(ExtendedAssembler * assembler)1059 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1060 {
1061 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1062 Label target;
1063 PushAsmInterpBridgeFrame(assembler);
1064 __ Bl(&target);
1065 PopAsmInterpBridgeFrame(assembler);
1066 __ Ret();
1067 __ Bind(&target);
1068 {
1069 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1070 FrameTransitionType::OTHER_TO_OTHER);
1071 }
1072 }
1073
CallContainersArgs3(ExtendedAssembler * assembler)1074 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1075 {
1076 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1077 Label target;
1078 PushAsmInterpBridgeFrame(assembler);
1079 __ Bl(&target);
1080 PopAsmInterpBridgeFrame(assembler);
1081 __ Ret();
1082 __ Bind(&target);
1083 {
1084 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1085 FrameTransitionType::OTHER_TO_OTHER);
1086 }
1087 }
1088
1089 // c++ calling convention
1090 // X0 - glue
1091 // X1 - callTarget
1092 // X2 - method
1093 // X3 - callField
1094 // X4 - arg0(argc)
1095 // X5 - arg1(arglist)
1096 // X6 - arg3(argthis)
CallReturnWithArgv(ExtendedAssembler * assembler)1097 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1098 {
1099 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1100 Label target;
1101 PushAsmInterpBridgeFrame(assembler);
1102 __ Bl(&target);
1103 PopAsmInterpBridgeFrame(assembler);
1104 __ Ret();
1105 __ Bind(&target);
1106 {
1107 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1108 FrameTransitionType::OTHER_TO_OTHER);
1109 }
1110 }
1111
1112 // preserve all the general registers, except x15 and callee saved registers/
1113 // and call x15
PreserveMostCall(ExtendedAssembler * assembler)1114 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1115 {
1116 // * layout as the following:
1117 // +--------------------------+ ---------
1118 // | . . . . . | ^
1119 // callerSP ---> |--------------------------| |
1120 // | returnAddr | |
1121 // |--------------------------| OptimizedFrame
1122 // | callsiteFp | |
1123 // fp ---> |--------------------------| |
1124 // | OPTIMIZED_FRAME | v
1125 // +--------------------------+ ---------
1126 // | x0 |
1127 // +--------------------------+
1128 // | x1 |
1129 // +--------------------------+
1130 // | r2 |
1131 // +--------------------------+
1132 // | x3 |
1133 // +--------------------------+
1134 // | x4 |
1135 // +--------------------------+
1136 // | x5 |
1137 // +--------------------------+
1138 // | x6 |
1139 // +--------------------------+
1140 // | x7 |
1141 // +--------------------------+
1142 // | x8 |
1143 // +--------------------------+
1144 // | x9 |
1145 // +--------------------------+
1146 // | x10 |
1147 // +--------------------------+
1148 // | x11 |
1149 // +--------------------------+
1150 // | x12 |
1151 // +--------------------------+
1152 // | x13 |
1153 // +--------------------------+
1154 // | x14 |
1155 // +--------------------------+
1156 // | x16 |
1157 // +--------------------------+
1158 // | x17 |
1159 // +--------------------------+
1160 // | x18 |
1161 // +--------------------------+
1162 // | align |
1163 // calleeSP ---> +--------------------------+
1164 {
1165 // prologue to save fp, frametype, and update fp.
1166 __ Stp(X29, X30, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1167 // Zero register means OPTIMIZED_FRAME
1168 __ Stp(X0, Zero, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1169 __ Add(FP, SP, Immediate(DOUBLE_SLOT_SIZE));
1170 }
1171 int32_t PreserveRegPairIndex = 9;
1172 // x0~x14,x16,x17,x18 should be preserved,
1173 // other general registers are callee saved register, callee will save them.
1174 __ Sub(SP, SP, Immediate(DOUBLE_SLOT_SIZE * PreserveRegPairIndex));
1175 __ Stp(X1, X2, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1176 __ Stp(X3, X4, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1177 __ Stp(X5, X6, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1178 __ Stp(X7, X8, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1179 __ Stp(X9, X10, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1180 __ Stp(X11, X12, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1181 __ Stp(X13, X14, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1182 __ Stp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1183 __ Str(X18, MemoryOperand(SP, FRAME_SLOT_SIZE));
1184 __ Blr(X15);
1185 __ Ldr(X18, MemoryOperand(SP, FRAME_SLOT_SIZE));
1186 __ Ldp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1187 __ Ldp(X13, X14, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1188 __ Ldp(X11, X12, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1189 __ Ldp(X9, X10, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1190 __ Ldp(X7, X8, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1191 __ Ldp(X5, X6, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1192 __ Ldp(X3, X4, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1193 __ Ldp(X1, X2, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1194 __ Ldr(X0, MemoryOperand(SP, DOUBLE_SLOT_SIZE * PreserveRegPairIndex));
1195 {
1196 // epilogue to restore sp, fp, lr.
1197 // Skip x0 slot and frametype slot
1198 __ Add(SP, SP, Immediate(DOUBLE_SLOT_SIZE * PreserveRegPairIndex +
1199 FRAME_SLOT_SIZE + FRAME_SLOT_SIZE));
1200 __ Ldp(FP, X30, MemoryOperand(SP, DOUBLE_SLOT_SIZE, AddrMode::POSTINDEX));
1201 __ Ret();
1202 }
1203 }
1204
1205 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1206 // c calling convention, but preserve all general registers except %x15
1207 // %x0 - glue
1208 // %x1 - obj
1209 // %x2 - offset
1210 // %x3 - value
ASMFastWriteBarrier(ExtendedAssembler * assembler)1211 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1212 {
1213 // valid region flag are as follows, assume it will be ALWAYS VALID.
1214 // Judge the region of value with:
1215 // "young" "sweepable share" "readonly share"
1216 // region flag: 0x08, 0x09, [0x0A, 0x11], [0x12, 0x15], 0x16
1217 // value is share: [0x12, 0x16] => valueMaybeSweepableShare
1218 // readonly share: 0x16 => return
1219 // sweepable share: [0x12, 0x15] => needShareBarrier
1220 // value is not share: 0x08, 0x09, [0x0A, 0x11], => valueNotShare
1221 // value is young : 0x09 => needCallNotShare
1222 // value is not young : 0x08, [0x0A, 0x11], => checkMark
1223 ASSERT(IN_YOUNG_SPACE < SHARED_SPACE_BEGIN && SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN &&
1224 SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE && IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1225 __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1226
1227 Label needCall;
1228 Label checkMark;
1229 Label needCallNotShare;
1230 Label needShareBarrier;
1231 Label valueNotShare;
1232 Label valueMaybeSweepableShare;
1233 {
1234 // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1235 // int8_t valueFlag = *valueRegion
1236 // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1237 // goto valueMaybeSweepableShare
1238 // }
1239
1240 __ And(X15, X3, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1241 // X15 is the region address of value.
1242 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1243 // X15 is the flag load from region of value.
1244 __ Cmp(Register(X15, W), Immediate(SHARED_SWEEPABLE_SPACE_BEGIN));
1245 __ B(GE, &valueMaybeSweepableShare);
1246 // if value may be SweepableShare, goto valueMaybeSweepableShare
1247 }
1248 __ Bind(&valueNotShare);
1249 {
1250 // valueNotShare:
1251 // if (valueFlag != IN_YOUNG_SPACE){
1252 // goto checkMark
1253 // }
1254 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1255 // int8_t objFlag = *objRegion
1256 // if (objFlag != IN_YOUNG_SPACE){
1257 // goto needCallNotShare
1258 // }
1259
1260 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_YOUNG_SPACE));
1261 __ B(NE, &checkMark);
1262 // if value is not in young, goto checkMark
1263
1264 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1265 // X15 is the region address of obj.
1266 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1267 // X15 is the flag load from region of obj.
1268 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_YOUNG_SPACE));
1269 __ B(NE, &needCallNotShare);
1270 // if obj is not in young, goto needCallNotShare
1271 }
1272
1273 __ Bind(&checkMark);
1274 {
1275 // checkMark:
1276 // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1277 // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1278 // goto needCallNotShare
1279 // }
1280 // return
1281
1282 __ Mov(X15, JSThread::GlueData::GetGCStateBitFieldOffset(false));
1283 __ Ldrb(Register(X15, W), MemoryOperand(X0, Register(X15), UXTX));
1284 __ Tst(Register(X15, W), LogicalImmediate::Create(JSThread::CONCURRENT_MARKING_BITFIELD_MASK, RegWSize));
1285 __ B(NE, &needCallNotShare);
1286 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1287 __ Ret();
1288 }
1289
1290 __ Bind(&valueMaybeSweepableShare);
1291 {
1292 // valueMaybeSweepableShare:
1293 // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1294 // goto needShareBarrier
1295 // }
1296 // return
1297 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE));
1298 __ B(NE, &needShareBarrier);
1299 __ Ret();
1300 }
1301
1302 __ Bind(&needCallNotShare);
1303 {
1304 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1305 kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1306 __ Mov(X15, NonSValueBarrier);
1307 }
1308 __ Bind(&needCall);
1309 {
1310 __ Ldr(X15, MemoryOperand(X0, Register(X15), UXTX));
1311 PreserveMostCall(assembler);
1312 }
1313 __ Bind(&needShareBarrier);
1314 {
1315 ASMFastSharedWriteBarrier(assembler, needCall);
1316 }
1317 }
1318
1319 // %x0 - glue
1320 // %x1 - obj
1321 // %x2 - offset
1322 // %x3 - value
ASMFastSharedWriteBarrier(ExtendedAssembler * assembler,Label & needCall)1323 void AsmInterpreterCall::ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needCall)
1324 {
1325 Label checkBarrierForSharedValue;
1326 Label restoreScratchRegister;
1327 Label callSharedBarrier;
1328 {
1329 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1330 // int8_t objFlag = *objRegion
1331 // if (objFlag >= SHARED_SPACE_BEGIN){
1332 // // share to share, just check the barrier
1333 // goto checkBarrierForSharedValue
1334 // }
1335 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1336 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1337 // X15 is the flag load from region of obj.
1338 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::SHARED_SPACE_BEGIN));
1339 __ B(GE, &checkBarrierForSharedValue); // if objflag >= SHARED_SPACE_BEGIN => checkBarrierForSharedValue
1340 }
1341 {
1342 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1343 // int8_t *localToShareSet = *(objRegion + LocalToShareSetOffset)
1344 // if (localToShareSet == 0){
1345 // goto callSharedBarrier
1346 // }
1347 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1348 __ Ldr(X15, MemoryOperand(X15, Region::PackedData::GetLocalToShareSetOffset(false)));
1349 // X15 is localToShareSet for obj region.
1350 __ Cbz({X15, X}, &callSharedBarrier); // if localToShareSet == 0 => callSharedBarrier
1351 }
1352 {
1353 // X16, X17 will be used as scratch register, spill them.
1354 // the caller will call this function with inline asm, it will not save any registers except x15.
1355 // So we need spill and restore x16, x17 when we need them as scratch register.
1356 {
1357 __ Stp(X16, X17, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1358 }
1359 // int64_t objOffset = obj & DEFAULT_REGION_MASK
1360 // int64_t slotOffset = objOffset + offset
1361 __ And(X16, X1, LogicalImmediate::Create(DEFAULT_REGION_MASK, RegXSize));
1362 __ Add(X16, X16, Operand(Register(X2)));
1363
1364 // the logic to get mask in stub_builder.cpp
1365 // [63-------------------------35][34------------------------8][7---3][2-0]
1366 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1367 // bitPerWordMask: 11 111
1368 // indexInWord = And bitoffset bitPerWordMask
1369 // indexInWord: cc ccc
1370 // mask = 1 << indexInWord
1371
1372 // the logic to test bit set value here:
1373 // [63-------------------------35][34------------------------8][7---3][2-0]
1374 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1375 // Ubfm X16 slotOffset 3 7
1376 // indexInWord: cc ccc
1377 __ Ubfm(X17, X16, TAGGED_TYPE_SIZE_LOG, TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - 1);
1378
1379 // the logic to get byteIndex in stub_builder.cpp
1380 // [63-------------------------35][34------------------------8][7---3][2-0]
1381 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1382 // 1. bitOffsetPtr = LSR TAGGED_TYPE_SIZE_LOG(3) slotOffset
1383 // bitOffsetPtr: aaaaaaaaaaaaaaaaaaaaaaaaaa aaabbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1384 // 2. bitOffset = TruncPtrToInt32 bitOffsetPtr
1385 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1386 // 3. index = LSR BIT_PER_WORD_LOG2(5) bitOffset
1387 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1388 // 4. byteIndex = Mul index BYTE_PER_WORD(4)
1389 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1390
1391 // the logic to get byteIndex here:
1392 // [63-------------------------35][34------------------------8][7---3][2-0]
1393 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1394 // Ubfm X16 slotOffset 8 34
1395 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1396 __ Ubfm(X16, X16, TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2,
1397 sizeof(uint32_t) * GCBitset::BIT_PER_BYTE + TAGGED_TYPE_SIZE_LOG - 1);
1398 __ Add(X15, X15, Operand(Register(X16), LSL, GCBitset::BYTE_PER_WORD_LOG2));
1399 __ Add(X15, X15, Immediate(RememberedSet::GCBITSET_DATA_OFFSET));
1400 // X15 is the address of bitset value. X15 = X15 + X16 << BYTE_PER_WORD_LOG2 + GCBITSET_DATA_OFFSET
1401
1402 // mask = 1 << indexInWord
1403 __ Mov(Register(X16, W), 1);
1404 __ Lsl(Register(X17, W), Register(X16, W), Register(X17, W)); // X17 is the mask
1405
1406 __ Ldr(Register(X16, W), MemoryOperand(X15, 0)); // x16: oldsetValue
1407 __ Tst(Register(X16, W), Register(X17, W));
1408 __ B(NE, &restoreScratchRegister);
1409 __ Orr(Register(X16, W), Register(X16, W), Register(X17, W));
1410 __ Str(Register(X16, W), MemoryOperand(X15, 0));
1411 }
1412 __ Bind(&restoreScratchRegister);
1413 {
1414 __ Ldp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE, POSTINDEX));
1415 }
1416 __ Bind(&checkBarrierForSharedValue);
1417 {
1418 // checkBarrierForSharedValue:
1419 // int8_t GCStateBitField = *(glue+SharedGCStateBitFieldOffset)
1420 // if (GCStateBitField & JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1421 // goto callSharedBarrier
1422 // }
1423 // return
1424 __ Mov(X15, JSThread::GlueData::GetSharedGCStateBitFieldOffset(false));
1425 __ Ldrb(Register(X15, W), MemoryOperand(X0, Register(X15), UXTX));
1426 static_assert(JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK == 1 && "Tbnz can't handle other bit mask");
1427 __ Tbnz(Register(X15, W), 0, &callSharedBarrier);
1428 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1429 __ Ret();
1430 }
1431
1432 __ Bind(&callSharedBarrier);
1433 {
1434 int32_t SValueBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1435 kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1436 __ Mov(X15, SValueBarrierOffset);
1437 __ B(&needCall);
1438 }
1439 }
1440
1441 // Generate code for generator re-entering asm interpreter
1442 // c++ calling convention
1443 // Input: %X0 - glue
1444 // %X1 - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler * assembler)1445 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
1446 {
1447 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
1448 Label target;
1449 size_t begin = __ GetCurrentPosition();
1450 PushAsmInterpEntryFrame(assembler);
1451 __ Bl(&target);
1452 PopAsmInterpEntryFrame(assembler);
1453 size_t end = __ GetCurrentPosition();
1454 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
1455 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
1456 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
1457 }
1458 __ Ret();
1459 __ Bind(&target);
1460 {
1461 GeneratorReEnterAsmInterpDispatch(assembler);
1462 }
1463 }
1464
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler * assembler)1465 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
1466 {
1467 Label pushFrameState;
1468 Label stackOverflow;
1469 Register glue = __ GlueRegister();
1470 Register contextRegister(X1);
1471 Register spRegister(SP);
1472 Register pc(X8);
1473 Register prevSpRegister(FP);
1474 Register callTarget(X4);
1475 Register method(X5);
1476 Register temp(X6); // can not be used to store any variable
1477 Register currentSlotRegister(X7);
1478 Register fpRegister(X9);
1479 Register thisRegister(X25);
1480 Register nRegsRegister(X26, W);
1481 Register regsArrayRegister(X27);
1482 Register newSp(X28);
1483 __ Ldr(callTarget, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_METHOD_OFFSET));
1484 __ Ldr(method, MemoryOperand(callTarget, JSFunctionBase::METHOD_OFFSET));
1485 __ PushFpAndLr();
1486 // save fp
1487 __ Mov(fpRegister, spRegister);
1488 __ Mov(currentSlotRegister, spRegister);
1489 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
1490 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
1491 __ Mov(Register(SP), temp);
1492 // push context regs
1493 __ Ldr(nRegsRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
1494 __ Ldr(thisRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_THIS_OFFSET));
1495 __ Ldr(regsArrayRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET));
1496 __ Add(regsArrayRegister, regsArrayRegister, Immediate(TaggedArray::DATA_OFFSET));
1497 PushArgsWithArgv(assembler, glue, nRegsRegister, regsArrayRegister, temp,
1498 currentSlotRegister, &pushFrameState, &stackOverflow);
1499
1500 __ Bind(&pushFrameState);
1501 __ Mov(newSp, currentSlotRegister);
1502 // push frame state
1503 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTarget, thisRegister,
1504 method, contextRegister, pc, temp);
1505 __ Align16(currentSlotRegister);
1506 __ Mov(Register(SP), currentSlotRegister);
1507 // call bc stub
1508 CallBCStub(assembler, newSp, glue, callTarget, method, pc, temp);
1509
1510 __ Bind(&stackOverflow);
1511 {
1512 ThrowStackOverflowExceptionAndReturn(assembler, glue, fpRegister, temp);
1513 }
1514 }
1515
PushCallThis(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow,FrameTransitionType type)1516 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
1517 JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
1518 {
1519 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
1520 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1521 Register thisRegister = __ AvailableRegister2();
1522 Register currentSlotRegister = __ AvailableRegister3();
1523
1524 Label pushVregs;
1525 Label pushNewTarget;
1526 Label pushCallTarget;
1527 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
1528 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
1529 if (!haveThis) {
1530 __ Mov(thisRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED)); // default this: undefined
1531 } else {
1532 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
1533 if (thisRegister.GetId() != thisArgRegister.GetId()) {
1534 __ Mov(thisRegister, thisArgRegister);
1535 }
1536 }
1537 __ Tst(callFieldRegister, LogicalImmediate::Create(CALL_TYPE_MASK, RegXSize));
1538 __ B(Condition::EQ, &pushVregs);
1539 __ Tbz(callFieldRegister, MethodLiteral::HaveThisBit::START_BIT, &pushNewTarget);
1540 if (!haveThis) {
1541 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1542 Register tempRegister = __ TempRegister1();
1543 __ Mov(tempRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1544 __ Str(tempRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1545 } else {
1546 __ Str(thisRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1547 }
1548 __ Bind(&pushNewTarget);
1549 {
1550 __ Tbz(callFieldRegister, MethodLiteral::HaveNewTargetBit::START_BIT, &pushCallTarget);
1551 if (!haveNewTarget) {
1552 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1553 Register newTarget = __ TempRegister1();
1554 __ Mov(newTarget, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1555 __ Str(newTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1556 } else {
1557 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1558 Register defaultRegister = __ TempRegister1();
1559 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
1560 __ Str(newTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1561 }
1562 }
1563 __ Bind(&pushCallTarget);
1564 {
1565 __ Tbz(callFieldRegister, MethodLiteral::HaveFuncBit::START_BIT, &pushVregs);
1566 __ Str(callTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1567 }
1568 __ Bind(&pushVregs);
1569 {
1570 PushVregs(assembler, stackOverflow, type);
1571 }
1572 }
1573
PushVregs(ExtendedAssembler * assembler,Label * stackOverflow,FrameTransitionType type)1574 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
1575 Label *stackOverflow, FrameTransitionType type)
1576 {
1577 Register glue = __ GlueRegister();
1578 Register prevSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::SP);
1579 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1580 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1581 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
1582 Register fpRegister = __ AvailableRegister1();
1583 Register thisRegister = __ AvailableRegister2();
1584 Register currentSlotRegister = __ AvailableRegister3();
1585
1586 Label pushFrameStateAndCall;
1587 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1588 Register tempRegister = __ TempRegister1();
1589 // args register can be reused now.
1590 Register newSpRegister = __ AvailableRegister4();
1591 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
1592 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
1593 PushUndefinedWithArgc(assembler, glue, numVregsRegister, tempRegister, currentSlotRegister, &pushFrameStateAndCall,
1594 stackOverflow);
1595 // fall through
1596 __ Bind(&pushFrameStateAndCall);
1597 {
1598 __ Mov(newSpRegister, currentSlotRegister);
1599
1600 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1601 Register pcRegister = __ TempRegister2();
1602 PushFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTargetRegister, thisRegister,
1603 methodRegister, pcRegister, tempRegister);
1604
1605 __ Align16(currentSlotRegister);
1606 __ Mov(Register(SP), currentSlotRegister);
1607 if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
1608 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
1609 // check baselinecode, temp modify TOOD: need to check
1610 Label baselineCodeUndefined;
1611 __ Ldr(tempRegister, MemoryOperand(callTargetRegister, JSFunction::BASELINECODE_OFFSET));
1612 __ Cmp(tempRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1613 __ B(Condition::EQ, &baselineCodeUndefined);
1614
1615 // check is compiling
1616 __ Cmp(tempRegister, Immediate(JSTaggedValue::VALUE_HOLE));
1617 __ B(Condition::EQ, &baselineCodeUndefined);
1618
1619 if (MachineCode::FUNCADDR_OFFSET % 8 == 0) { // 8: imm in 64-bit ldr insn must be a multiple of 8
1620 __ Ldr(tempRegister, MemoryOperand(tempRegister, MachineCode::FUNCADDR_OFFSET));
1621 } else {
1622 ASSERT(MachineCode::FUNCADDR_OFFSET < 256); // 256: imm in ldur insn must be in the range -256 to 255
1623 __ Ldur(tempRegister, MemoryOperand(tempRegister, MachineCode::FUNCADDR_OFFSET));
1624 }
1625 if (glue != X19) {
1626 __ Mov(X19, glue);
1627 }
1628 if (methodRegister != X21) {
1629 __ Mov(X21, methodRegister);
1630 }
1631 __ Mov(currentSlotRegister, Immediate(BASELINEJIT_PC_FLAG));
1632 // -3: frame type, prevSp, pc
1633 __ Stur(currentSlotRegister, MemoryOperand(newSpRegister, -3 * FRAME_SLOT_SIZE));
1634 __ Mov(Register(X29), newSpRegister);
1635 __ Br(tempRegister);
1636 __ Bind(&baselineCodeUndefined);
1637 }
1638 DispatchCall(assembler, pcRegister, newSpRegister);
1639 }
1640 }
1641
1642 // Input: X19 - glue
1643 // FP - sp
1644 // X20 - callTarget
1645 // X21 - method
DispatchCall(ExtendedAssembler * assembler,Register pcRegister,Register newSpRegister,Register accRegister,bool hasException)1646 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
1647 Register newSpRegister, Register accRegister, bool hasException)
1648 {
1649 Register glueRegister = __ GlueRegister();
1650 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1651 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1652
1653 if (glueRegister.GetId() != X19) {
1654 __ Mov(Register(X19), glueRegister);
1655 }
1656 __ Ldrh(Register(X24, W), MemoryOperand(methodRegister, Method::LITERAL_INFO_OFFSET));
1657 if (accRegister == INVALID_REG) {
1658 __ Mov(Register(X23), Immediate(JSTaggedValue::VALUE_HOLE));
1659 } else {
1660 ASSERT(accRegister == Register(X23));
1661 }
1662 __ Ldr(Register(X22), MemoryOperand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
1663 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET));
1664 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
1665 __ Mov(Register(X20), pcRegister);
1666 __ Mov(Register(FP), newSpRegister);
1667
1668 Register bcIndexRegister = __ AvailableRegister1();
1669 Register tempRegister = __ AvailableRegister2();
1670 if (hasException) {
1671 __ Mov(bcIndexRegister.W(), Immediate(kungfu::BytecodeStubCSigns::ID_ExceptionHandler));
1672 } else {
1673 __ Ldrb(bcIndexRegister.W(), MemoryOperand(pcRegister, 0));
1674 }
1675 __ Add(tempRegister, glueRegister, Operand(bcIndexRegister.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1676 __ Ldr(tempRegister, MemoryOperand(tempRegister, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1677 __ Br(tempRegister);
1678 }
1679
PushFrameState(ExtendedAssembler * assembler,Register prevSp,Register fp,Register currentSlot,Register callTarget,Register thisObj,Register method,Register pc,Register op)1680 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSp, Register fp,
1681 Register currentSlot, Register callTarget, Register thisObj, Register method, Register pc, Register op)
1682 {
1683 __ Mov(op, Immediate(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)));
1684 __ Stp(prevSp, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: frame type & prevSp
1685 __ Ldr(pc, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1686 __ Stp(fp, pc, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: pc & fp
1687 __ Ldr(op, MemoryOperand(callTarget, JSFunction::LEXICAL_ENV_OFFSET));
1688 __ Stp(op, Register(Zero), MemoryOperand(currentSlot,
1689 -2 * FRAME_SLOT_SIZE, // -2: jumpSizeAfterCall & env
1690 AddrMode::PREINDEX));
1691 __ Mov(op, Immediate(JSTaggedValue::VALUE_HOLE));
1692 __ Stp(thisObj, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: acc & this
1693 __ Str(callTarget, MemoryOperand(currentSlot, -FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -1: callTarget
1694 }
1695
GetNumVregsFromCallField(ExtendedAssembler * assembler,Register callField,Register numVregs)1696 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callField, Register numVregs)
1697 {
1698 __ Mov(numVregs, callField);
1699 __ Lsr(numVregs, numVregs, MethodLiteral::NumVregsBits::START_BIT);
1700 __ And(numVregs.W(), numVregs.W(), LogicalImmediate::Create(
1701 MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, RegWSize));
1702 }
1703
GetDeclaredNumArgsFromCallField(ExtendedAssembler * assembler,Register callField,Register declaredNumArgs)1704 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callField,
1705 Register declaredNumArgs)
1706 {
1707 __ Mov(declaredNumArgs, callField);
1708 __ Lsr(declaredNumArgs, declaredNumArgs, MethodLiteral::NumArgsBits::START_BIT);
1709 __ And(declaredNumArgs.W(), declaredNumArgs.W(), LogicalImmediate::Create(
1710 MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, RegWSize));
1711 }
1712
PushAsmInterpEntryFrame(ExtendedAssembler * assembler)1713 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
1714 {
1715 Register glue = __ GlueRegister();
1716 Register fp(X29);
1717 Register sp(SP);
1718
1719 size_t begin = __ GetCurrentPosition();
1720 if (!assembler->FromInterpreterHandler()) {
1721 __ CalleeSave();
1722 }
1723
1724 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1725 Register prevFrameRegister = __ TempRegister1();
1726 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1727 Register frameTypeRegister = __ TempRegister2();
1728
1729 __ PushFpAndLr();
1730
1731 // prev managed fp is leave frame or nullptr(the first frame)
1732 __ Ldr(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1733 __ Mov(frameTypeRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME)));
1734 // 2 : prevSp & frame type
1735 __ Stp(prevFrameRegister, frameTypeRegister, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1736 // 2 : pc & glue
1737 __ Stp(glue, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // pc
1738 if (!assembler->FromInterpreterHandler()) {
1739 size_t end = __ GetCurrentPosition();
1740 if ((end - begin) != FrameCompletionPos::ARM64CppToAsmInterp) {
1741 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64CppToAsmInterp
1742 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
1743 }
1744 }
1745 __ Add(fp, sp, Immediate(4 * FRAME_SLOT_SIZE)); // 4: 32 means skip frame type, prevSp, pc and glue
1746 }
1747
PopAsmInterpEntryFrame(ExtendedAssembler * assembler)1748 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
1749 {
1750 Register sp(SP);
1751
1752 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1753 Register prevFrameRegister = __ TempRegister1();
1754 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1755 Register glue = __ TempRegister2();
1756 // 2: glue & pc
1757 __ Ldp(glue, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1758 __ Ldr(prevFrameRegister, MemoryOperand(sp, 0));
1759 __ Str(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1760 // 2: skip frame type & prev
1761 __ Ldp(prevFrameRegister, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1762 size_t begin = __ GetCurrentPosition();
1763 __ RestoreFpAndLr();
1764 if (!assembler->FromInterpreterHandler()) {
1765 __ CalleeRestore();
1766 size_t end = __ GetCurrentPosition();
1767 if ((end - begin) != FrameCompletionPos::ARM64AsmInterpToCpp) {
1768 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64AsmInterpToCpp
1769 << "This frame has been modified, and the offset AsmInterpToCpp should be updated too.";
1770 }
1771 }
1772 }
1773
PushGeneratorFrameState(ExtendedAssembler * assembler,Register & prevSpRegister,Register & fpRegister,Register & currentSlotRegister,Register & callTargetRegister,Register & thisRegister,Register & methodRegister,Register & contextRegister,Register & pcRegister,Register & operatorRegister)1774 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register &prevSpRegister,
1775 Register &fpRegister, Register ¤tSlotRegister, Register &callTargetRegister, Register &thisRegister,
1776 Register &methodRegister, Register &contextRegister, Register &pcRegister, Register &operatorRegister)
1777 {
1778 __ Mov(operatorRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_FRAME)));
1779 __ Stp(prevSpRegister, operatorRegister,
1780 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : frameType and prevSp
1781 __ Ldr(pcRegister, MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1782 // offset need 8 align, GENERATOR_NREGS_OFFSET instead of GENERATOR_BC_OFFSET_OFFSET
1783 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
1784 // 32: get high 32bit
1785 __ Lsr(operatorRegister, operatorRegister, 32);
1786 __ Add(pcRegister, operatorRegister, pcRegister);
1787 // 2 : pc and fp
1788 __ Stp(fpRegister, pcRegister, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1789 // jumpSizeAfterCall
1790 __ Str(Register(Zero), MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1791 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET));
1792 // env
1793 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1794 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET));
1795 // acc
1796 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1797 __ Stp(callTargetRegister, thisRegister,
1798 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : acc and callTarget
1799 }
1800
CallBCStub(ExtendedAssembler * assembler,Register & newSp,Register & glue,Register & callTarget,Register & method,Register & pc,Register & temp)1801 void AsmInterpreterCall::CallBCStub(ExtendedAssembler *assembler, Register &newSp, Register &glue,
1802 Register &callTarget, Register &method, Register &pc, Register &temp)
1803 {
1804 // prepare call entry
1805 __ Mov(Register(X19), glue); // X19 - glue
1806 __ Mov(Register(FP), newSp); // FP - sp
1807 __ Mov(Register(X20), pc); // X20 - pc
1808 __ Ldr(Register(X21), MemoryOperand(method, Method::CONSTANT_POOL_OFFSET)); // X21 - constantpool
1809 __ Ldr(Register(X22), MemoryOperand(callTarget, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
1810 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET)); // X22 - profileTypeInfo
1811 __ Mov(Register(X23), Immediate(JSTaggedValue::Hole().GetRawData())); // X23 - acc
1812 __ Ldr(Register(X24), MemoryOperand(method, Method::LITERAL_INFO_OFFSET)); // X24 - hotnessCounter
1813
1814 // call the first bytecode handler
1815 __ Ldrb(temp.W(), MemoryOperand(pc, 0));
1816 // 3 : 3 means *8
1817 __ Add(temp, glue, Operand(temp.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1818 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1819 __ Br(temp);
1820 }
1821
CallNativeEntry(ExtendedAssembler * assembler,bool isJSFunction)1822 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler, bool isJSFunction)
1823 {
1824 Register glue(X0);
1825 Register argv(X5);
1826 Register function(X1);
1827 Register nativeCode(X7);
1828 Register temp(X9);
1829 // get native pointer
1830 if (isJSFunction) {
1831 __ Ldr(nativeCode, MemoryOperand(function, JSFunctionBase::CODE_ENTRY_OFFSET));
1832
1833 Label next;
1834 Register lexicalEnv = temp;
1835 __ Ldr(lexicalEnv, MemoryOperand(function, JSFunction::LEXICAL_ENV_OFFSET));
1836 __ Cmp(lexicalEnv, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1837 __ B(Condition::EQ, &next);
1838 __ Str(lexicalEnv, MemoryOperand(glue, JSThread::GlueData::GetCurrentEnvOffset(false)));
1839 __ Bind(&next);
1840 } else {
1841 // JSProxy or JSBoundFunction
1842 Register method(X2);
1843 __ Ldr(nativeCode, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1844 }
1845
1846 Register sp(SP);
1847 // 2: function & align
1848 __ Stp(function, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1849 // 2: skip argc & thread
1850 __ Sub(sp, sp, Immediate(2 * FRAME_SLOT_SIZE));
1851 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME, temp, argv);
1852 __ Mov(temp, argv);
1853 __ Sub(Register(X0), temp, Immediate(2 * FRAME_SLOT_SIZE)); // 2: skip argc & thread
1854 CallNativeInternal(assembler, nativeCode);
1855
1856 // 4: skip function
1857 __ Add(sp, sp, Immediate(4 * FRAME_SLOT_SIZE));
1858 __ Ret();
1859 }
1860
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1861 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue,
1862 Register fp, Register op)
1863 {
1864 Register sp(SP);
1865
1866 if (fp != sp) {
1867 __ Mov(sp, fp);
1868 }
1869 __ Mov(op, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
1870 // 3 : 3 means *8
1871 __ Add(op, glue, Operand(op, LSL, 3));
1872 __ Ldr(op, MemoryOperand(op, JSThread::GlueData::GetRTStubEntriesOffset(false)));
1873 if (glue.GetId() != X0) {
1874 __ Mov(Register(X0), glue);
1875 }
1876
1877 __ Blr(op);
1878 __ RestoreFpAndLr();
1879 __ Ret();
1880 }
1881
ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1882 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler *assembler,
1883 Register glue, Register fp, Register op)
1884 {
1885 Register sp(SP);
1886
1887 if (fp != sp) {
1888 __ Mov(sp, fp);
1889 }
1890
1891 if (glue.GetId() != X0) {
1892 __ Mov(Register(X0), glue);
1893 }
1894
1895 __ PushFpAndLr();
1896 __ Mov(op, Immediate(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)));
1897 __ Stp(Register(X10), op, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX)); // frame type and caller save
1898 __ Add(Register(FP), sp, Immediate(DOUBLE_SLOT_SIZE));
1899
1900 __ Mov(op, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
1901 __ Stp(op, Register(Zero), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX)); // argc and runtime id
1902 __ Mov(Register(X10), Immediate(kungfu::RuntimeStubCSigns::ID_CallRuntime));
1903 // 3 : 3 means *8
1904 __ Add(Register(X10), glue, Operand(Register(X10), LSL, 3));
1905 __ Ldr(Register(X10), MemoryOperand(Register(X10), JSThread::GlueData::GetRTStubEntriesOffset(false)));
1906 __ Blr(Register(X10));
1907 // 2: skip argc and runtime_id
1908 __ Add(sp, sp, Immediate(2 * FRAME_SLOT_SIZE));
1909 __ Ldr(Register(X10), MemoryOperand(sp, FRAME_SLOT_SIZE, POSTINDEX));
1910
1911 __ Mov(sp, Register(FP));
1912 __ RestoreFpAndLr();
1913
1914 // +----------------------------------------------------+
1915 // | return addr |
1916 // |----------------------------------------------------| <---- FP
1917 // | frame type | ^ ^
1918 // |----------------------------------------------------| | |
1919 // | prevSp | | |
1920 // |----------------------------------------------------| | |
1921 // | pc | | |
1922 // |----------------------------------------------------| PushAsmInterpBridgeFrame total skip
1923 // | 18 callee save regs(x19 - x28, v8 - v15) | | |
1924 // |----------------------------------------------------| v v
1925 // | lr |
1926 // +----------------------------------------------------+
1927 // Base on PushAsmInterpBridgeFrame, need to skip AsmInterpBridgeFrame size and callee Save Registers(18)
1928 // but no lr(-1), x64 should skip lr because Ret in x64 will set stack pointer += 8
1929 int32_t skipNum = static_cast<int32_t>(AsmInterpretedBridgeFrame::GetSize(false)) / FRAME_SLOT_SIZE + 18 - 1;
1930 __ Add(Register(SP), Register(FP), Immediate(-skipNum * FRAME_SLOT_SIZE));
1931 __ Ret();
1932 }
1933 #undef __
1934 } // panda::ecmascript::aarch64
1935