1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16
17 #include "ecmascript/compiler/trampoline/x64/common_call.h"
18
19 #include "ecmascript/js_generator_object.h"
20 #include "ecmascript/message_string.h"
21
22 namespace panda::ecmascript::x64 {
23 #define __ assembler->
24
25 // Generate code for Entering asm interpreter
26 // Input: glue - %rdi
27 // callTarget - %rsi
28 // method - %rdx
29 // callField - %rcx
30 // argc - %r8
31 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler * assembler)32 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
33 {
34 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
35 Label target;
36 // push asm interpreter entry frame
37 size_t begin = __ GetCurrentPosition();
38 PushAsmInterpEntryFrame(assembler);
39 __ Callq(&target);
40 PopAsmInterpEntryFrame(assembler);
41 size_t end = __ GetCurrentPosition();
42 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
43 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
44 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
45 }
46 __ Ret();
47
48 __ Bind(&target);
49 AsmInterpEntryDispatch(assembler);
50 }
51
52 // Generate code for generator re-enter asm interpreter
53 // c++ calling convention
54 // Input: %rdi - glue
55 // %rsi - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler * assembler)56 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
57 {
58 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
59 Label target;
60 size_t begin = __ GetCurrentPosition();
61 PushAsmInterpEntryFrame(assembler);
62 __ Callq(&target);
63 PopAsmInterpEntryFrame(assembler);
64 size_t end = __ GetCurrentPosition();
65 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
66 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
67 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
68 }
69 __ Ret();
70
71 __ Bind(&target);
72 GeneratorReEnterAsmInterpDispatch(assembler);
73 }
74
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler * assembler)75 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
76 {
77 Register glueRegister = __ GlueRegister();
78 Register contextRegister = rsi;
79 Register prevSpRegister = rbp;
80
81 Register callTargetRegister = r9;
82 Register methodRegister = rcx;
83 Register tempRegister = r11; // can not be used to store any variable
84 Register opRegister = r8; // can not be used to store any variable
85 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_METHOD_OFFSET), callTargetRegister);
86 __ Movq(Operand(callTargetRegister, JSFunctionBase::METHOD_OFFSET), methodRegister);
87
88 Label stackOverflow;
89
90 Register fpRegister = r10;
91 __ Movq(rsp, fpRegister);
92 Register nRegsRegister = rdx;
93 Register regsArrayRegister = r12;
94 Register thisRegister = r15;
95 // push context regs
96 __ Movl(Operand(rsi, GeneratorContext::GENERATOR_NREGS_OFFSET), nRegsRegister);
97 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_THIS_OFFSET), thisRegister);
98 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET), regsArrayRegister);
99 __ Addq(TaggedArray::DATA_OFFSET, regsArrayRegister);
100 PushArgsWithArgvAndCheckStack(assembler, glueRegister, nRegsRegister, regsArrayRegister, tempRegister, opRegister,
101 &stackOverflow);
102
103 // newSp
104 Register newSpRegister = r8;
105 __ Movq(rsp, newSpRegister);
106
107 // resume asm interp frame
108 Register pcRegister = r12;
109 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, callTargetRegister, thisRegister, methodRegister,
110 contextRegister, pcRegister, tempRegister);
111
112 // call bc stub
113 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
114 __ Bind(&stackOverflow);
115 {
116 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, tempRegister);
117 }
118 }
119
120 // Input: glue - %rdi
121 // callTarget - %rsi
122 // method - %rdx
123 // callField - %rcx
124 // argc - %r8
125 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
126 // prevSp - %rbp
AsmInterpEntryDispatch(ExtendedAssembler * assembler)127 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
128 {
129 Label notJSFunction;
130 Label callNativeEntry;
131 Label callJSFunctionEntry;
132 Label notCallable;
133 Register glueRegister = rdi;
134 Register callTargetRegister = rsi;
135 Register argvRegister = r9;
136 Register bitFieldRegister = r12;
137 Register tempRegister = r11; // can not be used to store any variable
138 __ Movq(Operand(callTargetRegister, TaggedObject::HCLASS_OFFSET), tempRegister); // hclass
139 Register maskRegister = r12;
140 __ Movabs(TaggedObject::GC_STATE_MASK, maskRegister);
141 __ And(maskRegister, tempRegister);
142 __ Movq(Operand(tempRegister, JSHClass::BIT_FIELD_OFFSET), bitFieldRegister);
143 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_FIRST), bitFieldRegister);
144 __ Jb(¬JSFunction);
145 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_LAST), bitFieldRegister);
146 __ Jbe(&callJSFunctionEntry);
147 __ Bind(¬JSFunction);
148 {
149 __ Testq(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), bitFieldRegister);
150 __ Jz(¬Callable);
151 CallNativeEntry(assembler, false);
152 }
153 __ Bind(&callNativeEntry);
154 CallNativeEntry(assembler, true);
155 __ Bind(&callJSFunctionEntry);
156 {
157 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
158 __ Btq(MethodLiteral::IsNativeBit::START_BIT, callFieldRegister);
159 __ Jb(&callNativeEntry);
160
161 __ Leaq(Operand(argvRegister, NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()),
162 argvRegister);
163 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
164 }
165 __ Bind(¬Callable);
166 {
167 __ Movq(glueRegister, rax); // glue
168 __ Pushq(0); // argc
169 Register runtimeIdRegister = r12;
170 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException, runtimeIdRegister);
171 __ Pushq(runtimeIdRegister); // runtimeId
172 Register trampolineIdRegister = r12;
173 Register trampolineRegister = r10;
174 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, trampolineIdRegister);
175 __ Movq(Operand(rax, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
176 trampolineRegister);
177 __ Callq(trampolineRegister);
178 __ Addq(16, rsp); // 16: skip argc and runtime_id
179 __ Ret();
180 }
181 }
182
PushFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register pcRegister,Register operatorRegister)183 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister,
184 Register callTargetRegister, Register thisRegister, Register methodRegister, Register pcRegister,
185 Register operatorRegister)
186 {
187 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
188 __ Pushq(prevSpRegister); // prevSp
189 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
190 __ Pushq(pcRegister); // pc
191 __ Pushq(fpRegister); // fp
192 __ Pushq(0); // jumpSizeAfterCall
193 __ Movq(Operand(callTargetRegister, JSFunction::LEXICAL_ENV_OFFSET), operatorRegister);
194 __ Pushq(operatorRegister); // env
195 __ Pushq(JSTaggedValue::Hole().GetRawData()); // acc
196 __ Pushq(thisRegister); // thisObj
197 __ Pushq(callTargetRegister); // callTarget
198 }
199
PushGeneratorFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register contextRegister,Register pcRegister,Register operatorRegister)200 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register prevSpRegister,
201 Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister,
202 Register contextRegister, Register pcRegister, Register operatorRegister)
203 {
204 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
205 __ Pushq(prevSpRegister); // prevSp
206 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
207 __ Movl(Operand(contextRegister, GeneratorContext::GENERATOR_BC_OFFSET_OFFSET), operatorRegister);
208 __ Addq(operatorRegister, pcRegister);
209 __ Pushq(pcRegister); // pc
210 __ Pushq(fpRegister); // fp
211 __ Pushq(0); // jumpSizeAfterCall
212 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET), operatorRegister);
213 __ Pushq(operatorRegister); // env
214 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET), operatorRegister);
215 __ Pushq(operatorRegister); // acc
216 __ Pushq(thisRegister); // thisObj
217 __ Pushq(callTargetRegister); // callTarget
218 }
219
PushAsmInterpEntryFrame(ExtendedAssembler * assembler)220 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
221 {
222 size_t begin = __ GetCurrentPosition();
223 if (!assembler->FromInterpreterHandler()) {
224 __ PushCppCalleeSaveRegisters();
225 }
226 Register fpRegister = r10;
227 __ Pushq(rdi);
228 __ PushAlignBytes();
229 __ Movq(Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)), fpRegister);
230 // construct asm interpreter entry frame
231 __ Pushq(rbp);
232 __ Pushq(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME));
233 __ Pushq(fpRegister);
234 __ Pushq(0); // pc
235 if (!assembler->FromInterpreterHandler()) {
236 size_t end = __ GetCurrentPosition();
237 if ((end - begin) != FrameCompletionPos::X64CppToAsmInterp) {
238 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64CppToAsmInterp
239 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
240 }
241 }
242 __ Leaq(Operand(rsp, 3 * FRAME_SLOT_SIZE), rbp); // 3: 24 means skip frame type, prevSp and pc
243 }
244
PopAsmInterpEntryFrame(ExtendedAssembler * assembler)245 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
246 {
247 __ Addq(8, rsp); // 8: skip pc
248 Register fpRegister = r10;
249 __ Popq(fpRegister);
250 __ Addq(FRAME_SLOT_SIZE, rsp); // 8: skip frame type
251 __ Popq(rbp);
252 __ PopAlignBytes();
253 __ Popq(rdi);
254 __ Movq(fpRegister, Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)));
255 size_t begin = __ GetCurrentPosition();
256 if (!assembler->FromInterpreterHandler()) {
257 __ PopCppCalleeSaveRegisters();
258 size_t end = __ GetCurrentPosition();
259 if ((end - begin) != FrameCompletionPos::X64AsmInterpToCpp) {
260 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64AsmInterpToCpp
261 << "This frame has been modified, and the offset AsmInterpToCp should be updated too.";
262 }
263 }
264 }
265
GetDeclaredNumArgsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register declaredNumArgsRegister)266 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
267 Register declaredNumArgsRegister)
268 {
269 __ Movq(callFieldRegister, declaredNumArgsRegister);
270 __ Shrq(MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
271 __ Andq(MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
272 }
273
GetNumVregsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register numVregsRegister)274 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
275 Register numVregsRegister)
276 {
277 __ Movq(callFieldRegister, numVregsRegister);
278 __ Shrq(MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
279 __ Andq(MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
280 }
281
JSCallCommonEntry(ExtendedAssembler * assembler,JSCallMode mode,FrameTransitionType type)282 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
283 JSCallMode mode, FrameTransitionType type)
284 {
285 Label stackOverflow;
286 Register glueRegister = __ GlueRegister();
287 Register fpRegister = __ AvailableRegister1();
288 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
289 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
290 // save fp
291 __ Movq(rsp, fpRegister);
292 Register declaredNumArgsRegister = __ AvailableRegister2();
293 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
294
295 Label slowPathEntry;
296 Label fastPathEntry;
297 Label pushCallThis;
298 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
299 if (argc >= 0) {
300 __ Cmpq(argc, declaredNumArgsRegister);
301 } else {
302 __ Cmpq(argcRegister, declaredNumArgsRegister);
303 }
304 __ Jne(&slowPathEntry);
305 __ Bind(&fastPathEntry);
306 JSCallCommonFastPath(assembler, mode, &stackOverflow);
307 __ Bind(&pushCallThis);
308 PushCallThis(assembler, mode, &stackOverflow, type);
309 __ Bind(&slowPathEntry);
310 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
311
312 __ Bind(&stackOverflow);
313 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
314 __ Movq(fpRegister, rsp);
315 Register tempRegister = __ AvailableRegister1();
316 // only glue and acc are useful in exception handler
317 if (glueRegister != r13) {
318 __ Movq(glueRegister, r13);
319 }
320 Register acc = rsi;
321 __ Movq(JSTaggedValue::VALUE_EXCEPTION, acc);
322 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
323 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
324 // Reload pc to make sure stack trace is right
325 __ Movq(callTargetRegister, tempRegister);
326 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), r12); // pc: r12
327 // Reload constpool and profileInfo to make sure gc map work normally
328 __ Movq(Operand(tempRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
329 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14); // profileTypeInfo: r14
330 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx); // constantPool: rbx
331
332 __ Movq(kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException, tempRegister);
333 __ Movq(Operand(glueRegister, tempRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
334 tempRegister);
335 __ Jmp(tempRegister);
336 } else {
337 [[maybe_unused]] TempRegisterScope scope(assembler);
338 Register temp = __ TempRegister();
339 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
340 }
341 }
342
343 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
344 // uint64_t callField, ...)
345 // GHC calling convention
346 // Input1: for callarg0/1/2/3 Input2: for callrange
347 // %r13 - glue // %r13 - glue
348 // %rbp - sp // %rbp - sp
349 // %r12 - callTarget // %r12 - callTarget
350 // %rbx - method // %rbx - method
351 // %r14 - callField // %r14 - callField
352 // %rsi - arg0 // %rsi - actualArgc
353 // %rdi - arg1 // %rdi - argv
354 // %r8 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler * assembler)355 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
356 {
357 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
358 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
359 }
360
PushCallRangeAndDispatch(ExtendedAssembler * assembler)361 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
362 {
363 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
364 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
365 }
366
PushCallNewAndDispatch(ExtendedAssembler * assembler)367 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
368 {
369 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
370 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
371 }
372
PushSuperCallAndDispatch(ExtendedAssembler * assembler)373 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
374 {
375 __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
376 JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
377 }
378
PushCallArgs3AndDispatch(ExtendedAssembler * assembler)379 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
380 {
381 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
382 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
383 }
384
PushCallArgs2AndDispatch(ExtendedAssembler * assembler)385 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
386 {
387 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
388 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
389 }
390
PushCallArg1AndDispatch(ExtendedAssembler * assembler)391 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
392 {
393 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
394 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
395 }
396
PushCallArg0AndDispatch(ExtendedAssembler * assembler)397 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
398 {
399 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
400 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
401 }
PushCallThisArg0AndDispatch(ExtendedAssembler * assembler)402 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
403 {
404 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
405 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
406 }
407
PushCallThisArg1AndDispatch(ExtendedAssembler * assembler)408 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
409 {
410 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
411 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
412 }
413
PushCallThisArgs2AndDispatch(ExtendedAssembler * assembler)414 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
415 {
416 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
417 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
418 }
419
PushCallThisArgs3AndDispatch(ExtendedAssembler * assembler)420 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
421 {
422 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
423 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
424 }
425
JSCallCommonFastPath(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow)426 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)
427 {
428 Register glueRegister = __ GlueRegister();
429 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
430 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
431
432 Label pushCallThis;
433 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
434 // call range
435 if (argc < 0) {
436 Register argcRegister = arg0;
437 Register argvRegister = arg1;
438 __ Cmpq(0, argcRegister);
439 __ Jbe(&pushCallThis);
440 // fall through
441 {
442 [[maybe_unused]] TempRegisterScope scope(assembler);
443 Register opRegister = __ TempRegister();
444 Register op2Register = __ AvailableRegister2();
445 PushArgsWithArgvAndCheckStack(assembler, glueRegister, argcRegister, argvRegister, opRegister, op2Register,
446 stackOverflow);
447 }
448 __ Bind(&pushCallThis);
449 } else if (argc > 0) {
450 if (argc > 2) { // 2: call arg2
451 if (mode == JSCallMode::CALL_THIS_ARG3_WITH_RETURN) {
452 Register arg2 = __ CppJSCallAvailableRegister1();
453 __ Pushq(arg2);
454 } else {
455 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
456 __ Pushq(arg2);
457 }
458 }
459 if (argc > 1) {
460 __ Pushq(arg1);
461 }
462 if (argc > 0) {
463 __ Pushq(arg0);
464 }
465 }
466 }
467
JSCallCommonSlowPath(ExtendedAssembler * assembler,JSCallMode mode,Label * fastPathEntry,Label * pushCallThis,Label * stackOverflow)468 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
469 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
470 {
471 Register glueRegister = __ GlueRegister();
472 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
473 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
474 Register arg0 = argcRegister;
475 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
476 Label noExtraEntry;
477 Label pushArgsEntry;
478
479 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
480 Register declaredNumArgsRegister = __ AvailableRegister2();
481 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
482 __ Jz(&noExtraEntry);
483 // extra entry
484 {
485 [[maybe_unused]] TempRegisterScope scope(assembler);
486 Register tempArgcRegister = __ TempRegister();
487 if (argc >= 0) {
488 __ PushArgc(argc, tempArgcRegister);
489 } else {
490 __ PushArgc(argcRegister, tempArgcRegister);
491 }
492 }
493 __ Bind(&noExtraEntry);
494 {
495 if (argc == 0) {
496 Register op1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
497 [[maybe_unused]] TempRegisterScope scope(assembler);
498 Register op2 = __ TempRegister();
499 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, op1, op2,
500 stackOverflow);
501 __ Jmp(fastPathEntry);
502 return;
503 }
504 [[maybe_unused]] TempRegisterScope scope(assembler);
505 Register diffRegister = __ TempRegister();
506 __ Movq(declaredNumArgsRegister, diffRegister);
507 if (argc >= 0) {
508 __ Subq(argc, diffRegister);
509 } else {
510 __ Subq(argcRegister, diffRegister);
511 }
512 __ Cmpq(0, diffRegister);
513 __ Jle(&pushArgsEntry);
514 PushUndefinedWithArgc(assembler, diffRegister);
515 __ Jmp(fastPathEntry);
516 }
517 __ Bind(&pushArgsEntry);
518 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
519 __ Jnz(fastPathEntry);
520 // arg1, declare must be 0
521 if (argc == 1) {
522 __ Jmp(pushCallThis);
523 return;
524 }
525 // decalare < actual
526 __ Cmpq(0, declaredNumArgsRegister);
527 __ Je(pushCallThis);
528 if (argc < 0) {
529 Register argvRegister = arg1;
530 [[maybe_unused]] TempRegisterScope scope(assembler);
531 Register opRegister = __ TempRegister();
532 PushArgsWithArgvAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, argvRegister, opRegister,
533 opRegister, stackOverflow);
534 } else if (argc > 0) {
535 Label pushArgs0;
536 if (argc > 2) { // 2: call arg2
537 // decalare is 2 or 1 now
538 __ Cmpq(1, declaredNumArgsRegister);
539 __ Je(&pushArgs0);
540 __ Pushq(arg1);
541 }
542 if (argc > 1) {
543 __ Bind(&pushArgs0);
544 // decalare is is 1 now
545 __ Pushq(arg0);
546 }
547 }
548 __ Jmp(pushCallThis);
549 }
550
GetThisRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)551 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
552 {
553 switch (mode) {
554 case JSCallMode::CALL_GETTER:
555 case JSCallMode::CALL_THIS_ARG0:
556 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
557 case JSCallMode::CALL_SETTER:
558 case JSCallMode::CALL_THIS_ARG1:
559 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
560 case JSCallMode::CALL_THIS_ARG2:
561 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
562 case JSCallMode::CALL_THIS_WITH_ARGV:
563 case JSCallMode::SUPER_CALL_WITH_ARGV:
564 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
565 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
566 case JSCallMode::CALL_THIS_ARG3:
567 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
568 case JSCallMode::CALL_ENTRY:
569 case JSCallMode::CALL_FROM_AOT: {
570 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
571 __ Movq(Operand(argvRegister, -FRAME_SLOT_SIZE), defaultRegister); // 8: this is just before the argv list
572 return defaultRegister;
573 }
574 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
575 return __ CppJSCallAvailableRegister2();
576 case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
577 case JSCallMode::CALL_THIS_ARGV_WITH_RETURN: {
578 return __ CppJSCallAvailableRegister1();
579 }
580 default:
581 LOG_ECMA(FATAL) << "this branch is unreachable";
582 UNREACHABLE();
583 }
584 return rInvalid;
585 }
586
GetNewTargetRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)587 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
588 Register defaultRegister)
589 {
590 switch (mode) {
591 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
592 case JSCallMode::CALL_THIS_WITH_ARGV:
593 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
594 case JSCallMode::SUPER_CALL_WITH_ARGV:
595 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
596 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
597 case JSCallMode::CALL_FROM_AOT:
598 case JSCallMode::CALL_ENTRY: {
599 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
600 // -2: new Target offset
601 __ Movq(Operand(argvRegister, -2 * FRAME_SLOT_SIZE), defaultRegister);
602 return defaultRegister;
603 }
604 default:
605 LOG_ECMA(FATAL) << "this branch is unreachable";
606 UNREACHABLE();
607 }
608 return rInvalid;
609 }
610
611 // Input: %r14 - callField
612 // %rdi - argv
PushCallThis(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow,FrameTransitionType type)613 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
614 JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
615 {
616 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
617 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
618 Register thisRegister = __ AvailableRegister2();
619
620 Label pushVregs;
621 Label pushNewTarget;
622 Label pushCallTarget;
623 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
624 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
625 if (!haveThis) {
626 __ Movq(JSTaggedValue::VALUE_UNDEFINED, thisRegister); // default this: undefined
627 } else {
628 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
629 if (thisRegister != thisArgRegister) {
630 __ Movq(thisArgRegister, thisRegister);
631 }
632 }
633 __ Testb(CALL_TYPE_MASK, callFieldRegister);
634 __ Jz(&pushVregs);
635 // fall through
636 __ Testq(MethodLiteral::HaveThisBit::Mask(), callFieldRegister);
637 __ Jz(&pushNewTarget);
638 // push this
639 if (!haveThis) {
640 __ Pushq(JSTaggedValue::Undefined().GetRawData());
641 } else {
642 __ Pushq(thisRegister);
643 }
644 // fall through
645 __ Bind(&pushNewTarget);
646 {
647 __ Testq(MethodLiteral::HaveNewTargetBit::Mask(), callFieldRegister);
648 __ Jz(&pushCallTarget);
649 if (!haveNewTarget) {
650 __ Pushq(JSTaggedValue::Undefined().GetRawData());
651 } else {
652 [[maybe_unused]] TempRegisterScope scope(assembler);
653 Register defaultRegister = __ TempRegister();
654 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
655 __ Pushq(newTargetRegister);
656 }
657 }
658 // fall through
659 __ Bind(&pushCallTarget);
660 {
661 __ Testq(MethodLiteral::HaveFuncBit::Mask(), callFieldRegister);
662 __ Jz(&pushVregs);
663 __ Pushq(callTargetRegister);
664 }
665 // fall through
666 __ Bind(&pushVregs);
667 {
668 PushVregs(assembler, stackOverflow, type);
669 }
670 }
671
672 // Input: %rbp - sp
673 // %r12 - callTarget
674 // %rbx - method
675 // %r14 - callField
676 // %rdx - jumpSizeAfterCall
677 // %r10 - fp
PushVregs(ExtendedAssembler * assembler,Label * stackOverflow,FrameTransitionType type)678 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
679 Label *stackOverflow, FrameTransitionType type)
680 {
681 Register glueRegister = __ GlueRegister();
682 Register prevSpRegister = rbp;
683 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
684 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
685 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
686 Register fpRegister = __ AvailableRegister1();
687 Register thisRegister = __ AvailableRegister2();
688
689 Label pushFrameState;
690
691 [[maybe_unused]] TempRegisterScope scope(assembler);
692 Register tempRegister = __ TempRegister();
693 // args register can reused now.
694 Register pcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
695 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
696 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
697 __ Cmpq(0, numVregsRegister);
698 __ Jz(&pushFrameState);
699 Register temp2Register = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD); // reuse
700 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register,
701 stackOverflow);
702 // fall through
703 Register newSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
704 __ Bind(&pushFrameState);
705 {
706 StackOverflowCheck(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register, stackOverflow);
707 __ Movq(rsp, newSpRegister);
708
709 PushFrameState(assembler, prevSpRegister, fpRegister,
710 callTargetRegister, thisRegister, methodRegister, pcRegister, tempRegister);
711 }
712 if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
713 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
714 __ Movq(Operand(callTargetRegister, JSFunction::BASELINECODE_OFFSET), tempRegister);
715 Label baselineCodeUndefined;
716 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), tempRegister);
717 __ Je(&baselineCodeUndefined);
718
719 // check is compiling
720 __ Cmpq(JSTaggedValue::Hole().GetRawData(), tempRegister);
721 __ Je(&baselineCodeUndefined);
722
723 Label stackAligned;
724 // align 16 bytes
725 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
726 __ Jz(&stackAligned);
727 __ PushAlignBytes();
728 __ Bind(&stackAligned);
729
730 __ Movq(Operand(tempRegister, MachineCode::FUNCADDR_OFFSET), tempRegister);
731 if (glueRegister != r13) {
732 __ Movq(glueRegister, r13);
733 }
734 if (methodRegister != rbx) {
735 __ Movq(methodRegister, rbx);
736 }
737 const int32_t pcOffsetFromSP = -24; // -24: 3 slots, frameType, prevFrame, pc
738 Register temp3Register = r10;
739 __ Movabs(std::numeric_limits<uint64_t>::max(), temp3Register);
740 __ Movq(temp3Register, Operand(newSpRegister, pcOffsetFromSP));
741 __ Movq(newSpRegister, rbp);
742 __ Jmp(tempRegister);
743
744 __ Bind(&baselineCodeUndefined);
745 }
746 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
747 }
748
749 // Input: %r13 - glue
750 // %rbp - sp
751 // %r12 - callTarget
752 // %rbx - method
DispatchCall(ExtendedAssembler * assembler,Register pcRegister,Register newSpRegister,Register callTargetRegister,Register methodRegister,Register accRegister,bool hasException)753 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
754 Register newSpRegister, Register callTargetRegister, Register methodRegister, Register accRegister,
755 bool hasException)
756 {
757 Register glueRegister = __ GlueRegister();
758 Label dispatchCall;
759 // align 16 bytes
760 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
761 __ Jnz(&dispatchCall);
762 __ PushAlignBytes();
763 __ Bind(&dispatchCall);
764 // profileTypeInfo: r14
765 __ Movq(Operand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
766 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14);
767 // glue may rdi
768 if (glueRegister != r13) {
769 __ Movq(glueRegister, r13);
770 }
771 // sp: rbp
772 __ Movq(newSpRegister, rbp);
773 // hotnessCounter: rdi
774 __ Movzwq(Operand(methodRegister, Method::LITERAL_INFO_OFFSET), rdi);
775 // constantPool: rbx
776 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx);
777 // pc: r12
778 if (pcRegister != r12) {
779 __ Movq(pcRegister, r12);
780 }
781
782 Register bcIndexRegister = rax;
783 Register tempRegister = __ AvailableRegister1();
784 if (hasException) {
785 __ Movq(kungfu::BytecodeStubCSigns::ID_ExceptionHandler, bcIndexRegister);
786 } else {
787 __ Movzbq(Operand(pcRegister, 0), bcIndexRegister);
788 }
789 // acc: rsi
790 if (accRegister != rInvalid) {
791 ASSERT(accRegister == rsi);
792 } else {
793 __ Movq(JSTaggedValue::Hole().GetRawData(), rsi);
794 }
795 __ Movq(Operand(r13, bcIndexRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)), tempRegister);
796 __ Jmp(tempRegister);
797 }
798
799 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
800 // c++ calling convention call js function
801 // Input: %rdi - glue
802 // %rsi - nativeCode
803 // %rdx - func
804 // %rcx - thisValue
805 // %r8 - argc
806 // %r9 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler * assembler)807 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
808 {
809 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
810 CallNativeWithArgv(assembler, false);
811 }
812
PushCallNewAndDispatchNative(ExtendedAssembler * assembler)813 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
814 {
815 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
816 CallNativeWithArgv(assembler, true);
817 }
818
PushNewTargetAndDispatchNative(ExtendedAssembler * assembler)819 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
820 {
821 __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
822 CallNativeWithArgv(assembler, true, true);
823 }
824
CallNativeWithArgv(ExtendedAssembler * assembler,bool callNew,bool hasNewTarget)825 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
826 {
827 Register glue = rdi;
828 Register nativeCode = rsi;
829 Register func = rdx;
830 Register thisValue = rcx;
831 Register numArgs = r8;
832 Register stackArgs = r9;
833 Register temporary = rax;
834 Register temporary2 = r11;
835 Register opNumArgs = r10;
836 Label aligned;
837 Label pushThis;
838 Label stackOverflow;
839
840 bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV);
841
842 __ Push(numArgs);
843 __ Cmpq(0, numArgs);
844 __ Jz(&pushThis);
845 __ Movq(numArgs, opNumArgs);
846 PushArgsWithArgvAndCheckStack(assembler, glue, opNumArgs, stackArgs, temporary, temporary2, &stackOverflow);
847
848 __ Bind(&pushThis);
849 __ Push(thisValue);
850 // new.target
851 if (callNew) {
852 if (hasNewTarget) {
853 Register newTarget = r12;
854 // 5: skip frame type, numArgs, func, newTarget and this
855 __ Movq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), newTarget);
856 __ Pushq(newTarget);
857 } else {
858 __ Pushq(func);
859 }
860 } else {
861 __ Pushq(JSTaggedValue::Undefined().GetRawData());
862 }
863 __ Pushq(func);
864 if (!isFrameComplete) {
865 // 5: skip frame type, numArgs, func, newTarget and this
866 __ Leaq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), rbp);
867 }
868 __ Movq(rsp, stackArgs);
869
870 // push argc
871 __ Addl(NUM_MANDATORY_JSFUNC_ARGS, numArgs);
872 __ Pushq(numArgs);
873 // push thread
874 __ Pushq(glue);
875 // EcmaRuntimeCallInfo
876 __ Movq(rsp, rdi);
877
878 __ Testq(0xf, rsp); // 0xf: 0x1111
879 __ Jz(&aligned, Distance::Near);
880 __ PushAlignBytes();
881
882 __ Bind(&aligned);
883 CallNativeInternal(assembler, nativeCode);
884 __ Ret();
885
886 __ Bind(&stackOverflow);
887 {
888 Label aligneThrow;
889 __ Movq(Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)), rsp);
890 __ Pushq(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME)); // frame type
891 __ Pushq(0); // argc
892 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // this
893 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // newTarget
894 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // callTarget
895 // 5: skip frame type, argc, this, newTarget and callTarget
896 // +----------------------------------------------------------------+ <---- rbp = rsp + 5 * frame_slot_size
897 // | FrameType = BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
898 // |----------------------------------------------------------------|
899 // | argc = 0 |
900 // |----------------------------------------------------------------|
901 // | this = undefine |
902 // |----------------------------------------------------------------|
903 // | newTarget = undefined |
904 // |----------------------------------------------------------------|
905 // | callTarget = undefined |
906 // +----------------------------------------------------------------+ <---- rsp
907 __ Leaq(Operand(rsp, 5 * FRAME_SLOT_SIZE), rbp);
908
909 __ Testq(0xf, rsp); // 0xf: 0x1111
910 __ Jz(&aligneThrow, Distance::Near);
911 __ PushAlignBytes();
912
913 __ Bind(&aligneThrow);
914 Register trampolineIdRegister = r9;
915 Register trampolineRegister = r10;
916 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, trampolineIdRegister);
917 __ Movq(Operand(glue, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
918 trampolineRegister);
919 __ Callq(trampolineRegister);
920
921 // resume rsp
922 __ Movq(rbp, rsp);
923 __ Pop(rbp);
924 __ Ret();
925 }
926 }
927
CallNativeEntry(ExtendedAssembler * assembler,bool isJSFunction)928 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler, bool isJSFunction)
929 {
930 Register glue = rdi;
931 Register argv = r9;
932 Register function = rsi;
933 Register nativeCode = r10;
934 // get native pointer
935 if (isJSFunction) {
936 [[maybe_unused]] TempRegisterScope scope(assembler);
937 Register lexicalEnv = __ TempRegister();
938
939 __ Movq(Operand(function, JSFunctionBase::CODE_ENTRY_OFFSET), nativeCode);
940
941 Label next;
942 __ Movq(Operand(function, JSFunction::LEXICAL_ENV_OFFSET), lexicalEnv);
943 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), lexicalEnv);
944 __ Je(&next);
945 __ Movq(lexicalEnv, Operand(glue, JSThread::GlueData::GetCurrentEnvOffset(false)));
946 __ Bind(&next);
947 } else {
948 // JSProxy or JSBoundFunction
949 Register method = rdx;
950 __ Movq(Operand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), nativeCode);
951 }
952
953 __ PushAlignBytes();
954 __ Push(function);
955 // 3: 24 means skip thread & argc & returnAddr
956 __ Subq(3 * FRAME_SLOT_SIZE, rsp);
957 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME);
958 __ Movq(argv, r11);
959 // 2: 16 means skip numArgs & thread
960 __ Subq(2 * FRAME_SLOT_SIZE, r11);
961 // EcmaRuntimeCallInfo
962 __ Movq(r11, rdi);
963
964 CallNativeInternal(assembler, nativeCode);
965
966 // 5: 40 means skip function
967 __ Addq(5 * FRAME_SLOT_SIZE, rsp);
968 __ Ret();
969 }
970
971 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
972 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
973 // Input: %rax - codeAddress
974 // stack layout: sp + N*8 argvN
975 // ........
976 // sp + 24: argv1
977 // sp + 16: argv0
978 // sp + 8: actualArgc
979 // sp: thread
980 // construct Native Leave Frame
981 // +--------------------------+
982 // | argV[N - 1] |
983 // |--------------------------|
984 // | . . . . |
985 // |--------------------------+
986 // | argV[2]=this |
987 // +--------------------------+
988 // | argV[1]=new-target |
989 // +--------------------------+
990 // | argV[0]=call-target |
991 // +--------------------------+ ---------
992 // | argc | ^
993 // |--------------------------| |
994 // | thread | |
995 // |--------------------------| |
996 // | returnAddr | BuiltinFrame
997 // |--------------------------| |
998 // | callsiteFp | |
999 // |--------------------------| |
1000 // | frameType | v
1001 // +--------------------------+ ---------
1002
PushCallArgsAndDispatchNative(ExtendedAssembler * assembler)1003 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
1004 {
1005 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
1006 Register nativeCode = rax;
1007 Register glue = rdi;
1008
1009 __ Movq(Operand(rsp, FRAME_SLOT_SIZE), glue); // 8: glue
1010 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME);
1011 __ Leaq(Operand(rbp, 2 * FRAME_SLOT_SIZE), rdi); // 2: skip argc & thread
1012 __ PushAlignBytes();
1013 CallNativeInternal(assembler, nativeCode);
1014 __ Ret();
1015 }
1016
PushBuiltinFrame(ExtendedAssembler * assembler,Register glue,FrameType type)1017 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler,
1018 Register glue, FrameType type)
1019 {
1020 __ Pushq(rbp);
1021 __ Movq(rsp, Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1022 __ Pushq(static_cast<int32_t>(type));
1023 if (type != FrameType::BUILTIN_FRAME_WITH_ARGV) {
1024 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // 8: skip frame type
1025 return true;
1026 } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
1027 // this frame push stack args must before update rbp, otherwise cpu profiler maybe visit incomplete stack
1028 // BuiltinWithArgvFrame layout please see frames.h
1029 return false;
1030 } else {
1031 LOG_ECMA(FATAL) << "this branch is unreachable";
1032 UNREACHABLE();
1033 }
1034 }
1035
CallNativeInternal(ExtendedAssembler * assembler,Register nativeCode)1036 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
1037 {
1038 __ Callq(nativeCode);
1039 // resume rsp
1040 __ Movq(rbp, rsp);
1041 __ Pop(rbp);
1042 }
1043
1044 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1045 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1046 // GHC calling convention
1047 // %r13 - glue
1048 // %rbp - sp
1049 // %r12 - pc
1050 // %rbx - constantPool
1051 // %r14 - profileTypeInfo
1052 // %rsi - acc
1053 // %rdi - hotnessCounter
1054 // %r8 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler * assembler)1055 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
1056 {
1057 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
1058 Register glueRegister = __ GlueRegister();
1059 Register spRegister = rbp;
1060 Register pcRegister = r12;
1061 Register ret = rsi;
1062 Register jumpSizeRegister = r8;
1063
1064 Register frameStateBaseRegister = r11;
1065 __ Movq(spRegister, frameStateBaseRegister);
1066 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1067
1068 Label dispatch;
1069 Label newObjectRangeReturn;
1070 __ Cmpq(0, jumpSizeRegister);
1071 __ Jle(&newObjectRangeReturn);
1072
1073 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1074 __ Addq(jumpSizeRegister, pcRegister); // newPC
1075 Register temp = rax;
1076 Register opcodeRegister = rax;
1077 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1078
1079 __ Bind(&dispatch);
1080 {
1081 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1082 Register bcStubRegister = r11;
1083 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1084 bcStubRegister);
1085 __ Jmp(bcStubRegister);
1086 }
1087
1088 Label getThis;
1089 Label notUndefined;
1090 __ Bind(&newObjectRangeReturn);
1091 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1092 __ Jne(¬Undefined);
1093
1094 __ Bind(&getThis);
1095 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1096 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1097 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1098 {
1099 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetThisOffset(false)), ret);
1100 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1101 Register bcStubRegister = r11;
1102 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1103 bcStubRegister);
1104 __ Jmp(bcStubRegister);
1105 }
1106
1107 __ Bind(¬Undefined);
1108 {
1109 Label notEcmaObject;
1110 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1111 __ And(ret, temp);
1112 __ Cmpq(0, temp);
1113 __ Jne(¬EcmaObject);
1114 // acc is heap object
1115 __ Movq(Operand(ret, JSFunction::HCLASS_OFFSET), temp); // hclass
1116 Register maskRegister = r10;
1117 __ Movabs(TaggedObject::GC_STATE_MASK, maskRegister);
1118 __ And(maskRegister, temp);
1119 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1120 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1121 __ Ja(¬EcmaObject);
1122 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1123 __ Jb(¬EcmaObject);
1124 // acc is ecma object
1125 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1126 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1127 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1128 __ Jmp(&dispatch);
1129
1130 __ Bind(¬EcmaObject);
1131 {
1132 // load constructor
1133 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), temp);
1134 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1135 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1136 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1137 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1138 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1139 __ Jbe(&getThis); // constructor is base
1140 // fall through
1141 }
1142 // exception branch
1143 {
1144 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);
1145 __ Movq(kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException, opcodeRegister);
1146 __ Jmp(&dispatch);
1147 }
1148 }
1149 }
1150
1151 // c++ calling convention
1152 // %rdi - glue
1153 // %rsi - callTarget
1154 // %rdx - method
1155 // %rcx - callField
1156 // %r8 - receiver
1157 // %r9 - value
CallGetter(ExtendedAssembler * assembler)1158 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1159 {
1160 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1161 Label target;
1162
1163 PushAsmInterpBridgeFrame(assembler);
1164 __ Callq(&target);
1165 PopAsmInterpBridgeFrame(assembler);
1166 __ Ret();
1167 __ Bind(&target);
1168 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1169 }
1170
CallSetter(ExtendedAssembler * assembler)1171 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1172 {
1173 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1174 Label target;
1175 PushAsmInterpBridgeFrame(assembler);
1176 __ Callq(&target);
1177 PopAsmInterpBridgeFrame(assembler);
1178 __ Ret();
1179 __ Bind(&target);
1180 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1181 }
1182
1183 // Input: glue - %rdi
1184 // callTarget - %rsi
1185 // method - %rdx
1186 // callField - %rcx
1187 // arg0(argc) - %r8
1188 // arg1(arglist) - %r9
1189 // argthis - stack
CallReturnWithArgv(ExtendedAssembler * assembler)1190 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1191 {
1192 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1193 Label target;
1194 PushAsmInterpBridgeFrame(assembler);
1195 Register r13 = __ CppJSCallAvailableRegister1();
1196 __ Movq(Operand(rbp, FRAME_SLOT_SIZE), r13);
1197 __ Callq(&target);
1198 PopAsmInterpBridgeFrame(assembler);
1199 __ Ret();
1200 __ Bind(&target);
1201 {
1202 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1203 FrameTransitionType::OTHER_TO_OTHER);
1204 }
1205 }
1206
CallContainersArgs2(ExtendedAssembler * assembler)1207 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1208 {
1209 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1210 Label target;
1211 PushAsmInterpBridgeFrame(assembler);
1212 GetArgvAtStack(assembler);
1213 __ Callq(&target);
1214 PopAsmInterpBridgeFrame(assembler);
1215 __ Ret();
1216 __ Bind(&target);
1217 {
1218 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1219 FrameTransitionType::OTHER_TO_OTHER);
1220 }
1221 }
1222
CallContainersArgs3(ExtendedAssembler * assembler)1223 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1224 {
1225 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1226 Label target;
1227 PushAsmInterpBridgeFrame(assembler);
1228 GetArgvAtStack(assembler);
1229 __ Callq(&target);
1230 PopAsmInterpBridgeFrame(assembler);
1231 __ Ret();
1232 __ Bind(&target);
1233 {
1234 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1235 FrameTransitionType::OTHER_TO_OTHER);
1236 }
1237 }
1238
1239 // ResumeRspAndReturn(uintptr_t acc)
1240 // GHC calling convention
1241 // %r13 - acc
1242 // %rbp - prevSp
1243 // %r12 - sp
ResumeRspAndReturn(ExtendedAssembler * assembler)1244 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
1245 {
1246 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
1247 Register currentSp = r12;
1248 Register fpRegister = r10;
1249 intptr_t offset = AsmInterpretedFrame::GetFpOffsetAsIntptr(false) -
1250 AsmInterpretedFrame::GetSizeAsIntptr(false);
1251 __ Movq(Operand(currentSp, static_cast<int32_t>(offset)), fpRegister);
1252 __ Movq(fpRegister, rsp);
1253 // return
1254 {
1255 __ Movq(r13, rax);
1256 __ Ret();
1257 }
1258 }
1259
1260 // ResumeRspAndReturnBaseline(uintptr_t acc)
1261 // GHC calling convention
1262 // %r13 - glue
1263 // %rbp - acc
1264 // %r12 - prevSp
1265 // %rbx - sp
1266 // %r14 - jumpSizeAfterCall
ResumeRspAndReturnBaseline(ExtendedAssembler * assembler)1267 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
1268 {
1269 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
1270 Register currentSp = rbx;
1271 Register fpRegister = r10;
1272 intptr_t fpOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetFpOffset(false)) -
1273 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1274 __ Movq(Operand(currentSp, static_cast<int32_t>(fpOffset)), fpRegister);
1275 __ Movq(fpRegister, rsp);
1276
1277 // Check result
1278 Register ret = rbp;
1279 Register jumpSizeRegister = r14;
1280 Label getThis;
1281 Label notUndefined;
1282 Label normalReturn;
1283 Label newObjectRangeReturn;
1284 __ Cmpq(0, jumpSizeRegister);
1285 __ Jg(&normalReturn);
1286
1287 __ Bind(&newObjectRangeReturn);
1288 {
1289 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1290 __ Jne(¬Undefined);
1291
1292 // acc is undefined
1293 __ Bind(&getThis);
1294 intptr_t thisOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetThisOffset(false)) -
1295 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1296 __ Movq(Operand(currentSp, static_cast<int32_t>(thisOffset)), ret);
1297 __ Jmp(&normalReturn);
1298
1299 // acc is not undefined
1300 __ Bind(¬Undefined);
1301 {
1302 Register temp = rax;
1303 Label notEcmaObject;
1304 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1305 __ And(ret, temp);
1306 __ Cmpq(0, temp);
1307 __ Jne(¬EcmaObject);
1308 // acc is heap object
1309 __ Movq(Operand(ret, JSFunction::HCLASS_OFFSET), temp); // hclass
1310 Register maskRegister = r11;
1311 __ Movabs(TaggedObject::GC_STATE_MASK, maskRegister);
1312 __ And(maskRegister, temp);
1313 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1314 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1315 __ Ja(¬EcmaObject);
1316 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1317 __ Jb(¬EcmaObject);
1318 // acc is ecma object
1319 __ Jmp(&normalReturn);
1320
1321 __ Bind(¬EcmaObject);
1322 {
1323 // load constructor
1324 intptr_t funcOffset = AsmInterpretedFrame::GetFunctionOffsetAsIntptr(false) -
1325 AsmInterpretedFrame::GetSizeAsIntptr(false);
1326 __ Movq(Operand(currentSp, static_cast<int32_t>(funcOffset)), temp);
1327 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1328 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1329 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1330 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1331 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1332 __ Jbe(&getThis); // constructor is base
1333 // fall through
1334 }
1335 }
1336 }
1337 __ Bind(&normalReturn);
1338 __ Movq(ret, rax);
1339 __ Ret();
1340 }
1341
1342 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1343 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
1344 // GHC calling convention
1345 // %r13 - glue
1346 // %rbp - sp
1347 // %r12 - pc
1348 // %rbx - constantPool
1349 // %r14 - profileTypeInfo
1350 // %rsi - acc
1351 // %rdi - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler * assembler)1352 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
1353 {
1354 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
1355 Register glueRegister = __ GlueRegister();
1356 Register pcRegister = r12;
1357
1358 Label dispatch;
1359 Register fpRegister = r11;
1360 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1361 __ Cmpq(0, fpRegister);
1362 __ Jz(&dispatch);
1363 __ Movq(fpRegister, rsp); // resume rsp
1364 __ Bind(&dispatch);
1365 {
1366 Register opcodeRegister = rax;
1367 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1368 Register bcStubRegister = r11;
1369 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1370 bcStubRegister);
1371 __ Jmp(bcStubRegister);
1372 }
1373 }
1374
1375 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
1376 // GHC calling convention
1377 // %r13 - glue
1378 // %rbp - sp
1379 // %r12 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler * assembler)1380 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
1381 {
1382 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
1383 Register glueRegister = __ GlueRegister();
1384 Register acc(r12);
1385 Register cppRet(rax);
1386
1387 Label ret;
1388 Register fpRegister = r11;
1389 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1390 __ Cmpq(0, fpRegister);
1391 __ Jz(&ret);
1392 __ Movq(fpRegister, rsp); // resume rsp
1393 __ Bind(&ret);
1394 // this method will return to Execute(cpp calling convention), and the return value should be put into rax.
1395 __ Movq(acc, cppRet);
1396 __ Ret();
1397 }
1398
1399 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1400 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1401 // GHC calling convention
1402 // %r13 - glue
1403 // %rbp - sp
1404 // %r12 - pc
1405 // %rbx - constantPool
1406 // %r14 - profileTypeInfo
1407 // %rsi - acc
1408 // %rdi - hotnessCounter
1409 // %r8 - jumpSizeAfterCall
ResumeRspAndRollback(ExtendedAssembler * assembler)1410 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
1411 {
1412 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
1413 Register glueRegister = __ GlueRegister();
1414 Register spRegister = rbp;
1415 Register pcRegister = r12;
1416 Register ret = rsi;
1417 Register jumpSizeRegister = r8;
1418
1419 Register frameStateBaseRegister = r11;
1420 __ Movq(spRegister, frameStateBaseRegister);
1421 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1422
1423 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1424 __ Addq(jumpSizeRegister, pcRegister); // newPC
1425 Register opcodeRegister = rax;
1426 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1427
1428 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), ret); // restore acc
1429
1430 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1431 Register bcStubRegister = r11;
1432 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1433 bcStubRegister);
1434 __ Jmp(bcStubRegister);
1435 }
1436
1437 // preserve all the general registers, except r11 and callee saved registers/
1438 // and call r11
PreserveMostCall(ExtendedAssembler * assembler)1439 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1440 {
1441 // * layout as the following:
1442 // +--------------------------+ ---------
1443 // | . . . . . | ^
1444 // callerSP ---> |--------------------------| |
1445 // | returnAddr | |
1446 // |--------------------------| OptimizedFrame
1447 // | callsiteFp | |
1448 // fp ---> |--------------------------| |
1449 // | OPTIMIZED_FRAME | v
1450 // +--------------------------+ ---------
1451 // | rdi |
1452 // +--------------------------+
1453 // | rsi |
1454 // +--------------------------+
1455 // | rdx |
1456 // +--------------------------+
1457 // | rcx |
1458 // +--------------------------+
1459 // | r8 |
1460 // +--------------------------+
1461 // | r9 |
1462 // +--------------------------+
1463 // | r10 |
1464 // +--------------------------+
1465 // | rax |
1466 // +--------------------------+
1467 // | align |
1468 // calleeSP ---> +--------------------------+
1469 {
1470 // prologue to save rbp, frametype, and update rbp.
1471 __ Pushq(rbp);
1472 __ Pushq(static_cast<int64_t>(FrameType::OPTIMIZED_FRAME)); // set frame type
1473 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1474 }
1475 int32_t PreserveRegisterIndex = 9;
1476 // rdi,rsi,rdx,rcx,r8,r9,r10,rax should be preserved,
1477 // other general registers are callee saved register, callee will save them.
1478 __ Subq(PreserveRegisterIndex * FRAME_SLOT_SIZE, rsp);
1479 __ Movq(rdi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1480 __ Movq(rsi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1481 __ Movq(rdx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1482 __ Movq(rcx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1483 __ Movq(r8, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1484 __ Movq(r9, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1485 __ Movq(r10, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1486 __ Movq(rax, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1487 __ Callq(r11);
1488 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rax);
1489 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r10);
1490 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r9);
1491 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r8);
1492 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rcx);
1493 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdx);
1494 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rsi);
1495 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdi);
1496 {
1497 // epilogue to restore rsp, rbp.
1498 // need add the frametype slot
1499 __ Addq(PreserveRegisterIndex * FRAME_SLOT_SIZE + FRAME_SLOT_SIZE, rsp);
1500 __ Popq(rbp);
1501 __ Ret();
1502 }
1503 }
1504
1505 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1506 // c calling convention, but preserve all general registers except %r11
1507 // %rd1 - glue
1508 // %rsi - obj
1509 // %rdx - offset
1510 // %rcx - value
ASMFastWriteBarrier(ExtendedAssembler * assembler)1511 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1512 {
1513 // valid region flag are as follows, assume it will be ALWAYS VALID.
1514 // Judge the region of value with:
1515 // "young" "sweepable share" "readonly share"
1516 // region flag: 0x08, 0x09, [0x0A, 0x11], [0x12, 0x14], 0x15
1517 // value is share: [0x12, 0x15] => valueMaybeSweepableShare
1518 // readonly share: 0x15 => return
1519 // sweepable share: [0x12, 0x14] => needShareBarrier
1520 // value is not share: 0x08, 0x09, [0x0A, 0x11], => valueNotShare
1521 // value is young : 0x09 => needCallNotShare
1522 // value is not young : 0x08, [0x0A, 0x11], => checkMark
1523 ASSERT(IN_YOUNG_SPACE < SHARED_SPACE_BEGIN && SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN &&
1524 SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE && IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1525 __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1526
1527 Label needCall;
1528 Label checkMark;
1529 Label needCallNotShare;
1530 Label needShareBarrier;
1531 Label valueNotShare;
1532 Label valueMaybeSweepableShare;
1533 {
1534 // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1535 // int8_t valueFlag = *valueRegion
1536 // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1537 // goto valueMaybeSweepableShare
1538 // }
1539
1540 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1541 __ And(rcx, r11); // r11 is the region address of value.
1542 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of value.
1543 __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN), r11);
1544 __ Jae(&valueMaybeSweepableShare);
1545 // if value may be SweepableShare, goto valueMaybeSweepableShare
1546 }
1547 __ Bind(&valueNotShare);
1548 {
1549 // valueNotShare:
1550 // if (valueFlag != IN_YOUNG_SPACE){
1551 // goto checkMark
1552 // }
1553 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1554 // int8_t objFlag = *objRegion
1555 // if (objFlag != IN_YOUNG_SPACE){
1556 // goto needCallNotShare
1557 // }
1558
1559 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1560 __ Jne(&checkMark);
1561 // if value is not in young, goto checkMark
1562
1563 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11);
1564 __ And(rsi, r11); // r11 is the region address of obj.
1565 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of obj.
1566 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1567 __ Jne(&needCallNotShare);
1568 // if obj is not in young, goto needCallNotShare
1569 }
1570
1571 __ Bind(&checkMark);
1572 {
1573 // checkMark:
1574 // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1575 // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1576 // goto needCallNotShare
1577 // }
1578 // return
1579
1580 __ Movl(Operand(rdi, JSThread::GlueData::GetGCStateBitFieldOffset(false)), r11);
1581 __ Testb(Immediate(JSThread::CONCURRENT_MARKING_BITFIELD_MASK), r11);
1582 __ Jne(&needCallNotShare);
1583 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1584 __ Ret();
1585 }
1586
1587 __ Bind(&valueMaybeSweepableShare);
1588 {
1589 // valueMaybeSweepableShare:
1590 // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1591 // goto needShareBarrier
1592 // }
1593 // return
1594 __ Cmpl(Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE), r11);
1595 __ Jne(&needShareBarrier);
1596 __ Ret();
1597 }
1598
1599 __ Bind(&needCallNotShare);
1600 {
1601 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1602 kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1603 __ Movq(Operand(rdi, NonSValueBarrier), r11);
1604 }
1605 __ Bind(&needCall);
1606 {
1607 PreserveMostCall(assembler);
1608 }
1609 __ Bind(&needShareBarrier);
1610 {
1611 ASMFastSharedWriteBarrier(assembler, needCall);
1612 }
1613 }
1614
1615 // %rd1 - glue
1616 // %rsi - obj
1617 // %rdx - offset
1618 // %rcx - value
ASMFastSharedWriteBarrier(ExtendedAssembler * assembler,Label & needcall)1619 void AsmInterpreterCall::ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needcall)
1620 {
1621 Label checkBarrierForSharedValue;
1622 Label restoreScratchRegister;
1623 Label callSharedBarrier;
1624 {
1625 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1626 // int8_t objFlag = *objRegion
1627 // if (objFlag >= SHARED_SPACE_BEGIN){
1628 // // share to share, just check the barrier
1629 // goto checkBarrierForSharedValue
1630 // }
1631 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1632 __ And(rsi, r11); // r11: region address of obj.
1633 __ Movzbl(Operand(r11, 0), r11); // r11: the flag load from region of obj.
1634 __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SPACE_BEGIN), r11);
1635 __ Jae(&checkBarrierForSharedValue); // if objflag >= SHARED_SPACE_BEGIN => checkBarrierForSharedValue
1636 }
1637 {
1638 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1639 // int8_t *localToShareSet = *(objRegion + LocalToShareSetOffset)
1640 // if (localToShareSet == 0){
1641 // goto callSharedBarrier
1642 // }
1643 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1644 __ And(rsi, r11); // r11: region address of obj.
1645 __ Movq(Operand(r11, Region::PackedData::GetLocalToShareSetOffset(false)), r11);
1646 // r11 is localToShareSet for obj region.
1647 __ Cmpq(Immediate(0), r11);
1648 __ Je(&callSharedBarrier); // if localToShareSet == 0 => callSharedBarrier
1649 }
1650 {
1651 // r12, r13 will be used as scratch register, spill them.
1652 {
1653 __ Pushq(r12);
1654 __ Pushq(r13);
1655 }
1656 // int64_t objOffset = obj & DEFAULT_REGION_MASK
1657 // int64_t slotOffset = objOffset + offset
1658 // int8_t lowSlotOffset = slotOffset & 0xff
1659
1660 __ Movabs(DEFAULT_REGION_MASK, r12);
1661 __ And(rsi, r12); // obj & DEFAULT_REGION_MASK => r12 is obj's offset to region
1662 __ Addq(rdx, r12); // r12 is slotAddr's offset to region
1663 __ Movzbl(r12, r13); // r13 is low 8 bit of slotAddr's offset to region
1664
1665 // the logic to get byteIndex in stub_builder.cpp
1666 // [63-------------------------35][34------------------------8][7---3][2-0]
1667 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1668 // 1. bitOffsetPtr = LSR TAGGED_TYPE_SIZE_LOG(3) slotOffset
1669 // bitOffsetPtr: aaaaaaaaaaaaaaaaaaaaaaaaaa aaabbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1670 // 2. bitOffset = TruncPtrToInt32 bitOffsetPtr
1671 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1672 // 3. index = LSR BIT_PER_WORD_LOG2(5) bitOffset
1673 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1674 // 4. byteIndex = Mul index BYTE_PER_WORD(4)
1675 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1676
1677 // the logic to get byteIndex here:
1678 // [63-------------------------35][34------------------------8][7---3][2-0]
1679 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1680 // 1. LSR (TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2)(6) slotOffset
1681 // r12: aaaaaaaaaaaaaaaaaaaaaaa aaaaaabbbbbbbbbbbbbbbbbbbbb bbbbb bcc
1682 // indexMask: 00000000000000000000000000000 000000111111111111111111111 11111 100
1683 // 2. And r12 indexMask
1684 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1685 constexpr uint32_t byteIndexMask = static_cast<uint32_t>(0xffffffffffffffff >> TAGGED_TYPE_SIZE_LOG) >>
1686 GCBitset::BIT_PER_WORD_LOG2 << GCBitset::BYTE_PER_WORD_LOG2;
1687 static_assert(byteIndexMask == 0x1ffffffc && "LocalToShareSet is changed?");
1688 __ Shrq(TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2, r12);
1689 __ Andq(byteIndexMask, r12); // r12 is byteIndex
1690
1691 __ Addq(RememberedSet::GCBITSET_DATA_OFFSET, r11); // r11 is bitsetData addr
1692 __ Addq(r12, r11); // r11 is the addr of bitset value
1693 __ Movl(Operand(r11, 0), r12); // r12: oldsetValue
1694
1695 // the logic to get mask in stub_builder.cpp
1696 // [63-------------------------35][34------------------------8][7---3][2-0]
1697 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1698 // bitPerWordMask: 11 111
1699 // indexInWord = And bitoffset bitPerWordMask
1700 // indexInWord: cc ccc
1701 // mask = 1 << indexInWord
1702
1703 // the logic to test bit set value here:
1704 // [63-------------------------35][34------------------------8][7---3][2-0]
1705 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1706 // lowSlotOffset: ccccc ddd
1707 // indexInWord = Shrl TAGGED_TYPE_SIZE_LOG lowSlotOffset
1708 // indexInWord: cc ccc
1709 __ Shrl(TAGGED_TYPE_SIZE_LOG, r13);
1710
1711 // if "r13" position in r12 is 1, goto restoreScratchRegister;
1712 // if "r13" position in r12 is 0, set it to 1 and store r12 to r11(addr of bitset value)
1713 __ Btsl(r13, r12);
1714 __ Jb(&restoreScratchRegister);
1715 __ Movl(r12, Operand(r11, 0));
1716 }
1717 __ Bind(&restoreScratchRegister);
1718 {
1719 __ Popq(r13);
1720 __ Popq(r12);
1721 }
1722 __ Bind(&checkBarrierForSharedValue);
1723 {
1724 // checkBarrierForSharedValue:
1725 // int8_t GCStateBitField = *(glue+SharedGCStateBitFieldOffset)
1726 // if (GCStateBitField & JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1727 // goto callSharedBarrier
1728 // }
1729 // return
1730 __ Movl(Operand(rdi, JSThread::GlueData::GetSharedGCStateBitFieldOffset(false)), r11);
1731 __ Testb(Immediate(JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK), r11);
1732 __ Jne(&callSharedBarrier);
1733 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1734 __ Ret();
1735 }
1736 __ Bind(&callSharedBarrier);
1737 {
1738 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1739 kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1740 __ Movq(Operand(rdi, NonSValueBarrier), r11);
1741 __ Jmp(&needcall);
1742 }
1743 }
1744
PushUndefinedWithArgcAndCheckStack(ExtendedAssembler * assembler,Register glue,Register argc,Register op1,Register op2,Label * stackOverflow)1745 void AsmInterpreterCall::PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc,
1746 Register op1, Register op2, Label *stackOverflow)
1747 {
1748 ASSERT(stackOverflow != nullptr);
1749 StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow);
1750 PushUndefinedWithArgc(assembler, argc);
1751 }
1752
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1753 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue, Register fp,
1754 Register op)
1755 {
1756 if (fp != rsp) {
1757 __ Movq(fp, rsp);
1758 }
1759 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1760 __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1761 if (glue != r13) {
1762 __ Movq(glue, r13);
1763 }
1764
1765 __ Pushq(rbp);
1766 __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1767 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1768
1769 Label callRuntime;
1770 // 16 bytes align check
1771 __ Testq(0x8, rsp);
1772 __ Jnz(&callRuntime);
1773 __ PushAlignBytes();
1774 __ Bind(&callRuntime);
1775 __ Pushq(r10); // caller save
1776 __ Pushq(0); // argc
1777 __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1778 __ Movq(glue, rax); // glue
1779 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1780 __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1781 __ Callq(r10); // call CallRuntime
1782 __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1783 __ Popq(r10);
1784 __ Movq(rbp, rsp);
1785 __ Popq(rbp);
1786 __ Ret();
1787 }
1788
ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1789 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler *assembler,
1790 Register glue, Register fp, Register op)
1791 {
1792 if (fp != rsp) {
1793 __ Movq(fp, rsp);
1794 }
1795 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1796 __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1797 if (glue != r13) {
1798 __ Movq(glue, r13);
1799 }
1800
1801 __ Pushq(rbp);
1802 __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1803 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1804
1805 Label callRuntime;
1806 // 16 bytes align check
1807 __ Testq(0x8, rsp);
1808 __ Jnz(&callRuntime);
1809 __ PushAlignBytes();
1810 __ Bind(&callRuntime);
1811 __ Pushq(r10); // caller save
1812 __ Pushq(0); // argc
1813 __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1814 __ Movq(glue, rax); // glue
1815 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1816 __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1817 __ Callq(r10); // call CallRuntime
1818 __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1819 __ Popq(r10);
1820 __ Movq(rbp, rsp);
1821 __ Popq(rbp);
1822
1823 // +----------------------------------------------------+
1824 // | return addr |
1825 // |----------------------------------------------------| <---- rbp
1826 // | frame type | ^ ^
1827 // |----------------------------------------------------| | |
1828 // | prev rbp | | |
1829 // |----------------------------------------------------| | |
1830 // | pc | | |
1831 // |----------------------------------------------------| PushAsmInterpBridgeFrame total skip
1832 // | pushAlignBytes | | |
1833 // |----------------------------------------------------| | |
1834 // | 5 callee save regs(r12,r13,r14,r15,rbx) | | |
1835 // |----------------------------------------------------| v |
1836 // | lr | |
1837 // +----------------------------------------------------+ v
1838 // Base on PushAsmInterpBridgeFrame, need to skip AsmInterpBridgeFrame size, callee Save Registers(5)
1839 // and PushAlignBytes(1)
1840 int32_t skipNum = static_cast<int32_t>(AsmInterpretedBridgeFrame::GetSize(false)) / FRAME_SLOT_SIZE + 5 + 1;
1841 __ Leaq(Operand(rbp, -skipNum * FRAME_SLOT_SIZE), rsp);
1842 __ Ret();
1843 }
1844
HasPendingException(ExtendedAssembler * assembler,Register threadRegister)1845 void AsmInterpreterCall::HasPendingException([[maybe_unused]] ExtendedAssembler *assembler,
1846 [[maybe_unused]] Register threadRegister)
1847 {
1848 }
1849 #undef __
1850 } // namespace panda::ecmascript::x64
1851