1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <ecmascript/stubs/runtime_stubs.h>
17
18 #include "ecmascript/compiler/trampoline/x64/common_call.h"
19
20 #include "ecmascript/compiler/assembler/assembler.h"
21 #include "ecmascript/compiler/rt_call_signature.h"
22 #include "ecmascript/ecma_runtime_call_info.h"
23 #include "ecmascript/frames.h"
24 #include "ecmascript/js_function.h"
25 #include "ecmascript/js_thread.h"
26 #include "ecmascript/js_generator_object.h"
27 #include "ecmascript/mem/machine_code.h"
28 #include "ecmascript/message_string.h"
29 #include "ecmascript/method.h"
30 #include "ecmascript/runtime_call_id.h"
31
32 namespace panda::ecmascript::x64 {
33 #define __ assembler->
34
35 // Generate code for Entering asm interpreter
36 // Input: glue - %rdi
37 // callTarget - %rsi
38 // method - %rdx
39 // callField - %rcx
40 // argc - %r8
41 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler * assembler)42 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
43 {
44 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
45 Label target;
46 // push asm interpreter entry frame
47 size_t begin = __ GetCurrentPosition();
48 PushAsmInterpEntryFrame(assembler);
49 __ Callq(&target);
50 PopAsmInterpEntryFrame(assembler);
51 size_t end = __ GetCurrentPosition();
52 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
53 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
54 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
55 }
56 __ Ret();
57
58 __ Bind(&target);
59 AsmInterpEntryDispatch(assembler);
60 }
61
62 // Generate code for generator re-enter asm interpreter
63 // c++ calling convention
64 // Input: %rdi - glue
65 // %rsi - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler * assembler)66 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
67 {
68 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
69 Label target;
70 size_t begin = __ GetCurrentPosition();
71 PushAsmInterpEntryFrame(assembler);
72 __ Callq(&target);
73 PopAsmInterpEntryFrame(assembler);
74 size_t end = __ GetCurrentPosition();
75 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
76 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
77 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
78 }
79 __ Ret();
80
81 __ Bind(&target);
82 GeneratorReEnterAsmInterpDispatch(assembler);
83 }
84
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler * assembler)85 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
86 {
87 Register glueRegister = __ GlueRegister();
88 Register contextRegister = rsi;
89 Register prevSpRegister = rbp;
90
91 Register callTargetRegister = r9;
92 Register methodRegister = rcx;
93 Register tempRegister = r11; // can not be used to store any variable
94 Register opRegister = r8; // can not be used to store any variable
95 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_METHOD_OFFSET), callTargetRegister);
96 __ Movq(Operand(callTargetRegister, JSFunctionBase::METHOD_OFFSET), methodRegister);
97
98 Label stackOverflow;
99
100 Register fpRegister = r10;
101 __ Movq(rsp, fpRegister);
102 Register nRegsRegister = rdx;
103 Register regsArrayRegister = r12;
104 Register thisRegister = r15;
105 // push context regs
106 __ Movl(Operand(rsi, GeneratorContext::GENERATOR_NREGS_OFFSET), nRegsRegister);
107 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_THIS_OFFSET), thisRegister);
108 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET), regsArrayRegister);
109 __ Addq(TaggedArray::DATA_OFFSET, regsArrayRegister);
110 PushArgsWithArgvAndCheckStack(assembler, glueRegister, nRegsRegister, regsArrayRegister, tempRegister, opRegister,
111 &stackOverflow);
112
113 // newSp
114 Register newSpRegister = r8;
115 __ Movq(rsp, newSpRegister);
116
117 // resume asm interp frame
118 Register pcRegister = r12;
119 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, callTargetRegister, thisRegister, methodRegister,
120 contextRegister, pcRegister, tempRegister);
121
122 // call bc stub
123 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
124 __ Bind(&stackOverflow);
125 {
126 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, tempRegister);
127 }
128 }
129
130 // Input: glue - %rdi
131 // callTarget - %rsi
132 // method - %rdx
133 // callField - %rcx
134 // argc - %r8
135 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
136 // prevSp - %rbp
AsmInterpEntryDispatch(ExtendedAssembler * assembler)137 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
138 {
139 Label notJSFunction;
140 Label callNativeEntry;
141 Label callJSFunctionEntry;
142 Label notCallable;
143 Register glueRegister = rdi;
144 Register callTargetRegister = rsi;
145 Register argvRegister = r9;
146 Register bitFieldRegister = r12;
147 Register tempRegister = r11; // can not be used to store any variable
148 __ Movq(Operand(callTargetRegister, TaggedObject::HCLASS_OFFSET), tempRegister); // hclass
149 __ Movq(Operand(tempRegister, JSHClass::BIT_FIELD_OFFSET), bitFieldRegister);
150 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_FIRST), bitFieldRegister);
151 __ Jb(¬JSFunction);
152 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_LAST), bitFieldRegister);
153 __ Jbe(&callJSFunctionEntry);
154 __ Bind(¬JSFunction);
155 {
156 __ Testq(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), bitFieldRegister);
157 __ Jz(¬Callable);
158 // fall through
159 }
160 __ Bind(&callNativeEntry);
161 CallNativeEntry(assembler);
162 __ Bind(&callJSFunctionEntry);
163 {
164 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
165 __ Btq(MethodLiteral::IsNativeBit::START_BIT, callFieldRegister);
166 __ Jb(&callNativeEntry);
167
168 __ Leaq(Operand(argvRegister, NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()),
169 argvRegister);
170 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
171 }
172 __ Bind(¬Callable);
173 {
174 __ Movq(glueRegister, rax); // glue
175 __ Pushq(0); // argc
176 Register runtimeIdRegister = r12;
177 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException, runtimeIdRegister);
178 __ Pushq(runtimeIdRegister); // runtimeId
179 Register trampolineIdRegister = r12;
180 Register trampolineRegister = r10;
181 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, trampolineIdRegister);
182 __ Movq(Operand(rax, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
183 trampolineRegister);
184 __ Callq(trampolineRegister);
185 __ Addq(16, rsp); // 16: skip argc and runtime_id
186 __ Ret();
187 }
188 }
189
PushFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register pcRegister,Register operatorRegister)190 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister,
191 Register callTargetRegister, Register thisRegister, Register methodRegister, Register pcRegister,
192 Register operatorRegister)
193 {
194 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
195 __ Pushq(prevSpRegister); // prevSp
196 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
197 __ Pushq(pcRegister); // pc
198 __ Pushq(fpRegister); // fp
199 __ Pushq(0); // jumpSizeAfterCall
200 __ Movq(Operand(callTargetRegister, JSFunction::LEXICAL_ENV_OFFSET), operatorRegister);
201 __ Pushq(operatorRegister); // env
202 __ Pushq(JSTaggedValue::Hole().GetRawData()); // acc
203 __ Pushq(thisRegister); // thisObj
204 __ Pushq(callTargetRegister); // callTarget
205 }
206
PushGeneratorFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register contextRegister,Register pcRegister,Register operatorRegister)207 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register prevSpRegister,
208 Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister,
209 Register contextRegister, Register pcRegister, Register operatorRegister)
210 {
211 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
212 __ Pushq(prevSpRegister); // prevSp
213 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
214 __ Movl(Operand(contextRegister, GeneratorContext::GENERATOR_BC_OFFSET_OFFSET), operatorRegister);
215 __ Addq(operatorRegister, pcRegister);
216 __ Pushq(pcRegister); // pc
217 __ Pushq(fpRegister); // fp
218 __ Pushq(0); // jumpSizeAfterCall
219 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET), operatorRegister);
220 __ Pushq(operatorRegister); // env
221 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET), operatorRegister);
222 __ Pushq(operatorRegister); // acc
223 __ Pushq(thisRegister); // thisObj
224 __ Pushq(callTargetRegister); // callTarget
225 }
226
PushAsmInterpEntryFrame(ExtendedAssembler * assembler)227 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
228 {
229 size_t begin = __ GetCurrentPosition();
230 if (!assembler->FromInterpreterHandler()) {
231 __ PushCppCalleeSaveRegisters();
232 }
233 Register fpRegister = r10;
234 __ Pushq(rdi);
235 __ PushAlignBytes();
236 __ Movq(Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)), fpRegister);
237 // construct asm interpreter entry frame
238 __ Pushq(rbp);
239 __ Pushq(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME));
240 __ Pushq(fpRegister);
241 __ Pushq(0); // pc
242 if (!assembler->FromInterpreterHandler()) {
243 size_t end = __ GetCurrentPosition();
244 if ((end - begin) != FrameCompletionPos::X64CppToAsmInterp) {
245 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64CppToAsmInterp
246 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
247 }
248 }
249 __ Leaq(Operand(rsp, 3 * FRAME_SLOT_SIZE), rbp); // 3: 24 means skip frame type, prevSp and pc
250 }
251
PopAsmInterpEntryFrame(ExtendedAssembler * assembler)252 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
253 {
254 __ Addq(8, rsp); // 8: skip pc
255 Register fpRegister = r10;
256 __ Popq(fpRegister);
257 __ Addq(FRAME_SLOT_SIZE, rsp); // 8: skip frame type
258 __ Popq(rbp);
259 __ PopAlignBytes();
260 __ Popq(rdi);
261 __ Movq(fpRegister, Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)));
262 size_t begin = __ GetCurrentPosition();
263 if (!assembler->FromInterpreterHandler()) {
264 __ PopCppCalleeSaveRegisters();
265 size_t end = __ GetCurrentPosition();
266 if ((end - begin) != FrameCompletionPos::X64AsmInterpToCpp) {
267 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64AsmInterpToCpp
268 << "This frame has been modified, and the offset AsmInterpToCp should be updated too.";
269 }
270 }
271 }
272
GetDeclaredNumArgsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register declaredNumArgsRegister)273 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
274 Register declaredNumArgsRegister)
275 {
276 __ Movq(callFieldRegister, declaredNumArgsRegister);
277 __ Shrq(MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
278 __ Andq(MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
279 }
280
GetNumVregsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register numVregsRegister)281 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
282 Register numVregsRegister)
283 {
284 __ Movq(callFieldRegister, numVregsRegister);
285 __ Shrq(MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
286 __ Andq(MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
287 }
288
JSCallCommonEntry(ExtendedAssembler * assembler,JSCallMode mode,FrameTransitionType type)289 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
290 JSCallMode mode, FrameTransitionType type)
291 {
292 Label stackOverflow;
293 Register glueRegister = __ GlueRegister();
294 Register fpRegister = __ AvailableRegister1();
295 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
296 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
297 // save fp
298 __ Movq(rsp, fpRegister);
299 Register declaredNumArgsRegister = __ AvailableRegister2();
300 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
301
302 Label slowPathEntry;
303 Label fastPathEntry;
304 Label pushCallThis;
305 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
306 if (argc >= 0) {
307 __ Cmpq(argc, declaredNumArgsRegister);
308 } else {
309 __ Cmpq(argcRegister, declaredNumArgsRegister);
310 }
311 __ Jne(&slowPathEntry);
312 __ Bind(&fastPathEntry);
313 JSCallCommonFastPath(assembler, mode, &stackOverflow);
314 __ Bind(&pushCallThis);
315 PushCallThis(assembler, mode, &stackOverflow, type);
316 __ Bind(&slowPathEntry);
317 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
318
319 __ Bind(&stackOverflow);
320 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
321 __ Movq(fpRegister, rsp);
322 Register tempRegister = __ AvailableRegister1();
323 // only glue and acc are useful in exception handler
324 if (glueRegister != r13) {
325 __ Movq(glueRegister, r13);
326 }
327 Register acc = rsi;
328 __ Movq(JSTaggedValue::VALUE_EXCEPTION, acc);
329 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
330 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
331 // Reload pc to make sure stack trace is right
332 __ Movq(callTargetRegister, tempRegister);
333 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), r12); // pc: r12
334 // Reload constpool and profileInfo to make sure gc map work normally
335 __ Movq(Operand(tempRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
336 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14); // profileTypeInfo: r14
337 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx); // constantPool: rbx
338
339 __ Movq(kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException, tempRegister);
340 __ Movq(Operand(glueRegister, tempRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
341 tempRegister);
342 __ Jmp(tempRegister);
343 } else {
344 [[maybe_unused]] TempRegisterScope scope(assembler);
345 Register temp = __ TempRegister();
346 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
347 }
348 }
349
350 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
351 // uint64_t callField, ...)
352 // GHC calling convention
353 // Input1: for callarg0/1/2/3 Input2: for callrange
354 // %r13 - glue // %r13 - glue
355 // %rbp - sp // %rbp - sp
356 // %r12 - callTarget // %r12 - callTarget
357 // %rbx - method // %rbx - method
358 // %r14 - callField // %r14 - callField
359 // %rsi - arg0 // %rsi - actualArgc
360 // %rdi - arg1 // %rdi - argv
361 // %r8 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler * assembler)362 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
363 {
364 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
365 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
366 }
367
PushCallRangeAndDispatch(ExtendedAssembler * assembler)368 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
369 {
370 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
371 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
372 }
373
PushCallNewAndDispatch(ExtendedAssembler * assembler)374 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
375 {
376 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
377 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
378 }
379
PushSuperCallAndDispatch(ExtendedAssembler * assembler)380 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
381 {
382 __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
383 JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
384 }
385
PushCallArgs3AndDispatch(ExtendedAssembler * assembler)386 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
387 {
388 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
389 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
390 }
391
PushCallArgs2AndDispatch(ExtendedAssembler * assembler)392 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
393 {
394 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
395 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
396 }
397
PushCallArg1AndDispatch(ExtendedAssembler * assembler)398 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
399 {
400 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
401 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
402 }
403
PushCallArg0AndDispatch(ExtendedAssembler * assembler)404 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
405 {
406 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
407 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
408 }
PushCallThisArg0AndDispatch(ExtendedAssembler * assembler)409 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
410 {
411 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
412 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
413 }
414
PushCallThisArg1AndDispatch(ExtendedAssembler * assembler)415 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
416 {
417 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
418 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
419 }
420
PushCallThisArgs2AndDispatch(ExtendedAssembler * assembler)421 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
422 {
423 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
424 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
425 }
426
PushCallThisArgs3AndDispatch(ExtendedAssembler * assembler)427 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
428 {
429 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
430 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
431 }
432
JSCallCommonFastPath(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow)433 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)
434 {
435 Register glueRegister = __ GlueRegister();
436 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
437 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
438
439 Label pushCallThis;
440 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
441 // call range
442 if (argc < 0) {
443 Register argcRegister = arg0;
444 Register argvRegister = arg1;
445 __ Cmpq(0, argcRegister);
446 __ Jbe(&pushCallThis);
447 // fall through
448 {
449 [[maybe_unused]] TempRegisterScope scope(assembler);
450 Register opRegister = __ TempRegister();
451 Register op2Register = __ AvailableRegister2();
452 PushArgsWithArgvAndCheckStack(assembler, glueRegister, argcRegister, argvRegister, opRegister, op2Register,
453 stackOverflow);
454 }
455 __ Bind(&pushCallThis);
456 } else if (argc > 0) {
457 if (argc > 2) { // 2: call arg2
458 if (mode == JSCallMode::CALL_THIS_ARG3_WITH_RETURN) {
459 Register arg2 = __ CppJSCallAvailableRegister1();
460 __ Pushq(arg2);
461 } else {
462 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
463 __ Pushq(arg2);
464 }
465 }
466 if (argc > 1) {
467 __ Pushq(arg1);
468 }
469 if (argc > 0) {
470 __ Pushq(arg0);
471 }
472 }
473 }
474
JSCallCommonSlowPath(ExtendedAssembler * assembler,JSCallMode mode,Label * fastPathEntry,Label * pushCallThis,Label * stackOverflow)475 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
476 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
477 {
478 Register glueRegister = __ GlueRegister();
479 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
480 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
481 Register arg0 = argcRegister;
482 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
483 Label noExtraEntry;
484 Label pushArgsEntry;
485
486 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
487 Register declaredNumArgsRegister = __ AvailableRegister2();
488 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
489 __ Jz(&noExtraEntry);
490 // extra entry
491 {
492 [[maybe_unused]] TempRegisterScope scope(assembler);
493 Register tempArgcRegister = __ TempRegister();
494 if (argc >= 0) {
495 __ PushArgc(argc, tempArgcRegister);
496 } else {
497 __ PushArgc(argcRegister, tempArgcRegister);
498 }
499 }
500 __ Bind(&noExtraEntry);
501 {
502 if (argc == 0) {
503 Register op1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
504 [[maybe_unused]] TempRegisterScope scope(assembler);
505 Register op2 = __ TempRegister();
506 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, op1, op2,
507 stackOverflow);
508 __ Jmp(fastPathEntry);
509 return;
510 }
511 [[maybe_unused]] TempRegisterScope scope(assembler);
512 Register diffRegister = __ TempRegister();
513 __ Movq(declaredNumArgsRegister, diffRegister);
514 if (argc >= 0) {
515 __ Subq(argc, diffRegister);
516 } else {
517 __ Subq(argcRegister, diffRegister);
518 }
519 __ Cmpq(0, diffRegister);
520 __ Jle(&pushArgsEntry);
521 PushUndefinedWithArgc(assembler, diffRegister);
522 __ Jmp(fastPathEntry);
523 }
524 __ Bind(&pushArgsEntry);
525 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
526 __ Jnz(fastPathEntry);
527 // arg1, declare must be 0
528 if (argc == 1) {
529 __ Jmp(pushCallThis);
530 return;
531 }
532 // decalare < actual
533 __ Cmpq(0, declaredNumArgsRegister);
534 __ Je(pushCallThis);
535 if (argc < 0) {
536 Register argvRegister = arg1;
537 [[maybe_unused]] TempRegisterScope scope(assembler);
538 Register opRegister = __ TempRegister();
539 PushArgsWithArgvAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, argvRegister, opRegister,
540 opRegister, stackOverflow);
541 } else if (argc > 0) {
542 Label pushArgs0;
543 if (argc > 2) { // 2: call arg2
544 // decalare is 2 or 1 now
545 __ Cmpq(1, declaredNumArgsRegister);
546 __ Je(&pushArgs0);
547 __ Pushq(arg1);
548 }
549 if (argc > 1) {
550 __ Bind(&pushArgs0);
551 // decalare is is 1 now
552 __ Pushq(arg0);
553 }
554 }
555 __ Jmp(pushCallThis);
556 }
557
GetThisRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)558 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
559 {
560 switch (mode) {
561 case JSCallMode::CALL_GETTER:
562 case JSCallMode::CALL_THIS_ARG0:
563 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
564 case JSCallMode::CALL_SETTER:
565 case JSCallMode::CALL_THIS_ARG1:
566 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
567 case JSCallMode::CALL_THIS_ARG2:
568 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
569 case JSCallMode::CALL_THIS_WITH_ARGV:
570 case JSCallMode::SUPER_CALL_WITH_ARGV:
571 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
572 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
573 case JSCallMode::CALL_THIS_ARG3:
574 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
575 case JSCallMode::CALL_ENTRY:
576 case JSCallMode::CALL_FROM_AOT: {
577 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
578 __ Movq(Operand(argvRegister, -FRAME_SLOT_SIZE), defaultRegister); // 8: this is just before the argv list
579 return defaultRegister;
580 }
581 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
582 return __ CppJSCallAvailableRegister2();
583 case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
584 case JSCallMode::CALL_THIS_ARGV_WITH_RETURN: {
585 return __ CppJSCallAvailableRegister1();
586 }
587 default:
588 LOG_ECMA(FATAL) << "this branch is unreachable";
589 UNREACHABLE();
590 }
591 return rInvalid;
592 }
593
GetNewTargetRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)594 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
595 Register defaultRegister)
596 {
597 switch (mode) {
598 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
599 case JSCallMode::CALL_THIS_WITH_ARGV:
600 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
601 case JSCallMode::SUPER_CALL_WITH_ARGV:
602 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
603 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
604 case JSCallMode::CALL_FROM_AOT:
605 case JSCallMode::CALL_ENTRY: {
606 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
607 // -2: new Target offset
608 __ Movq(Operand(argvRegister, -2 * FRAME_SLOT_SIZE), defaultRegister);
609 return defaultRegister;
610 }
611 default:
612 LOG_ECMA(FATAL) << "this branch is unreachable";
613 UNREACHABLE();
614 }
615 return rInvalid;
616 }
617
618 // Input: %r14 - callField
619 // %rdi - argv
PushCallThis(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow,FrameTransitionType type)620 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
621 JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
622 {
623 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
624 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
625 Register thisRegister = __ AvailableRegister2();
626
627 Label pushVregs;
628 Label pushNewTarget;
629 Label pushCallTarget;
630 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
631 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
632 if (!haveThis) {
633 __ Movq(JSTaggedValue::VALUE_UNDEFINED, thisRegister); // default this: undefined
634 } else {
635 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
636 if (thisRegister != thisArgRegister) {
637 __ Movq(thisArgRegister, thisRegister);
638 }
639 }
640 __ Testb(CALL_TYPE_MASK, callFieldRegister);
641 __ Jz(&pushVregs);
642 // fall through
643 __ Testq(MethodLiteral::HaveThisBit::Mask(), callFieldRegister);
644 __ Jz(&pushNewTarget);
645 // push this
646 if (!haveThis) {
647 __ Pushq(JSTaggedValue::Undefined().GetRawData());
648 } else {
649 __ Pushq(thisRegister);
650 }
651 // fall through
652 __ Bind(&pushNewTarget);
653 {
654 __ Testq(MethodLiteral::HaveNewTargetBit::Mask(), callFieldRegister);
655 __ Jz(&pushCallTarget);
656 if (!haveNewTarget) {
657 __ Pushq(JSTaggedValue::Undefined().GetRawData());
658 } else {
659 [[maybe_unused]] TempRegisterScope scope(assembler);
660 Register defaultRegister = __ TempRegister();
661 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
662 __ Pushq(newTargetRegister);
663 }
664 }
665 // fall through
666 __ Bind(&pushCallTarget);
667 {
668 __ Testq(MethodLiteral::HaveFuncBit::Mask(), callFieldRegister);
669 __ Jz(&pushVregs);
670 __ Pushq(callTargetRegister);
671 }
672 // fall through
673 __ Bind(&pushVregs);
674 {
675 PushVregs(assembler, stackOverflow, type);
676 }
677 }
678
679 // Input: %rbp - sp
680 // %r12 - callTarget
681 // %rbx - method
682 // %r14 - callField
683 // %rdx - jumpSizeAfterCall
684 // %r10 - fp
PushVregs(ExtendedAssembler * assembler,Label * stackOverflow,FrameTransitionType type)685 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
686 Label *stackOverflow, FrameTransitionType type)
687 {
688 Register glueRegister = __ GlueRegister();
689 Register prevSpRegister = rbp;
690 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
691 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
692 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
693 Register fpRegister = __ AvailableRegister1();
694 Register thisRegister = __ AvailableRegister2();
695
696 Label pushFrameState;
697
698 [[maybe_unused]] TempRegisterScope scope(assembler);
699 Register tempRegister = __ TempRegister();
700 // args register can reused now.
701 Register pcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
702 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
703 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
704 __ Cmpq(0, numVregsRegister);
705 __ Jz(&pushFrameState);
706 Register temp2Register = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD); // reuse
707 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register,
708 stackOverflow);
709 // fall through
710 Register newSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
711 __ Bind(&pushFrameState);
712 {
713 StackOverflowCheck(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register, stackOverflow);
714 __ Movq(rsp, newSpRegister);
715
716 PushFrameState(assembler, prevSpRegister, fpRegister,
717 callTargetRegister, thisRegister, methodRegister, pcRegister, tempRegister);
718 }
719 if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
720 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
721 __ Movq(Operand(callTargetRegister, JSFunction::BASELINECODE_OFFSET), tempRegister);
722 Label baselineCodeUndefined;
723 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), tempRegister);
724 __ Je(&baselineCodeUndefined);
725
726 // check is compiling
727 __ Cmpq(JSTaggedValue::Hole().GetRawData(), tempRegister);
728 __ Je(&baselineCodeUndefined);
729
730 Label stackAligned;
731 // align 16 bytes
732 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
733 __ Jz(&stackAligned);
734 __ PushAlignBytes();
735 __ Bind(&stackAligned);
736
737 __ Movq(Operand(tempRegister, MachineCode::FUNCADDR_OFFSET), tempRegister);
738 if (glueRegister != r13) {
739 __ Movq(glueRegister, r13);
740 }
741 if (methodRegister != rbx) {
742 __ Movq(methodRegister, rbx);
743 }
744 const int32_t pcOffsetFromSP = -24; // -24: 3 slots, frameType, prevFrame, pc
745 Register temp3Register = r10;
746 __ Movabs(std::numeric_limits<uint64_t>::max(), temp3Register);
747 __ Movq(temp3Register, Operand(newSpRegister, pcOffsetFromSP));
748 __ Movq(newSpRegister, rbp);
749 __ Jmp(tempRegister);
750
751 __ Bind(&baselineCodeUndefined);
752 }
753 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
754 }
755
756 // Input: %r13 - glue
757 // %rbp - sp
758 // %r12 - callTarget
759 // %rbx - method
DispatchCall(ExtendedAssembler * assembler,Register pcRegister,Register newSpRegister,Register callTargetRegister,Register methodRegister,Register accRegister)760 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
761 Register newSpRegister, Register callTargetRegister, Register methodRegister, Register accRegister)
762 {
763 Register glueRegister = __ GlueRegister();
764 Label dispatchCall;
765 // align 16 bytes
766 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
767 __ Jnz(&dispatchCall);
768 __ PushAlignBytes();
769 __ Bind(&dispatchCall);
770 // profileTypeInfo: r14
771 __ Movq(Operand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
772 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14);
773 // glue may rdi
774 if (glueRegister != r13) {
775 __ Movq(glueRegister, r13);
776 }
777 // sp: rbp
778 __ Movq(newSpRegister, rbp);
779 // hotnessCounter: rdi
780 __ Movzwq(Operand(methodRegister, Method::LITERAL_INFO_OFFSET), rdi);
781 // constantPool: rbx
782 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx);
783 // pc: r12
784 if (pcRegister != r12) {
785 __ Movq(pcRegister, r12);
786 }
787
788 Register bcIndexRegister = rax;
789 Register tempRegister = __ AvailableRegister1();
790 __ Movzbq(Operand(pcRegister, 0), bcIndexRegister);
791 // acc: rsi
792 if (accRegister != rInvalid) {
793 ASSERT(accRegister == rsi);
794 } else {
795 __ Movq(JSTaggedValue::Hole().GetRawData(), rsi);
796 }
797 __ Movq(Operand(r13, bcIndexRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)), tempRegister);
798 __ Jmp(tempRegister);
799 }
800
801 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
802 // c++ calling convention call js function
803 // Input: %rdi - glue
804 // %rsi - nativeCode
805 // %rdx - func
806 // %rcx - thisValue
807 // %r8 - argc
808 // %r9 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler * assembler)809 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
810 {
811 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
812 CallNativeWithArgv(assembler, false);
813 }
814
PushCallNewAndDispatchNative(ExtendedAssembler * assembler)815 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
816 {
817 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
818 CallNativeWithArgv(assembler, true);
819 }
820
PushNewTargetAndDispatchNative(ExtendedAssembler * assembler)821 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
822 {
823 __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
824 CallNativeWithArgv(assembler, true, true);
825 }
826
CallNativeWithArgv(ExtendedAssembler * assembler,bool callNew,bool hasNewTarget)827 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
828 {
829 Register glue = rdi;
830 Register nativeCode = rsi;
831 Register func = rdx;
832 Register thisValue = rcx;
833 Register numArgs = r8;
834 Register stackArgs = r9;
835 Register temporary = rax;
836 Register temporary2 = r11;
837 Register opNumArgs = r10;
838 Label aligned;
839 Label pushThis;
840 Label stackOverflow;
841
842 bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV);
843
844 __ Push(numArgs);
845 __ Cmpq(0, numArgs);
846 __ Jz(&pushThis);
847 __ Movq(numArgs, opNumArgs);
848 PushArgsWithArgvAndCheckStack(assembler, glue, opNumArgs, stackArgs, temporary, temporary2, &stackOverflow);
849
850 __ Bind(&pushThis);
851 __ Push(thisValue);
852 // new.target
853 if (callNew) {
854 if (hasNewTarget) {
855 Register newTarget = r12;
856 // 5: skip frame type, numArgs, func, newTarget and this
857 __ Movq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), newTarget);
858 __ Pushq(newTarget);
859 } else {
860 __ Pushq(func);
861 }
862 } else {
863 __ Pushq(JSTaggedValue::Undefined().GetRawData());
864 }
865 __ Pushq(func);
866 if (!isFrameComplete) {
867 // 5: skip frame type, numArgs, func, newTarget and this
868 __ Leaq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), rbp);
869 }
870 __ Movq(rsp, stackArgs);
871
872 // push argc
873 __ Addl(NUM_MANDATORY_JSFUNC_ARGS, numArgs);
874 __ Pushq(numArgs);
875 // push thread
876 __ Pushq(glue);
877 // EcmaRuntimeCallInfo
878 __ Movq(rsp, rdi);
879
880 __ Testq(0xf, rsp); // 0xf: 0x1111
881 __ Jz(&aligned, Distance::Near);
882 __ PushAlignBytes();
883
884 __ Bind(&aligned);
885 CallNativeInternal(assembler, nativeCode);
886 __ Ret();
887
888 __ Bind(&stackOverflow);
889 {
890 Label aligneThrow;
891 __ Movq(Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)), rsp);
892 __ Pushq(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME)); // frame type
893 __ Pushq(0); // argc
894 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // this
895 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // newTarget
896 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // callTarget
897 // 5: skip frame type, argc, this, newTarget and callTarget
898 // +----------------------------------------------------------------+ <---- rbp = rsp + 5 * frame_slot_size
899 // | FrameType = BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
900 // |----------------------------------------------------------------|
901 // | argc = 0 |
902 // |----------------------------------------------------------------|
903 // | this = undefine |
904 // |----------------------------------------------------------------|
905 // | newTarget = undefined |
906 // |----------------------------------------------------------------|
907 // | callTarget = undefined |
908 // +----------------------------------------------------------------+ <---- rsp
909 __ Leaq(Operand(rsp, 5 * FRAME_SLOT_SIZE), rbp);
910
911 __ Testq(0xf, rsp); // 0xf: 0x1111
912 __ Jz(&aligneThrow, Distance::Near);
913 __ PushAlignBytes();
914
915 __ Bind(&aligneThrow);
916 Register trampolineIdRegister = r9;
917 Register trampolineRegister = r10;
918 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, trampolineIdRegister);
919 __ Movq(Operand(glue, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
920 trampolineRegister);
921 __ Callq(trampolineRegister);
922
923 // resume rsp
924 __ Movq(rbp, rsp);
925 __ Pop(rbp);
926 __ Ret();
927 }
928 }
929
CallNativeEntry(ExtendedAssembler * assembler)930 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler)
931 {
932 Register glue = rdi;
933 Register argv = r9;
934 Register method = rdx;
935 Register function = rsi;
936 Register nativeCode = r10;
937
938 __ PushAlignBytes();
939 __ Push(function);
940 // 3: 24 means skip thread & argc & returnAddr
941 __ Subq(3 * FRAME_SLOT_SIZE, rsp);
942 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME);
943 __ Movq(Operand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), nativeCode); // get native pointer
944 __ Movq(argv, r11);
945 // 2: 16 means skip numArgs & thread
946 __ Subq(2 * FRAME_SLOT_SIZE, r11);
947 // EcmaRuntimeCallInfo
948 __ Movq(r11, rdi);
949
950 CallNativeInternal(assembler, nativeCode);
951
952 // 5: 40 means skip function
953 __ Addq(5 * FRAME_SLOT_SIZE, rsp);
954 __ Ret();
955 }
956
957 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
958 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
959 // Input: %rax - codeAddress
960 // stack layout: sp + N*8 argvN
961 // ........
962 // sp + 24: argv1
963 // sp + 16: argv0
964 // sp + 8: actualArgc
965 // sp: thread
966 // construct Native Leave Frame
967 // +--------------------------+
968 // | argV[N - 1] |
969 // |--------------------------|
970 // | . . . . |
971 // |--------------------------+
972 // | argV[2]=this |
973 // +--------------------------+
974 // | argV[1]=new-target |
975 // +--------------------------+
976 // | argV[0]=call-target |
977 // +--------------------------+ ---------
978 // | argc | ^
979 // |--------------------------| |
980 // | thread | |
981 // |--------------------------| |
982 // | returnAddr | BuiltinFrame
983 // |--------------------------| |
984 // | callsiteFp | |
985 // |--------------------------| |
986 // | frameType | v
987 // +--------------------------+ ---------
988
PushCallArgsAndDispatchNative(ExtendedAssembler * assembler)989 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
990 {
991 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
992 Register nativeCode = rax;
993 Register glue = rdi;
994
995 __ Movq(Operand(rsp, FRAME_SLOT_SIZE), glue); // 8: glue
996 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME);
997 __ Leaq(Operand(rbp, 2 * FRAME_SLOT_SIZE), rdi); // 2: skip argc & thread
998 __ PushAlignBytes();
999 CallNativeInternal(assembler, nativeCode);
1000 __ Ret();
1001 }
1002
PushBuiltinFrame(ExtendedAssembler * assembler,Register glue,FrameType type)1003 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler,
1004 Register glue, FrameType type)
1005 {
1006 __ Pushq(rbp);
1007 __ Movq(rsp, Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1008 __ Pushq(static_cast<int32_t>(type));
1009 if (type != FrameType::BUILTIN_FRAME_WITH_ARGV) {
1010 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // 8: skip frame type
1011 return true;
1012 } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
1013 // this frame push stack args must before update rbp, otherwise cpu profiler maybe visit incomplete stack
1014 // BuiltinWithArgvFrame layout please see frames.h
1015 return false;
1016 } else {
1017 LOG_ECMA(FATAL) << "this branch is unreachable";
1018 UNREACHABLE();
1019 }
1020 }
1021
CallNativeInternal(ExtendedAssembler * assembler,Register nativeCode)1022 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
1023 {
1024 __ Callq(nativeCode);
1025 // resume rsp
1026 __ Movq(rbp, rsp);
1027 __ Pop(rbp);
1028 }
1029
1030 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1031 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1032 // GHC calling convention
1033 // %r13 - glue
1034 // %rbp - sp
1035 // %r12 - pc
1036 // %rbx - constantPool
1037 // %r14 - profileTypeInfo
1038 // %rsi - acc
1039 // %rdi - hotnessCounter
1040 // %r8 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler * assembler)1041 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
1042 {
1043 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
1044 Register glueRegister = __ GlueRegister();
1045 Register spRegister = rbp;
1046 Register pcRegister = r12;
1047 Register ret = rsi;
1048 Register jumpSizeRegister = r8;
1049
1050 Register frameStateBaseRegister = r11;
1051 __ Movq(spRegister, frameStateBaseRegister);
1052 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1053
1054 Label dispatch;
1055 Label newObjectRangeReturn;
1056 __ Cmpq(0, jumpSizeRegister);
1057 __ Jle(&newObjectRangeReturn);
1058
1059 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1060 __ Addq(jumpSizeRegister, pcRegister); // newPC
1061 Register temp = rax;
1062 Register opcodeRegister = rax;
1063 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1064
1065 __ Bind(&dispatch);
1066 {
1067 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1068 Register bcStubRegister = r11;
1069 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1070 bcStubRegister);
1071 __ Jmp(bcStubRegister);
1072 }
1073
1074 Label getThis;
1075 Label notUndefined;
1076 __ Bind(&newObjectRangeReturn);
1077 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1078 __ Jne(¬Undefined);
1079
1080 __ Bind(&getThis);
1081 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1082 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1083 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1084 {
1085 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetThisOffset(false)), ret);
1086 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1087 Register bcStubRegister = r11;
1088 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1089 bcStubRegister);
1090 __ Jmp(bcStubRegister);
1091 }
1092
1093 __ Bind(¬Undefined);
1094 {
1095 Label notEcmaObject;
1096 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1097 __ And(ret, temp);
1098 __ Cmpq(0, temp);
1099 __ Jne(¬EcmaObject);
1100 // acc is heap object
1101 __ Movq(Operand(ret, 0), temp); // hclass
1102 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1103 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1104 __ Ja(¬EcmaObject);
1105 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1106 __ Jb(¬EcmaObject);
1107 // acc is ecma object
1108 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1109 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1110 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1111 __ Jmp(&dispatch);
1112
1113 __ Bind(¬EcmaObject);
1114 {
1115 // load constructor
1116 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), temp);
1117 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1118 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1119 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1120 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1121 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1122 __ Jbe(&getThis); // constructor is base
1123 // fall through
1124 }
1125 // exception branch
1126 {
1127 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);
1128 __ Movq(kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException, opcodeRegister);
1129 __ Jmp(&dispatch);
1130 }
1131 }
1132 }
1133
1134 // c++ calling convention
1135 // %rdi - glue
1136 // %rsi - callTarget
1137 // %rdx - method
1138 // %rcx - callField
1139 // %r8 - receiver
1140 // %r9 - value
CallGetter(ExtendedAssembler * assembler)1141 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1142 {
1143 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1144 Label target;
1145
1146 PushAsmInterpBridgeFrame(assembler);
1147 __ Callq(&target);
1148 PopAsmInterpBridgeFrame(assembler);
1149 __ Ret();
1150 __ Bind(&target);
1151 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1152 }
1153
CallSetter(ExtendedAssembler * assembler)1154 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1155 {
1156 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1157 Label target;
1158 PushAsmInterpBridgeFrame(assembler);
1159 __ Callq(&target);
1160 PopAsmInterpBridgeFrame(assembler);
1161 __ Ret();
1162 __ Bind(&target);
1163 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1164 }
1165
1166 // Input: glue - %rdi
1167 // callTarget - %rsi
1168 // method - %rdx
1169 // callField - %rcx
1170 // arg0(argc) - %r8
1171 // arg1(arglist) - %r9
1172 // argthis - stack
CallReturnWithArgv(ExtendedAssembler * assembler)1173 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1174 {
1175 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1176 Label target;
1177 PushAsmInterpBridgeFrame(assembler);
1178 Register r13 = __ CppJSCallAvailableRegister1();
1179 __ Movq(Operand(rbp, FRAME_SLOT_SIZE), r13);
1180 __ Callq(&target);
1181 PopAsmInterpBridgeFrame(assembler);
1182 __ Ret();
1183 __ Bind(&target);
1184 {
1185 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1186 FrameTransitionType::OTHER_TO_OTHER);
1187 }
1188 }
1189
CallContainersArgs2(ExtendedAssembler * assembler)1190 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1191 {
1192 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1193 Label target;
1194 PushAsmInterpBridgeFrame(assembler);
1195 GetArgvAtStack(assembler);
1196 __ Callq(&target);
1197 PopAsmInterpBridgeFrame(assembler);
1198 __ Ret();
1199 __ Bind(&target);
1200 {
1201 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1202 FrameTransitionType::OTHER_TO_OTHER);
1203 }
1204 }
1205
CallContainersArgs3(ExtendedAssembler * assembler)1206 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1207 {
1208 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1209 Label target;
1210 PushAsmInterpBridgeFrame(assembler);
1211 GetArgvAtStack(assembler);
1212 __ Callq(&target);
1213 PopAsmInterpBridgeFrame(assembler);
1214 __ Ret();
1215 __ Bind(&target);
1216 {
1217 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1218 FrameTransitionType::OTHER_TO_OTHER);
1219 }
1220 }
1221
1222 // ResumeRspAndReturn(uintptr_t acc)
1223 // GHC calling convention
1224 // %r13 - acc
1225 // %rbp - prevSp
1226 // %r12 - sp
ResumeRspAndReturn(ExtendedAssembler * assembler)1227 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
1228 {
1229 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
1230 Register currentSp = r12;
1231 Register fpRegister = r10;
1232 intptr_t offset = AsmInterpretedFrame::GetFpOffsetAsIntptr(false) -
1233 AsmInterpretedFrame::GetSizeAsIntptr(false);
1234 __ Movq(Operand(currentSp, static_cast<int32_t>(offset)), fpRegister);
1235 __ Movq(fpRegister, rsp);
1236 // return
1237 {
1238 __ Movq(r13, rax);
1239 __ Ret();
1240 }
1241 }
1242
1243 // ResumeRspAndReturnBaseline(uintptr_t acc)
1244 // GHC calling convention
1245 // %r13 - acc
1246 // %rbp - prevSp
1247 // %r12 - sp
1248 // %rbx - jumpSizeAfterCall
ResumeRspAndReturnBaseline(ExtendedAssembler * assembler)1249 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
1250 {
1251 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
1252 Register currentSp = r12;
1253 Register fpRegister = r10;
1254 intptr_t fpOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetFpOffset(false)) -
1255 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1256 __ Movq(Operand(currentSp, static_cast<int32_t>(fpOffset)), fpRegister);
1257 __ Movq(fpRegister, rsp);
1258
1259 // Check result
1260 Register ret = r13;
1261 Register jumpSizeRegister = rbx;
1262 Label getThis;
1263 Label notUndefined;
1264 Label normalReturn;
1265 Label newObjectRangeReturn;
1266 __ Cmpq(0, jumpSizeRegister);
1267 __ Jg(&normalReturn);
1268
1269 __ Bind(&newObjectRangeReturn);
1270 {
1271 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1272 __ Jne(¬Undefined);
1273
1274 // acc is undefined
1275 __ Bind(&getThis);
1276 intptr_t thisOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetThisOffset(false)) -
1277 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1278 __ Movq(Operand(currentSp, static_cast<int32_t>(thisOffset)), ret);
1279 __ Jmp(&normalReturn);
1280
1281 // acc is not undefined
1282 __ Bind(¬Undefined);
1283 {
1284 Register temp = rax;
1285 Label notEcmaObject;
1286 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1287 __ And(ret, temp);
1288 __ Cmpq(0, temp);
1289 __ Jne(¬EcmaObject);
1290 // acc is heap object
1291 __ Movq(Operand(ret, 0), temp); // hclass
1292 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1293 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1294 __ Ja(¬EcmaObject);
1295 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1296 __ Jb(¬EcmaObject);
1297 // acc is ecma object
1298 __ Jmp(&normalReturn);
1299
1300 __ Bind(¬EcmaObject);
1301 {
1302 // load constructor
1303 intptr_t funcOffset = AsmInterpretedFrame::GetFunctionOffsetAsIntptr(false) -
1304 AsmInterpretedFrame::GetSizeAsIntptr(false);
1305 __ Movq(Operand(currentSp, static_cast<int32_t>(funcOffset)), temp);
1306 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1307 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1308 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1309 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1310 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1311 __ Jbe(&getThis); // constructor is base
1312 // fall through
1313 }
1314 }
1315 }
1316 __ Bind(&normalReturn);
1317 __ Movq(ret, rax);
1318 __ Ret();
1319 }
1320
1321 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1322 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
1323 // GHC calling convention
1324 // %r13 - glue
1325 // %rbp - sp
1326 // %r12 - pc
1327 // %rbx - constantPool
1328 // %r14 - profileTypeInfo
1329 // %rsi - acc
1330 // %rdi - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler * assembler)1331 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
1332 {
1333 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
1334 Register glueRegister = __ GlueRegister();
1335 Register pcRegister = r12;
1336
1337 Label dispatch;
1338 Register fpRegister = r11;
1339 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1340 __ Cmpq(0, fpRegister);
1341 __ Jz(&dispatch);
1342 __ Movq(fpRegister, rsp); // resume rsp
1343 __ Bind(&dispatch);
1344 {
1345 Register opcodeRegister = rax;
1346 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1347 Register bcStubRegister = r11;
1348 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1349 bcStubRegister);
1350 __ Jmp(bcStubRegister);
1351 }
1352 }
1353
1354 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
1355 // GHC calling convention
1356 // %r13 - glue
1357 // %rbp - sp
1358 // %r12 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler * assembler)1359 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
1360 {
1361 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
1362 Register glueRegister = __ GlueRegister();
1363 Register acc(r12);
1364 Register cppRet(rax);
1365
1366 Label ret;
1367 Register fpRegister = r11;
1368 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1369 __ Cmpq(0, fpRegister);
1370 __ Jz(&ret);
1371 __ Movq(fpRegister, rsp); // resume rsp
1372 __ Bind(&ret);
1373 // this method will return to Execute(cpp calling convention), and the return value should be put into rax.
1374 __ Movq(acc, cppRet);
1375 __ Ret();
1376 }
1377
1378 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1379 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1380 // GHC calling convention
1381 // %r13 - glue
1382 // %rbp - sp
1383 // %r12 - pc
1384 // %rbx - constantPool
1385 // %r14 - profileTypeInfo
1386 // %rsi - acc
1387 // %rdi - hotnessCounter
1388 // %r8 - jumpSizeAfterCall
ResumeRspAndRollback(ExtendedAssembler * assembler)1389 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
1390 {
1391 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
1392 Register glueRegister = __ GlueRegister();
1393 Register spRegister = rbp;
1394 Register pcRegister = r12;
1395 Register ret = rsi;
1396 Register jumpSizeRegister = r8;
1397
1398 Register frameStateBaseRegister = r11;
1399 __ Movq(spRegister, frameStateBaseRegister);
1400 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1401
1402 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1403 __ Addq(jumpSizeRegister, pcRegister); // newPC
1404 Register opcodeRegister = rax;
1405 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1406
1407 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), ret); // restore acc
1408
1409 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1410 Register bcStubRegister = r11;
1411 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1412 bcStubRegister);
1413 __ Jmp(bcStubRegister);
1414 }
1415
1416 // preserve all the general registers, except r11 and callee saved registers/
1417 // and call r11
PreserveMostCall(ExtendedAssembler * assembler)1418 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1419 {
1420 // * layout as the following:
1421 // +--------------------------+ ---------
1422 // | . . . . . | ^
1423 // callerSP ---> |--------------------------| |
1424 // | returnAddr | |
1425 // |--------------------------| OptimizedFrame
1426 // | callsiteFp | |
1427 // fp ---> |--------------------------| |
1428 // | OPTIMIZED_FRAME | v
1429 // +--------------------------+ ---------
1430 // | rdi |
1431 // +--------------------------+
1432 // | rsi |
1433 // +--------------------------+
1434 // | rdx |
1435 // +--------------------------+
1436 // | rcx |
1437 // +--------------------------+
1438 // | r8 |
1439 // +--------------------------+
1440 // | r9 |
1441 // +--------------------------+
1442 // | r10 |
1443 // +--------------------------+
1444 // | rax |
1445 // +--------------------------+
1446 // | align |
1447 // calleeSP ---> +--------------------------+
1448 {
1449 // prologue to save rbp, frametype, and update rbp.
1450 __ Pushq(rbp);
1451 __ Pushq(static_cast<int64_t>(FrameType::OPTIMIZED_FRAME)); // set frame type
1452 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1453 }
1454 int32_t PreserveRegisterIndex = 9;
1455 // rdi,rsi,rdx,rcx,r8,r9,r10,rax should be preserved,
1456 // other general registers are callee saved register, callee will save them.
1457 __ Subq(PreserveRegisterIndex * FRAME_SLOT_SIZE, rsp);
1458 __ Movq(rdi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1459 __ Movq(rsi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1460 __ Movq(rdx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1461 __ Movq(rcx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1462 __ Movq(r8, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1463 __ Movq(r9, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1464 __ Movq(r10, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1465 __ Movq(rax, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1466 __ Callq(r11);
1467 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rax);
1468 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r10);
1469 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r9);
1470 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r8);
1471 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rcx);
1472 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdx);
1473 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rsi);
1474 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdi);
1475 {
1476 // epilogue to restore rsp, rbp.
1477 // need add the frametype slot
1478 __ Addq(PreserveRegisterIndex * FRAME_SLOT_SIZE + FRAME_SLOT_SIZE, rsp);
1479 __ Popq(rbp);
1480 __ Ret();
1481 }
1482 }
1483
1484 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1485 // c calling convention, but preserve all general registers except %r11
1486 // %rd1 - glue
1487 // %rsi - obj
1488 // %rdx - offset
1489 // %rcx - value
ASMFastWriteBarrier(ExtendedAssembler * assembler)1490 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1491 {
1492 // valid region flag are as follows, assume it will be ALWAYS VALID.
1493 // Judge the region of value with:
1494 // "young" "sweepable share" "readonly share"
1495 // region flag: 0x08, 0x09, [0x0A, 0x11], [0x12, 0x14], 0x15
1496 // value is share: [0x12, 0x15] => valueMaybeSweepableShare
1497 // readonly share: 0x15 => return
1498 // sweepable share: [0x12, 0x14] => needCallShare
1499 // value is not share: 0x08, 0x09, [0x0A, 0x11], => valueNotShare
1500 // value is young : 0x09 => needCallNotShare
1501 // value is not young : 0x08, [0x0A, 0x11], => checkMark
1502 ASSERT(GENERAL_YOUNG_BEGIN <= IN_YOUNG_SPACE && IN_YOUNG_SPACE < SHARED_SPACE_BEGIN &&
1503 SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN && SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE &&
1504 IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1505 __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1506 Label needCall;
1507 Label checkMark;
1508 Label needCallNotShare;
1509 Label needCallShare;
1510 Label valueNotShare;
1511 Label valueMaybeSweepableShare;
1512 {
1513 // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1514 // int8_t valueFlag = *valueRegion
1515 // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1516 // goto valueMaybeSweepableShare
1517 // }
1518
1519 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1520 __ And(rcx, r11); // r11 is the region address of value.
1521 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of value.
1522 __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN), r11);
1523 __ Jae(&valueMaybeSweepableShare);
1524 // if value may be SweepableShare, goto valueMaybeSweepableShare
1525 }
1526 __ Bind(&valueNotShare);
1527 {
1528 // valueNotShare:
1529 // if (valueFlag != IN_YOUNG_SPACE){
1530 // goto checkMark
1531 // }
1532 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1533 // int8_t objFlag = *objRegion
1534 // if (objFlag != IN_YOUNG_SPACE){
1535 // goto needCallNotShare
1536 // }
1537
1538 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1539 __ Jne(&checkMark);
1540 // if value is not in young, goto checkMark
1541
1542 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11);
1543 __ And(rsi, r11); // r11 is the region address of obj.
1544 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of obj.
1545 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1546 __ Jne(&needCallNotShare);
1547 // if obj is not in young, goto needCallNotShare
1548 }
1549
1550 __ Bind(&checkMark);
1551 {
1552 // checkMark:
1553 // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1554 // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1555 // goto needCallNotShare
1556 // }
1557 // return
1558
1559 __ Movl(Operand(rdi, JSThread::GlueData::GetGCStateBitFieldOffset(false)), r11);
1560 __ Testb(Immediate(JSThread::CONCURRENT_MARKING_BITFIELD_MASK), r11);
1561 __ Jne(&needCallNotShare);
1562 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1563 __ Ret();
1564 }
1565
1566 __ Bind(&valueMaybeSweepableShare);
1567 {
1568 // valueMaybeSweepableShare:
1569 // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1570 // goto needCallShare
1571 // }
1572 // return
1573 __ Cmpl(Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE), r11);
1574 __ Jne(&needCallShare);
1575 __ Ret();
1576 }
1577
1578 __ Bind(&needCallShare);
1579 {
1580 int32_t SValueBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1581 kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1582 __ Movq(Operand(rdi, SValueBarrierOffset), r11);
1583 __ Jmp(&needCall);
1584 }
1585 __ Bind(&needCallNotShare);
1586 {
1587 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1588 kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1589 __ Movq(Operand(rdi, NonSValueBarrier), r11);
1590 }
1591 __ Bind(&needCall);
1592 {
1593 PreserveMostCall(assembler);
1594 }
1595 }
1596
1597 // ASMWriteBarrierWithEden(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1598 // c calling convention, but preserve all general registers except %x15
1599 // %x0 - glue
1600 // %x1 - obj
1601 // %x2 - offset
1602 // %x3 - value
ASMWriteBarrierWithEden(ExtendedAssembler * assembler)1603 void AsmInterpreterCall::ASMWriteBarrierWithEden(ExtendedAssembler* assembler)
1604 {
1605 __ BindAssemblerStub(RTSTUB_ID(ASMWriteBarrierWithEden));
1606 // Just for compitability, not a fast implement, should be refactored when enable EdenBarrier.
1607 int32_t EdenBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1608 kungfu::CommonStubCSigns::SetValueWithEdenBarrier * FRAME_SLOT_SIZE;
1609 __ Movq(Operand(rdi, EdenBarrierOffset), r11);
1610 PreserveMostCall(assembler);
1611 }
1612
PushUndefinedWithArgcAndCheckStack(ExtendedAssembler * assembler,Register glue,Register argc,Register op1,Register op2,Label * stackOverflow)1613 void AsmInterpreterCall::PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc,
1614 Register op1, Register op2, Label *stackOverflow)
1615 {
1616 ASSERT(stackOverflow != nullptr);
1617 StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow);
1618 PushUndefinedWithArgc(assembler, argc);
1619 }
1620
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1621 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue, Register fp,
1622 Register op)
1623 {
1624 if (fp != rsp) {
1625 __ Movq(fp, rsp);
1626 }
1627 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1628 __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1629 if (glue != r13) {
1630 __ Movq(glue, r13);
1631 }
1632
1633 __ Pushq(rbp);
1634 __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1635 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1636
1637 __ Pushq(r10); // caller save
1638 __ Pushq(0); // argc
1639 __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1640 __ Movq(glue, rax); // glue
1641 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1642 __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1643 __ Callq(r10); // call CallRuntime
1644 __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1645 __ Popq(r10);
1646 __ Addq(FRAME_SLOT_SIZE, rsp); // skip frame type
1647 __ Popq(rbp);
1648 __ Ret();
1649 }
1650
HasPendingException(ExtendedAssembler * assembler,Register threadRegister)1651 void AsmInterpreterCall::HasPendingException([[maybe_unused]] ExtendedAssembler *assembler,
1652 [[maybe_unused]] Register threadRegister)
1653 {
1654 }
1655 #undef __
1656 } // namespace panda::ecmascript::x64