1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16
17 #include "ecmascript/compiler/trampoline/x64/common_call.h"
18
19 #include "ecmascript/js_generator_object.h"
20 #include "ecmascript/message_string.h"
21
22 namespace panda::ecmascript::x64 {
23 #define __ assembler->
24
25 // Generate code for Entering asm interpreter
26 // Input: glue - %rdi
27 // callTarget - %rsi
28 // method - %rdx
29 // callField - %rcx
30 // argc - %r8
31 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler * assembler)32 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
33 {
34 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
35 Label target;
36 // push asm interpreter entry frame
37 size_t begin = __ GetCurrentPosition();
38 PushAsmInterpEntryFrame(assembler);
39 __ Callq(&target);
40 PopAsmInterpEntryFrame(assembler);
41 size_t end = __ GetCurrentPosition();
42 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
43 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
44 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
45 }
46 __ Ret();
47
48 __ Bind(&target);
49 AsmInterpEntryDispatch(assembler);
50 }
51
52 // Generate code for generator re-enter asm interpreter
53 // c++ calling convention
54 // Input: %rdi - glue
55 // %rsi - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler * assembler)56 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
57 {
58 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
59 Label target;
60 size_t begin = __ GetCurrentPosition();
61 PushAsmInterpEntryFrame(assembler);
62 __ Callq(&target);
63 PopAsmInterpEntryFrame(assembler);
64 size_t end = __ GetCurrentPosition();
65 if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
66 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
67 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
68 }
69 __ Ret();
70
71 __ Bind(&target);
72 GeneratorReEnterAsmInterpDispatch(assembler);
73 }
74
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler * assembler)75 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
76 {
77 Register glueRegister = __ GlueRegister();
78 Register contextRegister = rsi;
79 Register prevSpRegister = rbp;
80
81 Register callTargetRegister = r9;
82 Register methodRegister = rcx;
83 Register tempRegister = r11; // can not be used to store any variable
84 Register opRegister = r8; // can not be used to store any variable
85 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_METHOD_OFFSET), callTargetRegister);
86 __ Movq(Operand(callTargetRegister, JSFunctionBase::METHOD_OFFSET), methodRegister);
87
88 Label stackOverflow;
89
90 Register fpRegister = r10;
91 __ Movq(rsp, fpRegister);
92 Register nRegsRegister = rdx;
93 Register regsArrayRegister = r12;
94 Register thisRegister = r15;
95 // push context regs
96 __ Movl(Operand(rsi, GeneratorContext::GENERATOR_NREGS_OFFSET), nRegsRegister);
97 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_THIS_OFFSET), thisRegister);
98 __ Movq(Operand(rsi, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET), regsArrayRegister);
99 __ Addq(TaggedArray::DATA_OFFSET, regsArrayRegister);
100 PushArgsWithArgvAndCheckStack(assembler, glueRegister, nRegsRegister, regsArrayRegister, tempRegister, opRegister,
101 &stackOverflow);
102
103 // newSp
104 Register newSpRegister = r8;
105 __ Movq(rsp, newSpRegister);
106
107 // resume asm interp frame
108 Register pcRegister = r12;
109 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, callTargetRegister, thisRegister, methodRegister,
110 contextRegister, pcRegister, tempRegister);
111
112 // call bc stub
113 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
114 __ Bind(&stackOverflow);
115 {
116 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, tempRegister);
117 }
118 }
119
120 // Input: glue - %rdi
121 // callTarget - %rsi
122 // method - %rdx
123 // callField - %rcx
124 // argc - %r8
125 // argv - %r9(<callTarget, newTarget, this> are at the beginning of argv)
126 // prevSp - %rbp
AsmInterpEntryDispatch(ExtendedAssembler * assembler)127 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
128 {
129 Label notJSFunction;
130 Label callNativeEntry;
131 Label callJSFunctionEntry;
132 Label notCallable;
133 Register glueRegister = rdi;
134 Register callTargetRegister = rsi;
135 Register argvRegister = r9;
136 Register bitFieldRegister = r12;
137 Register tempRegister = r11; // can not be used to store any variable
138 __ Movq(Operand(callTargetRegister, TaggedObject::HCLASS_OFFSET), tempRegister); // hclass
139 __ Movq(Operand(tempRegister, JSHClass::BIT_FIELD_OFFSET), bitFieldRegister);
140 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_FIRST), bitFieldRegister);
141 __ Jb(¬JSFunction);
142 __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_LAST), bitFieldRegister);
143 __ Jbe(&callJSFunctionEntry);
144 __ Bind(¬JSFunction);
145 {
146 __ Testq(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), bitFieldRegister);
147 __ Jz(¬Callable);
148 CallNativeEntry(assembler, true);
149 }
150 __ Bind(&callNativeEntry);
151 CallNativeEntry(assembler, false);
152 __ Bind(&callJSFunctionEntry);
153 {
154 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
155 __ Btq(MethodLiteral::IsNativeBit::START_BIT, callFieldRegister);
156 __ Jb(&callNativeEntry);
157
158 __ Leaq(Operand(argvRegister, NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()),
159 argvRegister);
160 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
161 }
162 __ Bind(¬Callable);
163 {
164 __ Movq(glueRegister, rax); // glue
165 __ Pushq(0); // argc
166 Register runtimeIdRegister = r12;
167 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException, runtimeIdRegister);
168 __ Pushq(runtimeIdRegister); // runtimeId
169 Register trampolineIdRegister = r12;
170 Register trampolineRegister = r10;
171 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, trampolineIdRegister);
172 __ Movq(Operand(rax, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
173 trampolineRegister);
174 __ Callq(trampolineRegister);
175 __ Addq(16, rsp); // 16: skip argc and runtime_id
176 __ Ret();
177 }
178 }
179
PushFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register pcRegister,Register operatorRegister)180 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister,
181 Register callTargetRegister, Register thisRegister, Register methodRegister, Register pcRegister,
182 Register operatorRegister)
183 {
184 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
185 __ Pushq(prevSpRegister); // prevSp
186 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
187 __ Pushq(pcRegister); // pc
188 __ Pushq(fpRegister); // fp
189 __ Pushq(0); // jumpSizeAfterCall
190 __ Movq(Operand(callTargetRegister, JSFunction::LEXICAL_ENV_OFFSET), operatorRegister);
191 __ Pushq(operatorRegister); // env
192 __ Pushq(JSTaggedValue::Hole().GetRawData()); // acc
193 __ Pushq(thisRegister); // thisObj
194 __ Pushq(callTargetRegister); // callTarget
195 }
196
PushGeneratorFrameState(ExtendedAssembler * assembler,Register prevSpRegister,Register fpRegister,Register callTargetRegister,Register thisRegister,Register methodRegister,Register contextRegister,Register pcRegister,Register operatorRegister)197 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register prevSpRegister,
198 Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister,
199 Register contextRegister, Register pcRegister, Register operatorRegister)
200 {
201 __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)); // frame type
202 __ Pushq(prevSpRegister); // prevSp
203 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
204 __ Movl(Operand(contextRegister, GeneratorContext::GENERATOR_BC_OFFSET_OFFSET), operatorRegister);
205 __ Addq(operatorRegister, pcRegister);
206 __ Pushq(pcRegister); // pc
207 __ Pushq(fpRegister); // fp
208 __ Pushq(0); // jumpSizeAfterCall
209 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET), operatorRegister);
210 __ Pushq(operatorRegister); // env
211 __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET), operatorRegister);
212 __ Pushq(operatorRegister); // acc
213 __ Pushq(thisRegister); // thisObj
214 __ Pushq(callTargetRegister); // callTarget
215 }
216
PushAsmInterpEntryFrame(ExtendedAssembler * assembler)217 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
218 {
219 size_t begin = __ GetCurrentPosition();
220 if (!assembler->FromInterpreterHandler()) {
221 __ PushCppCalleeSaveRegisters();
222 }
223 Register fpRegister = r10;
224 __ Pushq(rdi);
225 __ PushAlignBytes();
226 __ Movq(Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)), fpRegister);
227 // construct asm interpreter entry frame
228 __ Pushq(rbp);
229 __ Pushq(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME));
230 __ Pushq(fpRegister);
231 __ Pushq(0); // pc
232 if (!assembler->FromInterpreterHandler()) {
233 size_t end = __ GetCurrentPosition();
234 if ((end - begin) != FrameCompletionPos::X64CppToAsmInterp) {
235 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64CppToAsmInterp
236 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
237 }
238 }
239 __ Leaq(Operand(rsp, 3 * FRAME_SLOT_SIZE), rbp); // 3: 24 means skip frame type, prevSp and pc
240 }
241
PopAsmInterpEntryFrame(ExtendedAssembler * assembler)242 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
243 {
244 __ Addq(8, rsp); // 8: skip pc
245 Register fpRegister = r10;
246 __ Popq(fpRegister);
247 __ Addq(FRAME_SLOT_SIZE, rsp); // 8: skip frame type
248 __ Popq(rbp);
249 __ PopAlignBytes();
250 __ Popq(rdi);
251 __ Movq(fpRegister, Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)));
252 size_t begin = __ GetCurrentPosition();
253 if (!assembler->FromInterpreterHandler()) {
254 __ PopCppCalleeSaveRegisters();
255 size_t end = __ GetCurrentPosition();
256 if ((end - begin) != FrameCompletionPos::X64AsmInterpToCpp) {
257 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64AsmInterpToCpp
258 << "This frame has been modified, and the offset AsmInterpToCp should be updated too.";
259 }
260 }
261 }
262
GetDeclaredNumArgsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register declaredNumArgsRegister)263 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
264 Register declaredNumArgsRegister)
265 {
266 __ Movq(callFieldRegister, declaredNumArgsRegister);
267 __ Shrq(MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
268 __ Andq(MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
269 }
270
GetNumVregsFromCallField(ExtendedAssembler * assembler,Register callFieldRegister,Register numVregsRegister)271 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
272 Register numVregsRegister)
273 {
274 __ Movq(callFieldRegister, numVregsRegister);
275 __ Shrq(MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
276 __ Andq(MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
277 }
278
JSCallCommonEntry(ExtendedAssembler * assembler,JSCallMode mode,FrameTransitionType type)279 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
280 JSCallMode mode, FrameTransitionType type)
281 {
282 Label stackOverflow;
283 Register glueRegister = __ GlueRegister();
284 Register fpRegister = __ AvailableRegister1();
285 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
286 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
287 // save fp
288 __ Movq(rsp, fpRegister);
289 Register declaredNumArgsRegister = __ AvailableRegister2();
290 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
291
292 Label slowPathEntry;
293 Label fastPathEntry;
294 Label pushCallThis;
295 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
296 if (argc >= 0) {
297 __ Cmpq(argc, declaredNumArgsRegister);
298 } else {
299 __ Cmpq(argcRegister, declaredNumArgsRegister);
300 }
301 __ Jne(&slowPathEntry);
302 __ Bind(&fastPathEntry);
303 JSCallCommonFastPath(assembler, mode, &stackOverflow);
304 __ Bind(&pushCallThis);
305 PushCallThis(assembler, mode, &stackOverflow, type);
306 __ Bind(&slowPathEntry);
307 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
308
309 __ Bind(&stackOverflow);
310 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
311 __ Movq(fpRegister, rsp);
312 Register tempRegister = __ AvailableRegister1();
313 // only glue and acc are useful in exception handler
314 if (glueRegister != r13) {
315 __ Movq(glueRegister, r13);
316 }
317 Register acc = rsi;
318 __ Movq(JSTaggedValue::VALUE_EXCEPTION, acc);
319 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
320 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
321 // Reload pc to make sure stack trace is right
322 __ Movq(callTargetRegister, tempRegister);
323 __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), r12); // pc: r12
324 // Reload constpool and profileInfo to make sure gc map work normally
325 __ Movq(Operand(tempRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
326 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14); // profileTypeInfo: r14
327 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx); // constantPool: rbx
328
329 __ Movq(kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException, tempRegister);
330 __ Movq(Operand(glueRegister, tempRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
331 tempRegister);
332 __ Jmp(tempRegister);
333 } else {
334 [[maybe_unused]] TempRegisterScope scope(assembler);
335 Register temp = __ TempRegister();
336 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
337 }
338 }
339
340 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
341 // uint64_t callField, ...)
342 // GHC calling convention
343 // Input1: for callarg0/1/2/3 Input2: for callrange
344 // %r13 - glue // %r13 - glue
345 // %rbp - sp // %rbp - sp
346 // %r12 - callTarget // %r12 - callTarget
347 // %rbx - method // %rbx - method
348 // %r14 - callField // %r14 - callField
349 // %rsi - arg0 // %rsi - actualArgc
350 // %rdi - arg1 // %rdi - argv
351 // %r8 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler * assembler)352 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
353 {
354 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
355 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
356 }
357
PushCallRangeAndDispatch(ExtendedAssembler * assembler)358 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
359 {
360 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
361 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
362 }
363
PushCallNewAndDispatch(ExtendedAssembler * assembler)364 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
365 {
366 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
367 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
368 }
369
PushSuperCallAndDispatch(ExtendedAssembler * assembler)370 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
371 {
372 __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
373 JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
374 }
375
PushCallArgs3AndDispatch(ExtendedAssembler * assembler)376 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
377 {
378 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
379 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
380 }
381
PushCallArgs2AndDispatch(ExtendedAssembler * assembler)382 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
383 {
384 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
385 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
386 }
387
PushCallArg1AndDispatch(ExtendedAssembler * assembler)388 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
389 {
390 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
391 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
392 }
393
PushCallArg0AndDispatch(ExtendedAssembler * assembler)394 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
395 {
396 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
397 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
398 }
PushCallThisArg0AndDispatch(ExtendedAssembler * assembler)399 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
400 {
401 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
402 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
403 }
404
PushCallThisArg1AndDispatch(ExtendedAssembler * assembler)405 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
406 {
407 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
408 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
409 }
410
PushCallThisArgs2AndDispatch(ExtendedAssembler * assembler)411 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
412 {
413 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
414 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
415 }
416
PushCallThisArgs3AndDispatch(ExtendedAssembler * assembler)417 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
418 {
419 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
420 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
421 }
422
JSCallCommonFastPath(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow)423 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)
424 {
425 Register glueRegister = __ GlueRegister();
426 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
427 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
428
429 Label pushCallThis;
430 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
431 // call range
432 if (argc < 0) {
433 Register argcRegister = arg0;
434 Register argvRegister = arg1;
435 __ Cmpq(0, argcRegister);
436 __ Jbe(&pushCallThis);
437 // fall through
438 {
439 [[maybe_unused]] TempRegisterScope scope(assembler);
440 Register opRegister = __ TempRegister();
441 Register op2Register = __ AvailableRegister2();
442 PushArgsWithArgvAndCheckStack(assembler, glueRegister, argcRegister, argvRegister, opRegister, op2Register,
443 stackOverflow);
444 }
445 __ Bind(&pushCallThis);
446 } else if (argc > 0) {
447 if (argc > 2) { // 2: call arg2
448 if (mode == JSCallMode::CALL_THIS_ARG3_WITH_RETURN) {
449 Register arg2 = __ CppJSCallAvailableRegister1();
450 __ Pushq(arg2);
451 } else {
452 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
453 __ Pushq(arg2);
454 }
455 }
456 if (argc > 1) {
457 __ Pushq(arg1);
458 }
459 if (argc > 0) {
460 __ Pushq(arg0);
461 }
462 }
463 }
464
JSCallCommonSlowPath(ExtendedAssembler * assembler,JSCallMode mode,Label * fastPathEntry,Label * pushCallThis,Label * stackOverflow)465 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
466 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
467 {
468 Register glueRegister = __ GlueRegister();
469 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
470 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
471 Register arg0 = argcRegister;
472 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
473 Label noExtraEntry;
474 Label pushArgsEntry;
475
476 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
477 Register declaredNumArgsRegister = __ AvailableRegister2();
478 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
479 __ Jz(&noExtraEntry);
480 // extra entry
481 {
482 [[maybe_unused]] TempRegisterScope scope(assembler);
483 Register tempArgcRegister = __ TempRegister();
484 if (argc >= 0) {
485 __ PushArgc(argc, tempArgcRegister);
486 } else {
487 __ PushArgc(argcRegister, tempArgcRegister);
488 }
489 }
490 __ Bind(&noExtraEntry);
491 {
492 if (argc == 0) {
493 Register op1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
494 [[maybe_unused]] TempRegisterScope scope(assembler);
495 Register op2 = __ TempRegister();
496 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, op1, op2,
497 stackOverflow);
498 __ Jmp(fastPathEntry);
499 return;
500 }
501 [[maybe_unused]] TempRegisterScope scope(assembler);
502 Register diffRegister = __ TempRegister();
503 __ Movq(declaredNumArgsRegister, diffRegister);
504 if (argc >= 0) {
505 __ Subq(argc, diffRegister);
506 } else {
507 __ Subq(argcRegister, diffRegister);
508 }
509 __ Cmpq(0, diffRegister);
510 __ Jle(&pushArgsEntry);
511 PushUndefinedWithArgc(assembler, diffRegister);
512 __ Jmp(fastPathEntry);
513 }
514 __ Bind(&pushArgsEntry);
515 __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
516 __ Jnz(fastPathEntry);
517 // arg1, declare must be 0
518 if (argc == 1) {
519 __ Jmp(pushCallThis);
520 return;
521 }
522 // decalare < actual
523 __ Cmpq(0, declaredNumArgsRegister);
524 __ Je(pushCallThis);
525 if (argc < 0) {
526 Register argvRegister = arg1;
527 [[maybe_unused]] TempRegisterScope scope(assembler);
528 Register opRegister = __ TempRegister();
529 PushArgsWithArgvAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, argvRegister, opRegister,
530 opRegister, stackOverflow);
531 } else if (argc > 0) {
532 Label pushArgs0;
533 if (argc > 2) { // 2: call arg2
534 // decalare is 2 or 1 now
535 __ Cmpq(1, declaredNumArgsRegister);
536 __ Je(&pushArgs0);
537 __ Pushq(arg1);
538 }
539 if (argc > 1) {
540 __ Bind(&pushArgs0);
541 // decalare is is 1 now
542 __ Pushq(arg0);
543 }
544 }
545 __ Jmp(pushCallThis);
546 }
547
GetThisRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)548 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
549 {
550 switch (mode) {
551 case JSCallMode::CALL_GETTER:
552 case JSCallMode::CALL_THIS_ARG0:
553 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
554 case JSCallMode::CALL_SETTER:
555 case JSCallMode::CALL_THIS_ARG1:
556 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
557 case JSCallMode::CALL_THIS_ARG2:
558 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
559 case JSCallMode::CALL_THIS_WITH_ARGV:
560 case JSCallMode::SUPER_CALL_WITH_ARGV:
561 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
562 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
563 case JSCallMode::CALL_THIS_ARG3:
564 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
565 case JSCallMode::CALL_ENTRY:
566 case JSCallMode::CALL_FROM_AOT: {
567 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
568 __ Movq(Operand(argvRegister, -FRAME_SLOT_SIZE), defaultRegister); // 8: this is just before the argv list
569 return defaultRegister;
570 }
571 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
572 return __ CppJSCallAvailableRegister2();
573 case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
574 case JSCallMode::CALL_THIS_ARGV_WITH_RETURN: {
575 return __ CppJSCallAvailableRegister1();
576 }
577 default:
578 LOG_ECMA(FATAL) << "this branch is unreachable";
579 UNREACHABLE();
580 }
581 return rInvalid;
582 }
583
GetNewTargetRegsiter(ExtendedAssembler * assembler,JSCallMode mode,Register defaultRegister)584 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
585 Register defaultRegister)
586 {
587 switch (mode) {
588 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
589 case JSCallMode::CALL_THIS_WITH_ARGV:
590 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
591 case JSCallMode::SUPER_CALL_WITH_ARGV:
592 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
593 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
594 case JSCallMode::CALL_FROM_AOT:
595 case JSCallMode::CALL_ENTRY: {
596 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
597 // -2: new Target offset
598 __ Movq(Operand(argvRegister, -2 * FRAME_SLOT_SIZE), defaultRegister);
599 return defaultRegister;
600 }
601 default:
602 LOG_ECMA(FATAL) << "this branch is unreachable";
603 UNREACHABLE();
604 }
605 return rInvalid;
606 }
607
608 // Input: %r14 - callField
609 // %rdi - argv
PushCallThis(ExtendedAssembler * assembler,JSCallMode mode,Label * stackOverflow,FrameTransitionType type)610 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
611 JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
612 {
613 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
614 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
615 Register thisRegister = __ AvailableRegister2();
616
617 Label pushVregs;
618 Label pushNewTarget;
619 Label pushCallTarget;
620 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
621 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
622 if (!haveThis) {
623 __ Movq(JSTaggedValue::VALUE_UNDEFINED, thisRegister); // default this: undefined
624 } else {
625 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
626 if (thisRegister != thisArgRegister) {
627 __ Movq(thisArgRegister, thisRegister);
628 }
629 }
630 __ Testb(CALL_TYPE_MASK, callFieldRegister);
631 __ Jz(&pushVregs);
632 // fall through
633 __ Testq(MethodLiteral::HaveThisBit::Mask(), callFieldRegister);
634 __ Jz(&pushNewTarget);
635 // push this
636 if (!haveThis) {
637 __ Pushq(JSTaggedValue::Undefined().GetRawData());
638 } else {
639 __ Pushq(thisRegister);
640 }
641 // fall through
642 __ Bind(&pushNewTarget);
643 {
644 __ Testq(MethodLiteral::HaveNewTargetBit::Mask(), callFieldRegister);
645 __ Jz(&pushCallTarget);
646 if (!haveNewTarget) {
647 __ Pushq(JSTaggedValue::Undefined().GetRawData());
648 } else {
649 [[maybe_unused]] TempRegisterScope scope(assembler);
650 Register defaultRegister = __ TempRegister();
651 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
652 __ Pushq(newTargetRegister);
653 }
654 }
655 // fall through
656 __ Bind(&pushCallTarget);
657 {
658 __ Testq(MethodLiteral::HaveFuncBit::Mask(), callFieldRegister);
659 __ Jz(&pushVregs);
660 __ Pushq(callTargetRegister);
661 }
662 // fall through
663 __ Bind(&pushVregs);
664 {
665 PushVregs(assembler, stackOverflow, type);
666 }
667 }
668
669 // Input: %rbp - sp
670 // %r12 - callTarget
671 // %rbx - method
672 // %r14 - callField
673 // %rdx - jumpSizeAfterCall
674 // %r10 - fp
PushVregs(ExtendedAssembler * assembler,Label * stackOverflow,FrameTransitionType type)675 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
676 Label *stackOverflow, FrameTransitionType type)
677 {
678 Register glueRegister = __ GlueRegister();
679 Register prevSpRegister = rbp;
680 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
681 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
682 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
683 Register fpRegister = __ AvailableRegister1();
684 Register thisRegister = __ AvailableRegister2();
685
686 Label pushFrameState;
687
688 [[maybe_unused]] TempRegisterScope scope(assembler);
689 Register tempRegister = __ TempRegister();
690 // args register can reused now.
691 Register pcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
692 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
693 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
694 __ Cmpq(0, numVregsRegister);
695 __ Jz(&pushFrameState);
696 Register temp2Register = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD); // reuse
697 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register,
698 stackOverflow);
699 // fall through
700 Register newSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
701 __ Bind(&pushFrameState);
702 {
703 StackOverflowCheck(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register, stackOverflow);
704 __ Movq(rsp, newSpRegister);
705
706 PushFrameState(assembler, prevSpRegister, fpRegister,
707 callTargetRegister, thisRegister, methodRegister, pcRegister, tempRegister);
708 }
709 if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
710 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
711 __ Movq(Operand(callTargetRegister, JSFunction::BASELINECODE_OFFSET), tempRegister);
712 Label baselineCodeUndefined;
713 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), tempRegister);
714 __ Je(&baselineCodeUndefined);
715
716 // check is compiling
717 __ Cmpq(JSTaggedValue::Hole().GetRawData(), tempRegister);
718 __ Je(&baselineCodeUndefined);
719
720 Label stackAligned;
721 // align 16 bytes
722 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
723 __ Jz(&stackAligned);
724 __ PushAlignBytes();
725 __ Bind(&stackAligned);
726
727 __ Movq(Operand(tempRegister, MachineCode::FUNCADDR_OFFSET), tempRegister);
728 if (glueRegister != r13) {
729 __ Movq(glueRegister, r13);
730 }
731 if (methodRegister != rbx) {
732 __ Movq(methodRegister, rbx);
733 }
734 const int32_t pcOffsetFromSP = -24; // -24: 3 slots, frameType, prevFrame, pc
735 Register temp3Register = r10;
736 __ Movabs(std::numeric_limits<uint64_t>::max(), temp3Register);
737 __ Movq(temp3Register, Operand(newSpRegister, pcOffsetFromSP));
738 __ Movq(newSpRegister, rbp);
739 __ Jmp(tempRegister);
740
741 __ Bind(&baselineCodeUndefined);
742 }
743 DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
744 }
745
746 // Input: %r13 - glue
747 // %rbp - sp
748 // %r12 - callTarget
749 // %rbx - method
DispatchCall(ExtendedAssembler * assembler,Register pcRegister,Register newSpRegister,Register callTargetRegister,Register methodRegister,Register accRegister)750 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
751 Register newSpRegister, Register callTargetRegister, Register methodRegister, Register accRegister)
752 {
753 Register glueRegister = __ GlueRegister();
754 Label dispatchCall;
755 // align 16 bytes
756 __ Testq(15, rsp); // 15: low 4 bits must be 0b0000
757 __ Jnz(&dispatchCall);
758 __ PushAlignBytes();
759 __ Bind(&dispatchCall);
760 // profileTypeInfo: r14
761 __ Movq(Operand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
762 __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14);
763 // glue may rdi
764 if (glueRegister != r13) {
765 __ Movq(glueRegister, r13);
766 }
767 // sp: rbp
768 __ Movq(newSpRegister, rbp);
769 // hotnessCounter: rdi
770 __ Movzwq(Operand(methodRegister, Method::LITERAL_INFO_OFFSET), rdi);
771 // constantPool: rbx
772 __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx);
773 // pc: r12
774 if (pcRegister != r12) {
775 __ Movq(pcRegister, r12);
776 }
777
778 Register bcIndexRegister = rax;
779 Register tempRegister = __ AvailableRegister1();
780 __ Movzbq(Operand(pcRegister, 0), bcIndexRegister);
781 // acc: rsi
782 if (accRegister != rInvalid) {
783 ASSERT(accRegister == rsi);
784 } else {
785 __ Movq(JSTaggedValue::Hole().GetRawData(), rsi);
786 }
787 __ Movq(Operand(r13, bcIndexRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)), tempRegister);
788 __ Jmp(tempRegister);
789 }
790
791 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
792 // c++ calling convention call js function
793 // Input: %rdi - glue
794 // %rsi - nativeCode
795 // %rdx - func
796 // %rcx - thisValue
797 // %r8 - argc
798 // %r9 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler * assembler)799 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
800 {
801 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
802 CallNativeWithArgv(assembler, false);
803 }
804
PushCallNewAndDispatchNative(ExtendedAssembler * assembler)805 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
806 {
807 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
808 CallNativeWithArgv(assembler, true);
809 }
810
PushNewTargetAndDispatchNative(ExtendedAssembler * assembler)811 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
812 {
813 __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
814 CallNativeWithArgv(assembler, true, true);
815 }
816
CallNativeWithArgv(ExtendedAssembler * assembler,bool callNew,bool hasNewTarget)817 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
818 {
819 Register glue = rdi;
820 Register nativeCode = rsi;
821 Register func = rdx;
822 Register thisValue = rcx;
823 Register numArgs = r8;
824 Register stackArgs = r9;
825 Register temporary = rax;
826 Register temporary2 = r11;
827 Register opNumArgs = r10;
828 Label aligned;
829 Label pushThis;
830 Label stackOverflow;
831
832 bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV);
833
834 __ Push(numArgs);
835 __ Cmpq(0, numArgs);
836 __ Jz(&pushThis);
837 __ Movq(numArgs, opNumArgs);
838 PushArgsWithArgvAndCheckStack(assembler, glue, opNumArgs, stackArgs, temporary, temporary2, &stackOverflow);
839
840 __ Bind(&pushThis);
841 __ Push(thisValue);
842 // new.target
843 if (callNew) {
844 if (hasNewTarget) {
845 Register newTarget = r12;
846 // 5: skip frame type, numArgs, func, newTarget and this
847 __ Movq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), newTarget);
848 __ Pushq(newTarget);
849 } else {
850 __ Pushq(func);
851 }
852 } else {
853 __ Pushq(JSTaggedValue::Undefined().GetRawData());
854 }
855 __ Pushq(func);
856 if (!isFrameComplete) {
857 // 5: skip frame type, numArgs, func, newTarget and this
858 __ Leaq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), rbp);
859 }
860 __ Movq(rsp, stackArgs);
861
862 // push argc
863 __ Addl(NUM_MANDATORY_JSFUNC_ARGS, numArgs);
864 __ Pushq(numArgs);
865 // push thread
866 __ Pushq(glue);
867 // EcmaRuntimeCallInfo
868 __ Movq(rsp, rdi);
869
870 __ Testq(0xf, rsp); // 0xf: 0x1111
871 __ Jz(&aligned, Distance::Near);
872 __ PushAlignBytes();
873
874 __ Bind(&aligned);
875 CallNativeInternal(assembler, nativeCode);
876 __ Ret();
877
878 __ Bind(&stackOverflow);
879 {
880 Label aligneThrow;
881 __ Movq(Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)), rsp);
882 __ Pushq(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME)); // frame type
883 __ Pushq(0); // argc
884 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // this
885 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // newTarget
886 __ Pushq(JSTaggedValue::VALUE_UNDEFINED); // callTarget
887 // 5: skip frame type, argc, this, newTarget and callTarget
888 // +----------------------------------------------------------------+ <---- rbp = rsp + 5 * frame_slot_size
889 // | FrameType = BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
890 // |----------------------------------------------------------------|
891 // | argc = 0 |
892 // |----------------------------------------------------------------|
893 // | this = undefine |
894 // |----------------------------------------------------------------|
895 // | newTarget = undefined |
896 // |----------------------------------------------------------------|
897 // | callTarget = undefined |
898 // +----------------------------------------------------------------+ <---- rsp
899 __ Leaq(Operand(rsp, 5 * FRAME_SLOT_SIZE), rbp);
900
901 __ Testq(0xf, rsp); // 0xf: 0x1111
902 __ Jz(&aligneThrow, Distance::Near);
903 __ PushAlignBytes();
904
905 __ Bind(&aligneThrow);
906 Register trampolineIdRegister = r9;
907 Register trampolineRegister = r10;
908 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, trampolineIdRegister);
909 __ Movq(Operand(glue, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
910 trampolineRegister);
911 __ Callq(trampolineRegister);
912
913 // resume rsp
914 __ Movq(rbp, rsp);
915 __ Pop(rbp);
916 __ Ret();
917 }
918 }
919
CallNativeEntry(ExtendedAssembler * assembler,bool isJsProxy)920 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler, bool isJsProxy)
921 {
922 Label callFastBuiltin;
923 Label callNativeBuiltin;
924 Register glue = rdi;
925 Register argv = r9;
926 Register function = rsi;
927 Register nativeCode = r10;
928 if (isJsProxy) {
929 Register method = rdx;
930 __ Movq(Operand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), nativeCode); // get native pointer
931 } else {
932 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
933 __ Movq(Operand(function, JSFunctionBase::CODE_ENTRY_OFFSET), nativeCode); // get native pointer
934 __ Btq(MethodLiteral::IsFastBuiltinBit::START_BIT, callFieldRegister);
935 __ Jb(&callFastBuiltin);
936 }
937
938 __ Bind(&callNativeBuiltin);
939 __ PushAlignBytes();
940 __ Push(function);
941 // 3: 24 means skip thread & argc & returnAddr
942 __ Subq(3 * FRAME_SLOT_SIZE, rsp);
943 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME);
944 __ Movq(argv, r11);
945 // 2: 16 means skip numArgs & thread
946 __ Subq(2 * FRAME_SLOT_SIZE, r11);
947 // EcmaRuntimeCallInfo
948 __ Movq(r11, rdi);
949
950 CallNativeInternal(assembler, nativeCode);
951
952 // 5: 40 means skip function
953 __ Addq(5 * FRAME_SLOT_SIZE, rsp);
954 __ Ret();
955
956 __ Bind(&callFastBuiltin);
957 CallFastBuiltin(assembler, &callNativeBuiltin);
958 }
959
CallFastBuiltin(ExtendedAssembler * assembler,Label * callNativeBuiltin)960 void AsmInterpreterCall::CallFastBuiltin(ExtendedAssembler *assembler, Label *callNativeBuiltin)
961 {
962 Label arg1;
963 Label arg2;
964 Label arg3;
965 Label callEntry;
966 Register glue = rdi;
967 Register argc = r8;
968 Register argv = r9;
969 Register method = rdx;
970 Register function = rsi;
971 Register nativeCode = r10;
972 Register temp = rax;
973 Register temp1 = r11;
974 // get builtins id
975 __ Movq(Operand(method, Method::EXTRA_LITERAL_INFO_OFFSET), temp1);
976 __ Shr(MethodLiteral::BuiltinIdBits::START_BIT, temp1);
977 __ Andl((1LU << MethodLiteral::BuiltinIdBits::SIZE) - 1, temp1);
978
979 __ Cmpl(static_cast<int32_t>(kungfu::BuiltinsStubCSigns::BUILTINS_CONSTRUCTOR_STUB_FIRST), temp1);
980 __ Jge(callNativeBuiltin);
981
982 __ Cmpq(Immediate(3), argc); // 3: number of args
983 __ Jg(callNativeBuiltin);
984
985 // create frame
986 PushAsmBridgeFrame(assembler);
987
988 // register args
989 __ Movq(function, temp);
990 __ Movq(nativeCode, rsi); // nativeCode is rsi
991 __ Movq(temp, rdx); // fun is rdx
992 __ Movq(argv, temp); // temp is argv
993 __ Movq(argc, r9); // argc is r9
994 __ Movq(Operand(temp, FRAME_SLOT_SIZE), rcx); // get new target
995 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 2), r8); // 2: skip func & new target to get this target
996
997 __ Cmp(Immediate(0), r9);
998 __ Jne(&arg1);
999 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1000 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1001 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1002 __ Jmp(&callEntry);
1003 __ Bind(&arg1);
1004 {
1005 __ Cmp(Immediate(1), r9);
1006 __ Jne(&arg2);
1007 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1008 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1009 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1010 __ Pushq(r10);
1011 __ Jmp(&callEntry);
1012 }
1013 __ Bind(&arg2);
1014 {
1015 __ Cmp(Immediate(2), r9); // 2: number of args
1016 __ Jne(&arg3);
1017 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1018 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 4), r10); // 4: get arg1
1019 __ Pushq(r10);
1020 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1021 __ Pushq(r10);
1022 __ Jmp(&callEntry);
1023 }
1024 __ Bind(&arg3);
1025 {
1026 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 5), r10); // 5: get arg2
1027 __ Pushq(r10);
1028 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 4), r10); // 4: get arg1
1029 __ Pushq(r10);
1030 __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1031 __ Pushq(r10);
1032 __ Jmp(&callEntry);
1033 }
1034 __ Bind(&callEntry);
1035 {
1036 __ Movq(Operand(glue, temp1, Times8, JSThread::GlueData::GetBuiltinsStubEntriesOffset(false)), temp1);
1037 __ Callq(temp1);
1038 __ Addq(QUADRUPLE_SLOT_SIZE, rsp);
1039 __ Pop(rbp);
1040 __ Ret();
1041 }
1042 }
1043
1044 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
1045 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
1046 // Input: %rax - codeAddress
1047 // stack layout: sp + N*8 argvN
1048 // ........
1049 // sp + 24: argv1
1050 // sp + 16: argv0
1051 // sp + 8: actualArgc
1052 // sp: thread
1053 // construct Native Leave Frame
1054 // +--------------------------+
1055 // | argV[N - 1] |
1056 // |--------------------------|
1057 // | . . . . |
1058 // |--------------------------+
1059 // | argV[2]=this |
1060 // +--------------------------+
1061 // | argV[1]=new-target |
1062 // +--------------------------+
1063 // | argV[0]=call-target |
1064 // +--------------------------+ ---------
1065 // | argc | ^
1066 // |--------------------------| |
1067 // | thread | |
1068 // |--------------------------| |
1069 // | returnAddr | BuiltinFrame
1070 // |--------------------------| |
1071 // | callsiteFp | |
1072 // |--------------------------| |
1073 // | frameType | v
1074 // +--------------------------+ ---------
1075
PushCallArgsAndDispatchNative(ExtendedAssembler * assembler)1076 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
1077 {
1078 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
1079 Register nativeCode = rax;
1080 Register glue = rdi;
1081
1082 __ Movq(Operand(rsp, FRAME_SLOT_SIZE), glue); // 8: glue
1083 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME);
1084 __ Leaq(Operand(rbp, 2 * FRAME_SLOT_SIZE), rdi); // 2: skip argc & thread
1085 __ PushAlignBytes();
1086 CallNativeInternal(assembler, nativeCode);
1087 __ Ret();
1088 }
1089
PushBuiltinFrame(ExtendedAssembler * assembler,Register glue,FrameType type)1090 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler,
1091 Register glue, FrameType type)
1092 {
1093 __ Pushq(rbp);
1094 __ Movq(rsp, Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1095 __ Pushq(static_cast<int32_t>(type));
1096 if (type != FrameType::BUILTIN_FRAME_WITH_ARGV) {
1097 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // 8: skip frame type
1098 return true;
1099 } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
1100 // this frame push stack args must before update rbp, otherwise cpu profiler maybe visit incomplete stack
1101 // BuiltinWithArgvFrame layout please see frames.h
1102 return false;
1103 } else {
1104 LOG_ECMA(FATAL) << "this branch is unreachable";
1105 UNREACHABLE();
1106 }
1107 }
1108
CallNativeInternal(ExtendedAssembler * assembler,Register nativeCode)1109 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
1110 {
1111 __ Callq(nativeCode);
1112 // resume rsp
1113 __ Movq(rbp, rsp);
1114 __ Pop(rbp);
1115 }
1116
1117 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1118 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1119 // GHC calling convention
1120 // %r13 - glue
1121 // %rbp - sp
1122 // %r12 - pc
1123 // %rbx - constantPool
1124 // %r14 - profileTypeInfo
1125 // %rsi - acc
1126 // %rdi - hotnessCounter
1127 // %r8 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler * assembler)1128 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
1129 {
1130 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
1131 Register glueRegister = __ GlueRegister();
1132 Register spRegister = rbp;
1133 Register pcRegister = r12;
1134 Register ret = rsi;
1135 Register jumpSizeRegister = r8;
1136
1137 Register frameStateBaseRegister = r11;
1138 __ Movq(spRegister, frameStateBaseRegister);
1139 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1140
1141 Label dispatch;
1142 Label newObjectRangeReturn;
1143 __ Cmpq(0, jumpSizeRegister);
1144 __ Jle(&newObjectRangeReturn);
1145
1146 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1147 __ Addq(jumpSizeRegister, pcRegister); // newPC
1148 Register temp = rax;
1149 Register opcodeRegister = rax;
1150 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1151
1152 __ Bind(&dispatch);
1153 {
1154 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1155 Register bcStubRegister = r11;
1156 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1157 bcStubRegister);
1158 __ Jmp(bcStubRegister);
1159 }
1160
1161 Label getThis;
1162 Label notUndefined;
1163 __ Bind(&newObjectRangeReturn);
1164 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1165 __ Jne(¬Undefined);
1166
1167 __ Bind(&getThis);
1168 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1169 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1170 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1171 {
1172 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetThisOffset(false)), ret);
1173 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1174 Register bcStubRegister = r11;
1175 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1176 bcStubRegister);
1177 __ Jmp(bcStubRegister);
1178 }
1179
1180 __ Bind(¬Undefined);
1181 {
1182 Label notEcmaObject;
1183 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1184 __ And(ret, temp);
1185 __ Cmpq(0, temp);
1186 __ Jne(¬EcmaObject);
1187 // acc is heap object
1188 __ Movq(Operand(ret, 0), temp); // hclass
1189 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1190 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1191 __ Ja(¬EcmaObject);
1192 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1193 __ Jb(¬EcmaObject);
1194 // acc is ecma object
1195 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1196 __ Subq(jumpSizeRegister, pcRegister); // sub negative jmupSize
1197 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1198 __ Jmp(&dispatch);
1199
1200 __ Bind(¬EcmaObject);
1201 {
1202 // load constructor
1203 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), temp);
1204 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1205 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1206 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1207 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1208 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1209 __ Jbe(&getThis); // constructor is base
1210 // fall through
1211 }
1212 // exception branch
1213 {
1214 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);
1215 __ Movq(kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException, opcodeRegister);
1216 __ Jmp(&dispatch);
1217 }
1218 }
1219 }
1220
1221 // c++ calling convention
1222 // %rdi - glue
1223 // %rsi - callTarget
1224 // %rdx - method
1225 // %rcx - callField
1226 // %r8 - receiver
1227 // %r9 - value
CallGetter(ExtendedAssembler * assembler)1228 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1229 {
1230 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1231 Label target;
1232
1233 PushAsmInterpBridgeFrame(assembler);
1234 __ Callq(&target);
1235 PopAsmInterpBridgeFrame(assembler);
1236 __ Ret();
1237 __ Bind(&target);
1238 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1239 }
1240
CallSetter(ExtendedAssembler * assembler)1241 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1242 {
1243 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1244 Label target;
1245 PushAsmInterpBridgeFrame(assembler);
1246 __ Callq(&target);
1247 PopAsmInterpBridgeFrame(assembler);
1248 __ Ret();
1249 __ Bind(&target);
1250 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1251 }
1252
1253 // Input: glue - %rdi
1254 // callTarget - %rsi
1255 // method - %rdx
1256 // callField - %rcx
1257 // arg0(argc) - %r8
1258 // arg1(arglist) - %r9
1259 // argthis - stack
CallReturnWithArgv(ExtendedAssembler * assembler)1260 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1261 {
1262 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1263 Label target;
1264 PushAsmInterpBridgeFrame(assembler);
1265 Register r13 = __ CppJSCallAvailableRegister1();
1266 __ Movq(Operand(rbp, FRAME_SLOT_SIZE), r13);
1267 __ Callq(&target);
1268 PopAsmInterpBridgeFrame(assembler);
1269 __ Ret();
1270 __ Bind(&target);
1271 {
1272 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1273 FrameTransitionType::OTHER_TO_OTHER);
1274 }
1275 }
1276
CallContainersArgs2(ExtendedAssembler * assembler)1277 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1278 {
1279 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1280 Label target;
1281 PushAsmInterpBridgeFrame(assembler);
1282 GetArgvAtStack(assembler);
1283 __ Callq(&target);
1284 PopAsmInterpBridgeFrame(assembler);
1285 __ Ret();
1286 __ Bind(&target);
1287 {
1288 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1289 FrameTransitionType::OTHER_TO_OTHER);
1290 }
1291 }
1292
CallContainersArgs3(ExtendedAssembler * assembler)1293 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1294 {
1295 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1296 Label target;
1297 PushAsmInterpBridgeFrame(assembler);
1298 GetArgvAtStack(assembler);
1299 __ Callq(&target);
1300 PopAsmInterpBridgeFrame(assembler);
1301 __ Ret();
1302 __ Bind(&target);
1303 {
1304 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1305 FrameTransitionType::OTHER_TO_OTHER);
1306 }
1307 }
1308
1309 // c++ calling convention
1310 // %rdi - glue
1311 // %rsi - callTarget
1312 // %rdx - method
1313 // %rcx - callField
1314 // %r8 - receiver
1315 // %r9 - value
CallGetterToBaseline(ExtendedAssembler * assembler)1316 void AsmInterpreterCall::CallGetterToBaseline(ExtendedAssembler *assembler)
1317 {
1318 __ BindAssemblerStub(RTSTUB_ID(CallGetterToBaseline));
1319 Label target;
1320
1321 PushAsmInterpBridgeFrame(assembler);
1322 __ Callq(&target);
1323 PopAsmInterpBridgeFrame(assembler);
1324 __ Ret();
1325 __ Bind(&target);
1326 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
1327 }
1328
CallSetterToBaseline(ExtendedAssembler * assembler)1329 void AsmInterpreterCall::CallSetterToBaseline(ExtendedAssembler *assembler)
1330 {
1331 __ BindAssemblerStub(RTSTUB_ID(CallSetterToBaseline));
1332 Label target;
1333 PushAsmInterpBridgeFrame(assembler);
1334 __ Callq(&target);
1335 PopAsmInterpBridgeFrame(assembler);
1336 __ Ret();
1337 __ Bind(&target);
1338 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
1339 }
1340
1341 // Input: glue - %rdi
1342 // callTarget - %rsi
1343 // method - %rdx
1344 // callField - %rcx
1345 // arg0(argc) - %r8
1346 // arg1(arglist) - %r9
1347 // argthis - stack
CallReturnWithArgvToBaseline(ExtendedAssembler * assembler)1348 void AsmInterpreterCall::CallReturnWithArgvToBaseline(ExtendedAssembler *assembler)
1349 {
1350 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgvToBaseline));
1351 Label target;
1352 PushAsmInterpBridgeFrame(assembler);
1353 Register r13 = __ CppJSCallAvailableRegister1();
1354 __ Movq(Operand(rbp, FRAME_SLOT_SIZE), r13);
1355 __ Callq(&target);
1356 PopAsmInterpBridgeFrame(assembler);
1357 __ Ret();
1358 __ Bind(&target);
1359 {
1360 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1361 FrameTransitionType::OTHER_TO_BASELINE_CHECK);
1362 }
1363 }
1364
CallContainersArgs2ToBaseline(ExtendedAssembler * assembler)1365 void AsmInterpreterCall::CallContainersArgs2ToBaseline(ExtendedAssembler *assembler)
1366 {
1367 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2ToBaseline));
1368 Label target;
1369 PushAsmInterpBridgeFrame(assembler);
1370 GetArgvAtStack(assembler);
1371 __ Callq(&target);
1372 PopAsmInterpBridgeFrame(assembler);
1373 __ Ret();
1374 __ Bind(&target);
1375 {
1376 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1377 FrameTransitionType::OTHER_TO_BASELINE_CHECK);
1378 }
1379 }
1380
CallContainersArgs3ToBaseline(ExtendedAssembler * assembler)1381 void AsmInterpreterCall::CallContainersArgs3ToBaseline(ExtendedAssembler *assembler)
1382 {
1383 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3ToBaseline));
1384 Label target;
1385 PushAsmInterpBridgeFrame(assembler);
1386 GetArgvAtStack(assembler);
1387 __ Callq(&target);
1388 PopAsmInterpBridgeFrame(assembler);
1389 __ Ret();
1390 __ Bind(&target);
1391 {
1392 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1393 FrameTransitionType::OTHER_TO_BASELINE_CHECK);
1394 }
1395 }
1396
1397 // ResumeRspAndReturn(uintptr_t acc)
1398 // GHC calling convention
1399 // %r13 - acc
1400 // %rbp - prevSp
1401 // %r12 - sp
ResumeRspAndReturn(ExtendedAssembler * assembler)1402 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
1403 {
1404 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
1405 Register currentSp = r12;
1406 Register fpRegister = r10;
1407 intptr_t offset = AsmInterpretedFrame::GetFpOffsetAsIntptr(false) -
1408 AsmInterpretedFrame::GetSizeAsIntptr(false);
1409 __ Movq(Operand(currentSp, static_cast<int32_t>(offset)), fpRegister);
1410 __ Movq(fpRegister, rsp);
1411 // return
1412 {
1413 __ Movq(r13, rax);
1414 __ Ret();
1415 }
1416 }
1417
1418 // ResumeRspAndReturnBaseline(uintptr_t acc)
1419 // GHC calling convention
1420 // %r13 - acc
1421 // %rbp - prevSp
1422 // %r12 - sp
1423 // %rbx - jumpSizeAfterCall
ResumeRspAndReturnBaseline(ExtendedAssembler * assembler)1424 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
1425 {
1426 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
1427 Register currentSp = r12;
1428 Register fpRegister = r10;
1429 intptr_t fpOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetFpOffset(false)) -
1430 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1431 __ Movq(Operand(currentSp, static_cast<int32_t>(fpOffset)), fpRegister);
1432 __ Movq(fpRegister, rsp);
1433
1434 // Check result
1435 Register ret = r13;
1436 Register jumpSizeRegister = rbx;
1437 Label getThis;
1438 Label notUndefined;
1439 Label normalReturn;
1440 Label newObjectRangeReturn;
1441 __ Cmpq(0, jumpSizeRegister);
1442 __ Jg(&normalReturn);
1443
1444 __ Bind(&newObjectRangeReturn);
1445 {
1446 __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1447 __ Jne(¬Undefined);
1448
1449 // acc is undefined
1450 __ Bind(&getThis);
1451 intptr_t thisOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetThisOffset(false)) -
1452 static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1453 __ Movq(Operand(currentSp, static_cast<int32_t>(thisOffset)), ret);
1454 __ Jmp(&normalReturn);
1455
1456 // acc is not undefined
1457 __ Bind(¬Undefined);
1458 {
1459 Register temp = rax;
1460 Label notEcmaObject;
1461 __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1462 __ And(ret, temp);
1463 __ Cmpq(0, temp);
1464 __ Jne(¬EcmaObject);
1465 // acc is heap object
1466 __ Movq(Operand(ret, 0), temp); // hclass
1467 __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1468 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1469 __ Ja(¬EcmaObject);
1470 __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1471 __ Jb(¬EcmaObject);
1472 // acc is ecma object
1473 __ Jmp(&normalReturn);
1474
1475 __ Bind(¬EcmaObject);
1476 {
1477 // load constructor
1478 intptr_t funcOffset = AsmInterpretedFrame::GetFunctionOffsetAsIntptr(false) -
1479 AsmInterpretedFrame::GetSizeAsIntptr(false);
1480 __ Movq(Operand(currentSp, static_cast<int32_t>(funcOffset)), temp);
1481 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1482 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1483 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1484 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1485 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1486 __ Jbe(&getThis); // constructor is base
1487 // fall through
1488 }
1489 }
1490 }
1491 __ Bind(&normalReturn);
1492 __ Movq(ret, rax);
1493 __ Ret();
1494 }
1495
1496 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1497 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
1498 // GHC calling convention
1499 // %r13 - glue
1500 // %rbp - sp
1501 // %r12 - pc
1502 // %rbx - constantPool
1503 // %r14 - profileTypeInfo
1504 // %rsi - acc
1505 // %rdi - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler * assembler)1506 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
1507 {
1508 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
1509 Register glueRegister = __ GlueRegister();
1510 Register pcRegister = r12;
1511
1512 Label dispatch;
1513 Register fpRegister = r11;
1514 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1515 __ Cmpq(0, fpRegister);
1516 __ Jz(&dispatch);
1517 __ Movq(fpRegister, rsp); // resume rsp
1518 __ Bind(&dispatch);
1519 {
1520 Register opcodeRegister = rax;
1521 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1522 Register bcStubRegister = r11;
1523 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1524 bcStubRegister);
1525 __ Jmp(bcStubRegister);
1526 }
1527 }
1528
1529 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
1530 // GHC calling convention
1531 // %r13 - glue
1532 // %rbp - sp
1533 // %r12 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler * assembler)1534 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
1535 {
1536 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
1537 Register glueRegister = __ GlueRegister();
1538 Register acc(r12);
1539 Register cppRet(rax);
1540
1541 Label ret;
1542 Register fpRegister = r11;
1543 __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1544 __ Cmpq(0, fpRegister);
1545 __ Jz(&ret);
1546 __ Movq(fpRegister, rsp); // resume rsp
1547 __ Bind(&ret);
1548 // this method will return to Execute(cpp calling convention), and the return value should be put into rax.
1549 __ Movq(acc, cppRet);
1550 __ Ret();
1551 }
1552
1553 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1554 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1555 // GHC calling convention
1556 // %r13 - glue
1557 // %rbp - sp
1558 // %r12 - pc
1559 // %rbx - constantPool
1560 // %r14 - profileTypeInfo
1561 // %rsi - acc
1562 // %rdi - hotnessCounter
1563 // %r8 - jumpSizeAfterCall
ResumeRspAndRollback(ExtendedAssembler * assembler)1564 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
1565 {
1566 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
1567 Register glueRegister = __ GlueRegister();
1568 Register spRegister = rbp;
1569 Register pcRegister = r12;
1570 Register ret = rsi;
1571 Register jumpSizeRegister = r8;
1572
1573 Register frameStateBaseRegister = r11;
1574 __ Movq(spRegister, frameStateBaseRegister);
1575 __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1576
1577 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister); // update sp
1578 __ Addq(jumpSizeRegister, pcRegister); // newPC
1579 Register opcodeRegister = rax;
1580 __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1581
1582 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), ret); // restore acc
1583
1584 __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp); // resume rsp
1585 Register bcStubRegister = r11;
1586 __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1587 bcStubRegister);
1588 __ Jmp(bcStubRegister);
1589 }
1590
1591 // preserve all the general registers, except r11 and callee saved registers/
1592 // and call r11
PreserveMostCall(ExtendedAssembler * assembler)1593 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1594 {
1595 // * layout as the following:
1596 // +--------------------------+ ---------
1597 // | . . . . . | ^
1598 // callerSP ---> |--------------------------| |
1599 // | returnAddr | |
1600 // |--------------------------| OptimizedFrame
1601 // | callsiteFp | |
1602 // fp ---> |--------------------------| |
1603 // | OPTIMIZED_FRAME | v
1604 // +--------------------------+ ---------
1605 // | rdi |
1606 // +--------------------------+
1607 // | rsi |
1608 // +--------------------------+
1609 // | rdx |
1610 // +--------------------------+
1611 // | rcx |
1612 // +--------------------------+
1613 // | r8 |
1614 // +--------------------------+
1615 // | r9 |
1616 // +--------------------------+
1617 // | r10 |
1618 // +--------------------------+
1619 // | rax |
1620 // +--------------------------+
1621 // | align |
1622 // calleeSP ---> +--------------------------+
1623 {
1624 // prologue to save rbp, frametype, and update rbp.
1625 __ Pushq(rbp);
1626 __ Pushq(static_cast<int64_t>(FrameType::OPTIMIZED_FRAME)); // set frame type
1627 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1628 }
1629 int32_t PreserveRegisterIndex = 9;
1630 // rdi,rsi,rdx,rcx,r8,r9,r10,rax should be preserved,
1631 // other general registers are callee saved register, callee will save them.
1632 __ Subq(PreserveRegisterIndex * FRAME_SLOT_SIZE, rsp);
1633 __ Movq(rdi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1634 __ Movq(rsi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1635 __ Movq(rdx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1636 __ Movq(rcx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1637 __ Movq(r8, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1638 __ Movq(r9, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1639 __ Movq(r10, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1640 __ Movq(rax, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1641 __ Callq(r11);
1642 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rax);
1643 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r10);
1644 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r9);
1645 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r8);
1646 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rcx);
1647 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdx);
1648 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rsi);
1649 __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdi);
1650 {
1651 // epilogue to restore rsp, rbp.
1652 // need add the frametype slot
1653 __ Addq(PreserveRegisterIndex * FRAME_SLOT_SIZE + FRAME_SLOT_SIZE, rsp);
1654 __ Popq(rbp);
1655 __ Ret();
1656 }
1657 }
1658
1659 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1660 // c calling convention, but preserve all general registers except %r11
1661 // %rd1 - glue
1662 // %rsi - obj
1663 // %rdx - offset
1664 // %rcx - value
ASMFastWriteBarrier(ExtendedAssembler * assembler)1665 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1666 {
1667 // valid region flag are as follows, assume it will be ALWAYS VALID.
1668 // Judge the region of value with:
1669 // "young" "sweepable share" "readonly share"
1670 // region flag: 0x08, 0x09, [0x0A, 0x11], [0x12, 0x14], 0x15
1671 // value is share: [0x12, 0x15] => valueMaybeSweepableShare
1672 // readonly share: 0x15 => return
1673 // sweepable share: [0x12, 0x14] => needShareBarrier
1674 // value is not share: 0x08, 0x09, [0x0A, 0x11], => valueNotShare
1675 // value is young : 0x09 => needCallNotShare
1676 // value is not young : 0x08, [0x0A, 0x11], => checkMark
1677 ASSERT(IN_YOUNG_SPACE < SHARED_SPACE_BEGIN && SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN &&
1678 SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE && IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1679 __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1680 Label needCall;
1681 Label checkMark;
1682 Label needCallNotShare;
1683 Label needShareBarrier;
1684 Label valueNotShare;
1685 Label valueMaybeSweepableShare;
1686 {
1687 // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1688 // int8_t valueFlag = *valueRegion
1689 // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1690 // goto valueMaybeSweepableShare
1691 // }
1692
1693 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1694 __ And(rcx, r11); // r11 is the region address of value.
1695 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of value.
1696 __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN), r11);
1697 __ Jae(&valueMaybeSweepableShare);
1698 // if value may be SweepableShare, goto valueMaybeSweepableShare
1699 }
1700 __ Bind(&valueNotShare);
1701 {
1702 // valueNotShare:
1703 // if (valueFlag != IN_YOUNG_SPACE){
1704 // goto checkMark
1705 // }
1706 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1707 // int8_t objFlag = *objRegion
1708 // if (objFlag != IN_YOUNG_SPACE){
1709 // goto needCallNotShare
1710 // }
1711
1712 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1713 __ Jne(&checkMark);
1714 // if value is not in young, goto checkMark
1715
1716 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11);
1717 __ And(rsi, r11); // r11 is the region address of obj.
1718 __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of obj.
1719 __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1720 __ Jne(&needCallNotShare);
1721 // if obj is not in young, goto needCallNotShare
1722 }
1723
1724 __ Bind(&checkMark);
1725 {
1726 // checkMark:
1727 // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1728 // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1729 // goto needCallNotShare
1730 // }
1731 // return
1732
1733 __ Movl(Operand(rdi, JSThread::GlueData::GetGCStateBitFieldOffset(false)), r11);
1734 __ Testb(Immediate(JSThread::CONCURRENT_MARKING_BITFIELD_MASK), r11);
1735 __ Jne(&needCallNotShare);
1736 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1737 __ Ret();
1738 }
1739
1740 __ Bind(&valueMaybeSweepableShare);
1741 {
1742 // valueMaybeSweepableShare:
1743 // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1744 // goto needShareBarrier
1745 // }
1746 // return
1747 __ Cmpl(Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE), r11);
1748 __ Jne(&needShareBarrier);
1749 __ Ret();
1750 }
1751
1752 __ Bind(&needCallNotShare);
1753 {
1754 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1755 kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1756 __ Movq(Operand(rdi, NonSValueBarrier), r11);
1757 }
1758 __ Bind(&needCall);
1759 {
1760 PreserveMostCall(assembler);
1761 }
1762 __ Bind(&needShareBarrier);
1763 {
1764 ASMFastSharedWriteBarrier(assembler, needCall);
1765 }
1766 }
1767
1768 // %rd1 - glue
1769 // %rsi - obj
1770 // %rdx - offset
1771 // %rcx - value
ASMFastSharedWriteBarrier(ExtendedAssembler * assembler,Label & needcall)1772 void AsmInterpreterCall::ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needcall)
1773 {
1774 Label checkBarrierForSharedValue;
1775 Label restoreScratchRegister;
1776 Label callSharedBarrier;
1777 {
1778 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1779 // int8_t objFlag = *objRegion
1780 // if (objFlag >= SHARED_SPACE_BEGIN){
1781 // // share to share, just check the barrier
1782 // goto checkBarrierForSharedValue
1783 // }
1784 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1785 __ And(rsi, r11); // r11: region address of obj.
1786 __ Movzbl(Operand(r11, 0), r11); // r11: the flag load from region of obj.
1787 __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SPACE_BEGIN), r11);
1788 __ Jae(&checkBarrierForSharedValue); // if objflag >= SHARED_SPACE_BEGIN => checkBarrierForSharedValue
1789 }
1790 {
1791 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1792 // int8_t *localToShareSet = *(objRegion + LocalToShareSetOffset)
1793 // if (localToShareSet == 0){
1794 // goto callSharedBarrier
1795 // }
1796 __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1797 __ And(rsi, r11); // r11: region address of obj.
1798 __ Movq(Operand(r11, Region::PackedData::GetLocalToShareSetOffset(false)), r11);
1799 // r11 is localToShareSet for obj region.
1800 __ Cmpq(Immediate(0), r11);
1801 __ Je(&callSharedBarrier); // if localToShareSet == 0 => callSharedBarrier
1802 }
1803 {
1804 // r12, r13 will be used as scratch register, spill them.
1805 {
1806 __ Pushq(r12);
1807 __ Pushq(r13);
1808 }
1809 // int64_t objOffset = obj & DEFAULT_REGION_MASK
1810 // int64_t slotOffset = objOffset + offset
1811 // int8_t lowSlotOffset = slotOffset & 0xff
1812
1813 __ Movabs(DEFAULT_REGION_MASK, r12);
1814 __ And(rsi, r12); // obj & DEFAULT_REGION_MASK => r12 is obj's offset to region
1815 __ Addq(rdx, r12); // r12 is slotAddr's offset to region
1816 __ Movzbl(r12, r13); // r13 is low 8 bit of slotAddr's offset to region
1817
1818 // the logic to get byteIndex in stub_builder.cpp
1819 // [63-------------------------35][34------------------------8][7---3][2-0]
1820 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1821 // 1. bitOffsetPtr = LSR TAGGED_TYPE_SIZE_LOG(3) slotOffset
1822 // bitOffsetPtr: aaaaaaaaaaaaaaaaaaaaaaaaaa aaabbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1823 // 2. bitOffset = TruncPtrToInt32 bitOffsetPtr
1824 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1825 // 3. index = LSR BIT_PER_WORD_LOG2(5) bitOffset
1826 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1827 // 4. byteIndex = Mul index BYTE_PER_WORD(4)
1828 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1829
1830 // the logic to get byteIndex here:
1831 // [63-------------------------35][34------------------------8][7---3][2-0]
1832 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1833 // 1. LSR (TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2)(6) slotOffset
1834 // r12: aaaaaaaaaaaaaaaaaaaaaaa aaaaaabbbbbbbbbbbbbbbbbbbbb bbbbb bcc
1835 // indexMask: 00000000000000000000000000000 000000111111111111111111111 11111 100
1836 // 2. And r12 indexMask
1837 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1838 constexpr uint32_t byteIndexMask = static_cast<uint32_t>(0xffffffffffffffff >> TAGGED_TYPE_SIZE_LOG) >>
1839 GCBitset::BIT_PER_WORD_LOG2 << GCBitset::BYTE_PER_WORD_LOG2;
1840 static_assert(byteIndexMask == 0x1ffffffc && "LocalToShareSet is changed?");
1841 __ Shrq(TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2, r12);
1842 __ Andq(byteIndexMask, r12); // r12 is byteIndex
1843
1844 __ Addq(RememberedSet::GCBITSET_DATA_OFFSET, r11); // r11 is bitsetData addr
1845 __ Addq(r12, r11); // r11 is the addr of bitset value
1846 __ Movl(Operand(r11, 0), r12); // r12: oldsetValue
1847
1848 // the logic to get mask in stub_builder.cpp
1849 // [63-------------------------35][34------------------------8][7---3][2-0]
1850 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1851 // bitPerWordMask: 11 111
1852 // indexInWord = And bitoffset bitPerWordMask
1853 // indexInWord: cc ccc
1854 // mask = 1 << indexInWord
1855
1856 // the logic to test bit set value here:
1857 // [63-------------------------35][34------------------------8][7---3][2-0]
1858 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1859 // lowSlotOffset: ccccc ddd
1860 // indexInWord = Shrl TAGGED_TYPE_SIZE_LOG lowSlotOffset
1861 // indexInWord: cc ccc
1862 __ Shrl(TAGGED_TYPE_SIZE_LOG, r13);
1863
1864 // if "r13" position in r12 is 1, goto restoreScratchRegister;
1865 // if "r13" position in r12 is 0, set it to 1 and store r12 to r11(addr of bitset value)
1866 __ Btsl(r13, r12);
1867 __ Jb(&restoreScratchRegister);
1868 __ Movl(r12, Operand(r11, 0));
1869 }
1870 __ Bind(&restoreScratchRegister);
1871 {
1872 __ Popq(r13);
1873 __ Popq(r12);
1874 }
1875 __ Bind(&checkBarrierForSharedValue);
1876 {
1877 // checkBarrierForSharedValue:
1878 // int8_t GCStateBitField = *(glue+SharedGCStateBitFieldOffset)
1879 // if (GCStateBitField & JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1880 // goto callSharedBarrier
1881 // }
1882 // return
1883 __ Movl(Operand(rdi, JSThread::GlueData::GetSharedGCStateBitFieldOffset(false)), r11);
1884 __ Testb(Immediate(JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK), r11);
1885 __ Jne(&callSharedBarrier);
1886 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1887 __ Ret();
1888 }
1889 __ Bind(&callSharedBarrier);
1890 {
1891 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1892 kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1893 __ Movq(Operand(rdi, NonSValueBarrier), r11);
1894 __ Jmp(&needcall);
1895 }
1896 }
1897
PushUndefinedWithArgcAndCheckStack(ExtendedAssembler * assembler,Register glue,Register argc,Register op1,Register op2,Label * stackOverflow)1898 void AsmInterpreterCall::PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc,
1899 Register op1, Register op2, Label *stackOverflow)
1900 {
1901 ASSERT(stackOverflow != nullptr);
1902 StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow);
1903 PushUndefinedWithArgc(assembler, argc);
1904 }
1905
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1906 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue, Register fp,
1907 Register op)
1908 {
1909 if (fp != rsp) {
1910 __ Movq(fp, rsp);
1911 }
1912 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1913 __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1914 if (glue != r13) {
1915 __ Movq(glue, r13);
1916 }
1917
1918 __ Pushq(rbp);
1919 __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1920 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1921
1922 Label callRuntime;
1923 // 16 bytes align check
1924 __ Testq(0x8, rsp);
1925 __ Jnz(&callRuntime);
1926 __ PushAlignBytes();
1927 __ Bind(&callRuntime);
1928 __ Pushq(r10); // caller save
1929 __ Pushq(0); // argc
1930 __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1931 __ Movq(glue, rax); // glue
1932 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1933 __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1934 __ Callq(r10); // call CallRuntime
1935 __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1936 __ Popq(r10);
1937 __ Movq(rbp, rsp);
1938 __ Popq(rbp);
1939 __ Ret();
1940 }
1941
ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler * assembler,Register glue,Register fp,Register op)1942 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturnToAsmInterpBridgeFrame(ExtendedAssembler *assembler,
1943 Register glue, Register fp, Register op)
1944 {
1945 if (fp != rsp) {
1946 __ Movq(fp, rsp);
1947 }
1948 __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1949 __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1950 if (glue != r13) {
1951 __ Movq(glue, r13);
1952 }
1953
1954 __ Pushq(rbp);
1955 __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1956 __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1957
1958 Label callRuntime;
1959 // 16 bytes align check
1960 __ Testq(0x8, rsp);
1961 __ Jnz(&callRuntime);
1962 __ PushAlignBytes();
1963 __ Bind(&callRuntime);
1964 __ Pushq(r10); // caller save
1965 __ Pushq(0); // argc
1966 __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1967 __ Movq(glue, rax); // glue
1968 __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1969 __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1970 __ Callq(r10); // call CallRuntime
1971 __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1972 __ Popq(r10);
1973 __ Movq(rbp, rsp);
1974 __ Popq(rbp);
1975
1976 // +----------------------------------------------------+
1977 // | return addr |
1978 // |----------------------------------------------------| <---- rbp
1979 // | frame type | ^ ^
1980 // |----------------------------------------------------| | |
1981 // | prev rbp | | |
1982 // |----------------------------------------------------| | |
1983 // | pc | | |
1984 // |----------------------------------------------------| PushAsmInterpBridgeFrame total skip
1985 // | pushAlignBytes | | |
1986 // |----------------------------------------------------| | |
1987 // | 5 callee save regs(r12,r13,r14,r15,rbx) | | |
1988 // |----------------------------------------------------| v |
1989 // | lr | |
1990 // +----------------------------------------------------+ v
1991 // Base on PushAsmInterpBridgeFrame, need to skip AsmInterpBridgeFrame size, callee Save Registers(5)
1992 // and PushAlignBytes(1)
1993 int32_t skipNum = static_cast<int32_t>(AsmInterpretedBridgeFrame::GetSize(false)) / FRAME_SLOT_SIZE + 5 + 1;
1994 __ Leaq(Operand(rbp, -skipNum * FRAME_SLOT_SIZE), rsp);
1995 __ Ret();
1996 }
1997
HasPendingException(ExtendedAssembler * assembler,Register threadRegister)1998 void AsmInterpreterCall::HasPendingException([[maybe_unused]] ExtendedAssembler *assembler,
1999 [[maybe_unused]] Register threadRegister)
2000 {
2001 }
2002 #undef __
2003 } // namespace panda::ecmascript::x64