1 /*
2 * Copyright (c) 2023-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/compiler/trampoline/x64/common_call.h"
17
18 #include "ecmascript/message_string.h"
19
20 namespace panda::ecmascript::x64 {
21 #define __ assembler->
22
23 // * uint64_t OptimizedFastCallEntry(uintptr_t glue, uint32_t actualNumArgs, const JSTaggedType argV[],
24 // uintptr_t prevFp)
25 // * Arguments:
26 // %rdi - glue
27 // %rsi - actualNumArgs
28 // %rdx - argV
29 // %rcx - prevFp
30
OptimizedFastCallEntry(ExtendedAssembler * assembler)31 void OptimizedFastCall::OptimizedFastCallEntry(ExtendedAssembler *assembler)
32 {
33 __ BindAssemblerStub(RTSTUB_ID(OptimizedFastCallEntry));
34 Register glueReg = rdi;
35 Register argv = rdx;
36 Register prevFpReg = rcx;
37
38 OptimizedCall::PushJSFunctionEntryFrame(assembler, prevFpReg);
39 __ Movq(argv, r8);
40 __ Movq(rsi, rcx);
41 __ Movq(Operand(r8, 0), rsi); // func
42 __ Movq(Operand(r8, FRAME_SLOT_SIZE), rdx); // thisobj
43 __ Addq(DOUBLE_SLOT_SIZE, r8);
44 __ CallAssemblerStub(RTSTUB_ID(JSFastCallWithArgV), false);
45
46 __ Popq(prevFpReg);
47 __ Addq(FRAME_SLOT_SIZE, rsp); // 8: frame type
48 __ Popq(rbp);
49 __ Popq(glueReg); // caller restore
50 __ PopCppCalleeSaveRegisters(); // callee restore
51 __ Movq(prevFpReg, Operand(glueReg, JSThread::GlueData::GetLeaveFrameOffset(false)));
52 __ Ret();
53 }
54
55
56 // * uint64_t OptimizedFastCallAndPushArgv(uintptr_t glue, uint32_t expectedNumArgs, uint32_t actualNumArgs,
57 // uintptr_t codeAddr, uintptr_t argv)
58 // * Arguments wil CC calling convention:
59 // %rdi - glue
60 // %rsi - actualNumArgs
61 // %rdx - actualArgv
62 // %rcx - func
63 // %r8 - new target
64 // %r9 - this
65 // * The OptimizedJSFunctionArgsConfig Frame's structure is illustrated as the following:
66 // +--------------------------+
67 // | arg[N-1] |
68 // +--------------------------+
69 // | . . . . |
70 // +--------------------------+
71 // | arg[0] |
72 // +--------------------------+
73 // | argC |
74 // sp ---> +--------------------------+ -----------------
75 // | | ^
76 // | prevFP | |
77 // |--------------------------| OptimizedJSFunctionArgsConfigFrame
78 // | frameType | |
79 // | | V
80 // +--------------------------+ -----------------
OptimizedFastCallAndPushArgv(ExtendedAssembler * assembler)81 void OptimizedFastCall::OptimizedFastCallAndPushArgv(ExtendedAssembler *assembler)
82 {
83 __ BindAssemblerStub(RTSTUB_ID(OptimizedFastCallAndPushArgv));
84 Register actualNumArgsReg = rsi;
85 Register jsFuncReg = rcx;
86 Register thisObj = r9;
87 Label lCopyExtraAument1;
88 Label lCopyExtraUndefineToSp;
89 Label lCopyLoop1;
90 Label lCopyLoop2;
91 Label pushUndefined;
92 Label call;
93 Label arg4;
94 Label argc;
95 Label checkExpectedArgs;
96 JsFunctionArgsConfigFrameScope scope(assembler); // push frametype and callee save
97 __ Movq(actualNumArgsReg, r13);
98 actualNumArgsReg = r13;
99 __ Movq(rcx, rsi); // func move to argc
100 jsFuncReg = rsi;
101 __ Movq(thisObj, rdx); // this move to argv
102 Register method = r14;
103 Register methodCallField = rbx;
104 Register codeAddrReg = rax;
105 Register argvReg = r12;
106 __ Leaq(Operand(rsp, 8 * FRAME_SLOT_SIZE), argvReg); // 8: skip 8 frames to get argv
107 __ Mov(Operand(jsFuncReg, JSFunctionBase::METHOD_OFFSET), method); // get method
108 __ Mov(Operand(jsFuncReg, JSFunctionBase::CODE_ENTRY_OFFSET), codeAddrReg); // get codeAddress
109 __ Mov(Operand(method, Method::CALL_FIELD_OFFSET), methodCallField); // get call field
110 __ Shr(MethodLiteral::NumArgsBits::START_BIT, methodCallField);
111 __ Andl(((1LU << MethodLiteral::NumArgsBits::SIZE) - 1), methodCallField);
112 __ Addl(NUM_MANDATORY_JSFUNC_ARGS, methodCallField); // add mandatory argumentr
113 Register expectedNumArgsReg = rbx;
114
115 Label arg5;
116 Label arg6;
117 __ Cmp(Immediate(3), actualNumArgsReg); // 3: func new this
118 __ Jne(&arg4);
119 __ Movq(JSTaggedValue::VALUE_UNDEFINED, rcx);
120 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r8);
121 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
122 __ Subq(3, expectedNumArgsReg); // 3: skip 3 register
123 __ Jmp(&checkExpectedArgs);
124
125 __ Bind(&arg4);
126 {
127 __ Movq(Operand(argvReg, 0), rcx);
128 __ Addq(FRAME_SLOT_SIZE, argvReg);
129 __ Cmp(Immediate(4), actualNumArgsReg); // 4: func new this arg0
130 __ Jne(&arg5);
131 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r8);
132 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
133 __ Subq(3, expectedNumArgsReg); // 3: skip 3 register
134 __ Jmp(&checkExpectedArgs);
135 }
136
137 __ Bind(&arg5);
138 {
139 __ Movq(Operand(argvReg, 0), r8);
140 __ Addq(FRAME_SLOT_SIZE, argvReg);
141 __ Cmp(Immediate(5), actualNumArgsReg); // 5: 5 args
142 __ Jne(&arg6);
143 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
144 __ Subq(3, expectedNumArgsReg); // 3: skip 3 register
145 __ Jmp(&checkExpectedArgs);
146 }
147
148 __ Bind(&arg6);
149 {
150 __ Movq(Operand(argvReg, 0), r9);
151 __ Addq(FRAME_SLOT_SIZE, argvReg);
152 __ Cmp(Immediate(6), actualNumArgsReg); // 6: 6 args
153 __ Jne(&argc);
154 __ Subq(3, expectedNumArgsReg); // 3: skip above 3 args
155 __ Jmp(&checkExpectedArgs);
156 }
157
158 __ Bind(&argc); // actualNumArgsReg >=7
159 {
160 __ Cmpq(expectedNumArgsReg, actualNumArgsReg);
161 __ Jb(&pushUndefined);
162 // 16 bytes align check
163 __ Subq(6, actualNumArgsReg); // 6: skip above 6 args
164 __ Subq(6, expectedNumArgsReg); // 6: skip above 6 args
165 __ Testb(1, actualNumArgsReg);
166 __ Je(&lCopyLoop2);
167 __ Pushq(0);
168 __ Bind(&lCopyLoop2);
169 __ Movq(Operand(argvReg, actualNumArgsReg, Scale::Times8, -FRAME_SLOT_SIZE), r14); // -8: stack index
170 __ Pushq(r14);
171 __ Subq(1, actualNumArgsReg);
172 __ Jne(&lCopyLoop2);
173 __ Jmp(&call);
174
175 __ Bind(&pushUndefined);
176 // 16 bytes align check
177 __ Subq(6, actualNumArgsReg); // 6: skip above 6 args
178 __ Subq(6, expectedNumArgsReg); // 6: skip above 6 args
179 __ Testb(1, expectedNumArgsReg);
180 __ Je(&lCopyExtraAument1);
181 __ Pushq(0);
182 __ Bind(&lCopyExtraAument1); // copy undefined value to stack
183 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
184 __ Subq(1, expectedNumArgsReg);
185 __ Cmpq(actualNumArgsReg, expectedNumArgsReg);
186 __ Ja(&lCopyExtraAument1);
187 __ Bind(&lCopyLoop1);
188 __ Movq(Operand(argvReg, expectedNumArgsReg, Scale::Times8, -FRAME_SLOT_SIZE), r14); // -8: stack index
189 __ Pushq(r14);
190 __ Subq(1, expectedNumArgsReg);
191 __ Jne(&lCopyLoop1);
192 __ Jmp(&call);
193 }
194
195 __ Bind(&checkExpectedArgs); // actualNumArgsReg < 7
196 {
197 __ Cmp(Immediate(3), expectedNumArgsReg); // 3: expectedNumArgsReg <= 3 jump
198 __ Jbe(&call);
199 // expectedNumArgsReg > 6, expectedNumArgsReg > actual;NumArgsReg
200 __ Subq(3, expectedNumArgsReg); // 3 : skpi func new this
201 __ Testb(1, expectedNumArgsReg);
202 __ Je(&lCopyExtraUndefineToSp);
203 __ Pushq(0); // expectedNumArgsReg is odd need align
204 __ Bind(&lCopyExtraUndefineToSp); // copy undefined value to stack
205 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
206 __ Subq(1, expectedNumArgsReg);
207 __ Cmp(0, expectedNumArgsReg);
208 __ Ja(&lCopyExtraUndefineToSp);
209 __ Jmp(&call);
210 }
211 __ Bind(&call);
212 __ Callq(codeAddrReg); // then call jsFunction
213 }
214
215 // * uint64_t JSFastCallWithArgV(uintptr_t glue, uint32_t actualNumArgs, const JSTaggedType argV[], uintptr_t prevFp,
216 // size_t callType)
217 // cc callconv
218 // * Arguments:
219 // %rdi - glue
220 // %rsi - func
221 // %rdx - this
222 // %rcx - actualNumArgs
223 // %r8 - argv
224
JSFastCallWithArgV(ExtendedAssembler * assembler)225 void OptimizedFastCall::JSFastCallWithArgV(ExtendedAssembler *assembler)
226 {
227 __ BindAssemblerStub(RTSTUB_ID(JSFastCallWithArgV));
228 Register sp(rsp);
229 Register callsiteSp = __ AvailableRegister2();
230 Label align16Bytes;
231 Label call;
232
233 __ Movq(sp, callsiteSp);
234 __ Addq(Immediate(FRAME_SLOT_SIZE), callsiteSp); // 8 : 8 means skip pc to get last callsitesp
235 OptimizedUnfoldArgVFrameFrameScope scope(assembler); // push frametype and callee save
236 __ Movq(rcx, r12);
237 __ Movq(r8, rbx);
238 Register actualNumArgs(r12);
239 Register argV(rbx);
240
241 __ Cmp(0, actualNumArgs);
242 __ Jz(&call);
243 __ Movq(Operand(argV, 0), rcx); // first arg
244 __ Addq(FRAME_SLOT_SIZE, argV);
245 __ Addq(-1, actualNumArgs);
246
247 __ Cmp(0, actualNumArgs);
248 __ Jz(&call);
249 __ Movq(Operand(argV, 0), r8); // second arg
250 __ Addq(FRAME_SLOT_SIZE, argV);
251 __ Addq(-1, actualNumArgs);
252
253 __ Cmp(0, actualNumArgs);
254 __ Jz(&call);
255 __ Movq(Operand(argV, 0), r9); // third arg
256 __ Addq(FRAME_SLOT_SIZE, argV);
257 __ Addq(-1, actualNumArgs);
258
259 __ Cmp(0, actualNumArgs);
260 __ Jz(&call);
261
262 __ Testb(1, actualNumArgs);
263 __ Je(&align16Bytes);
264 __ PushAlignBytes();
265 __ Bind(&align16Bytes);
266 __ Mov(actualNumArgs, rax);
267 CopyArgumentWithArgV(assembler, rax, argV);
268
269 __ Bind(&call);
270 Register method = r12;
271 Register jsFuncReg = rsi;
272 __ Mov(Operand(jsFuncReg, JSFunctionBase::METHOD_OFFSET), method); // get method
273 __ Mov(Operand(jsFuncReg, JSFunctionBase::CODE_ENTRY_OFFSET), rbx); // get codeAddress
274 __ Callq(rbx);
275 }
276
277 // cc callconv
278 // * Arguments:
279 // %rdi - glue
280 // %rsi - func
281 // %rdx - this
282 // %rcx - actualNumArgs
283 // %r8 - argv
284 // %r9 - expectedNumArgs
285
JSFastCallWithArgVAndPushArgv(ExtendedAssembler * assembler)286 void OptimizedFastCall::JSFastCallWithArgVAndPushArgv(ExtendedAssembler *assembler)
287 {
288 __ BindAssemblerStub(RTSTUB_ID(JSFastCallWithArgVAndPushArgv));
289 Register sp(rsp);
290 Register callsiteSp = __ AvailableRegister2();
291 Label call;
292 Label lCopyExtraAument1;
293 Label lCopyExtraUndefineToSp;
294 Label lCopyLoop1;
295 Label lCopyLoop2;
296 Label pushUndefined;
297 Label arg1;
298 Label arg2;
299 Label arg3;
300 Label argc;
301 Label checkExpectedArgs;
302
303 __ Movq(sp, callsiteSp);
304 __ Addq(Immediate(FRAME_SLOT_SIZE), callsiteSp); // 8 : 8 means skip pc to get last callsitesp
305 OptimizedUnfoldArgVFrameFrame1Scope scope(assembler);
306
307 __ Movq(rcx, r12);
308 __ Movq(r8, rbx);
309 __ Movq(r9, r14);
310 Register actualNumArgsReg(r12);
311 Register expectedNumArgsReg(r14);
312 Register argV(rbx);
313
314 __ Cmp(0, actualNumArgsReg);
315 __ Jne(&arg1);
316 __ Movq(JSTaggedValue::VALUE_UNDEFINED, rcx);
317 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r8);
318 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
319 __ Jmp(&checkExpectedArgs);
320
321 __ Bind(&arg1);
322 {
323 __ Movq(Operand(argV, 0), rcx); // first arg
324 __ Addq(FRAME_SLOT_SIZE, argV);
325 __ Cmp(1, actualNumArgsReg);
326 __ Jne(&arg2);
327 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r8);
328 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
329 __ Jmp(&checkExpectedArgs);
330 }
331
332 __ Bind(&arg2);
333 {
334 __ Movq(Operand(argV, 0), r8); // second arg
335 __ Addq(FRAME_SLOT_SIZE, argV);
336 __ Cmp(2, actualNumArgsReg); // 2: 2 args
337 __ Jne(&arg3);
338 __ Movq(JSTaggedValue::VALUE_UNDEFINED, r9);
339 __ Jmp(&checkExpectedArgs);
340 }
341
342 __ Bind(&arg3);
343 {
344 __ Movq(Operand(argV, 0), r9); // third arg
345 __ Addq(FRAME_SLOT_SIZE, argV);
346 __ Cmp(3, actualNumArgsReg); // 3: 3 args
347 __ Jne(&argc);
348 __ Jmp(&checkExpectedArgs);
349 }
350
351 __ Bind(&argc); // actualNumArgsReg >=4
352 {
353 __ Cmpq(expectedNumArgsReg, actualNumArgsReg);
354 __ Jb(&pushUndefined);
355 __ Subq(3, actualNumArgsReg); // 3: skip above 3 args
356 __ Subq(3, expectedNumArgsReg); // 3: skip above 3 args
357 __ Testb(1, actualNumArgsReg);
358 __ Je(&lCopyLoop2);
359 __ Pushq(0);
360 __ Bind(&lCopyLoop2);
361 __ Movq(Operand(argV, actualNumArgsReg, Scale::Times8, -FRAME_SLOT_SIZE), r13); // -8: stack index
362 __ Pushq(r13);
363 __ Subq(1, actualNumArgsReg);
364 __ Jne(&lCopyLoop2);
365 __ Jmp(&call);
366
367 __ Bind(&pushUndefined);
368 __ Subq(3, actualNumArgsReg); // 3: skip above 3 args
369 __ Subq(3, expectedNumArgsReg); // 3: skip above 3 args
370 __ Testb(1, expectedNumArgsReg);
371 __ Je(&lCopyExtraAument1);
372 __ Pushq(0);
373 __ Bind(&lCopyExtraAument1); // copy undefined value to stack
374 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
375 __ Subq(1, expectedNumArgsReg);
376 __ Cmpq(actualNumArgsReg, expectedNumArgsReg);
377 __ Ja(&lCopyExtraAument1);
378 __ Bind(&lCopyLoop1);
379 __ Movq(Operand(argV, expectedNumArgsReg, Scale::Times8, -FRAME_SLOT_SIZE), r13); // -8: stack index
380 __ Pushq(r13);
381 __ Subq(1, expectedNumArgsReg);
382 __ Jne(&lCopyLoop1);
383 __ Jmp(&call);
384 }
385
386 __ Bind(&checkExpectedArgs);
387 {
388 __ Cmp(Immediate(3), expectedNumArgsReg); // 3:expectedNumArgsReg <= 3 jump
389 __ Jbe(&call);
390 __ Subq(3, expectedNumArgsReg); // 3 : skpi func new this
391 __ Testb(1, expectedNumArgsReg);
392 __ Je(&lCopyExtraUndefineToSp);
393 __ Pushq(0); // expectedNumArgsReg is odd need align
394 __ Bind(&lCopyExtraUndefineToSp); // copy undefined value to stack
395 __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
396 __ Subq(1, expectedNumArgsReg);
397 __ Cmp(0, expectedNumArgsReg);
398 __ Ja(&lCopyExtraUndefineToSp);
399 __ Jmp(&call);
400 }
401 __ Bind(&call);
402 Register method = r12;
403 Register jsFuncReg = rsi;
404 __ Mov(Operand(jsFuncReg, JSFunctionBase::METHOD_OFFSET), method); // get method
405 __ Mov(Operand(jsFuncReg, JSFunctionBase::CODE_ENTRY_OFFSET), rbx); // get codeAddress
406 __ Callq(rbx);
407 }
408 #undef __
409 } // namespace panda::ecmascript::x64