1/** 2 * Copyright (c) 2023-2024 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16#include "arch/asm_support.h" 17#include "arch/amd64/helpers_amd64.S" 18#include "arch/amd64/shorty.S" 19#include "shorty_values.h" 20 21// Promise EtsAsyncCall(Method *method, ManagedThread *thread, const uint8_t *reg_args, const uint8_t *stack_args) 22.extern EtsAsyncCall 23 24// The entrypoint for an async method 25.global EtsAsyncEntryPoint 26.type EtsAsyncEntryPoint, %function 27EtsAsyncEntryPoint: 28 CFI_STARTPROC 29 CFI_DEF_CFA(rsp, 8) 30 31 // %rsp % 16 == 8 32 pushq %rbp 33 CFI_ADJUST_CFA_OFFSET(8) 34 CFI_REL_OFFSET(rbp, 0) 35 36 movq %rsp, %rbp // frame pointer 37 CFI_DEF_CFA_REGISTER(rbp) 38 39 pushq %rdi 40 pushq $CFRAME_KIND_NATIVE 41 42 // Skip locals 43 subq $(CFRAME_LOCALS_COUNT * 8), %rsp 44 45 // save all the callee saved registers to the stack 46 // stack walker will read them during stack unwinding 47 pushq %r15 48 CFI_REL_OFFSET(r15, -((CFRAME_CALLEE_REGS_START_SLOT + 0) * 8)) 49 pushq %r14 50 CFI_REL_OFFSET(r14, -((CFRAME_CALLEE_REGS_START_SLOT + 1) * 8)) 51 pushq %r13 52 CFI_REL_OFFSET(r13, -((CFRAME_CALLEE_REGS_START_SLOT + 2) * 8)) 53 pushq %r12 54 CFI_REL_OFFSET(r12, -((CFRAME_CALLEE_REGS_START_SLOT + 3) * 8)) 55 pushq %rbx 56 CFI_REL_OFFSET(rbx, -((CFRAME_CALLEE_REGS_START_SLOT + 4) * 8)) 57 58 subq $8, %rsp 59 // %rsp % 16 == 0 60 61 // save arguments to the stack 62 PUSH_FP_REGS 63 PUSH_GENERAL_REGS 64 // %rsp % 16 == 0 65 movq %rsp, %rdx 66 67 movb MANAGED_THREAD_FRAME_KIND_OFFSET(%THREAD_REG), %r12b 68 69 // Update current frame in the thread 70 movq %rbp, MANAGED_THREAD_FRAME_OFFSET(%THREAD_REG) 71 movb $1, MANAGED_THREAD_FRAME_KIND_OFFSET(%THREAD_REG) 72 73 // %rdi already contains Method 74 movq %THREAD_REG, %rsi 75 leaq 16(%rbp), %rcx 76 callq EtsAsyncCall@plt 77 78 movb %r12b, MANAGED_THREAD_FRAME_KIND_OFFSET(%THREAD_REG) 79 movb %r12b, %r10b 80 81 // Restore callee registers, since GC may change its values while moving objects. 82 movq -((CFRAME_CALLEE_REGS_START_SLOT + 0) * 8)(%rbp), %r15 83 CFI_RESTORE(r15) 84 movq -((CFRAME_CALLEE_REGS_START_SLOT + 1) * 8)(%rbp), %r14 85 CFI_RESTORE(r14) 86 movq -((CFRAME_CALLEE_REGS_START_SLOT + 2) * 8)(%rbp), %r13 87 CFI_RESTORE(r13) 88 movq -((CFRAME_CALLEE_REGS_START_SLOT + 3) * 8)(%rbp), %r12 89 CFI_RESTORE(r12) 90 movq -((CFRAME_CALLEE_REGS_START_SLOT + 4) * 8)(%rbp), %rbx 91 CFI_RESTORE(rbx) 92 movq %rbp, %rsp 93 popq %rbp 94 CFI_RESTORE(rbp) 95 CFI_DEF_CFA(rsp, (1 * 8)) 96 97 // check exception 98 movq MANAGED_THREAD_EXCEPTION_OFFSET(%THREAD_REG), %r11 99 testq %r11, %r11 100 jz .Lexit 101 102 // check frame is compiled 103 testb %r10b, %r10b 104 jz .Lexit 105 106 // check prev frame is true CFRAME and not BYPASS 107 cmpq $BYPASS_BRIDGE, (SLOT_SIZE * COMP_METHOD_OFFSET)(%rbp) 108 je .Lexit 109 110 movq (-CALLER_REG0_OFFSET + 0)(%rbp), %rax 111 movq (-CALLER_REG0_OFFSET + 8)(%rbp), %rcx 112 movq (-CALLER_REG0_OFFSET + 16)(%rbp), %rdx 113 movq (-CALLER_REG0_OFFSET + 24)(%rbp), %r11 114 movq (-CALLER_REG0_OFFSET + 32)(%rbp), %r10 115 movq (-CALLER_REG0_OFFSET + 40)(%rbp), %r9 116 movq (-CALLER_REG0_OFFSET + 48)(%rbp), %rsi 117 movq (-CALLER_REG0_OFFSET + 56)(%rbp), %rdi 118 movq (-CALLER_REG0_OFFSET + 64)(%rbp), %r8 119 120 testq $CFRAME_HAS_FLOAT_REGS_FLAG_MASK, (-CFRAME_FLAGS_SLOT * SLOT_SIZE)(%rbp) 121 jz 1f 122 123 movsd (-CALLER_VREG0_OFFSET + 0)(%rbp), %xmm0 124 movsd (-CALLER_VREG0_OFFSET + 8)(%rbp), %xmm1 125 movsd (-CALLER_VREG0_OFFSET + 16)(%rbp), %xmm2 126 movsd (-CALLER_VREG0_OFFSET + 24)(%rbp), %xmm3 127 movsd (-CALLER_VREG0_OFFSET + 32)(%rbp), %xmm4 128 movsd (-CALLER_VREG0_OFFSET + 40)(%rbp), %xmm5 129 movsd (-CALLER_VREG0_OFFSET + 48)(%rbp), %xmm6 130 movsd (-CALLER_VREG0_OFFSET + 56)(%rbp), %xmm7 131 movsd (-CALLER_VREG0_OFFSET + 64)(%rbp), %xmm8 132 movsd (-CALLER_VREG0_OFFSET + 72)(%rbp), %xmm9 133 movsd (-CALLER_VREG0_OFFSET + 80)(%rbp), %xmm10 134 movsd (-CALLER_VREG0_OFFSET + 88)(%rbp), %xmm11 135 movsd (-CALLER_VREG0_OFFSET + 96)(%rbp), %xmm12 136 movsd (-CALLER_VREG0_OFFSET + 104)(%rbp), %xmm13 137 movsd (-CALLER_VREG0_OFFSET + 112)(%rbp), %xmm14 138 movsd (-CALLER_VREG0_OFFSET + 120)(%rbp), %xmm15 139 1401: 141 jmp ThrowNativeExceptionBridge@plt 142 143.Lexit: 144 retq 145 CFI_ENDPROC 146