1/** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16#include "arch/asm_support.h" 17#include "arch/amd64/helpers_amd64.S" 18 19.macro SAVE_CALLEE_GP_REGS base_reg, offset 20 movq %r15, (\offset - CALLEE_REG0_OFFSET + 8*4)(%\base_reg) 21 CFI_REL_OFFSET(r15, (\offset - CALLEE_REG0_OFFSET+8*4)) 22 movq %r14, (\offset - CALLEE_REG0_OFFSET + 8*3)(%\base_reg) 23 CFI_REL_OFFSET(r14, (\offset - CALLEE_REG0_OFFSET+8*3)) 24 movq %r13, (\offset - CALLEE_REG0_OFFSET + 8*2)(%\base_reg) 25 CFI_REL_OFFSET(r13, (\offset - CALLEE_REG0_OFFSET+8*2)) 26 movq %r12, (\offset - CALLEE_REG0_OFFSET + 8*1)(%\base_reg) 27 CFI_REL_OFFSET(r12, (\offset - CALLEE_REG0_OFFSET+8*1)) 28 movq %rbx, (\offset - CALLEE_REG0_OFFSET + 8*0)(%\base_reg) 29 CFI_REL_OFFSET(rbx, (\offset - CALLEE_REG0_OFFSET+8*0)) 30.endm 31 32.macro RESTORE_CALLEE_GP_REGS base_reg, offset 33 movq (\offset - CALLEE_REG0_OFFSET + 8*4)(%\base_reg), %r15 34 CFI_RESTORE(r15) 35 movq (\offset - CALLEE_REG0_OFFSET + 8*3)(%\base_reg), %r14 36 CFI_RESTORE(r14) 37 movq (\offset - CALLEE_REG0_OFFSET + 8*2)(%\base_reg), %r13 38 CFI_RESTORE(r13) 39 movq (\offset - CALLEE_REG0_OFFSET + 8*1)(%\base_reg), %r12 40 CFI_RESTORE(r12) 41 movq (\offset - CALLEE_REG0_OFFSET + 8*0)(%\base_reg), %rbx 42 CFI_RESTORE(rbx) 43.endm 44 45.macro SAVE_CALLER_GP_REGS fp_reg, paramsnum 46 movq %rax, (-CALLER_REG0_OFFSET + 0)(%\fp_reg) 47.ifle \paramsnum-3 48 movq %rcx, (-CALLER_REG0_OFFSET + 8)(%\fp_reg) 49.endif 50.ifle \paramsnum-2 51 movq %rdx, (-CALLER_REG0_OFFSET + 16)(%\fp_reg) 52.endif 53 movq %r11, (-CALLER_REG0_OFFSET + 24)(%\fp_reg) 54 movq %r10, (-CALLER_REG0_OFFSET + 32)(%\fp_reg) 55.ifle \paramsnum-5 56 movq %r9, (-CALLER_REG0_OFFSET + 40)(%\fp_reg) 57.endif 58.ifle \paramsnum-1 59 movq %rsi, (-CALLER_REG0_OFFSET + 48)(%\fp_reg) 60.endif 61.ifeq \paramsnum 62 movq %rdi, (-CALLER_REG0_OFFSET + 56)(%\fp_reg) 63.endif 64.ifle \paramsnum-4 65 movq %r8, (-CALLER_REG0_OFFSET + 64)(%\fp_reg) 66.endif 67.endm 68 69.macro RESTORE_CALLER_GP_REGS fp_reg, is_void 70.if \is_void == 1 71 movq (-CALLER_REG0_OFFSET + 0)(%\fp_reg), %rax 72.endif 73 movq (-CALLER_REG0_OFFSET + 8)(%\fp_reg), %rcx 74 movq (-CALLER_REG0_OFFSET + 16)(%\fp_reg), %rdx 75 movq (-CALLER_REG0_OFFSET + 24)(%\fp_reg), %r11 76 movq (-CALLER_REG0_OFFSET + 32)(%\fp_reg), %r10 77 movq (-CALLER_REG0_OFFSET + 40)(%\fp_reg), %r9 78 movq (-CALLER_REG0_OFFSET + 48)(%\fp_reg), %rsi 79 movq (-CALLER_REG0_OFFSET + 56)(%\fp_reg), %rdi 80 movq (-CALLER_REG0_OFFSET + 64)(%\fp_reg), %r8 81.endm 82 83.macro SAVE_CALLER_FP_REGS fp_reg 84 movsd %xmm0, (-CALLER_VREG0_OFFSET + 0)(%\fp_reg) 85 movsd %xmm1, (-CALLER_VREG0_OFFSET + 8)(%\fp_reg) 86 movsd %xmm2, (-CALLER_VREG0_OFFSET + 16)(%\fp_reg) 87 movsd %xmm3, (-CALLER_VREG0_OFFSET + 24)(%\fp_reg) 88 movsd %xmm4, (-CALLER_VREG0_OFFSET + 32)(%\fp_reg) 89 movsd %xmm5, (-CALLER_VREG0_OFFSET + 40)(%\fp_reg) 90 movsd %xmm6, (-CALLER_VREG0_OFFSET + 48)(%\fp_reg) 91 movsd %xmm7, (-CALLER_VREG0_OFFSET + 56)(%\fp_reg) 92 movsd %xmm8, (-CALLER_VREG0_OFFSET + 64)(%\fp_reg) 93 movsd %xmm9, (-CALLER_VREG0_OFFSET + 72)(%\fp_reg) 94 movsd %xmm10, (-CALLER_VREG0_OFFSET + 80)(%\fp_reg) 95 movsd %xmm11, (-CALLER_VREG0_OFFSET + 88)(%\fp_reg) 96 movsd %xmm12, (-CALLER_VREG0_OFFSET + 96)(%\fp_reg) 97 movsd %xmm13, (-CALLER_VREG0_OFFSET + 104)(%\fp_reg) 98 movsd %xmm14, (-CALLER_VREG0_OFFSET + 112)(%\fp_reg) 99 movsd %xmm15, (-CALLER_VREG0_OFFSET + 120)(%\fp_reg) 100.endm 101 102.macro RESTORE_CALLER_FP_REGS fp_reg 103 movsd (-CALLER_VREG0_OFFSET + 0)(%\fp_reg), %xmm0 104 movsd (-CALLER_VREG0_OFFSET + 8)(%\fp_reg), %xmm1 105 movsd (-CALLER_VREG0_OFFSET + 16)(%\fp_reg), %xmm2 106 movsd (-CALLER_VREG0_OFFSET + 24)(%\fp_reg), %xmm3 107 movsd (-CALLER_VREG0_OFFSET + 32)(%\fp_reg), %xmm4 108 movsd (-CALLER_VREG0_OFFSET + 40)(%\fp_reg), %xmm5 109 movsd (-CALLER_VREG0_OFFSET + 48)(%\fp_reg), %xmm6 110 movsd (-CALLER_VREG0_OFFSET + 56)(%\fp_reg), %xmm7 111 movsd (-CALLER_VREG0_OFFSET + 64)(%\fp_reg), %xmm8 112 movsd (-CALLER_VREG0_OFFSET + 72)(%\fp_reg), %xmm9 113 movsd (-CALLER_VREG0_OFFSET + 80)(%\fp_reg), %xmm10 114 movsd (-CALLER_VREG0_OFFSET + 88)(%\fp_reg), %xmm11 115 movsd (-CALLER_VREG0_OFFSET + 96)(%\fp_reg), %xmm12 116 movsd (-CALLER_VREG0_OFFSET + 104)(%\fp_reg), %xmm13 117 movsd (-CALLER_VREG0_OFFSET + 112)(%\fp_reg), %xmm14 118 movsd (-CALLER_VREG0_OFFSET + 120)(%\fp_reg), %xmm15 119.endm 120 121.macro BRIDGE_SELECTOR name, notcompiled_entry, compiled_entry 122.global \name 123TYPE_FUNCTION(\name) 124\name: 125 movb MANAGED_THREAD_FRAME_KIND_OFFSET(%THREAD_REG), %r10b 126 testb %r10b, %r10b 127 jnz \compiled_entry@plt 128 jmp \notcompiled_entry@plt 129.endm 130 131.macro RUNTIME_CALL_CHECKER name, entry 132.global \name 133TYPE_FUNCTION(\name) 134\name: 135 CFI_STARTPROC 136 CFI_DEF_CFA(rsp, 8) 137 138 movb MANAGED_THREAD_RUNTIME_CALL_ENABLED_OFFSET(%THREAD_REG), %r14b 139 pushq %r14 140 141 movb $0, MANAGED_THREAD_RUNTIME_CALL_ENABLED_OFFSET(%THREAD_REG) 142 143 call \entry@plt 144 145 popq %r14 146 movb %r14b, MANAGED_THREAD_RUNTIME_CALL_ENABLED_OFFSET(%THREAD_REG) 147 148 // return to the caller 149 retq 150 CFI_ENDPROC 151.endm 152 153.macro CALL_RUNTIME mode, entry, paramsnum, is_void 154 subq $(BRIDGE_FRAME_SIZE - 8), %rsp 155 CFI_ADJUST_CFA_OFFSET((BRIDGE_FRAME_SIZE - 8)) 156 157 SAVE_CALLEE_GP_REGS rsp, BRIDGE_FRAME_SIZE 158 159 // Bridge frame: 160 // [1] native_pc = retaddr 161 // [2] parent frame pointer 162 // [3] COMPILED_CODE_TO_INTERPRETER_BRIDGE flag 163 164 // Bridge frame, slot 1 = npc = retaddr (StackMap stays just after the bridge call) 165 mov (BRIDGE_FRAME_SIZE - 1 * 8)(%rsp), %r14 166 // ManagedThread.npc update 167 mov %r14, MANAGED_THREAD_NATIVE_PC_OFFSET(%THREAD_REG) 168 169 // Bridge frame, slot 2 = COMPILED_CODE_TO_INTERPRETER_BRIDGE flag 170 movq $COMPILED_CODE_TO_INTERPRETER_BRIDGE, (BRIDGE_FRAME_SIZE - 2 * 8)(%rsp) 171 // Bridge frame, slot 3 = parent frame pointer 172 mov %rbp, (BRIDGE_FRAME_SIZE - 3 * 8)(%rsp) 173 CFI_REL_OFFSET(rbp, (BRIDGE_FRAME_SIZE - 3 * 8)) 174 175 leaq (BRIDGE_FRAME_SIZE - 3 * 8)(%rsp), %r13 176 // ManagedThread._frame = this boundary frame 177 mov %r13, MANAGED_THREAD_FRAME_OFFSET(%THREAD_REG) 178 179.if \mode != RUNTIME_MODE_SLOW_PATH 180 SAVE_CALLER_GP_REGS rbp, \paramsnum 181.endif 182 183 movq (-CFRAME_FLAGS_SLOT * 8)(%rbp), %r12 184 testq $CFRAME_HAS_FLOAT_REGS_FLAG_MASK, %r12 185 jz 1f 186 187 SAVE_CALLER_FP_REGS rbp 188 1891: 190 // call to BoundaryFrame bridge 191 call \entry@plt 192 193 // ManagedThread._frame = parent frame pointer 194 movq %rbp, MANAGED_THREAD_FRAME_OFFSET(%THREAD_REG) 195 196 RESTORE_CALLER_GP_REGS rbp, \is_void 197 198 testq $CFRAME_HAS_FLOAT_REGS_FLAG_MASK, %r12 199 jz 2f 200 201 RESTORE_CALLER_FP_REGS rbp 202 2032: 204 RESTORE_CALLEE_GP_REGS rsp, BRIDGE_FRAME_SIZE 205 206 addq $(BRIDGE_FRAME_SIZE - 8), %rsp 207 CFI_ADJUST_CFA_OFFSET(-(BRIDGE_FRAME_SIZE - 8)) 208 209 cmpq $0, MANAGED_THREAD_EXCEPTION_OFFSET(%THREAD_REG) 210 jz 3f 211 212.if \is_void == 0 213 movq (-CALLER_REG0_OFFSET + 0)(%rbp), %rax 214.endif 215 jmp ThrowNativeExceptionBridge@plt 216 2173: 218 // return to the caller 219.endm 220 221.macro ENTRYPOINT name, entry, paramsnum, is_void 222.global \name 223TYPE_FUNCTION(\name) 224\name: 225 CFI_STARTPROC 226 CFI_DEF_CFA(rsp, 8) 227 228 CALL_RUNTIME RUNTIME_MODE_DEFAULT, \entry, \paramsnum, \is_void 229 retq 230 CFI_ENDPROC 231.endm 232 233.macro ENTRYPOINT_SLOW_PATH name, entry, paramsnum, is_void 234.global \name 235TYPE_FUNCTION(\name) 236\name: 237 CFI_STARTPROC 238 CFI_DEF_CFA(rsp, 8) 239 240 CALL_RUNTIME RUNTIME_MODE_SLOW_PATH, \entry, \paramsnum, \is_void 241 retq 242 CFI_ENDPROC 243.endm 244 245.macro MethodEntrypointStub name, entry, notcompiled 246.global \name 247TYPE_FUNCTION(\name) 248\name: 249 CFI_STARTPROC 250 CFI_DEF_CFA(rsp, 8) 251 252 // If the caller is not a compiled method, we need to call \entry 253 // and return back after its execution 254 movb MANAGED_THREAD_FRAME_KIND_OFFSET(%THREAD_REG), %r9b 255 testb %r9b, %r9b 256 jz .L\notcompiled 257 CFI_REMEMBER_STATE 258 259 movq (%rsp), %rax 260 movq %rax, MANAGED_THREAD_NATIVE_PC_OFFSET(%THREAD_REG) 261 movq %rbp, -0x10(%rsp) 262 CFI_REL_OFFSET(rbp, -(2 * 8)) 263 movq $COMPILED_CODE_TO_INTERPRETER_BRIDGE, -0x8(%rsp) 264 lea -0x10(%rsp), %rax 265 movq %rax, MANAGED_THREAD_FRAME_OFFSET(%THREAD_REG) 266 267 movq %r15, -0x20(%rsp) 268 CFI_REL_OFFSET(r15, -(4 * 8)) 269 movq %r14, -0x28(%rsp) 270 CFI_REL_OFFSET(r14, -(5 * 8)) 271 movq %r13, -0x30(%rsp) 272 CFI_REL_OFFSET(r13, -(6 * 8)) 273 movq %r12, -0x38(%rsp) 274 CFI_REL_OFFSET(r12, -(7 * 8)) 275 movq %rbx, -0x40(%rsp) 276 CFI_REL_OFFSET(rbx, -(8 * 8)) 277 278 // ------------- header 279 // %rsp : ret addr 280 // %rsp -0x08 : $COMPILED_CODE_TO_INTERPRETER_BRIDGE 281 // %rsp -0x10 : frame pointer 282 // %rsp -0x18 : UNUSED 283 // ------------- callee-saved regs 284 // %rsp -0x20 : %r15 285 // %rsp -0x28 : %r14 286 // %rsp -0x30 : %r13 287 // %rsp -0x38 : %r12 288 // %rsp -0x40 : %rbx 289 // %rsp -0x48 : empty slot for alignment 290 subq $0x48, %rsp 291 CFI_ADJUST_CFA_OFFSET(9 * 8) 292 293 call \entry@plt 294 // we're not going to return back here 295 296.L\notcompiled: 297 CFI_RESTORE_STATE 298 CFI_DEF_CFA(rsp, 8) 299 subq $0x8, %rsp 300 CFI_ADJUST_CFA_OFFSET(8) 301 call \entry@plt 302 addq $0x8, %rsp 303 CFI_ADJUST_CFA_OFFSET(-8) 304 ret 305 CFI_ENDPROC 306.endm 307 308#include "entrypoints_gen.S" 309#include "entrypoints_bridge_asm_macro.inl" 310 311MethodEntrypointStub AbstractMethodStub AbstractMethodErrorEntrypoint ame_not_compiled 312 313MethodEntrypointStub DefaultConflictMethodStub IncompatibleClassChangeErrorForMethodConflictEntrypoint icce_not_compiled 314