1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "asm_support_arm.S" 18 19#define MANAGED_ARGS_R4_LR_SAVE_SIZE /*s0-s15*/ 16 * 4 + /*r0-r3*/ 4 * 4 + /*r4*/ 4 + /*lr*/ 4 20 21// Note: R4 is saved for stack alignment. 22.macro SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME 23 // Save GPR args r0-r3 and return address. Also save r4 for stack alignment. 24 push {r0-r4, lr} 25 .cfi_adjust_cfa_offset 24 26 .cfi_rel_offset lr, 20 27 // Save FPR args. 28 vpush {s0-s15} 29 .cfi_adjust_cfa_offset 64 30.endm 31 32.macro RESTORE_MANAGED_ARGS_R4_AND_RETURN restore_cfa 33 // Restore FPR args. 34 vpop {s0-s15} 35 .cfi_adjust_cfa_offset -64 36 // Restore GPR args and r4 and return. 37 pop {r0-r4, pc} 38 .if \restore_cfa 39 .cfi_adjust_cfa_offset 64 40 .endif 41.endm 42 43.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none" 44 .extern \cxx_name 45ENTRY \name 46 // Note: Managed callee-save registers have been saved by the JNI stub. 47 // Save managed args, r4 (for stack alignment) and LR. 48 SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME 49 // Call `cxx_name()`. 50 .ifnc \arg1, none 51 mov r0, \arg1 @ Pass arg1. 52 .endif 53 bl \cxx_name @ Call cxx_name(...). 54 // Restore args and R4 and return. 55 RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 0 56END \name 57.endm 58 59.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none", label = "none" 60 .extern \cxx_name 61ENTRY \name 62 .ifnc \label, none 63 \label: 64 .endif 65 // Save GPR return registers and return address. Also save r4 for stack alignment. 66 push {r0-r1, r4, lr} 67 .cfi_adjust_cfa_offset 16 68 .cfi_rel_offset lr, 12 69 // Save FPR return registers. 70 vpush {s0-s1} 71 .cfi_adjust_cfa_offset 8 72 // Call `cxx_name()`. 73 mov r0, \arg1 @ Pass arg1. 74 .ifnc \arg2, none 75 mov r1, \arg2 @ Pass arg2. 76 .endif 77 bl \cxx_name @ Call cxx_name(...). 78 // Restore FPR return registers. 79 vpop {s0-s1} 80 .cfi_adjust_cfa_offset -8 81 // Restore GPR return registers and r4 and return. 82 pop {r0-r1, r4, pc} 83END \name 84.endm 85 86 /* 87 * Jni dlsym lookup stub. 88 */ 89 .extern artFindNativeMethod 90 .extern artFindNativeMethodRunnable 91ENTRY art_jni_dlsym_lookup_stub 92 push {r0, r1, r2, r3, lr} @ spill regs 93 .cfi_adjust_cfa_offset 20 94 .cfi_rel_offset lr, 16 95 sub sp, #12 @ pad stack pointer to align frame 96 .cfi_adjust_cfa_offset 12 97 98 mov r0, rSELF @ pass Thread::Current() 99 // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable() 100 // for @FastNative or @CriticalNative. 101 ldr ip, [r0, #THREAD_TOP_QUICK_FRAME_OFFSET] // uintptr_t tagged_quick_frame 102 bic ip, #TAGGED_JNI_SP_MASK // ArtMethod** sp 103 ldr ip, [ip] // ArtMethod* method 104 ldr ip, [ip, #ART_METHOD_ACCESS_FLAGS_OFFSET] // uint32_t access_flags 105 tst ip, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE) 106 bne .Llookup_stub_fast_or_critical_native 107 blx artFindNativeMethod 108 b .Llookup_stub_continue 109.Llookup_stub_fast_or_critical_native: 110 blx artFindNativeMethodRunnable 111.Llookup_stub_continue: 112 mov r12, r0 @ save result in r12 113 114 add sp, #12 @ restore stack pointer 115 .cfi_adjust_cfa_offset -12 116 CFI_REMEMBER_STATE 117 cbz r0, 1f @ is method code null? 118 pop {r0, r1, r2, r3, lr} @ restore regs 119 .cfi_adjust_cfa_offset -20 120 .cfi_restore lr 121 bx r12 @ if non-null, tail call to method's code 1221: 123 CFI_RESTORE_STATE_AND_DEF_CFA sp, 20 124 pop {r0, r1, r2, r3, pc} @ restore regs and return to caller to handle exception 125END art_jni_dlsym_lookup_stub 126 127 /* 128 * Jni dlsym lookup stub for @CriticalNative. 129 */ 130ENTRY art_jni_dlsym_lookup_critical_stub 131 // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is r4. 132 // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub. 133 tst r4, #1 134 bne art_jni_dlsym_lookup_stub 135 136 // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime 137 // method or for a GenericJNI frame which is similar but has a native method and a tag. 138 // Do this eagerly, so that we can use these registers as temps without the need to 139 // save and restore them multiple times. 140 INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 141 142 // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg. 143 push {r0, r1, r2, r3, r4, lr} 144 .cfi_adjust_cfa_offset 24 145 .cfi_rel_offset lr, 20 146 147 // Call artCriticalNativeFrameSize(method, caller_pc) 148 mov r0, r4 // r0 := method (from hidden arg) 149 mov r1, lr // r1 := caller_pc 150 bl artCriticalNativeFrameSize 151 152 // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame. 153 // If we're coming from JNI stub with tail call, it is LR. If we're coming from 154 // JNI stub that saved the return address, it will be the last value we copy below. 155 // If we're coming directly from compiled code, it is LR, set further down. 156 ldr lr, [sp, #20] 157 158 // Move the stack args if any. 159 add r4, sp, #24 160 cbz r0, .Lcritical_skip_copy_args 161.Lcritical_copy_args_loop: 162 ldrd ip, lr, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 163 subs r0, r0, #8 164 strd ip, lr, [r4], #8 165 bne .Lcritical_copy_args_loop 166.Lcritical_skip_copy_args: 167 // The managed frame address is now in R4. This is conveniently a callee-save in native ABI. 168 169 // Restore args. 170 pop {r0, r1, r2, r3} 171 .cfi_adjust_cfa_offset -16 172 173 // Spill registers for the SaveRefsAndArgs frame above the stack args. 174 // Note that the runtime shall not examine the args here, otherwise we would have to 175 // move them in registers and stack to account for the difference between managed and 176 // native ABIs. 177 add ip, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40 178 stmia ip, {r1-r3, r5-r8, r10-r11, lr} // LR: Save return address for tail call from JNI stub. 179 // (If there were any stack args, we're storing the value that's already there. 180 // For direct calls from compiled managed code, we shall overwrite this below.) 181 // Skip args r1-r3. 182 CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 28 183 CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 24 184 CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 20 185 CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 16 186 CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 12 187 CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 8 188 // The saved return PC for managed stack walk is not necessarily our LR. 189 // Skip managed FP args as these are native ABI caller-saves and not args. 190 191 // Restore the hidden arg to r1 and caller PC. 192 pop {r1, lr} 193 .cfi_adjust_cfa_offset -8 194 .cfi_restore lr 195 196 // Save our return PC in the padding. 197 str lr, [r4, #__SIZEOF_POINTER__] 198 CFI_EXPRESSION_BREG 14, 4, __SIZEOF_POINTER__ 199 200 ldr ip, [r1, #ART_METHOD_ACCESS_FLAGS_OFFSET] // Load access flags. 201 add r2, r4, #1 // Prepare managed SP tagged for a GenericJNI frame. 202 tst ip, #ACCESS_FLAGS_METHOD_IS_NATIVE 203 bne .Lcritical_skip_prepare_runtime_method 204 205 // When coming from a compiled method, the return PC for managed stack walk is LR. 206 // (When coming from a compiled stub, the correct return PC is already stored above.) 207 str lr, [r4, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)] 208 209 // Replace the target method with the SaveRefsAndArgs runtime method. 210 LOAD_RUNTIME_INSTANCE r1 211 ldr r1, [r1, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET] 212 213 mov r2, r4 // Prepare untagged managed SP for the runtime method. 214 215.Lcritical_skip_prepare_runtime_method: 216 // Store the method on the bottom of the managed frame. 217 str r1, [r4] 218 219 // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame. 220 str r2, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 221 222 // Preserve the native arg register r0 in callee-save register r10 which was saved above. 223 mov r10, r0 224 225 // Call artFindNativeMethodRunnable() 226 mov r0, rSELF // pass Thread::Current() 227 bl artFindNativeMethodRunnable 228 229 // Store result in scratch reg. 230 mov ip, r0 231 232 // Restore the native arg register r0. 233 mov r0, r10 234 235 // Restore the frame. We shall not need the method anymore. 236 add r1, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40 237 ldmia r1, {r1-r3, r5-r8, r10-r11} 238 .cfi_restore r5 239 .cfi_restore r6 240 .cfi_restore r7 241 .cfi_restore r8 242 .cfi_restore r10 243 .cfi_restore r11 244 245 REFRESH_MARKING_REGISTER 246 247 // Check for exception before moving args back to keep the return PC for managed stack walk. 248 cmp ip, #0 249 CFI_REMEMBER_STATE 250 beq .Lcritical_deliver_exception 251 252 // Restore our return PC. 253 ldr lr, [r4, #__SIZEOF_POINTER__] 254 .cfi_restore lr 255 256 // Move stack args to their original place. 257 cmp sp, r4 258 beq .Lcritical_skip_copy_args_back 259 push {r0, r1, r2, r3} 260 .cfi_adjust_cfa_offset 16 261 add r0, sp, #16 262 sub r0, r4, r0 263.Lcritical_copy_args_loop_back: 264 ldrd r2, r3, [r4, #-8]! 265 subs r0, r0, #8 266 strd r2, r3, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 267 bne .Lcritical_copy_args_loop_back 268 pop {r0, r1, r2, r3} 269 .cfi_adjust_cfa_offset -16 270.Lcritical_skip_copy_args_back: 271 272 // Remove the frame reservation. 273 DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 274 275 // Do the tail call. 276 bx ip 277 278.Lcritical_deliver_exception: 279 CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS 280 // The exception delivery checks that rSELF was saved but the SaveRefsAndArgs 281 // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame. 282 // That's why we checked for exception after restoring registers from it. 283 // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this 284 // point but keep the area allocated for stack args to keep CFA definition simple. 285#if FRAME_SIZE_SAVE_REFS_AND_ARGS != FRAME_SIZE_SAVE_ALL_CALLEE_SAVES 286# error "Expected FRAME_SIZE_SAVE_REFS_AND_ARGS == FRAME_SIZE_SAVE_ALL_CALLEE_SAVES" 287 // Otherwise we would need to adjust SP and R4 and move our return PC which is at [R4, #4]. 288 // (Luckily, both SaveRefsAndArgs and SaveAllCalleeSaves frames have padding there.) 289#endif 290 291 // Spill registers for the SaveAllCalleeSaves frame above the stack args area. 292 add ip, r4, #FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32 293 stmia ip, {r5-r11} // Keep the caller PC for managed stack walk. 294 CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32 295 CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 28 296 CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 24 297 CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 20 298 CFI_EXPRESSION_BREG 9, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 16 299 CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 12 300 CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 8 301 // Skip R4, it is callee-save in managed ABI. 302 add ip, r4, #12 303 vstmia ip, {s16-s31} 304 305 // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame. 306 LOAD_RUNTIME_INSTANCE ip 307 ldr ip, [ip, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET] 308 str ip, [r4] 309 310 // Place the managed frame SP in Thread::Current()->top_quick_frame. 311 str r4, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 312 313 DELIVER_PENDING_EXCEPTION_FRAME_READY 314END art_jni_dlsym_lookup_critical_stub 315 316 /* 317 * Read barrier for the method's declaring class needed by JNI stub for static methods. 318 * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.) 319 */ 320// The method argument is already in r0 for call to `artJniReadBarrier(ArtMethod*)`. 321JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier 322 323 /* 324 * Trampoline to `artJniMethodStart()` that preserves all managed arguments. 325 */ 326JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, rSELF 327 328 /* 329 * Trampoline to `artJniMethodEntryHook()` that preserves all managed arguments. 330 */ 331JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, rSELF 332 333 /* 334 * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments. 335 */ 336JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, rSELF 337 338 /* 339 * Trampoline to `artJniMethodEnd()` that preserves all return registers. 340 */ 341JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, rSELF 342 343 /* 344 * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers. 345 */ 346JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, rSELF 347 348 /* 349 * Entry from JNI stub that tries to lock the object in a fast path and 350 * calls `artLockObjectFromCode()` (the same as for managed code) for the 351 * difficult cases, may block for GC. 352 * Custom calling convention: 353 * r4 holds the non-null object to lock. 354 * Callee-save registers have been saved and can be used as temporaries. 355 * All argument registers need to be preserved. 356 */ 357ENTRY art_jni_lock_object 358 // Note: the slow path is actually the art_jni_lock_object_no_inline (tail call). 359 LOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Llock_object_jni_slow, /*can_be_null*/ 0 360END art_jni_lock_object 361 362 /* 363 * Entry from JNI stub that calls `artLockObjectFromCode()` 364 * (the same as for managed code), may block for GC. 365 * Custom calling convention: 366 * r4 holds the non-null object to lock. 367 * Callee-save registers have been saved and can be used as temporaries. 368 * All argument registers need to be preserved. 369 */ 370 .extern artLockObjectFromCode 371ENTRY art_jni_lock_object_no_inline 372 // This is also the slow path for art_jni_lock_object. 373 // Note that we need a local label as the assembler emits bad instructions 374 // for CBZ/CBNZ if we try to jump to `art_jni_lock_object_no_inline`. 375.Llock_object_jni_slow: 376 // Save managed args, r4 (for stack alignment) and LR. 377 SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME 378 // Call `artLockObjectFromCode()` 379 mov r0, r4 @ Pass the object to lock. 380 mov r1, rSELF @ Pass Thread::Current(). 381 bl artLockObjectFromCode @ (Object* obj, Thread*) 382 // Check result. 383 cbnz r0, 1f 384 // Restore args and r4 and return. 385 RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 1 3861: 387 // All args are irrelevant when throwing an exception and R4 is preserved 388 // by the `artLockObjectFromCode()` call. Load LR and drop saved args and R4. 389 ldr lr, [sp, #(MANAGED_ARGS_R4_LR_SAVE_SIZE - 4)] 390 .cfi_restore lr 391 DECREASE_FRAME MANAGED_ARGS_R4_LR_SAVE_SIZE 392 // Make a tail call to `artDeliverPendingExceptionFromCode()`. 393 // Rely on the JNI transition frame constructed in the JNI stub. 394 mov r0, rSELF @ Pass Thread::Current(). 395 b artDeliverPendingExceptionFromCode @ (Thread*) 396END art_jni_lock_object_no_inline 397 398 /* 399 * Entry from JNI stub that tries to unlock the object in a fast path and calls 400 * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock 401 * is fatal, so we do not need to check for exceptions in the slow path. 402 * Custom calling convention: 403 * r4 holds the non-null object to unlock. 404 * Callee-save registers have been saved and can be used as temporaries. 405 * Return registers r0-r1 and s0-s1 need to be preserved. 406 */ 407ENTRY art_jni_unlock_object 408 // Note: the slow path is actually the art_jni_unlock_object_no_inline (tail call). 409 UNLOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Lunlock_object_jni_slow, /*can_be_null*/ 0 410END art_jni_unlock_object 411 412 /* 413 * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to 414 * unlock is fatal, so we do not need to check for exceptions. 415 * Custom calling convention: 416 * r4 holds the non-null object to unlock. 417 * Callee-save registers have been saved and can be used as temporaries. 418 * Return registers r0-r1 and s0-s1 need to be preserved. 419 */ 420 // This is also the slow path for art_jni_unlock_object. 421JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, r4, rSELF, \ 422 /* Note that we need a local label as the assembler emits bad instructions */ \ 423 /* for CBZ/CBNZ if we try to jump to `art_jni_unlock_object_no_inline`. */ \ 424 .Lunlock_object_jni_slow 425