• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm.S"
18
19#define MANAGED_ARGS_R4_LR_SAVE_SIZE /*s0-s15*/ 16 * 4 + /*r0-r3*/ 4 * 4 + /*r4*/ 4 + /*lr*/ 4
20
21// Note: R4 is saved for stack alignment.
22.macro SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
23    // Save GPR args r0-r3 and return address. Also save r4 for stack alignment.
24    push   {r0-r4, lr}
25    .cfi_adjust_cfa_offset 24
26    .cfi_rel_offset lr, 20
27    // Save FPR args.
28    vpush  {s0-s15}
29    .cfi_adjust_cfa_offset 64
30.endm
31
32.macro RESTORE_MANAGED_ARGS_R4_AND_RETURN restore_cfa
33    // Restore FPR args.
34    vpop   {s0-s15}
35    .cfi_adjust_cfa_offset -64
36    // Restore GPR args and r4 and return.
37    pop    {r0-r4, pc}
38    .if \restore_cfa
39        .cfi_adjust_cfa_offset 64
40    .endif
41.endm
42
43.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
44    .extern \cxx_name
45ENTRY \name
46    // Note: Managed callee-save registers have been saved by the JNI stub.
47    // Save managed args, r4 (for stack alignment) and LR.
48    SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
49    // Call `cxx_name()`.
50    .ifnc \arg1, none
51        mov r0, \arg1                     @ Pass arg1.
52    .endif
53    bl     \cxx_name                      @ Call cxx_name(...).
54    // Restore args and R4 and return.
55    RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 0
56END \name
57.endm
58
59.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none", label = "none"
60    .extern \cxx_name
61ENTRY \name
62    .ifnc \label, none
63        \label:
64    .endif
65    // Save GPR return registers and return address. Also save r4 for stack alignment.
66    push   {r0-r1, r4, lr}
67    .cfi_adjust_cfa_offset 16
68    .cfi_rel_offset lr, 12
69    // Save FPR return registers.
70    vpush  {s0-s1}
71    .cfi_adjust_cfa_offset 8
72    // Call `cxx_name()`.
73    mov r0, \arg1                         @ Pass arg1.
74    .ifnc \arg2, none
75        mov r1, \arg2                     @ Pass arg2.
76    .endif
77    bl     \cxx_name                      @ Call cxx_name(...).
78    // Restore FPR return registers.
79    vpop   {s0-s1}
80    .cfi_adjust_cfa_offset -8
81    // Restore GPR return registers and r4 and return.
82    pop    {r0-r1, r4, pc}
83END \name
84.endm
85
86    /*
87     * Jni dlsym lookup stub.
88     */
89    .extern artFindNativeMethod
90    .extern artFindNativeMethodRunnable
91ENTRY art_jni_dlsym_lookup_stub
92    push   {r0, r1, r2, r3, lr}           @ spill regs
93    .cfi_adjust_cfa_offset 20
94    .cfi_rel_offset lr, 16
95    sub    sp, #12                        @ pad stack pointer to align frame
96    .cfi_adjust_cfa_offset 12
97
98    mov    r0, rSELF                      @ pass Thread::Current()
99    // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable()
100    // for @FastNative or @CriticalNative.
101    ldr    ip, [r0, #THREAD_TOP_QUICK_FRAME_OFFSET]   // uintptr_t tagged_quick_frame
102    bic    ip, #TAGGED_JNI_SP_MASK                    // ArtMethod** sp
103    ldr    ip, [ip]                                   // ArtMethod* method
104    ldr    ip, [ip, #ART_METHOD_ACCESS_FLAGS_OFFSET]  // uint32_t access_flags
105    tst    ip, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
106    bne    .Llookup_stub_fast_or_critical_native
107    blx    artFindNativeMethod
108    b      .Llookup_stub_continue
109.Llookup_stub_fast_or_critical_native:
110    blx    artFindNativeMethodRunnable
111.Llookup_stub_continue:
112    mov    r12, r0                        @ save result in r12
113
114    add    sp, #12                        @ restore stack pointer
115    .cfi_adjust_cfa_offset -12
116    cbz    r0, 1f                         @ is method code null?
117    .cfi_remember_state
118    pop    {r0, r1, r2, r3, lr}           @ restore regs
119    .cfi_adjust_cfa_offset -20
120    .cfi_restore lr
121    bx     r12                            @ if non-null, tail call to method's code
1221:
123    CFI_RESTORE_STATE_AND_DEF_CFA sp, 20
124    pop    {r0, r1, r2, r3, pc}           @ restore regs and return to caller to handle exception
125END art_jni_dlsym_lookup_stub
126
127    /*
128     * Jni dlsym lookup stub for @CriticalNative.
129     */
130ENTRY art_jni_dlsym_lookup_critical_stub
131    // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is r4.
132    // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
133    tst    r4, #1
134    bne art_jni_dlsym_lookup_stub
135
136    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
137    // method or for a GenericJNI frame which is similar but has a native method and a tag.
138    // Do this eagerly, so that we can use these registers as temps without the need to
139    // save and restore them multiple times.
140    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
141
142    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
143    push   {r0, r1, r2, r3, r4, lr}
144    .cfi_adjust_cfa_offset 24
145    .cfi_rel_offset lr, 20
146
147    // Call artCriticalNativeFrameSize(method, caller_pc)
148    mov    r0, r4  // r0 := method (from hidden arg)
149    mov    r1, lr  // r1 := caller_pc
150    bl     artCriticalNativeFrameSize
151
152    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
153    // If we're coming from JNI stub with tail call, it is LR. If we're coming from
154    // JNI stub that saved the return address, it will be the last value we copy below.
155    // If we're coming directly from compiled code, it is LR, set further down.
156    ldr    lr, [sp, #20]
157
158    // Move the stack args if any.
159    add    r4, sp, #24
160    cbz    r0, .Lcritical_skip_copy_args
161.Lcritical_copy_args_loop:
162    ldrd   ip, lr, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
163    subs   r0, r0, #8
164    strd   ip, lr, [r4], #8
165    bne    .Lcritical_copy_args_loop
166.Lcritical_skip_copy_args:
167    // The managed frame address is now in R4. This is conveniently a callee-save in native ABI.
168
169    // Restore args.
170    pop    {r0, r1, r2, r3}
171    .cfi_adjust_cfa_offset -16
172
173    // Spill registers for the SaveRefsAndArgs frame above the stack args.
174    // Note that the runtime shall not examine the args here, otherwise we would have to
175    // move them in registers and stack to account for the difference between managed and
176    // native ABIs.
177    add    ip, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40
178    stmia  ip, {r1-r3, r5-r8, r10-r11, lr}  // LR: Save return address for tail call from JNI stub.
179    // (If there were any stack args, we're storing the value that's already there.
180    // For direct calls from compiled managed code, we shall overwrite this below.)
181    // Skip args r1-r3.
182    CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 28
183    CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 24
184    CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 20
185    CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 16
186    CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 12
187    CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 8
188    // The saved return PC for managed stack walk is not necessarily our LR.
189    // Skip managed FP args as these are native ABI caller-saves and not args.
190
191    // Restore the hidden arg to r1 and caller PC.
192    pop    {r1, lr}
193    .cfi_adjust_cfa_offset -8
194    .cfi_restore lr
195
196    // Save our return PC in the padding.
197    str   lr, [r4, #__SIZEOF_POINTER__]
198    CFI_EXPRESSION_BREG 14, 4, __SIZEOF_POINTER__
199
200    ldr    ip, [r1, #ART_METHOD_ACCESS_FLAGS_OFFSET]  // Load access flags.
201    add    r2, r4, #1             // Prepare managed SP tagged for a GenericJNI frame.
202    tst    ip, #ACCESS_FLAGS_METHOD_IS_NATIVE
203    bne    .Lcritical_skip_prepare_runtime_method
204
205    // When coming from a compiled method, the return PC for managed stack walk is LR.
206    // (When coming from a compiled stub, the correct return PC is already stored above.)
207    str    lr, [r4, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)]
208
209    // Replace the target method with the SaveRefsAndArgs runtime method.
210    LOAD_RUNTIME_INSTANCE r1
211    ldr    r1, [r1, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET]
212
213    mov    r2, r4                 // Prepare untagged managed SP for the runtime method.
214
215.Lcritical_skip_prepare_runtime_method:
216    // Store the method on the bottom of the managed frame.
217    str    r1, [r4]
218
219    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
220    str    r2, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
221
222    // Preserve the native arg register r0 in callee-save register r10 which was saved above.
223    mov    r10, r0
224
225    // Call artFindNativeMethodRunnable()
226    mov    r0, rSELF   // pass Thread::Current()
227    bl     artFindNativeMethodRunnable
228
229    // Store result in scratch reg.
230    mov    ip, r0
231
232    // Restore the native arg register r0.
233    mov    r0, r10
234
235    // Restore the frame. We shall not need the method anymore.
236    add    r1, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40
237    ldmia  r1, {r1-r3, r5-r8, r10-r11}
238    .cfi_restore r5
239    .cfi_restore r6
240    .cfi_restore r7
241    .cfi_restore r8
242    .cfi_restore r10
243    .cfi_restore r11
244
245    REFRESH_MARKING_REGISTER
246
247    // Check for exception before moving args back to keep the return PC for managed stack walk.
248    cmp    ip, #0
249    beq    .Lcritical_deliver_exception
250
251    .cfi_remember_state
252
253    // Restore our return PC.
254    ldr    lr, [r4, #__SIZEOF_POINTER__]
255    .cfi_restore lr
256
257    // Move stack args to their original place.
258    cmp    sp, r4
259    beq    .Lcritical_skip_copy_args_back
260    push   {r0, r1, r2, r3}
261    .cfi_adjust_cfa_offset 16
262    add    r0, sp, #16
263    sub    r0, r4, r0
264.Lcritical_copy_args_loop_back:
265    ldrd   r2, r3, [r4, #-8]!
266    subs   r0, r0, #8
267    strd   r2, r3, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
268    bne    .Lcritical_copy_args_loop_back
269    pop    {r0, r1, r2, r3}
270    .cfi_adjust_cfa_offset -16
271.Lcritical_skip_copy_args_back:
272
273    // Remove the frame reservation.
274    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
275
276    // Do the tail call.
277    bx     ip
278    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS
279
280.Lcritical_deliver_exception:
281    // The exception delivery checks that rSELF was saved but the SaveRefsAndArgs
282    // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame.
283    // That's why we checked for exception after restoring registers from it.
284    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
285    // point but keep the area allocated for stack args to keep CFA definition simple.
286#if FRAME_SIZE_SAVE_REFS_AND_ARGS != FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
287#  error "Expected FRAME_SIZE_SAVE_REFS_AND_ARGS == FRAME_SIZE_SAVE_ALL_CALLEE_SAVES"
288    // Otherwise we would need to adjust SP and R4 and move our return PC which is at [R4, #4].
289    // (Luckily, both SaveRefsAndArgs and SaveAllCalleeSaves frames have padding there.)
290#endif
291
292    // Spill registers for the SaveAllCalleeSaves frame above the stack args area.
293    add    ip, r4, #FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32
294    stmia  ip, {r5-r11}  // Keep the caller PC for managed stack walk.
295    CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32
296    CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 28
297    CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 24
298    CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 20
299    CFI_EXPRESSION_BREG 9, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 16
300    CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 12
301    CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 8
302    // Skip R4, it is callee-save in managed ABI.
303    add    ip, r4, #12
304    vstmia ip, {s16-s31}
305
306    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
307    LOAD_RUNTIME_INSTANCE ip
308    ldr   ip, [ip, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
309    str   ip, [r4]
310
311    // Place the managed frame SP in Thread::Current()->top_quick_frame.
312    str   r4, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
313
314    DELIVER_PENDING_EXCEPTION_FRAME_READY
315END art_jni_dlsym_lookup_critical_stub
316
317    /*
318     * Read barrier for the method's declaring class needed by JNI stub for static methods.
319     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
320     */
321// The method argument is already in r0 for call to `artJniReadBarrier(ArtMethod*)`.
322JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
323
324    /*
325     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
326     */
327JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, rSELF
328
329    /*
330     * Trampoline to `artJniMethodEntryHook()` that preserves all managed arguments.
331     */
332JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, rSELF
333
334    /*
335     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
336     */
337JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, rSELF
338
339    /*
340     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
341     */
342JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, rSELF
343
344    /*
345     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
346     */
347JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, rSELF
348
349    /*
350     * Entry from JNI stub that tries to lock the object in a fast path and
351     * calls `artLockObjectFromCode()` (the same as for managed code) for the
352     * difficult cases, may block for GC.
353     * Custom calling convention:
354     *     r4 holds the non-null object to lock.
355     *     Callee-save registers have been saved and can be used as temporaries.
356     *     All argument registers need to be preserved.
357     */
358ENTRY art_jni_lock_object
359    // Note: the slow path is actually the art_jni_lock_object_no_inline (tail call).
360    LOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Llock_object_jni_slow, /*can_be_null*/ 0
361END art_jni_lock_object
362
363    /*
364     * Entry from JNI stub that calls `artLockObjectFromCode()`
365     * (the same as for managed code), may block for GC.
366     * Custom calling convention:
367     *     r4 holds the non-null object to lock.
368     *     Callee-save registers have been saved and can be used as temporaries.
369     *     All argument registers need to be preserved.
370     */
371    .extern artLockObjectFromCode
372ENTRY art_jni_lock_object_no_inline
373    // This is also the slow path for art_jni_lock_object.
374    // Note that we need a local label as the assembler emits bad instructions
375    // for CBZ/CBNZ if we try to jump to `art_jni_lock_object_no_inline`.
376.Llock_object_jni_slow:
377    // Save managed args, r4 (for stack alignment) and LR.
378    SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
379    // Call `artLockObjectFromCode()`
380    mov    r0, r4                       @ Pass the object to lock.
381    mov    r1, rSELF                    @ Pass Thread::Current().
382    bl     artLockObjectFromCode        @ (Object* obj, Thread*)
383    // Check result.
384    cbnz   r0, 1f
385    // Restore args and r4 and return.
386    RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 1
3871:
388    // All args are irrelevant when throwing an exception and R4 is preserved
389    // by the `artLockObjectFromCode()` call. Load LR and drop saved args and R4.
390    ldr    lr, [sp, #(MANAGED_ARGS_R4_LR_SAVE_SIZE - 4)]
391    .cfi_restore lr
392    DECREASE_FRAME MANAGED_ARGS_R4_LR_SAVE_SIZE
393    // Make a tail call to `artDeliverPendingExceptionFromCode()`.
394    // Rely on the JNI transition frame constructed in the JNI stub.
395    mov    r0, rSELF                           @ Pass Thread::Current().
396    b      artDeliverPendingExceptionFromCode  @ (Thread*)
397END art_jni_lock_object_no_inline
398
399    /*
400     * Entry from JNI stub that tries to unlock the object in a fast path and calls
401     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
402     * is fatal, so we do not need to check for exceptions in the slow path.
403     * Custom calling convention:
404     *     r4 holds the non-null object to unlock.
405     *     Callee-save registers have been saved and can be used as temporaries.
406     *     Return registers r0-r1 and s0-s1 need to be preserved.
407     */
408ENTRY art_jni_unlock_object
409    // Note: the slow path is actually the art_jni_unlock_object_no_inline (tail call).
410    UNLOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Lunlock_object_jni_slow, /*can_be_null*/ 0
411END art_jni_unlock_object
412
413    /*
414     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
415     * unlock is fatal, so we do not need to check for exceptions.
416     * Custom calling convention:
417     *     r4 holds the non-null object to unlock.
418     *     Callee-save registers have been saved and can be used as temporaries.
419     *     Return registers r0-r1 and s0-s1 need to be preserved.
420     */
421    // This is also the slow path for art_jni_unlock_object.
422JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, r4, rSELF, \
423    /* Note that we need a local label as the assembler emits bad instructions                */ \
424    /* for CBZ/CBNZ if we try to jump to `art_jni_unlock_object_no_inline`.                   */ \
425    .Lunlock_object_jni_slow
426