• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#define ALL_ARGS_SIZE (/*x0-x7*/ 8 * 8 + /*d0-d7*/ 8 * 8)
20
21.macro SAVE_ALL_ARGS_INCREASE_FRAME extra_space
22    // Save register args x0-x7, d0-d7 and return address.
23    stp    x0, x1, [sp, #-(ALL_ARGS_SIZE + \extra_space)]!
24    .cfi_adjust_cfa_offset (ALL_ARGS_SIZE + \extra_space)
25    stp    x2, x3, [sp, #16]
26    stp    x4, x5, [sp, #32]
27    stp    x6, x7, [sp, #48]
28    stp    d0, d1, [sp, #64]
29    stp    d2, d3, [sp, #80]
30    stp    d4, d5, [sp, #96]
31    stp    d6, d7, [sp, #112]
32.endm
33
34.macro RESTORE_ALL_ARGS_DECREASE_FRAME extra_space
35    ldp    x2, x3, [sp, #16]
36    ldp    x4, x5, [sp, #32]
37    ldp    x6, x7, [sp, #48]
38    ldp    d0, d1, [sp, #64]
39    ldp    d2, d3, [sp, #80]
40    ldp    d4, d5, [sp, #96]
41    ldp    d6, d7, [sp, #112]
42    ldp    x0, x1, [sp], #(ALL_ARGS_SIZE + \extra_space)
43    .cfi_adjust_cfa_offset -(ALL_ARGS_SIZE + \extra_space)
44.endm
45
46.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
47    .extern \cxx_name
48ENTRY \name
49    // Save args and LR.
50    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*LR*/ 8
51    str    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
52    .cfi_rel_offset lr, ALL_ARGS_SIZE + /*padding*/ 8
53    // Call `cxx_name()`.
54    .ifnc \arg1, none
55        mov x0, \arg1                          // Pass arg1.
56    .endif
57    bl     \cxx_name                           // Call cxx_name(...).
58    // Restore LR and args and return.
59    ldr    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
60    .cfi_restore lr
61    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*LR*/ 8
62    ret
63END \name
64.endm
65
66.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none"
67    .extern \cxx_name
68ENTRY \name
69    // Save return registers and return address.
70    stp    x0, lr, [sp, #-32]!
71    .cfi_adjust_cfa_offset 32
72    .cfi_rel_offset lr, 8
73    str    d0, [sp, #16]
74    // Call `cxx_name()`.
75    mov    x0, \arg1                           // Pass arg1.
76    .ifnc \arg2, none
77        mov x1, \arg2                          // Pass arg2.
78    .endif
79    bl     \cxx_name                           // Call cxx_name(...).
80    // Restore return registers and return.
81    ldr    d0, [sp, #16]
82    ldp    x0, lr, [sp], #32
83    .cfi_adjust_cfa_offset -32
84    .cfi_restore lr
85    ret
86END \name
87.endm
88
89    /*
90     * Jni dlsym lookup stub.
91     */
92    .extern artFindNativeMethod
93    .extern artFindNativeMethodRunnable
94ENTRY art_jni_dlsym_lookup_stub
95    // spill regs.
96    SAVE_ALL_ARGS_INCREASE_FRAME 2 * 8
97    stp   x29, x30, [sp, ALL_ARGS_SIZE]
98    .cfi_rel_offset x29, ALL_ARGS_SIZE
99    .cfi_rel_offset x30, ALL_ARGS_SIZE + 8
100    add   x29, sp, ALL_ARGS_SIZE
101
102    mov x0, xSELF   // pass Thread::Current()
103    // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable()
104    // for @FastNative or @CriticalNative.
105    ldr   xIP0, [x0, #THREAD_TOP_QUICK_FRAME_OFFSET]      // uintptr_t tagged_quick_frame
106    bic   xIP0, xIP0, #TAGGED_JNI_SP_MASK                 // ArtMethod** sp
107    ldr   xIP0, [xIP0]                                    // ArtMethod* method
108    ldr   xIP0, [xIP0, #ART_METHOD_ACCESS_FLAGS_OFFSET]   // uint32_t access_flags
109    mov   xIP1, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
110    tst   xIP0, xIP1
111    b.ne  .Llookup_stub_fast_or_critical_native
112    bl    artFindNativeMethod
113    b     .Llookup_stub_continue
114    .Llookup_stub_fast_or_critical_native:
115    bl    artFindNativeMethodRunnable
116.Llookup_stub_continue:
117    mov   x17, x0    // store result in scratch reg.
118
119    // load spill regs.
120    ldp   x29, x30, [sp, #ALL_ARGS_SIZE]
121    .cfi_restore x29
122    .cfi_restore x30
123    RESTORE_ALL_ARGS_DECREASE_FRAME 2 * 8
124
125    cbz   x17, 1f   // is method code null ?
126    br    x17       // if non-null, tail call to method's code.
127
1281:
129    ret             // restore regs and return to caller to handle exception.
130END art_jni_dlsym_lookup_stub
131
132    /*
133     * Jni dlsym lookup stub for @CriticalNative.
134     */
135ENTRY art_jni_dlsym_lookup_critical_stub
136    // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is x15.
137    // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
138    tbnz  x15, #0, art_jni_dlsym_lookup_stub
139
140    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
141    SAVE_ALL_ARGS_INCREASE_FRAME 2 * 8
142    stp   x15, lr, [sp, #ALL_ARGS_SIZE]
143    .cfi_rel_offset lr, ALL_ARGS_SIZE + 8
144
145    // Call artCriticalNativeFrameSize(method, caller_pc)
146    mov   x0, x15  // x0 := method (from hidden arg)
147    mov   x1, lr   // x1 := caller_pc
148    bl    artCriticalNativeFrameSize
149
150    // Move frame size to x14.
151    mov   x14, x0
152
153    // Restore args, the hidden arg and caller PC.
154    ldp   x15, lr, [sp, #128]
155    .cfi_restore lr
156    RESTORE_ALL_ARGS_DECREASE_FRAME 2 * 8
157
158    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
159    // method or for a GenericJNI frame which is similar but has a native method and a tag.
160    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
161
162    // Calculate the base address of the managed frame.
163    add   x13, sp, x14
164
165    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
166    // If we're coming from JNI stub with tail call, it is LR. If we're coming from
167    // JNI stub that saved the return address, it will be the last value we copy below.
168    // If we're coming directly from compiled code, it is LR, set further down.
169    mov   xIP1, lr
170
171    // Move the stack args if any.
172    cbz   x14, .Lcritical_skip_copy_args
173    mov   x12, sp
174.Lcritical_copy_args_loop:
175    ldp   xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
176    subs  x14, x14, #16
177    stp   xIP0, xIP1, [x12], #16
178    bne   .Lcritical_copy_args_loop
179.Lcritical_skip_copy_args:
180
181    // Spill registers for the SaveRefsAndArgs frame above the stack args.
182    // Note that the runtime shall not examine the args here, otherwise we would have to
183    // move them in registers and stack to account for the difference between managed and
184    // native ABIs. Do not update CFI while we hold the frame address in x13 and the values
185    // in registers are unchanged.
186    stp   d0, d1, [x13, #16]
187    stp   d2, d3, [x13, #32]
188    stp   d4, d5, [x13, #48]
189    stp   d6, d7, [x13, #64]
190    stp   x1, x2, [x13, #80]
191    stp   x3, x4, [x13, #96]
192    stp   x5, x6, [x13, #112]
193    stp   x7, x20, [x13, #128]
194    stp   x21, x22, [x13, #144]
195    stp   x23, x24, [x13, #160]
196    stp   x25, x26, [x13, #176]
197    stp   x27, x28, [x13, #192]
198    stp   x29, xIP1, [x13, #208]  // xIP1: Save return address for tail call from JNI stub.
199    // (If there were any stack args, we're storing the value that's already there.
200    // For direct calls from compiled managed code, we shall overwrite this below.)
201
202    // Move the managed frame address to native callee-save register x29 and update CFI.
203    mov   x29, x13
204    // Skip args d0-d7, x1-x7
205    CFI_EXPRESSION_BREG 20, 29, 136
206    CFI_EXPRESSION_BREG 21, 29, 144
207    CFI_EXPRESSION_BREG 22, 29, 152
208    CFI_EXPRESSION_BREG 23, 29, 160
209    CFI_EXPRESSION_BREG 24, 29, 168
210    CFI_EXPRESSION_BREG 25, 29, 176
211    CFI_EXPRESSION_BREG 26, 29, 184
212    CFI_EXPRESSION_BREG 27, 29, 192
213    CFI_EXPRESSION_BREG 28, 29, 200
214    CFI_EXPRESSION_BREG 29, 29, 208
215    // The saved return PC for managed stack walk is not necessarily our LR.
216
217    // Save our return PC in the padding.
218    str   lr, [x29, #__SIZEOF_POINTER__]
219    CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__
220
221    ldr   wIP0, [x15, #ART_METHOD_ACCESS_FLAGS_OFFSET]  // Load access flags.
222    add   x14, x29, #1            // Prepare managed SP tagged for a GenericJNI frame.
223    tbnz  wIP0, #ACCESS_FLAGS_METHOD_IS_NATIVE_BIT, .Lcritical_skip_prepare_runtime_method
224
225    // When coming from a compiled method, the return PC for managed stack walk is LR.
226    // (When coming from a compiled stub, the correct return PC is already stored above.)
227    str   lr, [x29, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)]
228
229    // Replace the target method with the SaveRefsAndArgs runtime method.
230    LOAD_RUNTIME_INSTANCE x15
231    ldr   x15, [x15, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET]
232
233    mov   x14, x29                // Prepare untagged managed SP for the runtime method.
234
235.Lcritical_skip_prepare_runtime_method:
236    // Store the method on the bottom of the managed frame.
237    str   x15, [x29]
238
239    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
240    str   x14, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
241
242    // Preserve the native arg register x0 in callee-save register x28 which was saved above.
243    mov   x28, x0
244
245    // Call artFindNativeMethodRunnable()
246    mov   x0, xSELF   // pass Thread::Current()
247    bl    artFindNativeMethodRunnable
248
249    // Store result in scratch reg.
250    mov   x13, x0
251
252    // Restore the native arg register x0.
253    mov   x0, x28
254
255    // Restore our return PC.
256    RESTORE_REG_BASE x29, lr, __SIZEOF_POINTER__
257
258    // Remember the stack args size, negated because SP cannot be on the right-hand side in SUB.
259    sub   x14, sp, x29
260
261    // Restore the frame. We shall not need the method anymore.
262    ldp   d0, d1, [x29, #16]
263    ldp   d2, d3, [x29, #32]
264    ldp   d4, d5, [x29, #48]
265    ldp   d6, d7, [x29, #64]
266    ldp   x1, x2, [x29, #80]
267    ldp   x3, x4, [x29, #96]
268    ldp   x5, x6, [x29, #112]
269    ldp   x7, x20, [x29, #128]
270    .cfi_restore x20
271    RESTORE_TWO_REGS_BASE x29, x21, x22, 144
272    RESTORE_TWO_REGS_BASE x29, x23, x24, 160
273    RESTORE_TWO_REGS_BASE x29, x25, x26, 176
274    RESTORE_TWO_REGS_BASE x29, x27, x28, 192
275    RESTORE_REG_BASE x29, x29, 208
276
277    REFRESH_MARKING_REGISTER
278
279    // Check for exception before moving args back to keep the return PC for managed stack walk.
280    cbz   x13, .Lcritical_deliver_exception
281
282    .cfi_remember_state
283
284    // Move stack args to their original place.
285    cbz   x14, .Lcritical_skip_copy_args_back
286    sub   x12, sp, x14
287.Lcritical_copy_args_back_loop:
288    ldp   xIP0, xIP1, [x12, #-16]!
289    adds  x14, x14, #16
290    stp   xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
291    bne   .Lcritical_copy_args_back_loop
292.Lcritical_skip_copy_args_back:
293
294    // Remove the frame reservation.
295    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
296
297    // Do the tail call.
298    br    x13
299    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS
300
301.Lcritical_deliver_exception:
302    // The exception delivery checks that xSELF was saved but the SaveRefsAndArgs
303    // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame.
304    // That's why we checked for exception after restoring registers from it.
305    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
306    // point but keep the area allocated for stack args to keep CFA definition simple.
307    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
308
309    // Calculate the base address of the managed frame.
310    sub   x13, sp, x14
311
312    // Spill registers for the SaveAllCalleeSaves frame above the stack args area. Do not update
313    // CFI while we hold the frame address in x13 and the values in registers are unchanged.
314    stp   d8, d9, [x13, #16]
315    stp   d10, d11, [x13, #32]
316    stp   d12, d13, [x13, #48]
317    stp   d14, d15, [x13, #64]
318    stp   x19, x20, [x13, #80]
319    stp   x21, x22, [x13, #96]
320    stp   x23, x24, [x13, #112]
321    stp   x25, x26, [x13, #128]
322    stp   x27, x28, [x13, #144]
323    str   x29, [x13, #160]
324    // Keep the caller PC for managed stack walk.
325
326    // Move the managed frame address to native callee-save register x29 and update CFI.
327    mov   x29, x13
328    CFI_EXPRESSION_BREG 19, 29, 80
329    CFI_EXPRESSION_BREG 20, 29, 88
330    CFI_EXPRESSION_BREG 21, 29, 96
331    CFI_EXPRESSION_BREG 22, 29, 104
332    CFI_EXPRESSION_BREG 23, 29, 112
333    CFI_EXPRESSION_BREG 24, 29, 120
334    CFI_EXPRESSION_BREG 25, 29, 128
335    CFI_EXPRESSION_BREG 26, 29, 136
336    CFI_EXPRESSION_BREG 27, 29, 144
337    CFI_EXPRESSION_BREG 28, 29, 152
338    CFI_EXPRESSION_BREG 29, 29, 160
339    // The saved return PC for managed stack walk is not necessarily our LR.
340
341    // Save our return PC in the padding.
342    str   lr, [x29, #__SIZEOF_POINTER__]
343    CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__
344
345    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
346    LOAD_RUNTIME_INSTANCE xIP0
347    ldr   xIP0, [xIP0, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
348    str   xIP0, [x29]
349
350    // Place the managed frame SP in Thread::Current()->top_quick_frame.
351    str   x29, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
352
353    DELIVER_PENDING_EXCEPTION_FRAME_READY
354END art_jni_dlsym_lookup_critical_stub
355
356    /*
357     * Read barrier for the method's declaring class needed by JNI stub for static methods.
358     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
359     */
360// The method argument is already in x0 for call to `artJniReadBarrier(ArtMethod*)`.
361JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
362
363    /*
364     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
365     */
366JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, xSELF
367
368    /*
369     * Trampoline to `artJniMethodEntryHook` that preserves all managed arguments.
370     */
371JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, xSELF
372
373    /*
374     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
375     */
376JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, xSELF
377
378    /*
379     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
380     */
381JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, xSELF
382
383    /*
384     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
385     */
386JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, xSELF
387
388    /*
389     * Entry from JNI stub that tries to lock the object in a fast path and
390     * calls `artLockObjectFromCode()` (the same as for managed code) for the
391     * difficult cases, may block for GC.
392     * Custom calling convention:
393     *     x15 holds the non-null object to lock.
394     *     Callee-save registers have been saved and can be used as temporaries.
395     *     All argument registers need to be preserved.
396     */
397ENTRY art_jni_lock_object
398    LOCK_OBJECT_FAST_PATH x15, art_jni_lock_object_no_inline, /*can_be_null*/ 0
399END art_jni_lock_object
400
401    /*
402     * Entry from JNI stub that calls `artLockObjectFromCode()`
403     * (the same as for managed code), may block for GC.
404     * Custom calling convention:
405     *     x15 holds the non-null object to lock.
406     *     Callee-save registers have been saved and can be used as temporaries.
407     *     All argument registers need to be preserved.
408     */
409    .extern artLockObjectFromCode
410ENTRY art_jni_lock_object_no_inline
411    // This is also the slow path for art_jni_lock_object.
412    // Save args and LR.
413    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*LR*/ 8
414    str    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
415    .cfi_rel_offset lr, ALL_ARGS_SIZE + /*padding*/ 8
416    // Call `artLockObjectFromCode()`.
417    mov    x0, x15                    // Pass the object to lock.
418    mov    x1, xSELF                  // Pass Thread::Current().
419    bl     artLockObjectFromCode      // (Object* obj, Thread*)
420    // Restore return address.
421    ldr    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
422    .cfi_restore lr
423    // Check result.
424    cbnz   x0, 1f
425    // Restore register args x0-x7, d0-d7 and return.
426    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*LR*/ 8
427    ret
428    .cfi_adjust_cfa_offset (ALL_ARGS_SIZE + /*padding*/ 8 + /*LR*/ 8)
4291:
430    // All args are irrelevant when throwing an exception. Remove the spill area.
431    DECREASE_FRAME (ALL_ARGS_SIZE + /*padding*/ 8 + /*LR*/ 8)
432    // Make a tail call to `artDeliverPendingExceptionFromCode()`.
433    // Rely on the JNI transition frame constructed in the JNI stub.
434    mov    x0, xSELF                           // Pass Thread::Current().
435    b      artDeliverPendingExceptionFromCode  // (Thread*)
436END art_jni_lock_object_no_inline
437
438    /*
439     * Entry from JNI stub that tries to unlock the object in a fast path and calls
440     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
441     * is fatal, so we do not need to check for exceptions in the slow path.
442     * Custom calling convention:
443     *     x15 holds the non-null object to unlock.
444     *     Callee-save registers have been saved and can be used as temporaries.
445     *     Return registers r0 and d0 need to be preserved.
446     */
447ENTRY art_jni_unlock_object
448    UNLOCK_OBJECT_FAST_PATH x15, art_jni_unlock_object_no_inline, /*can_be_null*/ 0
449END art_jni_unlock_object
450
451    /*
452     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
453     * unlock is fatal, so we do not need to check for exceptions.
454     * Custom calling convention:
455     *     x15 holds the non-null object to unlock.
456     *     Callee-save registers have been saved and can be used as temporaries.
457     *     Return registers r0 and d0 need to be preserved.
458     */
459    // This is also the slow path for art_jni_unlock_object.
460JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, x15, xSELF
461