• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ARCH_RISCV64_ASM_SUPPORT_RISCV64_S_
18#define ART_RUNTIME_ARCH_RISCV64_ASM_SUPPORT_RISCV64_S_
19
20#include "asm_support_riscv64.h"
21#include "interpreter/cfi_asm_support.h"
22
23// Define special registers.
24
25// Register holding Thread::Current().
26#define xSELF s1
27
28
29.macro ENTRY name
30    .hidden \name  // Hide this as a global symbol, so we do not incur plt calls.
31    .global \name
32    .balign 16
33\name:
34    .cfi_startproc
35.endm
36
37
38.macro END name
39    .cfi_endproc
40    .size \name, .-\name
41.endm
42
43
44.macro UNDEFINED name
45    ENTRY \name
46        unimp
47    END \name
48.endm
49
50
51.macro CFI_REMEMBER_STATE
52    .cfi_remember_state
53.endm
54
55
56// The spec is not clear whether the CFA is part of the saved state and tools differ in the
57// behaviour, so explicitly set the CFA to avoid any ambiguity.
58// The restored CFA state should match the CFA state during CFI_REMEMBER_STATE.
59.macro CFI_RESTORE_STATE_AND_DEF_CFA reg, offset
60    .cfi_restore_state
61    .cfi_def_cfa \reg, \offset
62.endm
63
64
65.macro CFI_EXPRESSION_BREG n, b, offset
66    .if (-0x40 <= (\offset)) && ((\offset) < 0x40)
67        CFI_EXPRESSION_BREG_1(\n, \b, \offset)
68    .elseif (-0x2000 <= (\offset)) && ((\offset) < 0x2000)
69        CFI_EXPRESSION_BREG_2(\n, \b, \offset)
70    .else
71        .error "Unsupported offset"
72    .endif
73.endm
74
75
76.macro CFI_DEF_CFA_BREG_PLUS_UCONST reg, offset, size
77    .if (((\offset) < -0x40) || ((\offset) >= 0x40))
78        .error "Unsupported offset"
79    .endif
80
81    .if ((\size) < 0)
82        .error "Unsupported size, negative"
83    .elseif ((\size) < 0x80)
84        CFI_DEF_CFA_BREG_PLUS_UCONST_1_1(\reg, \offset, \size)
85    .elseif ((\size) < 0x4000)
86        CFI_DEF_CFA_BREG_PLUS_UCONST_1_2(\reg, \offset, \size)
87    .else
88        .error "Unsupported size, too large"
89    .endif
90.endm
91
92
93// Macro to poison (negate) the reference for heap poisoning.
94.macro POISON_HEAP_REF ref
95#ifdef USE_HEAP_POISONING
96    neg \ref, \ref
97    zext.w \ref, \ref
98#endif  // USE_HEAP_POISONING
99.endm
100
101
102// Macro to unpoison (negate) the reference for heap poisoning.
103.macro UNPOISON_HEAP_REF ref
104#ifdef USE_HEAP_POISONING
105    neg \ref, \ref
106    zext.w \ref, \ref
107#endif  // USE_HEAP_POISONING
108.endm
109
110
111.macro INCREASE_FRAME frame_adjustment
112    addi sp, sp, -(\frame_adjustment)
113    .cfi_adjust_cfa_offset (\frame_adjustment)
114.endm
115
116
117.macro DECREASE_FRAME frame_adjustment
118    addi sp, sp, (\frame_adjustment)
119    .cfi_adjust_cfa_offset -(\frame_adjustment)
120.endm
121
122
123.macro SAVE_GPR_BASE base, reg, offset
124    sd \reg, (\offset)(\base)
125    .cfi_rel_offset \reg, (\offset)
126.endm
127
128
129.macro SAVE_GPR reg, offset
130    SAVE_GPR_BASE sp, \reg, \offset
131.endm
132
133
134.macro RESTORE_GPR_BASE base, reg, offset
135    ld \reg, (\offset)(\base)
136    .cfi_restore \reg
137.endm
138
139
140.macro RESTORE_GPR reg, offset
141    RESTORE_GPR_BASE sp, \reg, \offset
142.endm
143
144
145.macro RESTORE_GPR_NE skip, reg, offset
146    .ifnc \skip, \reg
147    RESTORE_GPR_BASE sp, \reg, \offset
148    .endif
149.endm
150
151
152.macro SAVE_FPR reg, offset
153    fsd \reg, (\offset)(sp)
154    .cfi_rel_offset \reg, (\offset)
155.endm
156
157
158.macro RESTORE_FPR reg, offset
159    fld \reg, (\offset)(sp)
160    .cfi_restore \reg
161.endm
162
163
164.macro LOAD_RUNTIME_INSTANCE reg
165#if __has_feature(hwaddress_sanitizer)
166#error "ART does not support HWASAN on RISC-V yet"
167#else
168    la \reg, _ZN3art7Runtime9instance_E
169#endif
170    ld \reg, 0(\reg)
171.endm
172
173
174// We need to save callee-save GPRs on the stack as they may contain references, and must be
175// visible to GC (unless the called method holds mutator lock and prevents GC from happening).
176// FP callee-saves shall be preserved by whatever runtime function we call, so they do not need
177// to be saved.
178.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
179#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 8*(1 + 8 + 7 + 11 + 1))
180#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(RISCV64) size not as expected."
181#endif
182    // stack slot (0*8)(sp) is for ArtMethod*
183
184    SAVE_FPR fa0, (1*8)
185    SAVE_FPR fa1, (2*8)
186    SAVE_FPR fa2, (3*8)
187    SAVE_FPR fa3, (4*8)
188    SAVE_FPR fa4, (5*8)
189    SAVE_FPR fa5, (6*8)
190    SAVE_FPR fa6, (7*8)
191    SAVE_FPR fa7, (8*8)
192
193    SAVE_GPR fp,  (9*8)  // x8, frame pointer
194    // s1 (x9) is the ART thread register
195
196    // a0 (x10) is the method pointer
197    SAVE_GPR a1,  (10*8)  // x11
198    SAVE_GPR a2,  (11*8)  // x12
199    SAVE_GPR a3,  (12*8)  // x13
200    SAVE_GPR a4,  (13*8)  // x14
201    SAVE_GPR a5,  (14*8)  // x15
202    SAVE_GPR a6,  (15*8)  // x16
203    SAVE_GPR a7,  (16*8)  // x17
204
205    SAVE_GPR s2,  (17*8)  // x18
206    SAVE_GPR s3,  (18*8)  // x19
207    SAVE_GPR s4,  (19*8)  // x20
208    SAVE_GPR s5,  (20*8)  // x21
209    SAVE_GPR s6,  (21*8)  // x22
210    SAVE_GPR s7,  (22*8)  // x23
211    SAVE_GPR s8,  (23*8)  // x24
212    SAVE_GPR s9,  (24*8)  // x25
213    SAVE_GPR s10, (25*8)  // x26
214    SAVE_GPR s11, (26*8)  // x27
215
216    SAVE_GPR ra,  (27*8)  // x1, return address
217.endm
218
219
220.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
221    // stack slot (0*8)(sp) is for ArtMethod*
222
223    RESTORE_FPR fa0, (1*8)
224    RESTORE_FPR fa1, (2*8)
225    RESTORE_FPR fa2, (3*8)
226    RESTORE_FPR fa3, (4*8)
227    RESTORE_FPR fa4, (5*8)
228    RESTORE_FPR fa5, (6*8)
229    RESTORE_FPR fa6, (7*8)
230    RESTORE_FPR fa7, (8*8)
231
232    RESTORE_GPR fp,  (9*8)  // x8, frame pointer
233
234    // a0 is the method pointer
235    RESTORE_GPR a1,  (10*8)  // x11
236    RESTORE_GPR a2,  (11*8)  // x12
237    RESTORE_GPR a3,  (12*8)  // x13
238    RESTORE_GPR a4,  (13*8)  // x14
239    RESTORE_GPR a5,  (14*8)  // x15
240    RESTORE_GPR a6,  (15*8)  // x16
241    RESTORE_GPR a7,  (16*8)  // x17
242
243    // s1 is the ART thread register
244    RESTORE_GPR s2,  (17*8)  // x18
245    RESTORE_GPR s3,  (18*8)  // x19
246    RESTORE_GPR s4,  (19*8)  // x20
247    RESTORE_GPR s5,  (20*8)  // x21
248    RESTORE_GPR s6,  (21*8)  // x22
249    RESTORE_GPR s7,  (22*8)  // x23
250    RESTORE_GPR s8,  (23*8)  // x24
251    RESTORE_GPR s9,  (24*8)  // x25
252    RESTORE_GPR s10, (25*8)  // x26
253    RESTORE_GPR s11, (26*8)  // x27
254
255    RESTORE_GPR ra,  (27*8)  // x1, return address
256.endm
257
258
259.macro SETUP_CALLEE_SAVE_FRAME_COMMON_INTERNAL reg
260    // ArtMethod* is in reg, store it at the bottom of the stack.
261    sd \reg, (sp)
262
263    // Place sp in Thread::Current()->top_quick_frame.
264    sd sp, THREAD_TOP_QUICK_FRAME_OFFSET(xSELF)
265.endm
266
267
268.macro SETUP_CALLEE_SAVE_FRAME_COMMON tmpreg, runtime_method_offset
269    // art::Runtime* tmpreg = art::Runtime::instance_;
270    LOAD_RUNTIME_INSTANCE \tmpreg
271
272    // ArtMethod* tmpreg = Runtime::instance_->callee_save_methods_[<callee-save-frame-type>];
273    ld  \tmpreg, \runtime_method_offset(\tmpreg)
274
275    SETUP_CALLEE_SAVE_FRAME_COMMON_INTERNAL \tmpreg
276.endm
277
278
279.macro SETUP_SAVE_REFS_AND_ARGS_FRAME
280    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
281    SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
282    SETUP_CALLEE_SAVE_FRAME_COMMON t0, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET
283.endm
284
285
286.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0
287    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
288    SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
289    SETUP_CALLEE_SAVE_FRAME_COMMON_INTERNAL a0
290.endm
291
292
293.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME
294    RESTORE_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
295    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
296.endm
297
298
299.macro SAVE_ALL_CALLEE_SAVES
300#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 8*(12 + 11 + 1 + 1 + 1))
301#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(RISCV64) size not as expected."
302#endif
303    // stack slot (0*8)(sp) is for ArtMethod*
304    // stack slot (1*8)(sp) is for padding
305
306    // FP callee-saves.
307    SAVE_FPR fs0,  (8*2)   // f8
308    SAVE_FPR fs1,  (8*3)   // f9
309    SAVE_FPR fs2,  (8*4)   // f18
310    SAVE_FPR fs3,  (8*5)   // f19
311    SAVE_FPR fs4,  (8*6)   // f20
312    SAVE_FPR fs5,  (8*7)   // f21
313    SAVE_FPR fs6,  (8*8)   // f22
314    SAVE_FPR fs7,  (8*9)   // f23
315    SAVE_FPR fs8,  (8*10)  // f24
316    SAVE_FPR fs9,  (8*11)  // f25
317    SAVE_FPR fs10, (8*12)  // f26
318    SAVE_FPR fs11, (8*13)  // f27
319
320    // GP callee-saves
321    SAVE_GPR s0,  (8*14)  // x8/fp, frame pointer
322    // s1 (x9) is the ART thread register
323    SAVE_GPR s2,  (8*15)  // x18
324    SAVE_GPR s3,  (8*16)  // x19
325    SAVE_GPR s4,  (8*17)  // x20
326    SAVE_GPR s5,  (8*18)  // x21
327    SAVE_GPR s6,  (8*19)  // x22
328    SAVE_GPR s7,  (8*20)  // x23
329    SAVE_GPR s8,  (8*21)  // x24
330    SAVE_GPR s9,  (8*22)  // x25
331    SAVE_GPR s10, (8*23)  // x26
332    SAVE_GPR s11, (8*24)  // x27
333
334    SAVE_GPR ra,  (8*25)  // x1, return address
335.endm
336
337
338.macro RESTORE_ALL_CALLEE_SAVES
339#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 8*(12 + 11 + 1 + 1 + 1))
340#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(RISCV64) size not as expected."
341#endif
342    // stack slot (8*0)(sp) is for ArtMethod*
343    // stack slot (8*1)(sp) is for padding
344
345    // FP callee-saves.
346    RESTORE_FPR fs0,  (8*2)   // f8
347    RESTORE_FPR fs1,  (8*3)   // f9
348    RESTORE_FPR fs2,  (8*4)   // f18
349    RESTORE_FPR fs3,  (8*5)   // f19
350    RESTORE_FPR fs4,  (8*6)   // f20
351    RESTORE_FPR fs5,  (8*7)   // f21
352    RESTORE_FPR fs6,  (8*8)   // f22
353    RESTORE_FPR fs7,  (8*9)   // f23
354    RESTORE_FPR fs8,  (8*10)  // f24
355    RESTORE_FPR fs9,  (8*11)  // f25
356    RESTORE_FPR fs10, (8*12)  // f26
357    RESTORE_FPR fs11, (8*13)  // f27
358
359    // GP callee-saves
360    RESTORE_GPR s0,  (8*14)  // x8/fp, frame pointer
361    // s1 is the ART thread register
362    RESTORE_GPR s2,  (8*15)  // x18
363    RESTORE_GPR s3,  (8*16)  // x19
364    RESTORE_GPR s4,  (8*17)  // x20
365    RESTORE_GPR s5,  (8*18)  // x21
366    RESTORE_GPR s6,  (8*19)  // x22
367    RESTORE_GPR s7,  (8*20)  // x23
368    RESTORE_GPR s8,  (8*21)  // x24
369    RESTORE_GPR s9,  (8*22)  // x25
370    RESTORE_GPR s10, (8*23)  // x26
371    RESTORE_GPR s11, (8*24)  // x27
372
373    RESTORE_GPR ra,  (8*25)  // x1, return address
374.endm
375
376
377.macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
378    INCREASE_FRAME FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
379    SAVE_ALL_CALLEE_SAVES
380    SETUP_CALLEE_SAVE_FRAME_COMMON t0, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET
381.endm
382
383
384.macro SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_RA \
385        runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET
386#if (FRAME_SIZE_SAVE_EVERYTHING != 8*(1 + 32 + 27))
387#error "FRAME_SIZE_SAVE_EVERYTHING(RISCV64) size not as expected."
388#endif
389    // stack slot (8*0)(sp) is for ArtMethod*
390
391    // 32 slots for FPRs
392    SAVE_FPR ft0,  8*1   // f0
393    SAVE_FPR ft1,  8*2   // f1
394    SAVE_FPR ft2,  8*3   // f2
395    SAVE_FPR ft3,  8*4   // f3
396    SAVE_FPR ft4,  8*5   // f4
397    SAVE_FPR ft5,  8*6   // f5
398    SAVE_FPR ft6,  8*7   // f6
399    SAVE_FPR ft7,  8*8   // f7
400    SAVE_FPR fs0,  8*9   // f8
401    SAVE_FPR fs1,  8*10  // f9
402#define SAVE_EVERYTHING_FRAME_OFFSET_FA0 (8*11)
403    SAVE_FPR fa0,  8*11  // f10, its offset must equal SAVE_EVERYTHING_FRAME_OFFSET_FA0
404    SAVE_FPR fa1,  8*12  // f11
405    SAVE_FPR fa2,  8*13  // f12
406    SAVE_FPR fa3,  8*14  // f13
407    SAVE_FPR fa4,  8*15  // f14
408    SAVE_FPR fa5,  8*16  // f15
409    SAVE_FPR fa6,  8*17  // f16
410    SAVE_FPR fa7,  8*18  // f17
411    SAVE_FPR fs2,  8*19  // f18
412    SAVE_FPR fs3,  8*20  // f19
413    SAVE_FPR fs4,  8*21  // f20
414    SAVE_FPR fs5,  8*22  // f21
415    SAVE_FPR fs6,  8*23  // f22
416    SAVE_FPR fs7,  8*24  // f23
417    SAVE_FPR fs8,  8*25  // f24
418    SAVE_FPR fs9,  8*26  // f25
419    SAVE_FPR fs10, 8*27  // f26
420    SAVE_FPR fs11, 8*28  // f27
421    SAVE_FPR ft8,  8*29  // f28
422    SAVE_FPR ft9,  8*30  // f29
423    SAVE_FPR ft10, 8*31  // f30
424    SAVE_FPR ft11, 8*32  // f31
425
426    // 27 slots for GPRs (excluded: zero/x0, sp/x2, gp/x3, tp/x4, s1/x9 -- the ART thread register)
427    SAVE_GPR t0,  8*33  // x5
428    SAVE_GPR t1,  8*34  // x6
429    SAVE_GPR t2,  8*35  // x7
430    SAVE_GPR s0,  8*36  // x8
431#define SAVE_EVERYTHING_FRAME_OFFSET_A0 (8*37)
432    SAVE_GPR a0,  8*37  // x10, its offset must equal SAVE_EVERYTHING_FRAME_OFFSET_A0
433    SAVE_GPR a1,  8*38  // x11
434    SAVE_GPR a2,  8*39  // x12
435    SAVE_GPR a3,  8*40  // x13
436    SAVE_GPR a4,  8*41  // x14
437    SAVE_GPR a5,  8*42  // x15
438    SAVE_GPR a6,  8*43  // x16
439    SAVE_GPR a7,  8*44  // x17
440    SAVE_GPR s2,  8*45  // x18
441    SAVE_GPR s3,  8*46  // x19
442    SAVE_GPR s4,  8*47  // x20
443    SAVE_GPR s5,  8*48  // x21
444    SAVE_GPR s6,  8*49  // x22
445    SAVE_GPR s7,  8*50  // x23
446    SAVE_GPR s8,  8*51  // x24
447    SAVE_GPR s9,  8*52  // x25
448    SAVE_GPR s10, 8*53  // x26
449    SAVE_GPR s11, 8*54  // x27
450    SAVE_GPR t3,  8*55  // x28
451    SAVE_GPR t4,  8*56  // x29
452    SAVE_GPR t5,  8*57  // x30
453    SAVE_GPR t6,  8*58  // x31
454
455    // RA already saved by the user of this macro.
456
457    SETUP_CALLEE_SAVE_FRAME_COMMON t0, \runtime_method_offset
458.endm
459
460
461.macro SETUP_SAVE_EVERYTHING_FRAME runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET
462#if (FRAME_SIZE_SAVE_EVERYTHING != 8*(1 + 32 + 27))
463#error "FRAME_SIZE_SAVE_EVERYTHING(RISCV64) size not as expected."
464#endif
465    INCREASE_FRAME FRAME_SIZE_SAVE_EVERYTHING
466    SAVE_GPR ra,  8*59  // x1, return address
467    SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_RA \runtime_method_offset
468.endm
469
470
471.macro RESTORE_SAVE_EVERYTHING_FRAME load_a0 = 1
472    // stack slot (8*0)(sp) is for ArtMethod*
473
474    // 32 slots for FPRs
475    RESTORE_FPR ft0,  (8*1)   // f0
476    RESTORE_FPR ft1,  (8*2)   // f1
477    RESTORE_FPR ft2,  (8*3)   // f2
478    RESTORE_FPR ft3,  (8*4)   // f3
479    RESTORE_FPR ft4,  (8*5)   // f4
480    RESTORE_FPR ft5,  (8*6)   // f5
481    RESTORE_FPR ft6,  (8*7)   // f6
482    RESTORE_FPR ft7,  (8*8)   // f7
483    RESTORE_FPR fs0,  (8*9)   // f8
484    RESTORE_FPR fs1,  (8*10)  // f9
485#if SAVE_EVERYTHING_FRAME_OFFSET_FA0 != (8*11)
486#error "unexpected SAVE_EVERYTHING_FRAME_OFFSET_FA0"
487#endif
488    RESTORE_FPR fa0,  (8*11)  // f10, offset must equal SAVE_EVERYTHING_FRAME_OFFSET_FA0
489    RESTORE_FPR fa1,  (8*12)  // f11
490    RESTORE_FPR fa2,  (8*13)  // f12
491    RESTORE_FPR fa3,  (8*14)  // f13
492    RESTORE_FPR fa4,  (8*15)  // f14
493    RESTORE_FPR fa5,  (8*16)  // f15
494    RESTORE_FPR fa6,  (8*17)  // f16
495    RESTORE_FPR fa7,  (8*18)  // f17
496    RESTORE_FPR fs2,  (8*19)  // f18
497    RESTORE_FPR fs3,  (8*20)  // f19
498    RESTORE_FPR fs4,  (8*21)  // f20
499    RESTORE_FPR fs5,  (8*22)  // f21
500    RESTORE_FPR fs6,  (8*23)  // f22
501    RESTORE_FPR fs7,  (8*24)  // f23
502    RESTORE_FPR fs8,  (8*25)  // f24
503    RESTORE_FPR fs9,  (8*26)  // f25
504    RESTORE_FPR fs10, (8*27)  // f26
505    RESTORE_FPR fs11, (8*28)  // f27
506    RESTORE_FPR ft8,  (8*29)  // f28
507    RESTORE_FPR ft9,  (8*30)  // f29
508    RESTORE_FPR ft10, (8*31)  // f30
509    RESTORE_FPR ft11, (8*32)  // f31
510
511    // 26 slots for GPRs (excluded: zero/x0, sp/x2, gp/x3, tp/x4, s1/x9 -- the ART thread register)
512    RESTORE_GPR t0,  (8*33)  // x5
513    RESTORE_GPR t1,  (8*34)  // x6
514    RESTORE_GPR t2,  (8*35)  // x7
515    RESTORE_GPR s0,  (8*36)  // x8
516#if SAVE_EVERYTHING_FRAME_OFFSET_A0 != (8*37)
517#error "unexpected SAVE_EVERYTHING_FRAME_OFFSET_A0"
518#endif
519    .if \load_a0
520    RESTORE_GPR a0,  (8*37)  // x10, offset must equal SAVE_EVERYTHING_FRAME_OFFSET_A0
521    .endif
522    RESTORE_GPR a1,  (8*38)  // x11
523    RESTORE_GPR a2,  (8*39)  // x12
524    RESTORE_GPR a3,  (8*40)  // x13
525    RESTORE_GPR a4,  (8*41)  // x14
526    RESTORE_GPR a5,  (8*42)  // x15
527    RESTORE_GPR a6,  (8*43)  // x16
528    RESTORE_GPR a7,  (8*44)  // x17
529    RESTORE_GPR s2,  (8*45)  // x18
530    RESTORE_GPR s3,  (8*46)  // x19
531    RESTORE_GPR s4,  (8*47)  // x20
532    RESTORE_GPR s5,  (8*48)  // x21
533    RESTORE_GPR s6,  (8*49)  // x22
534    RESTORE_GPR s7,  (8*50)  // x23
535    RESTORE_GPR s8,  (8*51)  // x24
536    RESTORE_GPR s9,  (8*52)  // x25
537    RESTORE_GPR s10, (8*53)  // x26
538    RESTORE_GPR s11, (8*54)  // x27
539    RESTORE_GPR t3,  (8*55)  // x28
540    RESTORE_GPR t4,  (8*56)  // x29
541    RESTORE_GPR t5,  (8*57)  // x30
542    RESTORE_GPR t6,  (8*58)  // x31
543
544    RESTORE_GPR ra,  (8*59)  // x1, return address
545
546    DECREASE_FRAME FRAME_SIZE_SAVE_EVERYTHING
547.endm
548
549
550// For compatibility with Runtime::CreateCalleeSaveMethod(kSaveRefsOnly).
551.macro SETUP_SAVE_REFS_ONLY_FRAME
552    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_ONLY
553
554    // stack slot (8*0)(sp) is for ArtMethod*
555    // stack slot (8*1)(sp) is for padding
556    SAVE_GPR s0,  (8*2)   // x8
557    SAVE_GPR s2,  (8*3)   // x18
558    SAVE_GPR s3,  (8*4)   // x19
559    SAVE_GPR s4,  (8*5)   // x20
560    SAVE_GPR s5,  (8*6)   // x21
561    SAVE_GPR s6,  (8*7)   // x22
562    SAVE_GPR s7,  (8*8)   // x23
563    SAVE_GPR s8,  (8*9)   // x24
564    SAVE_GPR s9,  (8*10)  // x25
565    SAVE_GPR s10, (8*11)  // x26
566    SAVE_GPR s11, (8*12)  // x27
567    SAVE_GPR ra,  (8*13)  // x1
568
569    SETUP_CALLEE_SAVE_FRAME_COMMON t0, RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET
570.endm
571
572
573.macro RESTORE_SAVE_REFS_ONLY_FRAME
574    // stack slot (8*0)(sp) is for ArtMethod*
575    // stack slot (8*1)(sp) is for padding
576    RESTORE_GPR s0,  (8*2)   // x8
577    RESTORE_GPR s2,  (8*3)   // x18
578    RESTORE_GPR s3,  (8*4)   // x19
579    RESTORE_GPR s4,  (8*5)   // x20
580    RESTORE_GPR s5,  (8*6)   // x21
581    RESTORE_GPR s6,  (8*7)   // x22
582    RESTORE_GPR s7,  (8*8)   // x23
583    RESTORE_GPR s8,  (8*9)   // x24
584    RESTORE_GPR s9,  (8*10)  // x25
585    RESTORE_GPR s10, (8*11)  // x26
586    RESTORE_GPR s11, (8*12)  // x27
587    RESTORE_GPR ra,  (8*13)  // x1
588
589    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_ONLY
590.endm
591
592
593// CFI note. This macro is used where the CFA rule is a dwarf expression, so adjustment of SP does
594// not affect CFA computation. We also elide CFI descriptors for the argument registers, because
595// they can be recovered from the stack in a debugging scenario.
596.macro SPILL_ALL_ARGUMENTS
597#if (FRAME_SIZE_SAVE_ARGS_ONLY != 128)
598#error "FRAME_SIZE_SAVE_ARGS_ONLY(riscv64) not as expected."
599#endif
600    addi sp, sp, -FRAME_SIZE_SAVE_ARGS_ONLY
601    sd a0,   (8*0)(sp)
602    sd a1,   (8*1)(sp)
603    sd a2,   (8*2)(sp)
604    sd a3,   (8*3)(sp)
605    sd a4,   (8*4)(sp)
606    sd a5,   (8*5)(sp)
607    sd a6,   (8*6)(sp)
608    sd a7,   (8*7)(sp)
609    fsd fa0, (8*8)(sp)
610    fsd fa1, (8*9)(sp)
611    fsd fa2, (8*10)(sp)
612    fsd fa3, (8*11)(sp)
613    fsd fa4, (8*12)(sp)
614    fsd fa5, (8*13)(sp)
615    fsd fa6, (8*14)(sp)
616    fsd fa7, (8*15)(sp)
617.endm
618
619
620.macro RESTORE_ALL_ARGUMENTS
621    ld a0,   (8*0)(sp)
622    ld a1,   (8*1)(sp)
623    ld a2,   (8*2)(sp)
624    ld a3,   (8*3)(sp)
625    ld a4,   (8*4)(sp)
626    ld a5,   (8*5)(sp)
627    ld a6,   (8*6)(sp)
628    ld a7,   (8*7)(sp)
629    fld fa0, (8*8)(sp)
630    fld fa1, (8*9)(sp)
631    fld fa2, (8*10)(sp)
632    fld fa3, (8*11)(sp)
633    fld fa4, (8*12)(sp)
634    fld fa5, (8*13)(sp)
635    fld fa6, (8*14)(sp)
636    fld fa7, (8*15)(sp)
637    addi sp, sp, FRAME_SIZE_SAVE_ARGS_ONLY
638.endm
639
640
641.macro SETUP_NTERP_SAVE_CALLEE_SAVES
642#if (NTERP_SIZE_SAVE_CALLEE_SAVES != 8*(12 + 1 + 10 + 1))
643#error "NTERP_SIZE_SAVE_CALLEE_SAVES(RISCV64) size not as expected."
644#endif
645    // FP callee-saves.
646    SAVE_FPR fs0,  (8*0)   // f8
647    SAVE_FPR fs1,  (8*1)   // f9
648    SAVE_FPR fs2,  (8*2)   // f18
649    SAVE_FPR fs3,  (8*3)   // f19
650    SAVE_FPR fs4,  (8*4)   // f20
651    SAVE_FPR fs5,  (8*5)   // f21
652    SAVE_FPR fs6,  (8*6)   // f22
653    SAVE_FPR fs7,  (8*7)   // f23
654    SAVE_FPR fs8,  (8*8)   // f24
655    SAVE_FPR fs9,  (8*9)   // f25
656    SAVE_FPR fs10, (8*10)  // f26
657    SAVE_FPR fs11, (8*11)  // f27
658
659    // GP callee-saves
660    SAVE_GPR s0,  (8*12)  // x8/fp, frame pointer
661    // s1 (x9) is the ART thread register
662    SAVE_GPR s2,  (8*13)  // x18
663    SAVE_GPR s3,  (8*14)  // x19
664    SAVE_GPR s4,  (8*15)  // x20
665    SAVE_GPR s5,  (8*16)  // x21
666    SAVE_GPR s6,  (8*17)  // x22
667    SAVE_GPR s7,  (8*18)  // x23
668    SAVE_GPR s8,  (8*19)  // x24
669    SAVE_GPR s9,  (8*20)  // x25
670    SAVE_GPR s10, (8*21)  // x26
671    SAVE_GPR s11, (8*22)  // x27
672
673    SAVE_GPR ra,  (8*23)  // x1, return address
674.endm
675
676
677.macro RESTORE_NTERP_SAVE_CALLEE_SAVES
678#if (NTERP_SIZE_SAVE_CALLEE_SAVES != 8*(12 + 1 + 10 + 1))
679#error "NTERP_SIZE_SAVE_CALLEE_SAVES(RISCV64) size not as expected."
680#endif
681    // FP callee-saves.
682    RESTORE_FPR fs0,  (8*0)   // f8
683    RESTORE_FPR fs1,  (8*1)   // f9
684    RESTORE_FPR fs2,  (8*2)   // f18
685    RESTORE_FPR fs3,  (8*3)   // f19
686    RESTORE_FPR fs4,  (8*4)   // f20
687    RESTORE_FPR fs5,  (8*5)   // f21
688    RESTORE_FPR fs6,  (8*6)   // f22
689    RESTORE_FPR fs7,  (8*7)   // f23
690    RESTORE_FPR fs8,  (8*8)   // f24
691    RESTORE_FPR fs9,  (8*9)   // f25
692    RESTORE_FPR fs10, (8*10)  // f26
693    RESTORE_FPR fs11, (8*11)  // f27
694
695    // GP callee-saves
696    RESTORE_GPR s0,  (8*12)  // x8/fp, frame pointer
697    // s1 is the ART thread register
698    RESTORE_GPR s2,  (8*13)  // x18
699    RESTORE_GPR s3,  (8*14)  // x19
700    RESTORE_GPR s4,  (8*15)  // x20
701    RESTORE_GPR s5,  (8*16)  // x21
702    RESTORE_GPR s6,  (8*17)  // x22
703    RESTORE_GPR s7,  (8*18)  // x23
704    RESTORE_GPR s8,  (8*19)  // x24
705    RESTORE_GPR s9,  (8*20)  // x25
706    RESTORE_GPR s10, (8*21)  // x26
707    RESTORE_GPR s11, (8*22)  // x27
708
709    RESTORE_GPR ra,  (8*23)  // x1, return address
710.endm
711
712
713// Macro that calls through to artDeliverPendingExceptionFromCode, where the pending exception is
714// Thread::Current()->exception_ when the runtime method frame is ready.
715.macro DELIVER_PENDING_EXCEPTION_FRAME_READY
716    mv a0, xSELF
717    call artDeliverPendingExceptionFromCode  // Point of no return.
718    unimp                                    // Unreachable.
719.endm
720
721
722// Macro that calls through to artDeliverPendingExceptionFromCode, where the pending exception is
723// Thread::Current()->exception_.
724.macro DELIVER_PENDING_EXCEPTION
725    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
726    DELIVER_PENDING_EXCEPTION_FRAME_READY
727.endm
728
729
730.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
731    ld \reg, THREAD_EXCEPTION_OFFSET(xSELF)
732    bnez \reg, 1f
733    ret
7341:
735    DELIVER_PENDING_EXCEPTION
736.endm
737
738// Macro to emit a single LUI to load the given value while checking that the low 12 bits are zero.
739.macro LUI_VALUE reg, value
740    .if (\value & 0xfff) != 0
741    .error "Cannot use LUI to materialize a value with some of the low 12 bits set."
742    .endif
743    lui \reg, (\value) >> 12
744.endm
745
746
747// Locking is needed for both managed code and JNI stubs.
748.macro LOCK_OBJECT_FAST_PATH obj, slow_lock, can_be_null
749    // Use scratch registers T1-T6 as temporaries.
750    // Note: T0 is used as the argument register for `art_jni_lock_object` and passed as `obj`.
751    lw      t2, THREAD_ID_OFFSET(xSELF)
752    .if \can_be_null
753        beqz    \obj, \slow_lock
754    .endif
755    addi    t1, \obj, MIRROR_OBJECT_LOCK_WORD_OFFSET  // Exclusive load/store has no offset.
7561:
757    // Note: The LR/SC sequence must be at most 16 instructions, so we cannot have the
758    // recursive locking in a slow-path as on other architectures.
759    lr.w.aq t3, (t1)                  // Acquire needed only in most common case.
760    LUI_VALUE t5, LOCK_WORD_GC_STATE_MASK_SHIFTED  // Prepare mask for testing non-gc bits.
761    xor     t4, t3, t2                // Prepare the value to store if unlocked
762                                      //   (thread id, count of 0 and preserved read barrier bits),
763                                      // or prepare to compare thread id for recursive lock check
764                                      //   (lock_word.ThreadId() ^ self->ThreadId()).
765    or      t6, t5, t3                // Test the non-gc bits.
766    beq     t6, t5, 2f                // Check if unlocked.
767                                      // Check lock word state and thread id together,
768    LUI_VALUE \
769        t5, 0xffffffff ^ (LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED)
770    or      t6, t5, t4
771    bne     t6, t5, \slow_lock
772    LUI_VALUE t4, LOCK_WORD_THIN_LOCK_COUNT_ONE  // Increment the recursive lock count.
773    addw    t4, t3, t4
774    LUI_VALUE t5, LOCK_WORD_THIN_LOCK_COUNT_MASK_SHIFTED  // Test the new thin lock count.
775    and     t5, t4, t5
776    beqz    t5, \slow_lock            // Zero as the new count indicates overflow, go slow path.
7772:
778    // Store the prepared value:
779    //   - if unlocked, original lock word plus thread id,
780    //   - if already locked, original lock word plus incremented lock count.
781    sc.w    t3, t4, (t1)
782    bnez    t3, 1b                    // If the store failed, retry.
783    ret
784.endm
785
786// Unlocking is needed for both managed code and JNI stubs.
787.macro UNLOCK_OBJECT_FAST_PATH obj, slow_unlock, can_be_null
788    // Use scratch registers T1-T6 as temporaries.
789    // Note: T0 is used as the argument register for `art_jni_unlock_object` and passed as `obj`.
790    lw      t2, THREAD_ID_OFFSET(xSELF)
791    .if \can_be_null
792        beqz    \obj, \slow_unlock
793    .endif
794    addi    t1, \obj, MIRROR_OBJECT_LOCK_WORD_OFFSET  // Exclusive load/store has no offset.
7951:
796    // Note: Without read barriers, we could do plain LW here but there is no store-release
797    // other than SC on riscv64, so we do this with LR/SC for all cofigurations.
798    // Note: The LR/SC sequence must be at most 16 instructions, so we cannot have the
799    // recursive unlocking in a slow-path as on other architectures.
800    lr.w    t3, (t1)
801    LUI_VALUE t5, LOCK_WORD_GC_STATE_MASK_SHIFTED  // Prepare mask for testing non-gc bits.
802    xor     t4, t3, t2                // Prepare the value to store if simply locked
803                                      //   (mostly 0s, and preserved read barrier bits),
804                                      // or prepare to compare thread id for recursive lock check
805                                      //   (lock_word.ThreadId() ^ self->ThreadId()).
806    or      t6, t5, t4                // Test the non-gc bits.
807    beq     t6, t5, 2f                // Simply locked by this thread?
808                                      // Check lock word state and thread id together.
809    LUI_VALUE \
810        t5, 0xffffffff ^ (LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED)
811    or      t6, t5, t4
812    bne     t6, t5, \slow_unlock
813    LUI_VALUE t4, LOCK_WORD_THIN_LOCK_COUNT_ONE  // Decrement the recursive lock count.
814    subw    t4, t3, t4
8152:
816    // Store the prepared value:
817    //   - if simply locked, original lock word with removed thread id,
818    //   - if recursively locked, original lock word plus decremented lock count.
819    sc.w.rl t3, t4, (t1)              // Need to use atomic instructions for read barrier.
820    bnez    t3, 1b                    // If the store failed, retry.
821    ret
822.endm
823
824
825// Macros to branch based on the value of a specific bit.
826.macro BRANCH_IF_BIT_CLEAR tmp, reg, bit, dest
827    slli    \tmp, \reg, (63 - \bit) // tested bit => sign bit
828    bgez    \tmp, \dest
829.endm
830
831
832.macro BRANCH_IF_BIT_SET tmp, reg, bit, dest
833    slli    \tmp, \reg, (63 - \bit) // tested bit => sign bit
834    bltz    \tmp, \dest
835.endm
836
837
838#endif  // ART_RUNTIME_ARCH_RISCV64_ASM_SUPPORT_RISCV64_S_
839