• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// This file is generated from a similarly-named Perl script in the BoringSSL
2// source tree. Do not edit by hand.
3
4#if defined(__has_feature)
5#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
6#define OPENSSL_NO_ASM
7#endif
8#endif
9
10#if !defined(OPENSSL_NO_ASM)
11#if defined(BORINGSSL_PREFIX)
12#include <boringssl_prefix_symbols_asm.h>
13#endif
14.syntax	unified
15
16
17
18
19.text
20
21@ abi_test_trampoline loads callee-saved registers from |state|, calls |func|
22@ with |argv|, then saves the callee-saved registers into |state|. It returns
23@ the result of |func|. The |unwind| argument is unused.
24@ uint32_t abi_test_trampoline(void (*func)(...), CallerState *state,
25@                              const uint32_t *argv, size_t argc,
26@                              int unwind);
27
28.globl	_abi_test_trampoline
29.private_extern	_abi_test_trampoline
30.align	4
31_abi_test_trampoline:
32Labi_test_trampoline_begin:
33	@ Save parameters and all callee-saved registers. For convenience, we
34	@ save r9 on iOS even though it's volatile.
35	vstmdb	sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
36	stmdb	sp!, {r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,lr}
37
38	@ Reserve stack space for six (10-4) stack parameters, plus an extra 4
39	@ bytes to keep it 8-byte-aligned (see AAPCS, section 5.3).
40	sub	sp, sp, #28
41
42	@ Every register in AAPCS is either non-volatile or a parameter (except
43	@ r9 on iOS), so this code, by the actual call, loses all its scratch
44	@ registers. First fill in stack parameters while there are registers
45	@ to spare.
46	cmp	r3, #4
47	bls	Lstack_args_done
48	mov	r4, sp				@ r4 is the output pointer.
49	add	r5, r2, r3, lsl #2	@ Set r5 to the end of argv.
50	add	r2, r2, #16		@ Skip four arguments.
51Lstack_args_loop:
52	ldr	r6, [r2], #4
53	cmp	r2, r5
54	str	r6, [r4], #4
55	bne	Lstack_args_loop
56
57Lstack_args_done:
58	@ Load registers from |r1|.
59	vldmia	r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
60#if defined(__APPLE__)
61	@ r9 is not volatile on iOS.
62	ldmia	r1!, {r4,r5,r6,r7,r8,r10-r11}
63#else
64	ldmia	r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
65#endif
66
67	@ Load register parameters. This uses up our remaining registers, so we
68	@ repurpose lr as scratch space.
69	ldr	r3, [sp, #40]	@ Reload argc.
70	ldr	lr, [sp, #36]		@ Load argv into lr.
71	cmp	r3, #3
72	bhi	Larg_r3
73	beq	Larg_r2
74	cmp	r3, #1
75	bhi	Larg_r1
76	beq	Larg_r0
77	b	Largs_done
78
79Larg_r3:
80	ldr	r3, [lr, #12]	@ argv[3]
81Larg_r2:
82	ldr	r2, [lr, #8]	@ argv[2]
83Larg_r1:
84	ldr	r1, [lr, #4]	@ argv[1]
85Larg_r0:
86	ldr	r0, [lr]	@ argv[0]
87Largs_done:
88
89	@ With every other register in use, load the function pointer into lr
90	@ and call the function.
91	ldr	lr, [sp, #28]
92	blx	lr
93
94	@ r1-r3 are free for use again. The trampoline only supports
95	@ single-return functions. Pass r4-r11 to the caller.
96	ldr	r1, [sp, #32]
97	vstmia	r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
98#if defined(__APPLE__)
99	@ r9 is not volatile on iOS.
100	stmia	r1!, {r4,r5,r6,r7,r8,r10-r11}
101#else
102	stmia	r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
103#endif
104
105	@ Unwind the stack and restore registers.
106	add	sp, sp, #44		@ 44 = 28+16
107	ldmia	sp!, {r4,r5,r6,r7,r8,r9,r10,r11,lr}	@ Skip r0-r3 (see +16 above).
108	vldmia	sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
109
110	bx	lr
111
112
113.globl	_abi_test_clobber_r0
114.private_extern	_abi_test_clobber_r0
115.align	4
116_abi_test_clobber_r0:
117	mov	r0, #0
118	bx	lr
119
120
121.globl	_abi_test_clobber_r1
122.private_extern	_abi_test_clobber_r1
123.align	4
124_abi_test_clobber_r1:
125	mov	r1, #0
126	bx	lr
127
128
129.globl	_abi_test_clobber_r2
130.private_extern	_abi_test_clobber_r2
131.align	4
132_abi_test_clobber_r2:
133	mov	r2, #0
134	bx	lr
135
136
137.globl	_abi_test_clobber_r3
138.private_extern	_abi_test_clobber_r3
139.align	4
140_abi_test_clobber_r3:
141	mov	r3, #0
142	bx	lr
143
144
145.globl	_abi_test_clobber_r4
146.private_extern	_abi_test_clobber_r4
147.align	4
148_abi_test_clobber_r4:
149	mov	r4, #0
150	bx	lr
151
152
153.globl	_abi_test_clobber_r5
154.private_extern	_abi_test_clobber_r5
155.align	4
156_abi_test_clobber_r5:
157	mov	r5, #0
158	bx	lr
159
160
161.globl	_abi_test_clobber_r6
162.private_extern	_abi_test_clobber_r6
163.align	4
164_abi_test_clobber_r6:
165	mov	r6, #0
166	bx	lr
167
168
169.globl	_abi_test_clobber_r7
170.private_extern	_abi_test_clobber_r7
171.align	4
172_abi_test_clobber_r7:
173	mov	r7, #0
174	bx	lr
175
176
177.globl	_abi_test_clobber_r8
178.private_extern	_abi_test_clobber_r8
179.align	4
180_abi_test_clobber_r8:
181	mov	r8, #0
182	bx	lr
183
184
185.globl	_abi_test_clobber_r9
186.private_extern	_abi_test_clobber_r9
187.align	4
188_abi_test_clobber_r9:
189	mov	r9, #0
190	bx	lr
191
192
193.globl	_abi_test_clobber_r10
194.private_extern	_abi_test_clobber_r10
195.align	4
196_abi_test_clobber_r10:
197	mov	r10, #0
198	bx	lr
199
200
201.globl	_abi_test_clobber_r11
202.private_extern	_abi_test_clobber_r11
203.align	4
204_abi_test_clobber_r11:
205	mov	r11, #0
206	bx	lr
207
208
209.globl	_abi_test_clobber_r12
210.private_extern	_abi_test_clobber_r12
211.align	4
212_abi_test_clobber_r12:
213	mov	r12, #0
214	bx	lr
215
216
217.globl	_abi_test_clobber_d0
218.private_extern	_abi_test_clobber_d0
219.align	4
220_abi_test_clobber_d0:
221	mov	r0, #0
222	vmov	s0, r0
223	vmov	s1, r0
224	bx	lr
225
226
227.globl	_abi_test_clobber_d1
228.private_extern	_abi_test_clobber_d1
229.align	4
230_abi_test_clobber_d1:
231	mov	r0, #0
232	vmov	s2, r0
233	vmov	s3, r0
234	bx	lr
235
236
237.globl	_abi_test_clobber_d2
238.private_extern	_abi_test_clobber_d2
239.align	4
240_abi_test_clobber_d2:
241	mov	r0, #0
242	vmov	s4, r0
243	vmov	s5, r0
244	bx	lr
245
246
247.globl	_abi_test_clobber_d3
248.private_extern	_abi_test_clobber_d3
249.align	4
250_abi_test_clobber_d3:
251	mov	r0, #0
252	vmov	s6, r0
253	vmov	s7, r0
254	bx	lr
255
256
257.globl	_abi_test_clobber_d4
258.private_extern	_abi_test_clobber_d4
259.align	4
260_abi_test_clobber_d4:
261	mov	r0, #0
262	vmov	s8, r0
263	vmov	s9, r0
264	bx	lr
265
266
267.globl	_abi_test_clobber_d5
268.private_extern	_abi_test_clobber_d5
269.align	4
270_abi_test_clobber_d5:
271	mov	r0, #0
272	vmov	s10, r0
273	vmov	s11, r0
274	bx	lr
275
276
277.globl	_abi_test_clobber_d6
278.private_extern	_abi_test_clobber_d6
279.align	4
280_abi_test_clobber_d6:
281	mov	r0, #0
282	vmov	s12, r0
283	vmov	s13, r0
284	bx	lr
285
286
287.globl	_abi_test_clobber_d7
288.private_extern	_abi_test_clobber_d7
289.align	4
290_abi_test_clobber_d7:
291	mov	r0, #0
292	vmov	s14, r0
293	vmov	s15, r0
294	bx	lr
295
296
297.globl	_abi_test_clobber_d8
298.private_extern	_abi_test_clobber_d8
299.align	4
300_abi_test_clobber_d8:
301	mov	r0, #0
302	vmov	s16, r0
303	vmov	s17, r0
304	bx	lr
305
306
307.globl	_abi_test_clobber_d9
308.private_extern	_abi_test_clobber_d9
309.align	4
310_abi_test_clobber_d9:
311	mov	r0, #0
312	vmov	s18, r0
313	vmov	s19, r0
314	bx	lr
315
316
317.globl	_abi_test_clobber_d10
318.private_extern	_abi_test_clobber_d10
319.align	4
320_abi_test_clobber_d10:
321	mov	r0, #0
322	vmov	s20, r0
323	vmov	s21, r0
324	bx	lr
325
326
327.globl	_abi_test_clobber_d11
328.private_extern	_abi_test_clobber_d11
329.align	4
330_abi_test_clobber_d11:
331	mov	r0, #0
332	vmov	s22, r0
333	vmov	s23, r0
334	bx	lr
335
336
337.globl	_abi_test_clobber_d12
338.private_extern	_abi_test_clobber_d12
339.align	4
340_abi_test_clobber_d12:
341	mov	r0, #0
342	vmov	s24, r0
343	vmov	s25, r0
344	bx	lr
345
346
347.globl	_abi_test_clobber_d13
348.private_extern	_abi_test_clobber_d13
349.align	4
350_abi_test_clobber_d13:
351	mov	r0, #0
352	vmov	s26, r0
353	vmov	s27, r0
354	bx	lr
355
356
357.globl	_abi_test_clobber_d14
358.private_extern	_abi_test_clobber_d14
359.align	4
360_abi_test_clobber_d14:
361	mov	r0, #0
362	vmov	s28, r0
363	vmov	s29, r0
364	bx	lr
365
366
367.globl	_abi_test_clobber_d15
368.private_extern	_abi_test_clobber_d15
369.align	4
370_abi_test_clobber_d15:
371	mov	r0, #0
372	vmov	s30, r0
373	vmov	s31, r0
374	bx	lr
375
376#endif  // !OPENSSL_NO_ASM
377