• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x17 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46
47func el2_sysregs_context_save
48	mrs	x9, actlr_el2
49	mrs	x10, afsr0_el2
50	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
51
52	mrs	x11, afsr1_el2
53	mrs	x12, amair_el2
54	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
55
56	mrs	x13, cnthctl_el2
57	mrs	x14, cnthp_ctl_el2
58	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
59
60	mrs	x15, cnthp_cval_el2
61	mrs	x16, cnthp_tval_el2
62	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
63
64	mrs	x17, cntvoff_el2
65	mrs	x9, cptr_el2
66	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
67
68	mrs	x11, elr_el2
69#if CTX_INCLUDE_AARCH32_REGS
70	mrs	x10, dbgvcr32_el2
71	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
72#else
73	str	x11, [x0, #CTX_ELR_EL2]
74#endif
75
76	mrs	x14, esr_el2
77	mrs	x15, far_el2
78	stp	x14, x15, [x0, #CTX_ESR_EL2]
79
80	mrs	x16, hacr_el2
81	mrs	x17, hcr_el2
82	stp	x16, x17, [x0, #CTX_HACR_EL2]
83
84	mrs	x9, hpfar_el2
85	mrs	x10, hstr_el2
86	stp	x9, x10, [x0, #CTX_HPFAR_EL2]
87
88	mrs	x11, ICC_SRE_EL2
89	mrs	x12, ICH_HCR_EL2
90	stp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
91
92	mrs	x13, ICH_VMCR_EL2
93	mrs	x14, mair_el2
94	stp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
95
96	mrs	x15, mdcr_el2
97#if ENABLE_SPE_FOR_LOWER_ELS
98	mrs	x16, PMSCR_EL2
99	stp	x15, x16, [x0, #CTX_MDCR_EL2]
100#else
101	str	x15, [x0, #CTX_MDCR_EL2]
102#endif
103
104	mrs	x17, sctlr_el2
105	mrs	x9, spsr_el2
106	stp	x17, x9, [x0, #CTX_SCTLR_EL2]
107
108	mrs	x10, sp_el2
109	mrs	x11, tcr_el2
110	stp	x10, x11, [x0, #CTX_SP_EL2]
111
112	mrs	x12, tpidr_el2
113	mrs	x13, ttbr0_el2
114	stp	x12, x13, [x0, #CTX_TPIDR_EL2]
115
116	mrs	x14, vbar_el2
117	mrs	x15, vmpidr_el2
118	stp	x14, x15, [x0, #CTX_VBAR_EL2]
119
120	mrs	x16, vpidr_el2
121	mrs	x17, vtcr_el2
122	stp	x16, x17, [x0, #CTX_VPIDR_EL2]
123
124	mrs	x9, vttbr_el2
125	str	x9, [x0, #CTX_VTTBR_EL2]
126
127#if CTX_INCLUDE_MTE_REGS
128	mrs	x10, TFSR_EL2
129	str	x10, [x0, #CTX_TFSR_EL2]
130#endif
131
132#if ENABLE_MPAM_FOR_LOWER_ELS
133	mrs	x9, MPAM2_EL2
134	mrs	x10, MPAMHCR_EL2
135	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
136
137	mrs	x11, MPAMVPM0_EL2
138	mrs	x12, MPAMVPM1_EL2
139	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
140
141	mrs	x13, MPAMVPM2_EL2
142	mrs	x14, MPAMVPM3_EL2
143	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
144
145	mrs	x15, MPAMVPM4_EL2
146	mrs	x16, MPAMVPM5_EL2
147	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
148
149	mrs	x17, MPAMVPM6_EL2
150	mrs	x9, MPAMVPM7_EL2
151	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
152
153	mrs	x10, MPAMVPMV_EL2
154	str	x10, [x0, #CTX_MPAMVPMV_EL2]
155#endif
156
157
158#if ARM_ARCH_AT_LEAST(8, 6)
159	mrs	x11, HAFGRTR_EL2
160	mrs	x12, HDFGRTR_EL2
161	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
162
163	mrs	x13, HDFGWTR_EL2
164	mrs	x14, HFGITR_EL2
165	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
166
167	mrs	x15, HFGRTR_EL2
168	mrs	x16, HFGWTR_EL2
169	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
170
171	mrs	x17, CNTPOFF_EL2
172	str	x17, [x0, #CTX_CNTPOFF_EL2]
173#endif
174
175#if ARM_ARCH_AT_LEAST(8, 4)
176	mrs	x9, cnthps_ctl_el2
177	mrs	x10, cnthps_cval_el2
178	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
179
180	mrs	x11, cnthps_tval_el2
181	mrs	x12, cnthvs_ctl_el2
182	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
183
184	mrs	x13, cnthvs_cval_el2
185	mrs	x14, cnthvs_tval_el2
186	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
187
188	mrs	x15, cnthv_ctl_el2
189	mrs	x16, cnthv_cval_el2
190	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
191
192	mrs	x17, cnthv_tval_el2
193	mrs	x9, contextidr_el2
194	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
195
196#if CTX_INCLUDE_AARCH32_REGS
197	mrs	x10, sder32_el2
198	str	x10, [x0, #CTX_SDER32_EL2]
199#endif
200
201	mrs	x11, ttbr1_el2
202	str	x11, [x0, #CTX_TTBR1_EL2]
203
204	mrs	x12, vdisr_el2
205	str	x12, [x0, #CTX_VDISR_EL2]
206
207#if CTX_INCLUDE_NEVE_REGS
208	mrs	x13, vncr_el2
209	str	x13, [x0, #CTX_VNCR_EL2]
210#endif
211
212	mrs	x14, vsesr_el2
213	str	x14, [x0, #CTX_VSESR_EL2]
214
215	mrs	x15, vstcr_el2
216	str	x15, [x0, #CTX_VSTCR_EL2]
217
218	mrs	x16, vsttbr_el2
219	str	x16, [x0, #CTX_VSTTBR_EL2]
220
221	mrs	x17, TRFCR_EL2
222	str	x17, [x0, #CTX_TRFCR_EL2]
223#endif
224
225#if ARM_ARCH_AT_LEAST(8, 5)
226	mrs	x9, scxtnum_el2
227	str	x9, [x0, #CTX_SCXTNUM_EL2]
228#endif
229
230	ret
231endfunc el2_sysregs_context_save
232
233/* -----------------------------------------------------
234 * The following function strictly follows the AArch64
235 * PCS to use x9-x17 (temporary caller-saved registers)
236 * to restore EL2 system register context.  It assumes
237 * that 'x0' is pointing to a 'el2_sys_regs' structure
238 * from where the register context will be restored
239
240 * The following registers are not restored
241 * AMEVCNTVOFF0<n>_EL2
242 * AMEVCNTVOFF1<n>_EL2
243 * ICH_AP0R<n>_EL2
244 * ICH_AP1R<n>_EL2
245 * ICH_LR<n>_EL2
246 * -----------------------------------------------------
247 */
248func el2_sysregs_context_restore
249
250	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
251	msr	actlr_el2, x9
252	msr	afsr0_el2, x10
253
254	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
255	msr	afsr1_el2, x11
256	msr	amair_el2, x12
257
258	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
259	msr	cnthctl_el2, x13
260	msr	cnthp_ctl_el2, x14
261
262	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
263	msr	cnthp_cval_el2, x15
264	msr	cnthp_tval_el2, x16
265
266	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
267	msr	cntvoff_el2, x17
268	msr	cptr_el2, x9
269
270#if CTX_INCLUDE_AARCH32_REGS
271	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
272	msr	dbgvcr32_el2, x10
273#else
274	ldr	x11, [x0, #CTX_ELR_EL2]
275#endif
276	msr	elr_el2, x11
277
278	ldp	x14, x15, [x0, #CTX_ESR_EL2]
279	msr	esr_el2, x14
280	msr	far_el2, x15
281
282	ldp	x16, x17, [x0, #CTX_HACR_EL2]
283	msr	hacr_el2, x16
284	msr	hcr_el2, x17
285
286	ldp	x9, x10, [x0, #CTX_HPFAR_EL2]
287	msr	hpfar_el2, x9
288	msr	hstr_el2, x10
289
290	ldp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
291	msr	ICC_SRE_EL2, x11
292	msr	ICH_HCR_EL2, x12
293
294	ldp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
295	msr	ICH_VMCR_EL2, x13
296	msr	mair_el2, x14
297
298#if ENABLE_SPE_FOR_LOWER_ELS
299	ldp	x15, x16, [x0, #CTX_MDCR_EL2]
300	msr	PMSCR_EL2, x16
301#else
302	ldr	x15, [x0, #CTX_MDCR_EL2]
303#endif
304	msr	mdcr_el2, x15
305
306	ldp	x17, x9, [x0, #CTX_SCTLR_EL2]
307	msr	sctlr_el2, x17
308	msr	spsr_el2, x9
309
310	ldp	x10, x11, [x0, #CTX_SP_EL2]
311	msr	sp_el2, x10
312	msr	tcr_el2, x11
313
314	ldp	x12, x13, [x0, #CTX_TPIDR_EL2]
315	msr	tpidr_el2, x12
316	msr	ttbr0_el2, x13
317
318	ldp	x13, x14, [x0, #CTX_VBAR_EL2]
319	msr	vbar_el2, x13
320	msr	vmpidr_el2, x14
321
322	ldp	x15, x16, [x0, #CTX_VPIDR_EL2]
323	msr	vpidr_el2, x15
324	msr	vtcr_el2, x16
325
326	ldr	x17, [x0, #CTX_VTTBR_EL2]
327	msr	vttbr_el2, x17
328
329#if CTX_INCLUDE_MTE_REGS
330	ldr	x9, [x0, #CTX_TFSR_EL2]
331	msr	TFSR_EL2, x9
332#endif
333
334#if ENABLE_MPAM_FOR_LOWER_ELS
335	ldp	x10, x11, [x0, #CTX_MPAM2_EL2]
336	msr	MPAM2_EL2, x10
337	msr	MPAMHCR_EL2, x11
338
339	ldp	x12, x13, [x0, #CTX_MPAMVPM0_EL2]
340	msr	MPAMVPM0_EL2, x12
341	msr	MPAMVPM1_EL2, x13
342
343	ldp	x14, x15, [x0, #CTX_MPAMVPM2_EL2]
344	msr	MPAMVPM2_EL2, x14
345	msr	MPAMVPM3_EL2, x15
346
347	ldp	x16, x17, [x0, #CTX_MPAMVPM4_EL2]
348	msr	MPAMVPM4_EL2, x16
349	msr	MPAMVPM5_EL2, x17
350
351	ldp	x9, x10, [x0, #CTX_MPAMVPM6_EL2]
352	msr	MPAMVPM6_EL2, x9
353	msr	MPAMVPM7_EL2, x10
354
355	ldr	x11, [x0, #CTX_MPAMVPMV_EL2]
356	msr	MPAMVPMV_EL2, x11
357#endif
358
359#if ARM_ARCH_AT_LEAST(8, 6)
360	ldp	x12, x13, [x0, #CTX_HAFGRTR_EL2]
361	msr	HAFGRTR_EL2, x12
362	msr	HDFGRTR_EL2, x13
363
364	ldp	x14, x15, [x0, #CTX_HDFGWTR_EL2]
365	msr	HDFGWTR_EL2, x14
366	msr	HFGITR_EL2, x15
367
368	ldp	x16, x17, [x0, #CTX_HFGRTR_EL2]
369	msr	HFGRTR_EL2, x16
370	msr	HFGWTR_EL2, x17
371
372	ldr	x9, [x0, #CTX_CNTPOFF_EL2]
373	msr	CNTPOFF_EL2, x9
374#endif
375
376#if ARM_ARCH_AT_LEAST(8, 4)
377	ldp	x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
378	msr	cnthps_ctl_el2, x10
379	msr	cnthps_cval_el2, x11
380
381	ldp	x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
382	msr	cnthps_tval_el2, x12
383	msr	cnthvs_ctl_el2, x13
384
385	ldp	x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
386	msr	cnthvs_cval_el2, x14
387	msr	cnthvs_tval_el2, x15
388
389	ldp	x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
390	msr	cnthv_ctl_el2, x16
391	msr	cnthv_cval_el2, x17
392
393	ldp	x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
394	msr	cnthv_tval_el2, x9
395	msr	contextidr_el2, x10
396
397#if CTX_INCLUDE_AARCH32_REGS
398	ldr	x11, [x0, #CTX_SDER32_EL2]
399	msr	sder32_el2, x11
400#endif
401
402	ldr	x12, [x0, #CTX_TTBR1_EL2]
403	msr	ttbr1_el2, x12
404
405	ldr	x13, [x0, #CTX_VDISR_EL2]
406	msr	vdisr_el2, x13
407
408#if CTX_INCLUDE_NEVE_REGS
409	ldr	x14, [x0, #CTX_VNCR_EL2]
410	msr	vncr_el2, x14
411#endif
412
413	ldr	x15, [x0, #CTX_VSESR_EL2]
414	msr	vsesr_el2, x15
415
416	ldr	x16, [x0, #CTX_VSTCR_EL2]
417	msr	vstcr_el2, x16
418
419	ldr	x17, [x0, #CTX_VSTTBR_EL2]
420	msr	vsttbr_el2, x17
421
422	ldr	x9, [x0, #CTX_TRFCR_EL2]
423	msr	TRFCR_EL2, x9
424#endif
425
426#if ARM_ARCH_AT_LEAST(8, 5)
427	ldr	x10, [x0, #CTX_SCXTNUM_EL2]
428	msr	scxtnum_el2, x10
429#endif
430
431	ret
432endfunc el2_sysregs_context_restore
433
434#endif /* CTX_INCLUDE_EL2_REGS */
435
436/* ------------------------------------------------------------------
437 * The following function strictly follows the AArch64 PCS to use
438 * x9-x17 (temporary caller-saved registers) to save EL1 system
439 * register context. It assumes that 'x0' is pointing to a
440 * 'el1_sys_regs' structure where the register context will be saved.
441 * ------------------------------------------------------------------
442 */
443func el1_sysregs_context_save
444
445	mrs	x9, spsr_el1
446	mrs	x10, elr_el1
447	stp	x9, x10, [x0, #CTX_SPSR_EL1]
448
449#if !ERRATA_SPECULATIVE_AT
450	mrs	x15, sctlr_el1
451	mrs	x16, tcr_el1
452	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
453#endif
454
455	mrs	x17, cpacr_el1
456	mrs	x9, csselr_el1
457	stp	x17, x9, [x0, #CTX_CPACR_EL1]
458
459	mrs	x10, sp_el1
460	mrs	x11, esr_el1
461	stp	x10, x11, [x0, #CTX_SP_EL1]
462
463	mrs	x12, ttbr0_el1
464	mrs	x13, ttbr1_el1
465	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
466
467	mrs	x14, mair_el1
468	mrs	x15, amair_el1
469	stp	x14, x15, [x0, #CTX_MAIR_EL1]
470
471	mrs	x16, actlr_el1
472	mrs	x17, tpidr_el1
473	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
474
475	mrs	x9, tpidr_el0
476	mrs	x10, tpidrro_el0
477	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
478
479	mrs	x13, par_el1
480	mrs	x14, far_el1
481	stp	x13, x14, [x0, #CTX_PAR_EL1]
482
483	mrs	x15, afsr0_el1
484	mrs	x16, afsr1_el1
485	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
486
487	mrs	x17, contextidr_el1
488	mrs	x9, vbar_el1
489	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
490
491	/* Save AArch32 system registers if the build has instructed so */
492#if CTX_INCLUDE_AARCH32_REGS
493	mrs	x11, spsr_abt
494	mrs	x12, spsr_und
495	stp	x11, x12, [x0, #CTX_SPSR_ABT]
496
497	mrs	x13, spsr_irq
498	mrs	x14, spsr_fiq
499	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
500
501	mrs	x15, dacr32_el2
502	mrs	x16, ifsr32_el2
503	stp	x15, x16, [x0, #CTX_DACR32_EL2]
504#endif
505
506	/* Save NS timer registers if the build has instructed so */
507#if NS_TIMER_SWITCH
508	mrs	x10, cntp_ctl_el0
509	mrs	x11, cntp_cval_el0
510	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
511
512	mrs	x12, cntv_ctl_el0
513	mrs	x13, cntv_cval_el0
514	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
515
516	mrs	x14, cntkctl_el1
517	str	x14, [x0, #CTX_CNTKCTL_EL1]
518#endif
519
520	/* Save MTE system registers if the build has instructed so */
521#if CTX_INCLUDE_MTE_REGS
522	mrs	x15, TFSRE0_EL1
523	mrs	x16, TFSR_EL1
524	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
525
526	mrs	x9, RGSR_EL1
527	mrs	x10, GCR_EL1
528	stp	x9, x10, [x0, #CTX_RGSR_EL1]
529#endif
530
531	ret
532endfunc el1_sysregs_context_save
533
534/* ------------------------------------------------------------------
535 * The following function strictly follows the AArch64 PCS to use
536 * x9-x17 (temporary caller-saved registers) to restore EL1 system
537 * register context.  It assumes that 'x0' is pointing to a
538 * 'el1_sys_regs' structure from where the register context will be
539 * restored
540 * ------------------------------------------------------------------
541 */
542func el1_sysregs_context_restore
543
544	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
545	msr	spsr_el1, x9
546	msr	elr_el1, x10
547
548#if !ERRATA_SPECULATIVE_AT
549	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
550	msr	sctlr_el1, x15
551	msr	tcr_el1, x16
552#endif
553
554	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
555	msr	cpacr_el1, x17
556	msr	csselr_el1, x9
557
558	ldp	x10, x11, [x0, #CTX_SP_EL1]
559	msr	sp_el1, x10
560	msr	esr_el1, x11
561
562	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
563	msr	ttbr0_el1, x12
564	msr	ttbr1_el1, x13
565
566	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
567	msr	mair_el1, x14
568	msr	amair_el1, x15
569
570	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
571	msr	actlr_el1, x16
572	msr	tpidr_el1, x17
573
574	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
575	msr	tpidr_el0, x9
576	msr	tpidrro_el0, x10
577
578	ldp	x13, x14, [x0, #CTX_PAR_EL1]
579	msr	par_el1, x13
580	msr	far_el1, x14
581
582	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
583	msr	afsr0_el1, x15
584	msr	afsr1_el1, x16
585
586	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
587	msr	contextidr_el1, x17
588	msr	vbar_el1, x9
589
590	/* Restore AArch32 system registers if the build has instructed so */
591#if CTX_INCLUDE_AARCH32_REGS
592	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
593	msr	spsr_abt, x11
594	msr	spsr_und, x12
595
596	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
597	msr	spsr_irq, x13
598	msr	spsr_fiq, x14
599
600	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
601	msr	dacr32_el2, x15
602	msr	ifsr32_el2, x16
603#endif
604	/* Restore NS timer registers if the build has instructed so */
605#if NS_TIMER_SWITCH
606	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
607	msr	cntp_ctl_el0, x10
608	msr	cntp_cval_el0, x11
609
610	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
611	msr	cntv_ctl_el0, x12
612	msr	cntv_cval_el0, x13
613
614	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
615	msr	cntkctl_el1, x14
616#endif
617	/* Restore MTE system registers if the build has instructed so */
618#if CTX_INCLUDE_MTE_REGS
619	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
620	msr	TFSRE0_EL1, x11
621	msr	TFSR_EL1, x12
622
623	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
624	msr	RGSR_EL1, x13
625	msr	GCR_EL1, x14
626#endif
627
628	/* No explict ISB required here as ERET covers it */
629	ret
630endfunc el1_sysregs_context_restore
631
632/* ------------------------------------------------------------------
633 * The following function follows the aapcs_64 strictly to use
634 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
635 * to save floating point register context. It assumes that 'x0' is
636 * pointing to a 'fp_regs' structure where the register context will
637 * be saved.
638 *
639 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
640 * However currently we don't use VFP registers nor set traps in
641 * Trusted Firmware, and assume it's cleared.
642 *
643 * TODO: Revisit when VFP is used in secure world
644 * ------------------------------------------------------------------
645 */
646#if CTX_INCLUDE_FPREGS
647func fpregs_context_save
648	stp	q0, q1, [x0, #CTX_FP_Q0]
649	stp	q2, q3, [x0, #CTX_FP_Q2]
650	stp	q4, q5, [x0, #CTX_FP_Q4]
651	stp	q6, q7, [x0, #CTX_FP_Q6]
652	stp	q8, q9, [x0, #CTX_FP_Q8]
653	stp	q10, q11, [x0, #CTX_FP_Q10]
654	stp	q12, q13, [x0, #CTX_FP_Q12]
655	stp	q14, q15, [x0, #CTX_FP_Q14]
656	stp	q16, q17, [x0, #CTX_FP_Q16]
657	stp	q18, q19, [x0, #CTX_FP_Q18]
658	stp	q20, q21, [x0, #CTX_FP_Q20]
659	stp	q22, q23, [x0, #CTX_FP_Q22]
660	stp	q24, q25, [x0, #CTX_FP_Q24]
661	stp	q26, q27, [x0, #CTX_FP_Q26]
662	stp	q28, q29, [x0, #CTX_FP_Q28]
663	stp	q30, q31, [x0, #CTX_FP_Q30]
664
665	mrs	x9, fpsr
666	str	x9, [x0, #CTX_FP_FPSR]
667
668	mrs	x10, fpcr
669	str	x10, [x0, #CTX_FP_FPCR]
670
671#if CTX_INCLUDE_AARCH32_REGS
672	mrs	x11, fpexc32_el2
673	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
674#endif
675	ret
676endfunc fpregs_context_save
677
678/* ------------------------------------------------------------------
679 * The following function follows the aapcs_64 strictly to use x9-x17
680 * (temporary caller-saved registers according to AArch64 PCS) to
681 * restore floating point register context. It assumes that 'x0' is
682 * pointing to a 'fp_regs' structure from where the register context
683 * will be restored.
684 *
685 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
686 * However currently we don't use VFP registers nor set traps in
687 * Trusted Firmware, and assume it's cleared.
688 *
689 * TODO: Revisit when VFP is used in secure world
690 * ------------------------------------------------------------------
691 */
692func fpregs_context_restore
693	ldp	q0, q1, [x0, #CTX_FP_Q0]
694	ldp	q2, q3, [x0, #CTX_FP_Q2]
695	ldp	q4, q5, [x0, #CTX_FP_Q4]
696	ldp	q6, q7, [x0, #CTX_FP_Q6]
697	ldp	q8, q9, [x0, #CTX_FP_Q8]
698	ldp	q10, q11, [x0, #CTX_FP_Q10]
699	ldp	q12, q13, [x0, #CTX_FP_Q12]
700	ldp	q14, q15, [x0, #CTX_FP_Q14]
701	ldp	q16, q17, [x0, #CTX_FP_Q16]
702	ldp	q18, q19, [x0, #CTX_FP_Q18]
703	ldp	q20, q21, [x0, #CTX_FP_Q20]
704	ldp	q22, q23, [x0, #CTX_FP_Q22]
705	ldp	q24, q25, [x0, #CTX_FP_Q24]
706	ldp	q26, q27, [x0, #CTX_FP_Q26]
707	ldp	q28, q29, [x0, #CTX_FP_Q28]
708	ldp	q30, q31, [x0, #CTX_FP_Q30]
709
710	ldr	x9, [x0, #CTX_FP_FPSR]
711	msr	fpsr, x9
712
713	ldr	x10, [x0, #CTX_FP_FPCR]
714	msr	fpcr, x10
715
716#if CTX_INCLUDE_AARCH32_REGS
717	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
718	msr	fpexc32_el2, x11
719#endif
720	/*
721	 * No explict ISB required here as ERET to
722	 * switch to secure EL1 or non-secure world
723	 * covers it
724	 */
725
726	ret
727endfunc fpregs_context_restore
728#endif /* CTX_INCLUDE_FPREGS */
729
730/* ------------------------------------------------------------------
731 * The following function is used to save and restore all the general
732 * purpose and ARMv8.3-PAuth (if enabled) registers.
733 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
734 * when ARMv8.5-PMU is implemented, and if called from Non-secure
735 * state saves PMCR_EL0 and disables Cycle Counter.
736 *
737 * Ideally we would only save and restore the callee saved registers
738 * when a world switch occurs but that type of implementation is more
739 * complex. So currently we will always save and restore these
740 * registers on entry and exit of EL3.
741 * These are not macros to ensure their invocation fits within the 32
742 * instructions per exception vector.
743 * clobbers: x18
744 * ------------------------------------------------------------------
745 */
746func save_gp_pmcr_pauth_regs
747	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
748	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
749	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
750	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
751	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
752	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
753	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
754	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
755	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
756	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
757	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
758	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
759	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
760	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
761	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
762	mrs	x18, sp_el0
763	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
764
765	/* ----------------------------------------------------------
766	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
767	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
768	 * should be saved in non-secure context.
769	 * ----------------------------------------------------------
770	 */
771	mrs	x9, mdcr_el3
772	tst	x9, #MDCR_SCCD_BIT
773	bne	1f
774
775	/* Secure Cycle Counter is not disabled */
776	mrs	x9, pmcr_el0
777
778	/* Check caller's security state */
779	mrs	x10, scr_el3
780	tst	x10, #SCR_NS_BIT
781	beq	2f
782
783	/* Save PMCR_EL0 if called from Non-secure state */
784	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
785
786	/* Disable cycle counter when event counting is prohibited */
7872:	orr	x9, x9, #PMCR_EL0_DP_BIT
788	msr	pmcr_el0, x9
789	isb
7901:
791#if CTX_INCLUDE_PAUTH_REGS
792	/* ----------------------------------------------------------
793 	 * Save the ARMv8.3-PAuth keys as they are not banked
794 	 * by exception level
795	 * ----------------------------------------------------------
796	 */
797	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
798
799	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
800	mrs	x21, APIAKeyHi_EL1
801	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
802	mrs	x23, APIBKeyHi_EL1
803	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
804	mrs	x25, APDAKeyHi_EL1
805	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
806	mrs	x27, APDBKeyHi_EL1
807	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
808	mrs	x29, APGAKeyHi_EL1
809
810	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
811	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
812	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
813	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
814	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
815#endif /* CTX_INCLUDE_PAUTH_REGS */
816
817	ret
818endfunc save_gp_pmcr_pauth_regs
819
820/* ------------------------------------------------------------------
821 * This function restores ARMv8.3-PAuth (if enabled) and all general
822 * purpose registers except x30 from the CPU context.
823 * x30 register must be explicitly restored by the caller.
824 * ------------------------------------------------------------------
825 */
826func restore_gp_pmcr_pauth_regs
827#if CTX_INCLUDE_PAUTH_REGS
828 	/* Restore the ARMv8.3 PAuth keys */
829	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
830
831	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
832	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
833	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
834	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
835	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
836
837	msr	APIAKeyLo_EL1, x0
838	msr	APIAKeyHi_EL1, x1
839	msr	APIBKeyLo_EL1, x2
840	msr	APIBKeyHi_EL1, x3
841	msr	APDAKeyLo_EL1, x4
842	msr	APDAKeyHi_EL1, x5
843	msr	APDBKeyLo_EL1, x6
844	msr	APDBKeyHi_EL1, x7
845	msr	APGAKeyLo_EL1, x8
846	msr	APGAKeyHi_EL1, x9
847#endif /* CTX_INCLUDE_PAUTH_REGS */
848
849	/* ----------------------------------------------------------
850	 * Restore PMCR_EL0 when returning to Non-secure state if
851	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
852	 * ARMv8.5-PMU is implemented.
853	 * ----------------------------------------------------------
854	 */
855	mrs	x0, scr_el3
856	tst	x0, #SCR_NS_BIT
857	beq	2f
858
859	/* ----------------------------------------------------------
860	 * Back to Non-secure state.
861	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
862	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
863	 * should be restored from non-secure context.
864	 * ----------------------------------------------------------
865	 */
866	mrs	x0, mdcr_el3
867	tst	x0, #MDCR_SCCD_BIT
868	bne	2f
869	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
870	msr	pmcr_el0, x0
8712:
872	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
873	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
874	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
875	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
876	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
877	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
878	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
879	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
880	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
881	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
882	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
883	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
884	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
885	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
886	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
887	msr	sp_el0, x28
888	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
889	ret
890endfunc restore_gp_pmcr_pauth_regs
891
892/*
893 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
894 * registers and update EL1 registers to disable stage1 and stage2
895 * page table walk
896 */
897func save_and_update_ptw_el1_sys_regs
898	/* ----------------------------------------------------------
899	 * Save only sctlr_el1 and tcr_el1 registers
900	 * ----------------------------------------------------------
901	 */
902	mrs	x29, sctlr_el1
903	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
904	mrs	x29, tcr_el1
905	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
906
907	/* ------------------------------------------------------------
908	 * Must follow below order in order to disable page table
909	 * walk for lower ELs (EL1 and EL0). First step ensures that
910	 * page table walk is disabled for stage1 and second step
911	 * ensures that page table walker should use TCR_EL1.EPDx
912	 * bits to perform address translation. ISB ensures that CPU
913	 * does these 2 steps in order.
914	 *
915	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
916	 *    stage1.
917	 * 2. Enable MMU bit to avoid identity mapping via stage2
918	 *    and force TCR_EL1.EPDx to be used by the page table
919	 *    walker.
920	 * ------------------------------------------------------------
921	 */
922	orr	x29, x29, #(TCR_EPD0_BIT)
923	orr	x29, x29, #(TCR_EPD1_BIT)
924	msr	tcr_el1, x29
925	isb
926	mrs	x29, sctlr_el1
927	orr	x29, x29, #SCTLR_M_BIT
928	msr	sctlr_el1, x29
929	isb
930
931	ret
932endfunc save_and_update_ptw_el1_sys_regs
933
934/* ------------------------------------------------------------------
935 * This routine assumes that the SP_EL3 is pointing to a valid
936 * context structure from where the gp regs and other special
937 * registers can be retrieved.
938 * ------------------------------------------------------------------
939 */
940func el3_exit
941#if ENABLE_ASSERTIONS
942	/* el3_exit assumes SP_EL0 on entry */
943	mrs	x17, spsel
944	cmp	x17, #MODE_SP_EL0
945	ASM_ASSERT(eq)
946#endif
947
948	/* ----------------------------------------------------------
949	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
950	 * will be used for handling the next SMC.
951	 * Then switch to SP_EL3.
952	 * ----------------------------------------------------------
953	 */
954	mov	x17, sp
955	msr	spsel, #MODE_SP_ELX
956	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
957
958	/* ----------------------------------------------------------
959	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
960	 * ----------------------------------------------------------
961	 */
962	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
963	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
964	msr	scr_el3, x18
965	msr	spsr_el3, x16
966	msr	elr_el3, x17
967
968#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
969	/* ----------------------------------------------------------
970	 * Restore mitigation state as it was on entry to EL3
971	 * ----------------------------------------------------------
972	 */
973	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
974	cbz	x17, 1f
975	blr	x17
9761:
977#endif
978	restore_ptw_el1_sys_regs
979
980	/* ----------------------------------------------------------
981	 * Restore general purpose (including x30), PMCR_EL0 and
982	 * ARMv8.3-PAuth registers.
983	 * Exit EL3 via ERET to a lower exception level.
984 	 * ----------------------------------------------------------
985 	 */
986	bl	restore_gp_pmcr_pauth_regs
987	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
988
989#if IMAGE_BL31 && RAS_EXTENSION
990	/* ----------------------------------------------------------
991	 * Issue Error Synchronization Barrier to synchronize SErrors
992	 * before exiting EL3. We're running with EAs unmasked, so
993	 * any synchronized errors would be taken immediately;
994	 * therefore no need to inspect DISR_EL1 register.
995 	 * ----------------------------------------------------------
996	 */
997	esb
998#else
999	dsb	sy
1000#endif
1001#ifdef IMAGE_BL31
1002	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1003#endif
1004	exception_return
1005
1006endfunc el3_exit
1007