1 /*
2 * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9
10 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
11 #include <lib/el3_runtime/context_el2.h>
12 #else
13 /**
14 * El1 context is required either when:
15 * IMAGE_BL1 || ((!CTX_INCLUDE_EL2_REGS) && IMAGE_BL31)
16 */
17 #include <lib/el3_runtime/context_el1.h>
18 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
19
20 #include <lib/el3_runtime/cpu_data.h>
21 #include <lib/el3_runtime/simd_ctx.h>
22 #include <lib/utils_def.h>
23
24 /*******************************************************************************
25 * Constants that allow assembler code to access members of and the 'gp_regs'
26 * structure at their correct offsets.
27 ******************************************************************************/
28 #define CTX_GPREGS_OFFSET U(0x0)
29 #define CTX_GPREG_X0 U(0x0)
30 #define CTX_GPREG_X1 U(0x8)
31 #define CTX_GPREG_X2 U(0x10)
32 #define CTX_GPREG_X3 U(0x18)
33 #define CTX_GPREG_X4 U(0x20)
34 #define CTX_GPREG_X5 U(0x28)
35 #define CTX_GPREG_X6 U(0x30)
36 #define CTX_GPREG_X7 U(0x38)
37 #define CTX_GPREG_X8 U(0x40)
38 #define CTX_GPREG_X9 U(0x48)
39 #define CTX_GPREG_X10 U(0x50)
40 #define CTX_GPREG_X11 U(0x58)
41 #define CTX_GPREG_X12 U(0x60)
42 #define CTX_GPREG_X13 U(0x68)
43 #define CTX_GPREG_X14 U(0x70)
44 #define CTX_GPREG_X15 U(0x78)
45 #define CTX_GPREG_X16 U(0x80)
46 #define CTX_GPREG_X17 U(0x88)
47 #define CTX_GPREG_X18 U(0x90)
48 #define CTX_GPREG_X19 U(0x98)
49 #define CTX_GPREG_X20 U(0xa0)
50 #define CTX_GPREG_X21 U(0xa8)
51 #define CTX_GPREG_X22 U(0xb0)
52 #define CTX_GPREG_X23 U(0xb8)
53 #define CTX_GPREG_X24 U(0xc0)
54 #define CTX_GPREG_X25 U(0xc8)
55 #define CTX_GPREG_X26 U(0xd0)
56 #define CTX_GPREG_X27 U(0xd8)
57 #define CTX_GPREG_X28 U(0xe0)
58 #define CTX_GPREG_X29 U(0xe8)
59 #define CTX_GPREG_LR U(0xf0)
60 #define CTX_GPREG_SP_EL0 U(0xf8)
61 #define CTX_GPREGS_END U(0x100)
62
63 /*******************************************************************************
64 * Constants that allow assembler code to access members of and the 'el3_state'
65 * structure at their correct offsets. Note that some of the registers are only
66 * 32-bits wide but are stored as 64-bit values for convenience
67 ******************************************************************************/
68 #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END)
69 #define CTX_SCR_EL3 U(0x0)
70 #define CTX_ESR_EL3 U(0x8)
71 #define CTX_RUNTIME_SP U(0x10)
72 #define CTX_SPSR_EL3 U(0x18)
73 #define CTX_ELR_EL3 U(0x20)
74 #define CTX_PMCR_EL0 U(0x28)
75 #define CTX_IS_IN_EL3 U(0x30)
76 #define CTX_MDCR_EL3 U(0x38)
77 /* Constants required in supporting nested exception in EL3 */
78 #define CTX_SAVED_ELR_EL3 U(0x40)
79 /*
80 * General purpose flag, to save various EL3 states
81 * FFH mode : Used to identify if handling nested exception
82 * KFH mode : Used as counter value
83 */
84 #define CTX_NESTED_EA_FLAG U(0x48)
85 #if FFH_SUPPORT
86 #define CTX_SAVED_ESR_EL3 U(0x50)
87 #define CTX_SAVED_SPSR_EL3 U(0x58)
88 #define CTX_SAVED_GPREG_LR U(0x60)
89 #define CTX_EL3STATE_END U(0x70) /* Align to the next 16 byte boundary */
90 #else
91 #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */
92 #endif /* FFH_SUPPORT */
93
94
95 /*******************************************************************************
96 * Registers related to CVE-2018-3639
97 ******************************************************************************/
98 #define CTX_CVE_2018_3639_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
99 #define CTX_CVE_2018_3639_DISABLE U(0)
100 #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */
101
102 /*******************************************************************************
103 * Registers related to ERRATA_SPECULATIVE_AT
104 *
105 * This is essential as with EL1 and EL2 context registers being decoupled,
106 * both will not be present for a given build configuration.
107 * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
108 * independent of the above logic, we need explicit context entries to be
109 * reserved for these registers.
110 *
111 * NOTE: Based on this we end up with following different configurations depending
112 * on the presence of errata and inclusion of EL1 or EL2 context.
113 *
114 * ============================================================================
115 * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
116 * ============================================================================
117 * | 0 | 0 | None |
118 * | 0 | 1 | EL1 C-Context structure |
119 * | 1 | 0 | Errata Context Offset Entries |
120 * | 1 | 1 | Errata Context Offset Entries |
121 * ============================================================================
122 *
123 * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
124 * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
125 * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
126 *
127 * In situations when EL1_Context=1 and ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
128 * and TCR_EL1 registers will be modified under errata and it happens at the
129 * early in the codeflow prior to el1 context (save and restore operations),
130 * context memory still will be reserved under the errata logic here explicitly.
131 * These registers will not be part of EL1 context save & restore routines.
132 *
133 * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
134 * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
135 * -----------------------------------------------------------------------------
136 ******************************************************************************/
137 #define CTX_ERRATA_SPEC_AT_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
138 #if ERRATA_SPECULATIVE_AT
139 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1 U(0x0)
140 #define CTX_ERRATA_SPEC_AT_TCR_EL1 U(0x8)
141 #define CTX_ERRATA_SPEC_AT_END U(0x10) /* Align to the next 16 byte boundary */
142 #else
143 #define CTX_ERRATA_SPEC_AT_END U(0x0)
144 #endif /* ERRATA_SPECULATIVE_AT */
145
146 /*******************************************************************************
147 * Registers related to ARMv8.3-PAuth.
148 ******************************************************************************/
149 #define CTX_PAUTH_REGS_OFFSET (CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
150 #if CTX_INCLUDE_PAUTH_REGS
151 #define CTX_PACIAKEY_LO U(0x0)
152 #define CTX_PACIAKEY_HI U(0x8)
153 #define CTX_PACIBKEY_LO U(0x10)
154 #define CTX_PACIBKEY_HI U(0x18)
155 #define CTX_PACDAKEY_LO U(0x20)
156 #define CTX_PACDAKEY_HI U(0x28)
157 #define CTX_PACDBKEY_LO U(0x30)
158 #define CTX_PACDBKEY_HI U(0x38)
159 #define CTX_PACGAKEY_LO U(0x40)
160 #define CTX_PACGAKEY_HI U(0x48)
161 #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */
162 #else
163 #define CTX_PAUTH_REGS_END U(0)
164 #endif /* CTX_INCLUDE_PAUTH_REGS */
165
166 /*******************************************************************************
167 * Registers initialised in a per-world context.
168 ******************************************************************************/
169 #define CTX_CPTR_EL3 U(0x0)
170 #define CTX_ZCR_EL3 U(0x8)
171 #define CTX_MPAM3_EL3 U(0x10)
172 #define CTX_PERWORLD_EL3STATE_END U(0x18)
173
174 #ifndef __ASSEMBLER__
175
176 #include <stdint.h>
177
178 #include <lib/cassert.h>
179
180 /*
181 * Common constants to help define the 'cpu_context' structure and its
182 * members below.
183 */
184 #define DWORD_SHIFT U(3)
185 #define DEFINE_REG_STRUCT(name, num_regs) \
186 typedef struct name { \
187 uint64_t ctx_regs[num_regs]; \
188 } __aligned(16) name##_t
189
190 /* Constants to determine the size of individual context structures */
191 #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT)
192
193 #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
194 #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT)
195
196 #if ERRATA_SPECULATIVE_AT
197 #define CTX_ERRATA_SPEC_AT_ALL (CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
198 #endif
199 #if CTX_INCLUDE_PAUTH_REGS
200 # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT)
201 #endif
202
203 /*
204 * AArch64 general purpose register context structure. Usually x0-x18,
205 * lr are saved as the compiler is expected to preserve the remaining
206 * callee saved registers if used by the C runtime and the assembler
207 * does not touch the remaining. But in case of world switch during
208 * exception handling, we need to save the callee registers too.
209 */
210 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
211
212 /*
213 * Miscellaneous registers used by EL3 firmware to maintain its state
214 * across exception entries and exits
215 */
216 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
217
218 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
219 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
220
221 /* Registers associated to Errata_Speculative */
222 #if ERRATA_SPECULATIVE_AT
223 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
224 #endif
225
226 /* Registers associated to ARMv8.3-PAuth */
227 #if CTX_INCLUDE_PAUTH_REGS
228 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
229 #endif
230
231 /*
232 * Macros to access members of any of the above structures using their
233 * offsets
234 */
235 #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
236 #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
237 = (uint64_t) (val))
238
239 /*
240 * Top-level context structure which is used by EL3 firmware to preserve
241 * the state of a core at the next lower EL in a given security state and
242 * save enough EL3 meta data to be able to return to that EL and security
243 * state. The context management library will be used to ensure that
244 * SP_EL3 always points to an instance of this structure at exception
245 * entry and exit.
246 */
247 typedef struct cpu_context {
248 gp_regs_t gpregs_ctx;
249 el3_state_t el3state_ctx;
250
251 cve_2018_3639_t cve_2018_3639_ctx;
252
253 #if ERRATA_SPECULATIVE_AT
254 errata_speculative_at_t errata_speculative_at_ctx;
255 #endif
256
257 #if CTX_INCLUDE_PAUTH_REGS
258 pauth_t pauth_ctx;
259 #endif
260
261 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
262 el2_sysregs_t el2_sysregs_ctx;
263 #else
264 /* El1 context should be included only either for IMAGE_BL1,
265 * or for IMAGE_BL31 when CTX_INCLUDE_EL2_REGS=0:
266 * When SPMD_SPM_AT_SEL2=1, SPMC at S-EL2 takes care of saving
267 * and restoring EL1 registers. In this case, BL31 at EL3 can
268 * exclude save and restore of EL1 context registers.
269 */
270 el1_sysregs_t el1_sysregs_ctx;
271 #endif
272
273 } cpu_context_t;
274
275 /*
276 * Per-World Context.
277 * It stores registers whose values can be shared across CPUs.
278 */
279 typedef struct per_world_context {
280 uint64_t ctx_cptr_el3;
281 uint64_t ctx_zcr_el3;
282 uint64_t ctx_mpam3_el3;
283 } per_world_context_t;
284
285 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
286
287 /* Macros to access members of the 'cpu_context_t' structure */
288 #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx)
289
290 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
291 #define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx)
292 #else
293 #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx)
294 #endif
295
296 #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx)
297 #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx)
298
299 #if ERRATA_SPECULATIVE_AT
300 #define get_errata_speculative_at_ctx(h) (&((cpu_context_t *) h)->errata_speculative_at_ctx)
301 #endif
302
303 #if CTX_INCLUDE_PAUTH_REGS
304 # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx)
305 #endif
306
307 /*
308 * Compile time assertions related to the 'cpu_context' structure to
309 * ensure that the assembler and the compiler view of the offsets of
310 * the structure members is the same.
311 */
312 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
313 assert_core_context_gp_offset_mismatch);
314
315 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
316 assert_core_context_el3state_offset_mismatch);
317
318
319 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
320 assert_core_context_cve_2018_3639_offset_mismatch);
321
322 #if ERRATA_SPECULATIVE_AT
323 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
324 assert_core_context_errata_speculative_at_offset_mismatch);
325 #endif
326
327 #if CTX_INCLUDE_PAUTH_REGS
328 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
329 assert_core_context_pauth_offset_mismatch);
330 #endif /* CTX_INCLUDE_PAUTH_REGS */
331
332 /*
333 * Helper macro to set the general purpose registers that correspond to
334 * parameters in an aapcs_64 call i.e. x0-x7
335 */
336 #define set_aapcs_args0(ctx, x0) do { \
337 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0); \
338 } while (0)
339 #define set_aapcs_args1(ctx, x0, x1) do { \
340 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1); \
341 set_aapcs_args0(ctx, x0); \
342 } while (0)
343 #define set_aapcs_args2(ctx, x0, x1, x2) do { \
344 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2); \
345 set_aapcs_args1(ctx, x0, x1); \
346 } while (0)
347 #define set_aapcs_args3(ctx, x0, x1, x2, x3) do { \
348 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3); \
349 set_aapcs_args2(ctx, x0, x1, x2); \
350 } while (0)
351 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4) do { \
352 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4); \
353 set_aapcs_args3(ctx, x0, x1, x2, x3); \
354 } while (0)
355 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5) do { \
356 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5); \
357 set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \
358 } while (0)
359 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6) do { \
360 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6); \
361 set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5); \
362 } while (0)
363 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7) do { \
364 write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7); \
365 set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6); \
366 } while (0)
367
368 /*******************************************************************************
369 * Function prototypes
370 ******************************************************************************/
371 #if CTX_INCLUDE_FPREGS
372 void fpregs_context_save(simd_regs_t *regs);
373 void fpregs_context_restore(simd_regs_t *regs);
374 #endif
375
376 /*******************************************************************************
377 * The next four inline functions are required for IMAGE_BL1, as well as for
378 * IMAGE_BL31 for the below combinations.
379 * ============================================================================
380 * | ERRATA_SPECULATIVE_AT| CTX_INCLUDE_EL2_REGS | Combination |
381 * ============================================================================
382 * | 0 | 0 | Valid (EL1 ctx) |
383 * |______________________|______________________|____________________________|
384 * | | | Invalid (No Errata/EL1 Ctx)|
385 * | 0 | 1 | Hence commented out. |
386 * |______________________|______________________|____________________________|
387 * | | | |
388 * | 1 | 0 | Valid (Errata ctx) |
389 * |______________________|______________________|____________________________|
390 * | | | |
391 * | 1 | 1 | Valid (Errata ctx) |
392 * |______________________|______________________|____________________________|
393 * ============================================================================
394 ******************************************************************************/
395 #if (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS)))
396
write_ctx_sctlr_el1_reg_errata(cpu_context_t * ctx,u_register_t val)397 static inline void write_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
398 {
399 #if (ERRATA_SPECULATIVE_AT)
400 write_ctx_reg(get_errata_speculative_at_ctx(ctx),
401 CTX_ERRATA_SPEC_AT_SCTLR_EL1, val);
402 #else
403 write_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1, val);
404 #endif /* ERRATA_SPECULATIVE_AT */
405 }
406
write_ctx_tcr_el1_reg_errata(cpu_context_t * ctx,u_register_t val)407 static inline void write_ctx_tcr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
408 {
409 #if (ERRATA_SPECULATIVE_AT)
410 write_ctx_reg(get_errata_speculative_at_ctx(ctx),
411 CTX_ERRATA_SPEC_AT_TCR_EL1, val);
412 #else
413 write_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1, val);
414 #endif /* ERRATA_SPECULATIVE_AT */
415 }
416
read_ctx_sctlr_el1_reg_errata(cpu_context_t * ctx)417 static inline u_register_t read_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx)
418 {
419 #if (ERRATA_SPECULATIVE_AT)
420 return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
421 CTX_ERRATA_SPEC_AT_SCTLR_EL1);
422 #else
423 return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1);
424 #endif /* ERRATA_SPECULATIVE_AT */
425 }
426
read_ctx_tcr_el1_reg_errata(cpu_context_t * ctx)427 static inline u_register_t read_ctx_tcr_el1_reg_errata(cpu_context_t *ctx)
428 {
429 #if (ERRATA_SPECULATIVE_AT)
430 return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
431 CTX_ERRATA_SPEC_AT_TCR_EL1);
432 #else
433 return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1);
434 #endif /* ERRATA_SPECULATIVE_AT */
435 }
436
437 #endif /* (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS))) */
438
439 #endif /* __ASSEMBLER__ */
440
441 #endif /* CONTEXT_H */
442