• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <context.h>
10#include <services/arm_arch_svc.h>
11
12	.globl	wa_cve_2017_5715_mmu_vbar
13
14#define ESR_EL3_A64_SMC0	0x5e000000
15#define ESR_EL3_A32_SMC0	0x4e000000
16
17vector_base wa_cve_2017_5715_mmu_vbar
18
19	.macro	apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
20	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
21	mrs	x1, sctlr_el3
22	/* Disable MMU */
23	bic	x1, x1, #SCTLR_M_BIT
24	msr	sctlr_el3, x1
25	isb
26	/* Enable MMU */
27	orr	x1, x1, #SCTLR_M_BIT
28	msr	sctlr_el3, x1
29	/*
30	 * Defer ISB to avoid synchronizing twice in case we hit
31	 * the workaround SMC call which will implicitly synchronize
32	 * because of the ERET instruction.
33	 */
34
35	/*
36	 * Ensure SMC is coming from A64/A32 state on #0
37	 * with W0 = SMCCC_ARCH_WORKAROUND_1
38	 *
39	 * This sequence evaluates as:
40	 *    (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
41	 * allowing use of a single branch operation
42	 */
43	.if \_is_sync_exception
44		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_1
45		cmp	w0, w1
46		mrs	x0, esr_el3
47		mov_imm	w1, \_esr_el3_val
48		ccmp	w0, w1, #0, eq
49		/* Static predictor will predict a fall through */
50		bne	1f
51		exception_return
521:
53	.endif
54
55	/*
56	 * Synchronize now to enable the MMU.  This is required
57	 * to ensure the load pair below reads the data stored earlier.
58	 */
59	isb
60	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
61	.endm
62
63	/* ---------------------------------------------------------------------
64	 * Current EL with SP_EL0 : 0x0 - 0x200
65	 * ---------------------------------------------------------------------
66	 */
67vector_entry mmu_sync_exception_sp_el0
68	b	sync_exception_sp_el0
69end_vector_entry mmu_sync_exception_sp_el0
70
71vector_entry mmu_irq_sp_el0
72	b	irq_sp_el0
73end_vector_entry mmu_irq_sp_el0
74
75vector_entry mmu_fiq_sp_el0
76	b	fiq_sp_el0
77end_vector_entry mmu_fiq_sp_el0
78
79vector_entry mmu_serror_sp_el0
80	b	serror_sp_el0
81end_vector_entry mmu_serror_sp_el0
82
83	/* ---------------------------------------------------------------------
84	 * Current EL with SP_ELx: 0x200 - 0x400
85	 * ---------------------------------------------------------------------
86	 */
87vector_entry mmu_sync_exception_sp_elx
88	b	sync_exception_sp_elx
89end_vector_entry mmu_sync_exception_sp_elx
90
91vector_entry mmu_irq_sp_elx
92	b	irq_sp_elx
93end_vector_entry mmu_irq_sp_elx
94
95vector_entry mmu_fiq_sp_elx
96	b	fiq_sp_elx
97end_vector_entry mmu_fiq_sp_elx
98
99vector_entry mmu_serror_sp_elx
100	b	serror_sp_elx
101end_vector_entry mmu_serror_sp_elx
102
103	/* ---------------------------------------------------------------------
104	 * Lower EL using AArch64 : 0x400 - 0x600
105	 * ---------------------------------------------------------------------
106	 */
107vector_entry mmu_sync_exception_aarch64
108	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
109	b	sync_exception_aarch64
110end_vector_entry mmu_sync_exception_aarch64
111
112vector_entry mmu_irq_aarch64
113	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
114	b	irq_aarch64
115end_vector_entry mmu_irq_aarch64
116
117vector_entry mmu_fiq_aarch64
118	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
119	b	fiq_aarch64
120end_vector_entry mmu_fiq_aarch64
121
122vector_entry mmu_serror_aarch64
123	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
124	b	serror_aarch64
125end_vector_entry mmu_serror_aarch64
126
127	/* ---------------------------------------------------------------------
128	 * Lower EL using AArch32 : 0x600 - 0x800
129	 * ---------------------------------------------------------------------
130	 */
131vector_entry mmu_sync_exception_aarch32
132	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
133	b	sync_exception_aarch32
134end_vector_entry mmu_sync_exception_aarch32
135
136vector_entry mmu_irq_aarch32
137	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
138	b	irq_aarch32
139end_vector_entry mmu_irq_aarch32
140
141vector_entry mmu_fiq_aarch32
142	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
143	b	fiq_aarch32
144end_vector_entry mmu_fiq_aarch32
145
146vector_entry mmu_serror_aarch32
147	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
148	b	serror_aarch32
149end_vector_entry mmu_serror_aarch32
150