• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_ALTERNATIVE_MACROS_H
3 #define __ASM_ALTERNATIVE_MACROS_H
4 
5 #include <linux/const.h>
6 #include <vdso/bits.h>
7 
8 #include <asm/cpucaps.h>
9 #include <asm/insn-def.h>
10 
11 /*
12  * Binutils 2.27.0 can't handle a 'UL' suffix on constants, so for the assembly
13  * macros below we must use we must use `(1 << ARM64_CB_SHIFT)`.
14  */
15 #define ARM64_CB_SHIFT	15
16 #define ARM64_CB_BIT	BIT(ARM64_CB_SHIFT)
17 
18 #if ARM64_NCAPS >= ARM64_CB_BIT
19 #error "cpucaps have overflown ARM64_CB_BIT"
20 #endif
21 
22 #ifndef BUILD_FIPS140_KO
23 #ifndef __ASSEMBLY__
24 
25 #include <linux/stringify.h>
26 
27 #define ALTINSTR_ENTRY(cpucap)					              \
28 	" .word 661b - .\n"				/* label           */ \
29 	" .word 663f - .\n"				/* new instruction */ \
30 	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
31 	" .byte 662b-661b\n"				/* source len      */ \
32 	" .byte 664f-663f\n"				/* replacement len */
33 
34 #define ALTINSTR_ENTRY_CB(cpucap, cb)					      \
35 	" .word 661b - .\n"				/* label           */ \
36 	" .word " __stringify(cb) "- .\n"		/* callback        */ \
37 	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
38 	" .byte 662b-661b\n"				/* source len      */ \
39 	" .byte 664f-663f\n"				/* replacement len */
40 
41 /*
42  * alternative assembly primitive:
43  *
44  * If any of these .org directive fail, it means that insn1 and insn2
45  * don't have the same length. This used to be written as
46  *
47  * .if ((664b-663b) != (662b-661b))
48  * 	.error "Alternatives instruction length mismatch"
49  * .endif
50  *
51  * but most assemblers die if insn1 or insn2 have a .inst. This should
52  * be fixed in a binutils release posterior to 2.25.51.0.2 (anything
53  * containing commit 4e4d08cf7399b606 or c1baaddf8861).
54  *
55  * Alternatives with callbacks do not generate replacement instructions.
56  */
57 #define __ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg_enabled)	\
58 	".if "__stringify(cfg_enabled)" == 1\n"				\
59 	"661:\n\t"							\
60 	oldinstr "\n"							\
61 	"662:\n"							\
62 	".pushsection .altinstructions,\"a\"\n"				\
63 	ALTINSTR_ENTRY(cpucap)						\
64 	".popsection\n"							\
65 	".subsection 1\n"						\
66 	"663:\n\t"							\
67 	newinstr "\n"							\
68 	"664:\n\t"							\
69 	".org	. - (664b-663b) + (662b-661b)\n\t"			\
70 	".org	. - (662b-661b) + (664b-663b)\n\t"			\
71 	".previous\n"							\
72 	".endif\n"
73 
74 #define __ALTERNATIVE_CFG_CB(oldinstr, cpucap, cfg_enabled, cb)	\
75 	".if "__stringify(cfg_enabled)" == 1\n"				\
76 	"661:\n\t"							\
77 	oldinstr "\n"							\
78 	"662:\n"							\
79 	".pushsection .altinstructions,\"a\"\n"				\
80 	ALTINSTR_ENTRY_CB(cpucap, cb)					\
81 	".popsection\n"							\
82 	"663:\n\t"							\
83 	"664:\n\t"							\
84 	".endif\n"
85 
86 #define _ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg, ...)	\
87 	__ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, IS_ENABLED(cfg))
88 
89 #define ALTERNATIVE_CB(oldinstr, cpucap, cb) \
90 	__ALTERNATIVE_CFG_CB(oldinstr, (1 << ARM64_CB_SHIFT) | (cpucap), 1, cb)
91 #else
92 
93 #include <asm/assembler.h>
94 
95 .macro altinstruction_entry orig_offset alt_offset cpucap orig_len alt_len
96 	.word \orig_offset - .
97 	.word \alt_offset - .
98 	.hword (\cpucap)
99 	.byte \orig_len
100 	.byte \alt_len
101 .endm
102 
103 .macro alternative_insn insn1, insn2, cap, enable = 1
104 	.if \enable
105 661:	\insn1
106 662:	.pushsection .altinstructions, "a"
107 	altinstruction_entry 661b, 663f, \cap, 662b-661b, 664f-663f
108 	.popsection
109 	.subsection 1
110 663:	\insn2
111 664:	.org	. - (664b-663b) + (662b-661b)
112 	.org	. - (662b-661b) + (664b-663b)
113 	.previous
114 	.endif
115 .endm
116 
117 /*
118  * Alternative sequences
119  *
120  * The code for the case where the capability is not present will be
121  * assembled and linked as normal. There are no restrictions on this
122  * code.
123  *
124  * The code for the case where the capability is present will be
125  * assembled into a special section to be used for dynamic patching.
126  * Code for that case must:
127  *
128  * 1. Be exactly the same length (in bytes) as the default code
129  *    sequence.
130  *
131  * 2. Not contain a branch target that is used outside of the
132  *    alternative sequence it is defined in (branches into an
133  *    alternative sequence are not fixed up).
134  */
135 
136 /*
137  * Begin an alternative code sequence.
138  */
139 .macro alternative_if_not cap
140 	.set .Lasm_alt_mode, 0
141 	.pushsection .altinstructions, "a"
142 	altinstruction_entry 661f, 663f, \cap, 662f-661f, 664f-663f
143 	.popsection
144 661:
145 .endm
146 
147 .macro alternative_if cap
148 	.set .Lasm_alt_mode, 1
149 	.pushsection .altinstructions, "a"
150 	altinstruction_entry 663f, 661f, \cap, 664f-663f, 662f-661f
151 	.popsection
152 	.subsection 1
153 	.align 2	/* So GAS knows label 661 is suitably aligned */
154 661:
155 .endm
156 
157 .macro alternative_cb cap, cb
158 	.set .Lasm_alt_mode, 0
159 	.pushsection .altinstructions, "a"
160 	altinstruction_entry 661f, \cb, (1 << ARM64_CB_SHIFT) | \cap, 662f-661f, 0
161 	.popsection
162 661:
163 .endm
164 
165 /*
166  * Provide the other half of the alternative code sequence.
167  */
168 .macro alternative_else
169 662:
170 	.if .Lasm_alt_mode==0
171 	.subsection 1
172 	.else
173 	.previous
174 	.endif
175 663:
176 .endm
177 
178 /*
179  * Complete an alternative code sequence.
180  */
181 .macro alternative_endif
182 664:
183 	.org	. - (664b-663b) + (662b-661b)
184 	.org	. - (662b-661b) + (664b-663b)
185 	.if .Lasm_alt_mode==0
186 	.previous
187 	.endif
188 .endm
189 
190 /*
191  * Callback-based alternative epilogue
192  */
193 .macro alternative_cb_end
194 662:
195 .endm
196 
197 /*
198  * Provides a trivial alternative or default sequence consisting solely
199  * of NOPs. The number of NOPs is chosen automatically to match the
200  * previous case.
201  */
202 .macro alternative_else_nop_endif
203 alternative_else
204 	nops	(662b-661b) / AARCH64_INSN_SIZE
205 alternative_endif
206 .endm
207 
208 #define _ALTERNATIVE_CFG(insn1, insn2, cap, cfg, ...)	\
209 	alternative_insn insn1, insn2, cap, IS_ENABLED(cfg)
210 
211 #endif  /*  __ASSEMBLY__  */
212 
213 /*
214  * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap));
215  *
216  * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap, CONFIG_FOO));
217  * N.B. If CONFIG_FOO is specified, but not selected, the whole block
218  *      will be omitted, including oldinstr.
219  */
220 #define ALTERNATIVE(oldinstr, newinstr, ...)   \
221 	_ALTERNATIVE_CFG(oldinstr, newinstr, __VA_ARGS__, 1)
222 
223 #ifndef __ASSEMBLY__
224 
225 #include <linux/types.h>
226 
227 static __always_inline bool
228 alternative_has_cap_likely(const unsigned long cpucap)
229 {
230 	if (!cpucap_is_possible(cpucap))
231 		return false;
232 
233 	asm goto(
234 #ifdef BUILD_VDSO
235 	ALTERNATIVE("b	%l[l_no]", "nop", %[cpucap])
236 #else
237 	ALTERNATIVE_CB("b	%l[l_no]", %[cpucap], alt_cb_patch_nops)
238 #endif
239 	:
240 	: [cpucap] "i" (cpucap)
241 	:
242 	: l_no);
243 
244 	return true;
245 l_no:
246 	return false;
247 }
248 
249 static __always_inline bool
alternative_has_cap_unlikely(const unsigned long cpucap)250 alternative_has_cap_unlikely(const unsigned long cpucap)
251 {
252 	if (!cpucap_is_possible(cpucap))
253 		return false;
254 
255 	asm goto(
256 	ALTERNATIVE("nop", "b	%l[l_yes]", %[cpucap])
257 	:
258 	: [cpucap] "i" (cpucap)
259 	:
260 	: l_yes);
261 
262 	return false;
263 l_yes:
264 	return true;
265 }
266 
267 #endif /* __ASSEMBLY__ */
268 
269 #else
270 
271 /*
272  * The FIPS140 module does not support alternatives patching, as this
273  * invalidates the HMAC digest of the .text section. However, some alternatives
274  * are known to be irrelevant so we can tolerate them in the FIPS140 module, as
275  * they will never be applied in the first place in the use cases that the
276  * FIPS140 module targets (Android running on a production phone). Any other
277  * uses of alternatives should be avoided, as it is not safe in the general
278  * case to simply use the default sequence in one place (the fips module) and
279  * the alternative sequence everywhere else.
280  *
281  * Below is an allowlist of cpucaps that we can ignore, by simply taking the
282  * safe default instruction sequence. Note that this implies that the FIPS140
283  * module is not compatible with VHE, or with pseudo-NMI support.
284  */
285 
286 #define __ALT_ARM64_HAS_LDAPR			0,
287 #define __ALT_ARM64_HAS_VIRT_HOST_EXTN		0,
288 #define __ALT_ARM64_HAS_GIC_PRIO_MASKING	0,
289 #define __ALT_ARM64_HAS_GIC_PRIO_RELAXED_SYNC	0,
290 
291 #define ALTERNATIVE(oldinstr, newinstr, cpucap, ...)   \
292 	_ALTERNATIVE(oldinstr, __ALT_ ## cpucap, #cpucap)
293 
294 #define ALTERNATIVE_CB(oldinstr, cpucap, cb)	\
295 	_ALTERNATIVE(oldinstr, __ALT_ ## cpucap, #cpucap)
296 
297 #define _ALTERNATIVE(oldinstr, cpucap, cpucap_str)   \
298 	__take_second_arg(cpucap oldinstr, \
299 		".err CPU capability " cpucap_str " not supported in fips140 module")
300 
301 #ifndef __ASSEMBLY__
302 
303 #include <linux/types.h>
304 
305 static bool __maybe_unused cpus_have_cap(unsigned int num);
306 
307 /*
308  * Return 'false' for all capabilities listed above, and use the slow path for
309  * the remaining ones. This ensures that the FIPS140 module is consistent with
310  * itself for all capabilities, and with the rest of the kernel at least for the
311  * ones not listed.
312  */
313 #define __alternative_has_cap(cpucap, altcap) \
314 	__take_second_arg(altcap false, cpus_have_cap(cpucap))
315 
316 #define alternative_has_cap_likely(cpucap) \
317 	__alternative_has_cap(cpucap, __ALT_ ## cpucap)
318 
319 #define alternative_has_cap_unlikely alternative_has_cap_likely
320 
321 #endif /* !__ASSEMBLY__ */
322 
323 #endif /* BUILD_FIPS140_KO */
324 
325 #endif /* __ASM_ALTERNATIVE_MACROS_H */
326