• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2016-2020, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_HELPERS_H
8 #define ARCH_HELPERS_H
9 
10 #include <cdefs.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <string.h>
14 
15 #include <arch.h>
16 
17 /**********************************************************************
18  * Macros which create inline functions to read or write CPU system
19  * registers
20  *********************************************************************/
21 
22 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
23 static inline void write_## _name(u_register_t v)			\
24 {									\
25 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
26 }
27 
28 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
29 static inline u_register_t read_ ## _name(void)				\
30 {									\
31 	u_register_t v;							\
32 	__asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
33 	return v;							\
34 }
35 
36 /*
37  *  The undocumented %Q and %R extended asm are used to implemented the below
38  *  64 bit `mrrc` and `mcrr` instructions.
39  */
40 
41 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm)		\
42 static inline void write64_## _name(uint64_t v)				\
43 {									\
44 	__asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
45 }
46 
47 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm)		\
48 static inline uint64_t read64_## _name(void)				\
49 {	uint64_t v;							\
50 	__asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
51 	return v;							\
52 }
53 
54 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name)			\
55 static inline u_register_t read_ ## _name(void)				\
56 {									\
57 	u_register_t v;							\
58 	__asm__ volatile ("mrs %0, " #_reg_name : "=r" (v));		\
59 	return v;							\
60 }
61 
62 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)			\
63 static inline void write_ ## _name(u_register_t v)			\
64 {									\
65 	__asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v));	\
66 }
67 
68 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name)		\
69 static inline void write_ ## _name(const u_register_t v)		\
70 {									\
71 	__asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v));	\
72 }
73 
74 /* Define read function for coproc register */
75 #define DEFINE_COPROCR_READ_FUNC(_name, ...) 				\
76 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
77 
78 /* Define write function for coproc register */
79 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) 				\
80 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
81 
82 /* Define read & write function for coproc register */
83 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) 				\
84 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)			\
85 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
86 
87 /* Define 64 bit read function for coproc register */
88 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) 			\
89 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
90 
91 /* Define 64 bit write function for coproc register */
92 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) 			\
93 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
94 
95 /* Define 64 bit read & write function for coproc register */
96 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) 				\
97 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)		\
98 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
99 
100 /* Define read & write function for system register */
101 #define DEFINE_SYSREG_RW_FUNCS(_name)					\
102 	_DEFINE_SYSREG_READ_FUNC(_name, _name)				\
103 	_DEFINE_SYSREG_WRITE_FUNC(_name, _name)
104 
105 /**********************************************************************
106  * Macros to create inline functions for tlbi operations
107  *********************************************************************/
108 
109 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
110 static inline void tlbi##_op(void)					\
111 {									\
112 	u_register_t v = 0;						\
113 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
114 }
115 
116 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
117 static inline void bpi##_op(void)					\
118 {									\
119 	u_register_t v = 0;						\
120 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
121 }
122 
123 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
124 static inline void tlbi##_op(u_register_t v)				\
125 {									\
126 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
127 }
128 
129 /* Define function for simple TLBI operation */
130 #define DEFINE_TLBIOP_FUNC(_op, ...)					\
131 	_DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
132 
133 /* Define function for TLBI operation with register parameter */
134 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...)				\
135 	_DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
136 
137 /* Define function for simple BPI operation */
138 #define DEFINE_BPIOP_FUNC(_op, ...)					\
139 	_DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
140 
141 /**********************************************************************
142  * Macros to create inline functions for DC operations
143  *********************************************************************/
144 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
145 static inline void dc##_op(u_register_t v)				\
146 {									\
147 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
148 }
149 
150 /* Define function for DC operation with register parameter */
151 #define DEFINE_DCOP_PARAM_FUNC(_op, ...)				\
152 	_DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
153 
154 /**********************************************************************
155  * Macros to create inline functions for system instructions
156  *********************************************************************/
157  /* Define function for simple system instruction */
158 #define DEFINE_SYSOP_FUNC(_op)						\
159 static inline void _op(void)						\
160 {									\
161 	__asm__ (#_op);							\
162 }
163 
164 
165 /* Define function for system instruction with type specifier */
166 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type)				\
167 static inline void _op ## _type(void)					\
168 {									\
169 	__asm__ (#_op " " #_type : : : "memory");			\
170 }
171 
172 /* Define function for system instruction with register parameter */
173 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type)			\
174 static inline void _op ## _type(u_register_t v)				\
175 {									\
176 	 __asm__ (#_op " " #_type ", %0" : : "r" (v));			\
177 }
178 
179 void flush_dcache_range(uintptr_t addr, size_t size);
180 void clean_dcache_range(uintptr_t addr, size_t size);
181 void inv_dcache_range(uintptr_t addr, size_t size);
182 bool is_dcache_enabled(void);
183 
184 void dcsw_op_louis(u_register_t op_type);
185 void dcsw_op_all(u_register_t op_type);
186 
187 void disable_mmu_secure(void);
188 void disable_mmu_icache_secure(void);
189 
190 DEFINE_SYSOP_FUNC(wfi)
191 DEFINE_SYSOP_FUNC(wfe)
192 DEFINE_SYSOP_FUNC(sev)
193 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
194 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
195 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
196 
197 /* dmb ld is not valid for armv7/thumb machines */
198 #if ARM_ARCH_MAJOR != 7
199 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
200 #endif
201 
202 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
203 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
204 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
205 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
206 DEFINE_SYSOP_FUNC(isb)
207 
208 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
209 		 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
210 
211 DEFINE_SYSREG_RW_FUNCS(spsr)
DEFINE_SYSREG_RW_FUNCS(cpsr)212 DEFINE_SYSREG_RW_FUNCS(cpsr)
213 
214 /*******************************************************************************
215  * System register accessor prototypes
216  ******************************************************************************/
217 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
218 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
219 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
220 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
221 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
222 DEFINE_COPROCR_READ_FUNC(isr, ISR)
223 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
224 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
225 
226 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
227 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
228 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
229 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
230 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
231 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
232 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
233 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
234 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
235 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
236 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
237 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
238 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
239 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
240 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
241 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
242 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
243 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
244 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
245 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
246 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
247 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
248 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
249 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
250 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
251 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
252 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
253 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
254 
255 #define get_cntp_ctl_enable(x)  (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
256 					 CNTP_CTL_ENABLE_MASK)
257 #define get_cntp_ctl_imask(x)   (((x) >> CNTP_CTL_IMASK_SHIFT) & \
258 					 CNTP_CTL_IMASK_MASK)
259 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
260 					 CNTP_CTL_ISTATUS_MASK)
261 
262 #define set_cntp_ctl_enable(x)  ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
263 #define set_cntp_ctl_imask(x)   ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
264 
265 #define clr_cntp_ctl_enable(x)  ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
266 #define clr_cntp_ctl_imask(x)   ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
267 
268 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
269 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
270 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
271 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
272 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
273 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
274 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
275 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
276 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
277 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
278 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
279 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
280 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
281 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
282 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
283 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
284 
285 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
286 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
287 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR)
288 
289 /*
290  * Address translation
291  */
292 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
293 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
294 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
295 
296 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
297 
298 /* AArch32 coproc registers for 32bit MMU descriptor support */
299 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
300 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
301 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
302 
303 /* Coproc registers for 32bit AMU support */
304 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
305 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
306 
307 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
308 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
309 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
310 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
311 
312 /* Coproc registers for 64bit AMU support */
313 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
314 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
315 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
316 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
317 
318 /*
319  * TLBI operation prototypes
320  */
321 DEFINE_TLBIOP_FUNC(all, TLBIALL)
322 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
323 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
324 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
325 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
326 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
327 
328 /*
329  * BPI operation prototypes.
330  */
331 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
332 
333 /*
334  * DC operation prototypes
335  */
336 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
337 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
338 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
339 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
340 #else
341 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
342 #endif
343 
344 /*
345  * DynamIQ Shared Unit power management
346  */
347 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
348 
349 /* Previously defined accessor functions with incomplete register names  */
350 #define dsb()			dsbsy()
351 #define dmb()			dmbsy()
352 
353 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
354 #if ARM_ARCH_MAJOR == 7
355 #define	dmbld()			dmb()
356 #endif
357 
358 #define IS_IN_SECURE() \
359 	(GET_NS_BIT(read_scr()) == 0)
360 
361 #define IS_IN_HYP()	(GET_M32(read_cpsr()) == MODE32_hyp)
362 #define IS_IN_SVC()	(GET_M32(read_cpsr()) == MODE32_svc)
363 #define IS_IN_MON()	(GET_M32(read_cpsr()) == MODE32_mon)
364 #define IS_IN_EL2()	IS_IN_HYP()
365 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
366 #define IS_IN_EL3() \
367 	((GET_M32(read_cpsr()) == MODE32_mon) ||	\
368 		(IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
369 
370 static inline unsigned int get_current_el(void)
371 {
372 	if (IS_IN_EL3()) {
373 		return 3U;
374 	} else if (IS_IN_EL2()) {
375 		return 2U;
376 	} else {
377 		return 1U;
378 	}
379 }
380 
381 /* Macros for compatibility with AArch64 system registers */
382 #define read_mpidr_el1()	read_mpidr()
383 
384 #define read_scr_el3()		read_scr()
385 #define write_scr_el3(_v)	write_scr(_v)
386 
387 #define read_hcr_el2()		read_hcr()
388 #define write_hcr_el2(_v)	write_hcr(_v)
389 
390 #define read_cpacr_el1()	read_cpacr()
391 #define write_cpacr_el1(_v)	write_cpacr(_v)
392 
393 #define read_cntfrq_el0()	read_cntfrq()
394 #define write_cntfrq_el0(_v)	write_cntfrq(_v)
395 #define read_isr_el1()		read_isr()
396 
397 #define read_cntpct_el0()	read64_cntpct()
398 
399 #define read_ctr_el0()		read_ctr()
400 
401 #define write_icc_sgi0r_el1(_v)	write64_icc_sgi0r_el1(_v)
402 
403 #define read_daif()		read_cpsr()
404 #define write_daif(flags)	write_cpsr(flags)
405 
406 #define read_cnthp_cval_el2()	read64_cnthp_cval_el2()
407 #define write_cnthp_cval_el2(v)	write64_cnthp_cval_el2(v)
408 
409 #define read_amcntenset0_el0()	read_amcntenset0()
410 #define read_amcntenset1_el0()	read_amcntenset1()
411 
412 /* Helper functions to manipulate CPSR */
enable_irq(void)413 static inline void enable_irq(void)
414 {
415 	/*
416 	 * The compiler memory barrier will prevent the compiler from
417 	 * scheduling non-volatile memory access after the write to the
418 	 * register.
419 	 *
420 	 * This could happen if some initialization code issues non-volatile
421 	 * accesses to an area used by an interrupt handler, in the assumption
422 	 * that it is safe as the interrupts are disabled at the time it does
423 	 * that (according to program order). However, non-volatile accesses
424 	 * are not necessarily in program order relatively with volatile inline
425 	 * assembly statements (and volatile accesses).
426 	 */
427 	COMPILER_BARRIER();
428 	__asm__ volatile ("cpsie	i");
429 	isb();
430 }
431 
enable_serror(void)432 static inline void enable_serror(void)
433 {
434 	COMPILER_BARRIER();
435 	__asm__ volatile ("cpsie	a");
436 	isb();
437 }
438 
enable_fiq(void)439 static inline void enable_fiq(void)
440 {
441 	COMPILER_BARRIER();
442 	__asm__ volatile ("cpsie	f");
443 	isb();
444 }
445 
disable_irq(void)446 static inline void disable_irq(void)
447 {
448 	COMPILER_BARRIER();
449 	__asm__ volatile ("cpsid	i");
450 	isb();
451 }
452 
disable_serror(void)453 static inline void disable_serror(void)
454 {
455 	COMPILER_BARRIER();
456 	__asm__ volatile ("cpsid	a");
457 	isb();
458 }
459 
disable_fiq(void)460 static inline void disable_fiq(void)
461 {
462 	COMPILER_BARRIER();
463 	__asm__ volatile ("cpsid	f");
464 	isb();
465 }
466 
467 #endif /* ARCH_HELPERS_H */
468