1 /* SPDX-License-Identifier: BSD-3-Clause */
2 /*
3 * cache.h: Cache maintenance API for ARM
4 */
5
6 #ifndef ARM_CACHE_H
7 #define ARM_CACHE_H
8
9 #include <stddef.h>
10 #include <stdint.h>
11
12 /* SCTLR bits */
13 #define SCTLR_M (1 << 0) /* MMU enable */
14 #define SCTLR_A (1 << 1) /* Alignment check enable */
15 #define SCTLR_C (1 << 2) /* Data/unified cache enable */
16 /* Bits 4:3 are reserved */
17 #define SCTLR_CP15BEN (1 << 5) /* CP15 barrier enable */
18 /* Bit 6 is reserved */
19 #define SCTLR_B (1 << 7) /* Endianness */
20 /* Bits 9:8 */
21 #define SCTLR_SW (1 << 10) /* SWP and SWPB enable */
22 #define SCTLR_Z (1 << 11) /* Branch prediction enable */
23 #define SCTLR_I (1 << 12) /* Instruction cache enable */
24 #define SCTLR_V (1 << 13) /* Low/high exception vectors */
25 #define SCTLR_RR (1 << 14) /* Round Robin select */
26 /* Bits 16:15 are reserved */
27 #define SCTLR_HA (1 << 17) /* Hardware Access flag enable */
28 /* Bit 18 is reserved */
29 /* Bits 20:19 reserved virtualization not supported */
30 #define SCTLR_WXN (1 << 19) /* Write permission implies XN */
31 #define SCTLR_UWXN (1 << 20) /* Unprivileged write permission
32 implies PL1 XN */
33 #define SCTLR_FI (1 << 21) /* Fast interrupt config enable */
34 #define SCTLR_U (1 << 22) /* Unaligned access behavior */
35 #define SCTLR_VE (1 << 24) /* Interrupt vectors enable */
36 #define SCTLR_EE (1 << 25) /* Exception endianness */
37 /* Bit 26 is reserved */
38 #define SCTLR_NMFI (1 << 27) /* Non-maskable FIQ support */
39 #define SCTLR_TRE (1 << 28) /* TEX remap enable */
40 #define SCTLR_AFE (1 << 29) /* Access flag enable */
41 #define SCTLR_TE (1 << 30) /* Thumb exception enable */
42 /* Bit 31 is reserved */
43
44 /*
45 * Sync primitives
46 */
47
48 /* data memory barrier */
dmb(void)49 static inline void dmb(void)
50 {
51 asm volatile ("dmb" : : : "memory");
52 }
53
54 /* data sync barrier */
dsb(void)55 static inline void dsb(void)
56 {
57 asm volatile ("dsb" : : : "memory");
58 }
59
60 /* instruction sync barrier */
isb(void)61 static inline void isb(void)
62 {
63 asm volatile ("isb" : : : "memory");
64 }
65
66 /*
67 * Low-level TLB maintenance operations
68 */
69
70 /* invalidate entire unified TLB */
tlbiall(void)71 static inline void tlbiall(void)
72 {
73 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0) : "memory");
74 }
75
76 /* invalidate unified TLB by MVA and ASID */
tlbimva(unsigned long mva)77 static inline void tlbimva(unsigned long mva)
78 {
79 asm volatile ("mcr p15, 0, %0, c8, c7, 1" : : "r" (mva) : "memory");
80 }
81
82 /* write data access control register (DACR) */
write_dacr(uint32_t val)83 static inline void write_dacr(uint32_t val)
84 {
85 asm volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (val));
86 }
87
88 /* read memory model feature register 0 (MMFR0) */
read_mmfr0(void)89 static inline uint32_t read_mmfr0(void)
90 {
91 uint32_t mmfr;
92 asm volatile ("mrc p15, 0, %0, c0, c1, 4" : "=r" (mmfr));
93 return mmfr;
94 }
95 /* read MAIR0 (memory address indirection register 0) */
read_mair0(void)96 static inline uint32_t read_mair0(void)
97 {
98 uint32_t mair;
99 asm volatile ("mrc p15, 0, %0, c10, c2, 0" : "=r" (mair));
100 return mair;
101 }
102 /* write MAIR0 (memory address indirection register 0) */
write_mair0(uint32_t val)103 static inline void write_mair0(uint32_t val)
104 {
105 asm volatile ("mcr p15, 0, %0, c10, c2, 0" : : "r" (val));
106 }
107 /* write translation table base register 0 (TTBR0) */
write_ttbr0(uint32_t val)108 static inline void write_ttbr0(uint32_t val)
109 {
110 if (CONFIG(ARM_LPAE))
111 asm volatile ("mcrr p15, 0, %[val], %[zero], c2" : :
112 [val] "r" (val), [zero] "r" (0));
113 else
114 asm volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (val) : "memory");
115 }
116
117 /* read translation table base control register (TTBCR) */
read_ttbcr(void)118 static inline uint32_t read_ttbcr(void)
119 {
120 uint32_t val = 0;
121 asm volatile ("mrc p15, 0, %0, c2, c0, 2" : "=r" (val));
122 return val;
123 }
124
125 /* write translation table base control register (TTBCR) */
write_ttbcr(uint32_t val)126 static inline void write_ttbcr(uint32_t val)
127 {
128 asm volatile ("mcr p15, 0, %0, c2, c0, 2" : : "r" (val) : "memory");
129 }
130
131 /*
132 * Low-level cache maintenance operations
133 */
134
135 /* branch predictor invalidate all */
bpiall(void)136 static inline void bpiall(void)
137 {
138 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
139 }
140
141 /* data cache clean and invalidate by MVA to PoC */
dccimvac(unsigned long mva)142 static inline void dccimvac(unsigned long mva)
143 {
144 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva) : "memory");
145 }
146
147 /* data cache invalidate by set/way */
dccisw(uint32_t val)148 static inline void dccisw(uint32_t val)
149 {
150 asm volatile ("mcr p15, 0, %0, c7, c14, 2" : : "r" (val) : "memory");
151 }
152
153 /* data cache clean by MVA to PoC */
dccmvac(unsigned long mva)154 static inline void dccmvac(unsigned long mva)
155 {
156 asm volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" (mva) : "memory");
157 }
158
159 /* data cache clean by set/way */
dccsw(uint32_t val)160 static inline void dccsw(uint32_t val)
161 {
162 asm volatile ("mcr p15, 0, %0, c7, c10, 2" : : "r" (val) : "memory");
163 }
164
165 /* data cache invalidate by MVA to PoC */
dcimvac(unsigned long mva)166 static inline void dcimvac(unsigned long mva)
167 {
168 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva) : "memory");
169 }
170
171 /* data cache invalidate by set/way */
dcisw(uint32_t val)172 static inline void dcisw(uint32_t val)
173 {
174 asm volatile ("mcr p15, 0, %0, c7, c6, 2" : : "r" (val) : "memory");
175 }
176
177 /* instruction cache invalidate all by PoU */
iciallu(void)178 static inline void iciallu(void)
179 {
180 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
181 }
182
183 /*
184 * Cache co-processor (CP15) access functions
185 */
186
187 /* read cache level ID register (CLIDR) */
read_clidr(void)188 static inline uint32_t read_clidr(void)
189 {
190 uint32_t val = 0;
191 asm volatile ("mrc p15, 1, %0, c0, c0, 1" : "=r" (val));
192 return val;
193 }
194
195 /* read cache size ID register (CCSIDR) */
read_ccsidr(void)196 static inline uint32_t read_ccsidr(void)
197 {
198 uint32_t val = 0;
199 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (val));
200 return val;
201 }
202
203 /* read cache size selection register (CSSELR) */
read_csselr(void)204 static inline uint32_t read_csselr(void)
205 {
206 uint32_t val = 0;
207 asm volatile ("mrc p15, 2, %0, c0, c0, 0" : "=r" (val));
208 return val;
209 }
210
211 /* write to cache size selection register (CSSELR) */
write_csselr(uint32_t val)212 static inline void write_csselr(uint32_t val)
213 {
214 /*
215 * Bits [3:1] - Cache level + 1 (0b000 = L1, 0b110 = L7, 0b111 is rsvd)
216 * Bit 0 - 0 = data or unified cache, 1 = instruction cache
217 */
218 asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (val));
219 isb(); /* ISB to sync the change to CCSIDR */
220 }
221
222 /* read L2 control register (L2CTLR) */
read_l2ctlr(void)223 static inline uint32_t read_l2ctlr(void)
224 {
225 uint32_t val = 0;
226 asm volatile ("mrc p15, 1, %0, c9, c0, 2" : "=r" (val));
227 return val;
228 }
229
230 /* write L2 control register (L2CTLR) */
write_l2ctlr(uint32_t val)231 static inline void write_l2ctlr(uint32_t val)
232 {
233 /*
234 * Note: L2CTLR can only be written when the L2 memory system
235 * is idle, ie before the MMU is enabled.
236 */
237 asm volatile("mcr p15, 1, %0, c9, c0, 2" : : "r" (val) : "memory" );
238 isb();
239 }
240
241 /* read L2 Auxiliary Control Register (L2ACTLR) */
read_l2actlr(void)242 static inline uint32_t read_l2actlr(void)
243 {
244 uint32_t val = 0;
245 asm volatile ("mrc p15, 1, %0, c15, c0, 0" : "=r" (val));
246 return val;
247 }
248
249 /* write L2 Auxiliary Control Register (L2ACTLR) */
write_l2actlr(uint32_t val)250 static inline void write_l2actlr(uint32_t val)
251 {
252 asm volatile ("mcr p15, 1, %0, c15, c0, 0" : : "r" (val) : "memory" );
253 isb();
254 }
255
256 /* read system control register (SCTLR) */
read_sctlr(void)257 static inline uint32_t read_sctlr(void)
258 {
259 uint32_t val;
260 asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (val));
261 return val;
262 }
263
264 /* write system control register (SCTLR) */
write_sctlr(uint32_t val)265 static inline void write_sctlr(uint32_t val)
266 {
267 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (val) : "cc");
268 isb();
269 }
270
271 /* read data fault address register (DFAR) */
read_dfar(void)272 static inline uint32_t read_dfar(void)
273 {
274 uint32_t val;
275 asm volatile ("mrc p15, 0, %0, c6, c0, 0" : "=r" (val));
276 return val;
277 }
278
279 /* read data fault status register (DFSR) */
read_dfsr(void)280 static inline uint32_t read_dfsr(void)
281 {
282 uint32_t val;
283 asm volatile ("mrc p15, 0, %0, c5, c0, 0" : "=r" (val));
284 return val;
285 }
286
287 /* read instruction fault address register (IFAR) */
read_ifar(void)288 static inline uint32_t read_ifar(void)
289 {
290 uint32_t val;
291 asm volatile ("mrc p15, 0, %0, c6, c0, 2" : "=r" (val));
292 return val;
293 }
294
295 /* read instruction fault status register (IFSR) */
read_ifsr(void)296 static inline uint32_t read_ifsr(void)
297 {
298 uint32_t val;
299 asm volatile ("mrc p15, 0, %0, c5, c0, 1" : "=r" (val));
300 return val;
301 }
302
303 /* read auxiliary data fault status register (ADFSR) */
read_adfsr(void)304 static inline uint32_t read_adfsr(void)
305 {
306 uint32_t val;
307 asm volatile ("mrc p15, 0, %0, c5, c1, 0" : "=r" (val));
308 return val;
309 }
310
311 /* read auxiliary instruction fault status register (AIFSR) */
read_aifsr(void)312 static inline uint32_t read_aifsr(void)
313 {
314 uint32_t val;
315 asm volatile ("mrc p15, 0, %0, c5, c1, 1" : "=r" (val));
316 return val;
317 }
318
319 /*
320 * Cache maintenance API
321 */
322
323 /* dcache clean and invalidate all (on current level given by CCSELR) */
324 void dcache_clean_invalidate_all(void);
325
326 /* dcache clean by modified virtual address to PoC */
327 void dcache_clean_by_mva(void const *addr, size_t len);
328
329 /* dcache clean and invalidate by modified virtual address to PoC */
330 void dcache_clean_invalidate_by_mva(void const *addr, size_t len);
331
332 /* dcache invalidate by modified virtual address to PoC */
333 void dcache_invalidate_by_mva(void const *addr, size_t len);
334
335 void dcache_clean_all(void);
336
337 /* dcache invalidate all (on current level given by CCSELR) */
338 void dcache_invalidate_all(void);
339
340 /* returns number of bytes per cache line */
341 unsigned int dcache_line_bytes(void);
342
343 /* dcache and MMU disable */
344 void dcache_mmu_disable(void);
345
346 /* dcache and MMU enable */
347 void dcache_mmu_enable(void);
348
349 /* perform all icache/dcache maintenance needed after loading new code */
350 void cache_sync_instructions(void);
351
352 /* tlb invalidate all */
353 void tlb_invalidate_all(void);
354
355 /*
356 * Generalized setup/init functions
357 */
358
359 /* MMU initialization (set page table base, permissions, initialize subtable
360 * buffer, etc.). Must only be called ONCE PER BOOT, before any mappings. */
361 void mmu_init(void);
362
363 enum dcache_policy {
364 DCACHE_OFF,
365 DCACHE_WRITEBACK,
366 DCACHE_WRITETHROUGH,
367 };
368
369 /* disable the mmu for a range. Primarily useful to lock out address 0. */
370 void mmu_disable_range(u32 start_mb, u32 size_mb);
371 /* mmu range configuration (set dcache policy) */
372 void mmu_config_range(u32 start_mb, u32 size_mb, enum dcache_policy policy);
373
374 /* Reconfigure memory mappings at the fine-grained (4K) page level. Must be
375 * called on a range contained within a single, already mapped block/superpage.
376 * Careful: Do NOT map over this address range with mmu_config_range() again
377 * later, or you will leak resources and may desync your TLB! */
378 void mmu_config_range_kb(u32 start_kb, u32 size_kb, enum dcache_policy policy);
379 void mmu_disable_range_kb(u32 start_kb, u32 size_kb);
380
381 #endif /* ARM_CACHE_H */
382