1 /*
2 * Copyright (c) 2020-2021 Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 /*
20 * CMSIS-Core(M) Level 1 Cache API for Armv7-M and later
21 */
22
23 #ifndef ARM_ARMV7M_CACHEL1_H
24 #define ARM_ARMV7M_CACHEL1_H
25
26 #if defined ( __ICCARM__ )
27 #pragma system_include /* treat file as system include file for MISRA check */
28 #elif defined (__clang__)
29 #pragma clang system_header /* treat file as system include file */
30 #endif
31
32 /**
33 \ingroup CMSIS_Core_FunctionInterface
34 \defgroup CMSIS_Core_CacheFunctions Cache Functions
35 \brief Functions that configure Instruction and Data cache.
36 @{
37 */
38
39 /* Cache Size ID Register Macros */
40 #define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
41 #define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
42
43 #ifndef __SCB_DCACHE_LINE_SIZE
44 #define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
45 #endif
46
47 #ifndef __SCB_ICACHE_LINE_SIZE
48 #define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
49 #endif
50
51 /**
52 \brief Enable I-Cache
53 \details Turns on I-Cache
54 */
SCB_EnableICache(void)55 __STATIC_FORCEINLINE void SCB_EnableICache (void)
56 {
57 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
58 if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
59
60 __DSB();
61 __ISB();
62 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
63 __DSB();
64 __ISB();
65 SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
66 __DSB();
67 __ISB();
68 #endif
69 }
70
71
72 /**
73 \brief Disable I-Cache
74 \details Turns off I-Cache
75 */
SCB_DisableICache(void)76 __STATIC_FORCEINLINE void SCB_DisableICache (void)
77 {
78 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
79 __DSB();
80 __ISB();
81 SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
82 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
83 __DSB();
84 __ISB();
85 #endif
86 }
87
88
89 /**
90 \brief Invalidate I-Cache
91 \details Invalidates I-Cache
92 */
SCB_InvalidateICache(void)93 __STATIC_FORCEINLINE void SCB_InvalidateICache (void)
94 {
95 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
96 __DSB();
97 __ISB();
98 SCB->ICIALLU = 0UL;
99 __DSB();
100 __ISB();
101 #endif
102 }
103
104
105 /**
106 \brief I-Cache Invalidate by address
107 \details Invalidates I-Cache for the given address.
108 I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
109 I-Cache memory blocks which are part of given address + given size are invalidated.
110 \param[in] addr address
111 \param[in] isize size of memory block (in number of bytes)
112 */
SCB_InvalidateICache_by_Addr(volatile void * addr,int32_t isize)113 __STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (volatile void *addr, int32_t isize)
114 {
115 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
116 if ( isize > 0 ) {
117 int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
118 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
119
120 __DSB();
121
122 do {
123 SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
124 op_addr += __SCB_ICACHE_LINE_SIZE;
125 op_size -= __SCB_ICACHE_LINE_SIZE;
126 } while ( op_size > 0 );
127
128 __DSB();
129 __ISB();
130 }
131 #endif
132 }
133
134
135 /**
136 \brief Enable D-Cache
137 \details Turns on D-Cache
138 */
SCB_EnableDCache(void)139 __STATIC_FORCEINLINE void SCB_EnableDCache (void)
140 {
141 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
142 uint32_t ccsidr;
143 uint32_t sets;
144 uint32_t ways;
145
146 if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
147
148 SCB->CSSELR = 0U; /* select Level 1 data cache */
149 __DSB();
150
151 ccsidr = SCB->CCSIDR;
152
153 /* invalidate D-Cache */
154 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
155 do {
156 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
157 do {
158 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
159 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
160 #if defined ( __CC_ARM )
161 __schedule_barrier();
162 #endif
163 } while (ways-- != 0U);
164 } while(sets-- != 0U);
165 __DSB();
166
167 SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
168
169 __DSB();
170 __ISB();
171 #endif
172 }
173
174
175 /**
176 \brief Disable D-Cache
177 \details Turns off D-Cache
178 */
SCB_DisableDCache(void)179 __STATIC_FORCEINLINE void SCB_DisableDCache (void)
180 {
181 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
182 struct {
183 uint32_t ccsidr;
184 uint32_t sets;
185 uint32_t ways;
186 } locals
187 #if ((defined(__GNUC__) || defined(__clang__)) && !defined(__OPTIMIZE__))
188 __ALIGNED(__SCB_DCACHE_LINE_SIZE)
189 #endif
190 ;
191
192 SCB->CSSELR = 0U; /* select Level 1 data cache */
193 __DSB();
194
195 SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
196 __DSB();
197
198 #if !defined(__OPTIMIZE__)
199 /*
200 * For the endless loop issue with no optimization builds.
201 * More details, see https://github.com/ARM-software/CMSIS_5/issues/620
202 *
203 * The issue only happens when local variables are in stack. If
204 * local variables are saved in general purpose register, then the function
205 * is OK.
206 *
207 * When local variables are in stack, after disabling the cache, flush the
208 * local variables cache line for data consistency.
209 */
210 /* Clean and invalidate the local variable cache. */
211 #if defined(__ICCARM__)
212 /* As we can't align the stack to the cache line size, invalidate each of the variables */
213 SCB->DCCIMVAC = (uint32_t)&locals.sets;
214 SCB->DCCIMVAC = (uint32_t)&locals.ways;
215 SCB->DCCIMVAC = (uint32_t)&locals.ccsidr;
216 #else
217 SCB->DCCIMVAC = (uint32_t)&locals;
218 #endif
219 __DSB();
220 __ISB();
221 #endif
222
223 locals.ccsidr = SCB->CCSIDR;
224 /* clean & invalidate D-Cache */
225 locals.sets = (uint32_t)(CCSIDR_SETS(locals.ccsidr));
226 do {
227 locals.ways = (uint32_t)(CCSIDR_WAYS(locals.ccsidr));
228 do {
229 SCB->DCCISW = (((locals.sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
230 ((locals.ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
231 #if defined ( __CC_ARM )
232 __schedule_barrier();
233 #endif
234 } while (locals.ways-- != 0U);
235 } while(locals.sets-- != 0U);
236
237 __DSB();
238 __ISB();
239 #endif
240 }
241
242
243 /**
244 \brief Invalidate D-Cache
245 \details Invalidates D-Cache
246 */
SCB_InvalidateDCache(void)247 __STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
248 {
249 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
250 uint32_t ccsidr;
251 uint32_t sets;
252 uint32_t ways;
253
254 SCB->CSSELR = 0U; /* select Level 1 data cache */
255 __DSB();
256
257 ccsidr = SCB->CCSIDR;
258
259 /* invalidate D-Cache */
260 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
261 do {
262 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
263 do {
264 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
265 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
266 #if defined ( __CC_ARM )
267 __schedule_barrier();
268 #endif
269 } while (ways-- != 0U);
270 } while(sets-- != 0U);
271
272 __DSB();
273 __ISB();
274 #endif
275 }
276
277
278 /**
279 \brief Clean D-Cache
280 \details Cleans D-Cache
281 */
SCB_CleanDCache(void)282 __STATIC_FORCEINLINE void SCB_CleanDCache (void)
283 {
284 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
285 uint32_t ccsidr;
286 uint32_t sets;
287 uint32_t ways;
288
289 SCB->CSSELR = 0U; /* select Level 1 data cache */
290 __DSB();
291
292 ccsidr = SCB->CCSIDR;
293
294 /* clean D-Cache */
295 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
296 do {
297 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
298 do {
299 SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
300 ((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
301 #if defined ( __CC_ARM )
302 __schedule_barrier();
303 #endif
304 } while (ways-- != 0U);
305 } while(sets-- != 0U);
306
307 __DSB();
308 __ISB();
309 #endif
310 }
311
312
313 /**
314 \brief Clean & Invalidate D-Cache
315 \details Cleans and Invalidates D-Cache
316 */
SCB_CleanInvalidateDCache(void)317 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
318 {
319 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
320 uint32_t ccsidr;
321 uint32_t sets;
322 uint32_t ways;
323
324 SCB->CSSELR = 0U; /* select Level 1 data cache */
325 __DSB();
326
327 ccsidr = SCB->CCSIDR;
328
329 /* clean & invalidate D-Cache */
330 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
331 do {
332 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
333 do {
334 SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
335 ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
336 #if defined ( __CC_ARM )
337 __schedule_barrier();
338 #endif
339 } while (ways-- != 0U);
340 } while(sets-- != 0U);
341
342 __DSB();
343 __ISB();
344 #endif
345 }
346
347
348 /**
349 \brief D-Cache Invalidate by address
350 \details Invalidates D-Cache for the given address.
351 D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
352 D-Cache memory blocks which are part of given address + given size are invalidated.
353 \param[in] addr address
354 \param[in] dsize size of memory block (in number of bytes)
355 */
SCB_InvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)356 __STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
357 {
358 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
359 if ( dsize > 0 ) {
360 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
361 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
362
363 __DSB();
364
365 do {
366 SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
367 op_addr += __SCB_DCACHE_LINE_SIZE;
368 op_size -= __SCB_DCACHE_LINE_SIZE;
369 } while ( op_size > 0 );
370
371 __DSB();
372 __ISB();
373 }
374 #endif
375 }
376
377
378 /**
379 \brief D-Cache Clean by address
380 \details Cleans D-Cache for the given address
381 D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
382 D-Cache memory blocks which are part of given address + given size are cleaned.
383 \param[in] addr address
384 \param[in] dsize size of memory block (in number of bytes)
385 */
SCB_CleanDCache_by_Addr(volatile void * addr,int32_t dsize)386 __STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (volatile void *addr, int32_t dsize)
387 {
388 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
389 if ( dsize > 0 ) {
390 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
391 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
392
393 __DSB();
394
395 do {
396 SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
397 op_addr += __SCB_DCACHE_LINE_SIZE;
398 op_size -= __SCB_DCACHE_LINE_SIZE;
399 } while ( op_size > 0 );
400
401 __DSB();
402 __ISB();
403 }
404 #endif
405 }
406
407
408 /**
409 \brief D-Cache Clean and Invalidate by address
410 \details Cleans and invalidates D_Cache for the given address
411 D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
412 D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
413 \param[in] addr address (aligned to 32-byte boundary)
414 \param[in] dsize size of memory block (in number of bytes)
415 */
SCB_CleanInvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)416 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
417 {
418 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
419 if ( dsize > 0 ) {
420 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
421 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
422
423 __DSB();
424
425 do {
426 SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
427 op_addr += __SCB_DCACHE_LINE_SIZE;
428 op_size -= __SCB_DCACHE_LINE_SIZE;
429 } while ( op_size > 0 );
430
431 __DSB();
432 __ISB();
433 }
434 #endif
435 }
436
437 /*@} end of CMSIS_Core_CacheFunctions */
438
439 #endif /* ARM_ARMV7M_CACHEL1_H */
440