1 /******************************************************************************
2 * @file cachel1_armv7.h
3 * @brief CMSIS Level 1 Cache API for Armv7-M and later
4 * @version V1.0.3
5 * @date 17. March 2023
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2020-2021 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #if defined ( __ICCARM__ )
26 #pragma system_include /* treat file as system include file for MISRA check */
27 #elif defined (__clang__)
28 #pragma clang system_header /* treat file as system include file */
29 #endif
30
31 #ifndef ARM_CACHEL1_ARMV7_H
32 #define ARM_CACHEL1_ARMV7_H
33
34 /**
35 \ingroup CMSIS_Core_FunctionInterface
36 \defgroup CMSIS_Core_CacheFunctions Cache Functions
37 \brief Functions that configure Instruction and Data cache.
38 @{
39 */
40
41 /* Cache Size ID Register Macros */
42 #define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
43 #define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
44
45 #ifndef __SCB_DCACHE_LINE_SIZE
46 #define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
47 #endif
48
49 #ifndef __SCB_ICACHE_LINE_SIZE
50 #define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
51 #endif
52
53 /**
54 \brief Enable I-Cache
55 \details Turns on I-Cache
56 */
SCB_EnableICache(void)57 __STATIC_FORCEINLINE void SCB_EnableICache (void)
58 {
59 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
60 if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
61
62 __DSB();
63 __ISB();
64 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
65 __DSB();
66 __ISB();
67 SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
68 __DSB();
69 __ISB();
70 #endif
71 }
72
73
74 /**
75 \brief Disable I-Cache
76 \details Turns off I-Cache
77 */
SCB_DisableICache(void)78 __STATIC_FORCEINLINE void SCB_DisableICache (void)
79 {
80 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
81 __DSB();
82 __ISB();
83 SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
84 SCB->ICIALLU = 0UL; /* invalidate I-Cache */
85 __DSB();
86 __ISB();
87 #endif
88 }
89
90
91 /**
92 \brief Invalidate I-Cache
93 \details Invalidates I-Cache
94 */
SCB_InvalidateICache(void)95 __STATIC_FORCEINLINE void SCB_InvalidateICache (void)
96 {
97 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
98 __DSB();
99 __ISB();
100 SCB->ICIALLU = 0UL;
101 __DSB();
102 __ISB();
103 #endif
104 }
105
106
107 /**
108 \brief I-Cache Invalidate by address
109 \details Invalidates I-Cache for the given address.
110 I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
111 I-Cache memory blocks which are part of given address + given size are invalidated.
112 \param[in] addr address
113 \param[in] isize size of memory block (in number of bytes)
114 */
SCB_InvalidateICache_by_Addr(volatile void * addr,int32_t isize)115 __STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (volatile void *addr, int32_t isize)
116 {
117 #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
118 if ( isize > 0 ) {
119 int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
120 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
121
122 __DSB();
123
124 do {
125 SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
126 op_addr += __SCB_ICACHE_LINE_SIZE;
127 op_size -= __SCB_ICACHE_LINE_SIZE;
128 } while ( op_size > 0 );
129
130 __DSB();
131 __ISB();
132 }
133 #endif
134 }
135
136
137 /**
138 \brief Enable D-Cache
139 \details Turns on D-Cache
140 */
SCB_EnableDCache(void)141 __STATIC_FORCEINLINE void SCB_EnableDCache (void)
142 {
143 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
144 uint32_t ccsidr;
145 uint32_t sets;
146 uint32_t ways;
147
148 if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
149
150 SCB->CSSELR = 0U; /* select Level 1 data cache */
151 __DSB();
152
153 ccsidr = SCB->CCSIDR;
154
155 /* invalidate D-Cache */
156 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
157 do {
158 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
159 do {
160 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
161 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
162 #if defined ( __CC_ARM )
163 __schedule_barrier();
164 #endif
165 } while (ways-- != 0U);
166 } while(sets-- != 0U);
167 __DSB();
168
169 SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
170
171 __DSB();
172 __ISB();
173 #endif
174 }
175
176
177 /**
178 \brief Disable D-Cache
179 \details Turns off D-Cache
180 */
SCB_DisableDCache(void)181 __STATIC_FORCEINLINE void SCB_DisableDCache (void)
182 {
183 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
184 struct {
185 uint32_t ccsidr;
186 uint32_t sets;
187 uint32_t ways;
188 } locals
189 #if ((defined(__GNUC__) || defined(__clang__)) && !defined(__OPTIMIZE__))
190 __ALIGNED(__SCB_DCACHE_LINE_SIZE)
191 #endif
192 ;
193
194 SCB->CSSELR = 0U; /* select Level 1 data cache */
195 __DSB();
196
197 SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
198 __DSB();
199
200 #if !defined(__OPTIMIZE__)
201 /*
202 * For the endless loop issue with no optimization builds.
203 * More details, see https://github.com/ARM-software/CMSIS_5/issues/620
204 *
205 * The issue only happens when local variables are in stack. If
206 * local variables are saved in general purpose register, then the function
207 * is OK.
208 *
209 * When local variables are in stack, after disabling the cache, flush the
210 * local variables cache line for data consistency.
211 */
212 /* Clean and invalidate the local variable cache. */
213 #if defined(__ICCARM__)
214 /* As we can't align the stack to the cache line size, invalidate each of the variables */
215 SCB->DCCIMVAC = (uint32_t)&locals.sets;
216 SCB->DCCIMVAC = (uint32_t)&locals.ways;
217 SCB->DCCIMVAC = (uint32_t)&locals.ccsidr;
218 #else
219 SCB->DCCIMVAC = (uint32_t)&locals;
220 #endif
221 __DSB();
222 __ISB();
223 #endif
224
225 locals.ccsidr = SCB->CCSIDR;
226 /* clean & invalidate D-Cache */
227 locals.sets = (uint32_t)(CCSIDR_SETS(locals.ccsidr));
228 do {
229 locals.ways = (uint32_t)(CCSIDR_WAYS(locals.ccsidr));
230 do {
231 SCB->DCCISW = (((locals.sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
232 ((locals.ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
233 #if defined ( __CC_ARM )
234 __schedule_barrier();
235 #endif
236 } while (locals.ways-- != 0U);
237 } while(locals.sets-- != 0U);
238
239 __DSB();
240 __ISB();
241 #endif
242 }
243
244
245 /**
246 \brief Invalidate D-Cache
247 \details Invalidates D-Cache
248 */
SCB_InvalidateDCache(void)249 __STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
250 {
251 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
252 uint32_t ccsidr;
253 uint32_t sets;
254 uint32_t ways;
255
256 SCB->CSSELR = 0U; /* select Level 1 data cache */
257 __DSB();
258
259 ccsidr = SCB->CCSIDR;
260
261 /* invalidate D-Cache */
262 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
263 do {
264 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
265 do {
266 SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
267 ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
268 #if defined ( __CC_ARM )
269 __schedule_barrier();
270 #endif
271 } while (ways-- != 0U);
272 } while(sets-- != 0U);
273
274 __DSB();
275 __ISB();
276 #endif
277 }
278
279
280 /**
281 \brief Clean D-Cache
282 \details Cleans D-Cache
283 */
SCB_CleanDCache(void)284 __STATIC_FORCEINLINE void SCB_CleanDCache (void)
285 {
286 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
287 uint32_t ccsidr;
288 uint32_t sets;
289 uint32_t ways;
290
291 SCB->CSSELR = 0U; /* select Level 1 data cache */
292 __DSB();
293
294 ccsidr = SCB->CCSIDR;
295
296 /* clean D-Cache */
297 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
298 do {
299 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
300 do {
301 SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
302 ((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
303 #if defined ( __CC_ARM )
304 __schedule_barrier();
305 #endif
306 } while (ways-- != 0U);
307 } while(sets-- != 0U);
308
309 __DSB();
310 __ISB();
311 #endif
312 }
313
314
315 /**
316 \brief Clean & Invalidate D-Cache
317 \details Cleans and Invalidates D-Cache
318 */
SCB_CleanInvalidateDCache(void)319 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
320 {
321 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
322 uint32_t ccsidr;
323 uint32_t sets;
324 uint32_t ways;
325
326 SCB->CSSELR = 0U; /* select Level 1 data cache */
327 __DSB();
328
329 ccsidr = SCB->CCSIDR;
330
331 /* clean & invalidate D-Cache */
332 sets = (uint32_t)(CCSIDR_SETS(ccsidr));
333 do {
334 ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
335 do {
336 SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
337 ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
338 #if defined ( __CC_ARM )
339 __schedule_barrier();
340 #endif
341 } while (ways-- != 0U);
342 } while(sets-- != 0U);
343
344 __DSB();
345 __ISB();
346 #endif
347 }
348
349
350 /**
351 \brief D-Cache Invalidate by address
352 \details Invalidates D-Cache for the given address.
353 D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
354 D-Cache memory blocks which are part of given address + given size are invalidated.
355 \param[in] addr address
356 \param[in] dsize size of memory block (in number of bytes)
357 */
SCB_InvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)358 __STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
359 {
360 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
361 if ( dsize > 0 ) {
362 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
363 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
364
365 __DSB();
366
367 do {
368 SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
369 op_addr += __SCB_DCACHE_LINE_SIZE;
370 op_size -= __SCB_DCACHE_LINE_SIZE;
371 } while ( op_size > 0 );
372
373 __DSB();
374 __ISB();
375 }
376 #endif
377 }
378
379
380 /**
381 \brief D-Cache Clean by address
382 \details Cleans D-Cache for the given address
383 D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
384 D-Cache memory blocks which are part of given address + given size are cleaned.
385 \param[in] addr address
386 \param[in] dsize size of memory block (in number of bytes)
387 */
SCB_CleanDCache_by_Addr(volatile void * addr,int32_t dsize)388 __STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (volatile void *addr, int32_t dsize)
389 {
390 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
391 if ( dsize > 0 ) {
392 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
393 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
394
395 __DSB();
396
397 do {
398 SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
399 op_addr += __SCB_DCACHE_LINE_SIZE;
400 op_size -= __SCB_DCACHE_LINE_SIZE;
401 } while ( op_size > 0 );
402
403 __DSB();
404 __ISB();
405 }
406 #endif
407 }
408
409
410 /**
411 \brief D-Cache Clean and Invalidate by address
412 \details Cleans and invalidates D_Cache for the given address
413 D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
414 D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
415 \param[in] addr address (aligned to 32-byte boundary)
416 \param[in] dsize size of memory block (in number of bytes)
417 */
SCB_CleanInvalidateDCache_by_Addr(volatile void * addr,int32_t dsize)418 __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
419 {
420 #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
421 if ( dsize > 0 ) {
422 int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
423 uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
424
425 __DSB();
426
427 do {
428 SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
429 op_addr += __SCB_DCACHE_LINE_SIZE;
430 op_size -= __SCB_DCACHE_LINE_SIZE;
431 } while ( op_size > 0 );
432
433 __DSB();
434 __ISB();
435 }
436 #endif
437 }
438
439 /*@} end of CMSIS_Core_CacheFunctions */
440
441 #endif /* ARM_CACHEL1_ARMV7_H */
442