1 /*
2 * Copyright (c) 2021 HPMicro
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8 #ifndef _HPM_L1_CACHE_H
9 #define _HPM_L1_CACHE_H
10 #include "hpm_common.h"
11 #include "hpm_csr_drv.h"
12 #include "hpm_soc.h"
13
14 /**
15 *
16 * @brief L1CACHE driver APIs
17 * @defgroup l1cache_interface L1CACHE driver APIs
18 * @{
19 */
20
21 /* cache size is 32KB */
22 #define HPM_L1C_CACHE_SIZE (uint32_t)(32 * SIZE_1KB)
23 #define HPM_L1C_ICACHE_SIZE (HPM_L1C_CACHE_SIZE)
24 #define HPM_L1C_DCACHE_SIZE (HPM_L1C_CACHE_SIZE)
25 /* cache line size is 64B */
26 #define HPM_L1C_CACHELINE_SIZE (64)
27 /* cache way is 128 */
28 #define HPM_L1C_CACHELINES_PER_WAY (128)
29
30 /* mcache_ctl register */
31 /*
32 * Controls if the instruction cache is enabled or not.
33 *
34 * 0 I-Cache is disabled
35 * 1 I-Cache is enabled
36 */
37 #define HPM_MCACHE_CTL_IC_EN_SHIFT (0UL)
38 #define HPM_MCACHE_CTL_IC_EN_MASK (1UL << HPM_MCACHE_CTL_IC_EN_SHIFT)
39 #define HPM_MCACHE_CTL_IC_EN(x) \
40 (uint32_t)(((x) << HPM_MCACHE_CTL_IC_EN_SHIFT) & HPM_MCACHE_CTL_IC_EN_MASK)
41
42 /*
43 * Controls if the data cache is enabled or not.
44 *
45 * 0 D-Cache is disabled
46 * 1 D-Cache is enabled
47 */
48 #define HPM_MCACHE_CTL_DC_EN_SHIFT (1UL)
49 #define HPM_MCACHE_CTL_DC_EN_MASK (1UL << HPM_MCACHE_CTL_DC_EN_SHIFT)
50 #define HPM_MCACHE_CTL_DC_EN(x) \
51 (uint32_t)(((x) << HPM_MCACHE_CTL_DC_EN_SHIFT) & HPM_MCACHE_CTL_DC_EN_MASK)
52
53 /*
54 * Parity/ECC error checking enable control for the instruction cache.
55 *
56 * 0 Disable parity/ECC
57 * 1 Reserved
58 * 2 Generate exceptions only on uncorrectable parity/ECC errors
59 * 3 Generate exceptions on any type of parity/ECC errors
60 */
61 #define HPM_MCACHE_CTL_IC_ECCEN_SHIFT (0x2UL)
62 #define HPM_MCACHE_CTL_IC_ECCEN_MASK (0x3UL << HPM_MCACHE_CTL_IC_ECCEN_SHIFT)
63 #define HPM_MCACHE_CTL_IC_ECCEN(x) \
64 (uint32_t)(((x) << HPM_MCACHE_CTL_IC_ECCEN_SHIFT) & HPM_MCACHE_CTL_IC_ECCEN_MASK)
65
66 /*
67 *
68 * Parity/ECC error checking enable control for the data cache.
69 *
70 * 0 Disable parity/ECC
71 * 1 Reserved
72 * 2 Generate exceptions only on uncorrectable parity/ECC errors
73 * 3 Generate exceptions on any type of parity/ECC errors
74 */
75 #define HPM_MCACHE_CTL_DC_ECCEN_SHIFT (0x4UL)
76 #define HPM_MCACHE_CTL_DC_ECCEN_MASK (0x3UL << HPM_MCACHE_CTL_DC_ECCEN_SHIFT)
77 #define HPM_MCACHE_CTL_DC_ECCEN(x) \
78 (uint32_t)(((x) << HPM_MCACHE_CTL_DC_ECCEN_SHIFT) & HPM_MCACHE_CTL_DC_ECCEN_MASK)
79
80 /*
81 *
82 * Controls diagnostic accesses of ECC codes of the instruction cache RAMs.
83 * It is set to enable CCTL operations to access the ECC codes. This bit
84 * can be set for injecting ECC errors to test the ECC handler.
85 *
86 * 0 Disable diagnostic accesses of ECC codes
87 * 1 Enable diagnostic accesses of ECC codes
88 */
89 #define HPM_MCACHE_CTL_IC_RWECC_SHIFT (0x6UL)
90 #define HPM_MCACHE_CTL_IC_RWECC_MASK (0x1UL << HPM_MCACHE_CTL_IC_RWECC_SHIFT)
91 #define HPM_MCACHE_CTL_IC_RWECC(x) \
92 (uint32_t)(((x) << HPM_MCACHE_CTL_IC_RWECC_SHIFT) & HPM_MCACHE_CTL_IC_RWECC_MASK)
93
94 /*
95 *
96 * Controls diagnostic accesses of ECC codes of the data cache RAMs. It is
97 * set to enable CCTL operations to access the ECC codes. This bit can be
98 * set for injecting
99 *
100 * ECC errors to test the ECC handler.
101 * 0 Disable diagnostic accesses of ECC codes
102 * 1 Enable diagnostic accesses of ECC codes
103 */
104 #define HPM_MCACHE_CTL_DC_RWECC_SHIFT (0x7UL)
105 #define HPM_MCACHE_CTL_DC_RWECC_MASK (0x1UL << HPM_MCACHE_CTL_DC_RWECC_SHIFT)
106 #define HPM_MCACHE_CTL_DC_RWECC(x) \
107 (uint32_t)(((x) << HPM_MCACHE_CTL_DC_RWECC_SHIFT) & HPM_MCACHE_CTL_DC_RWECC_MASK)
108
109 /*
110 * Enable bit for Superuser-mode and User-mode software to access
111 * ucctlbeginaddr and ucctlcommand CSRs.
112 *
113 * 0 Disable ucctlbeginaddr and ucctlcommand accesses in S/U mode
114 * 1 Enable ucctlbeginaddr and ucctlcommand accesses in S/U mode
115 */
116 #define HPM_MCACHE_CTL_CCTL_SUEN_SHIFT (0x8UL)
117 #define HPM_MCACHE_CTL_CCTL_SUEN_MASK (0x1UL << HPM_MCACHE_CTL_CCTL_SUEN_SHIFT)
118 #define HPM_MCACHE_CTL_CCTL_SUEN(x) \
119 (uint32_t)(((x) << HPM_MCACHE_CTL_CCTL_SUEN_SHIFT) & HPM_MCACHE_CTL_CCTL_SUEN_MASK)
120
121 /*
122 * This bit controls hardware prefetch for instruction fetches to cacheable
123 * memory regions when I-Cache size is not 0.
124 *
125 * 0 Disable hardware prefetch on instruction fetches
126 * 1 Enable hardware prefetch on instruction fetches
127 */
128 #define HPM_MCACHE_CTL_IPREF_EN_SHIFT (0x9UL)
129 #define HPM_MCACHE_CTL_IPREF_EN_MASK (0x1UL << HPM_MCACHE_CTL_IPREF_EN_SHIFT)
130 #define HPM_MCACHE_CTL_IPREF_EN(x) \
131 (uint32_t)(((x) << HPM_MCACHE_CTL_IPREF_EN_SHIFT) & HPM_MCACHE_CTL_IPREF_EN_MASK)
132
133 /*
134 * This bit controls hardware prefetch for load/store accesses to cacheable
135 * memory regions when D-Cache size is not 0.
136 *
137 * 0 Disable hardware prefetch on load/store memory accesses.
138 * 1 Enable hardware prefetch on load/store memory accesses.
139 */
140 #define HPM_MCACHE_CTL_DPREF_EN_SHIFT (0x10UL)
141 #define HPM_MCACHE_CTL_DPREF_EN_MASK (0x1UL << HPM_MCACHE_CTL_DPREF_EN_SHIFT)
142 #define HPM_MCACHE_CTL_DPREF_EN(x) \
143 (uint32_t)(((x) << HPM_MCACHE_CTL_DPREF_EN_SHIFT) & HPM_MCACHE_CTL_DPREF_EN_MASK)
144
145 /*
146 * I-Cache miss allocation filling policy Value Meaning
147 *
148 * 0 Cache line data is returned critical (double) word first
149 * 1 Cache line data is returned the lowest address (double) word first
150 */
151 #define HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT (0x11UL)
152 #define HPM_MCACHE_CTL_IC_FIRST_WORD_MASK (0x1UL << HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT)
153 #define HPM_MCACHE_CTL_IC_FIRST_WORD(x) \
154 (uint32_t)(((x) << HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT) & HPM_MCACHE_CTL_IC_FIRST_WORD_MASK)
155
156 /*
157 * D-Cache miss allocation filling policy
158 *
159 * 0 Cache line data is returned critical (double) word first
160 * 1 Cache line data is returned the lowest address (double) word first
161 */
162 #define HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT (0x12UL)
163 #define HPM_MCACHE_CTL_DC_FIRST_WORD_MASK (0x1UL << HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT)
164 #define HPM_MCACHE_CTL_DC_FIRST_WORD(x) \
165 (uint32_t)(((x) << HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT) & HPM_MCACHE_CTL_DC_FIRST_WORD_MASK)
166
167 /*
168 * D-Cache Write-Around threshold
169 *
170 * 0 Disables streaming. All cacheable write misses allocate a cache line
171 * according to PMA settings.
172 * 1 Override PMA setting and do not allocate D-Cache entries after
173 * consecutive stores to 4 cache lines.
174 * 2 Override PMA setting and do not allocate D-Cache entries after
175 * consecutive stores to 64 cache lines.
176 * 3 Override PMA setting and do not allocate D-Cache entries after
177 * consecutive stores to 128 cache lines.
178 */
179 #define HPM_MCACHE_CTL_DC_WAROUND_SHIFT (0x13UL)
180 #define HPM_MCACHE_CTL_DC_WAROUND_MASK (0x3UL << HPM_MCACHE_CTL_DC_WAROUND_SHIFT)
181 #define HPM_MCACHE_CTL_DC_WAROUND(x) \
182 (uint32_t)(((x) << HPM_MCACHE_CTL_DC_WAROUND_SHIFT) & HPM_MCACHE_CTL_DC_WAROUND_MASK)
183
184 /* CCTL command list */
185 #define HPM_L1C_CCTL_CMD_L1D_VA_INVAL (0UL)
186 #define HPM_L1C_CCTL_CMD_L1D_VA_WB (1UL)
187 #define HPM_L1C_CCTL_CMD_L1D_VA_WBINVAL (2UL)
188 #define HPM_L1C_CCTL_CMD_L1D_VA_LOCK (3UL)
189 #define HPM_L1C_CCTL_CMD_L1D_VA_UNLOCK (4UL)
190 #define HPM_L1C_CCTL_CMD_L1D_WBINVAL_ALL (6UL)
191 #define HPM_L1C_CCTL_CMD_L1D_WB_ALL (7UL)
192
193 #define HPM_L1C_CCTL_CMD_L1I_VA_INVAL (8UL)
194 #define HPM_L1C_CCTL_CMD_L1I_VA_LOCK (11UL)
195 #define HPM_L1C_CCTL_CMD_L1I_VA_UNLOCK (12UL)
196
197 #define HPM_L1C_CCTL_CMD_L1D_IX_INVAL (16UL)
198 #define HPM_L1C_CCTL_CMD_L1D_IX_WB (17UL)
199 #define HPM_L1C_CCTL_CMD_L1D_IX_WBINVAL (18UL)
200
201 #define HPM_L1C_CCTL_CMD_L1D_IX_RTAG (19UL)
202 #define HPM_L1C_CCTL_CMD_L1D_IX_RDATA (20UL)
203 #define HPM_L1C_CCTL_CMD_L1D_IX_WTAG (21UL)
204 #define HPM_L1C_CCTL_CMD_L1D_IX_WDATA (22UL)
205
206 #define HPM_L1C_CCTL_CMD_L1D_INVAL_ALL (23UL)
207
208 #define HPM_L1C_CCTL_CMD_L1I_IX_INVAL (24UL)
209 #define HPM_L1C_CCTL_CMD_L1I_IX_RTAG (27UL)
210 #define HPM_L1C_CCTL_CMD_L1I_IX_RDATA (28UL)
211 #define HPM_L1C_CCTL_CMD_L1I_IX_WTAG (29UL)
212 #define HPM_L1C_CCTL_CMD_L1I_IX_WDATA (30UL)
213
214 #define HPM_L1C_CCTL_CMD_SUCCESS (1UL)
215 #define HPM_L1C_CCTL_CMD_FAIL (0UL)
216
217 #ifdef __cplusplus
218 extern "C" {
219 #endif
220 /* get cache control register value */
l1c_get_control(void)221 __attribute__((always_inline)) static inline uint32_t l1c_get_control(void)
222 {
223 return read_csr(CSR_MCACHE_CTL);
224 }
225
l1c_dc_is_enabled(void)226 __attribute__((always_inline)) static inline bool l1c_dc_is_enabled(void)
227 {
228 return l1c_get_control() & HPM_MCACHE_CTL_DC_EN_MASK;
229 }
230
l1c_ic_is_enabled(void)231 __attribute__((always_inline)) static inline bool l1c_ic_is_enabled(void)
232 {
233 return l1c_get_control() & HPM_MCACHE_CTL_IC_EN_MASK;
234 }
235
236 /* mcctlbeginaddress register bitfield layout for CCTL IX type command */
237 #define HPM_MCCTLBEGINADDR_OFFSET_SHIFT (2UL)
238 #define HPM_MCCTLBEGINADDR_OFFSET_MASK ((uint32_t) 0xF << HPM_MCCTLBEGINADDR_OFFSET_SHIFT)
239 #define HPM_MCCTLBEGINADDR_OFFSET(x) \
240 (uint32_t)(((x) << HPM_MCCTLBEGINADDR_OFFSET_SHIFT) & HPM_MCCTLBEGINADDR_OFFSET_MASK)
241 #define HPM_MCCTLBEGINADDR_INDEX_SHIFT (6UL)
242 #define HPM_MCCTLBEGINADDR_INDEX_MASK ((uint32_t) 0x3F << HPM_MCCTLBEGINADDR_INDEX_SHIFT)
243 #define HPM_MCCTLBEGINADDR_INDEX(x) \
244 (uint32_t)(((x) << HPM_MCCTLBEGINADDR_INDEX_SHIFT) & HPM_MCCTLBEGINADDR_INDEX_MASK)
245 #define HPM_MCCTLBEGINADDR_WAY_SHIFT (13UL)
246 #define HPM_MCCTLBEGINADDR_WAY_MASK ((uint32_t) 0x3 << HPM_MCCTLBEGINADDR_WAY_SHIFT)
247 #define HPM_MCCTLBEGINADDR_WAY(x) \
248 (uint32_t)(((x) << HPM_MCCTLBEGINADDR_WAY_SHIFT) & HPM_MCCTLBEGINADDR_WAY_MASK)
249
250 /* send IX command */
l1c_cctl_address(uint32_t address)251 __attribute__((always_inline)) static inline void l1c_cctl_address(uint32_t address)
252 {
253 write_csr(CSR_MCCTLBEGINADDR, address);
254 }
255
256 /* send command */
l1c_cctl_cmd(uint8_t cmd)257 __attribute__((always_inline)) static inline void l1c_cctl_cmd(uint8_t cmd)
258 {
259 write_csr(CSR_MCCTLCOMMAND, cmd);
260 }
261
l1c_cctl_get_address(void)262 __attribute__((always_inline)) static inline uint32_t l1c_cctl_get_address(void)
263 {
264 return read_csr(CSR_MCCTLBEGINADDR);
265 }
266
267 /* send IX command */
268 __attribute__((always_inline)) static inline
l1c_cctl_address_cmd(uint8_t cmd,uint32_t address)269 void l1c_cctl_address_cmd(uint8_t cmd, uint32_t address)
270 {
271 write_csr(CSR_MCCTLBEGINADDR, address);
272 write_csr(CSR_MCCTLCOMMAND, cmd);
273 }
274
275 #define HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT (2UL)
276 #define HPM_MCCTLDATA_I_TAG_ADDRESS_MASK (uint32_t)(0XFFFFF << HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT)
277 #define HPM_MCCTLDATA_I_TAG_ADDRESS(x) \
278 (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT) & HPM_MCCTLDATA_I_TAG_ADDRESS_MASK)
279
280 #define HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT (29UL)
281 #define HPM_MCCTLDATA_I_TAG_LOCK_DUP_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT)
282 #define HPM_MCCTLDATA_I_TAG_LOCK_DUP(x) \
283 (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT) & HPM_MCCTLDATA_I_TAG_LOCK_DUP_MASK)
284
285 #define HPM_MCCTLDATA_I_TAG_LOCK_SHIFT (30UL)
286 #define HPM_MCCTLDATA_I_TAG_LOCK_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_LOCK_SHIFT)
287 #define HPM_MCCTLDATA_I_TAG_LOCK(x) \
288 (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_LOCK_SHIFT) & HPM_MCCTLDATA_I_TAG_LOCK_MASK)
289
290 #define HPM_MCCTLDATA_I_TAG_VALID_SHIFT (31UL)
291 #define HPM_MCCTLDATA_I_TAG_VALID_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_VALID_SHIFT)
292 #define HPM_MCCTLDATA_I_TAG_VALID(x) \
293 (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_VALID_SHIFT) & HPM_MCCTLDATA_I_TAG_VALID_MASK)
294
295 #define HPM_MCCTLDATA_D_TAG_MESI_SHIFT (0UL)
296 #define HPM_MCCTLDATA_D_TAG_MESI_MASK (uint32_t)(0x3 << HPM_MCCTLDATA_D_TAG_MESI_SHIFT)
297 #define HPM_MCCTLDATA_D_TAG_MESI(x) \
298 (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_MESI_SHIFT) & HPM_MCCTLDATA_D_TAG_MESI_MASK)
299
300 #define HPM_MCCTLDATA_D_TAG_LOCK_SHIFT (3UL)
301 #define HPM_MCCTLDATA_D_TAG_LOCK_MASK (uint32_t)(0x1 << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT)
302 #define HPM_MCCTLDATA_D_TAG_LOCK(x) \
303 (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT) & HPM_MCCTLDATA_D_TAG_LOCK_MASK)
304
305 #define HPM_MCCTLDATA_D_TAG_TAG_SHIFT (4UL)
306 #define HPM_MCCTLDATA_D_TAG_TAG_MASK (uint32_t)(0xFFFF << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT)
307 #define HPM_MCCTLDATA_D_TAG_TAG(x) \
308 (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_TAG_SHIFT) & HPM_MCCTLDATA_D_TAG_TAG_MASK)
309
310 /*
311 * @brief Cache control command read address
312 *
313 * Send IX read tag/data cmd
314 * @param[in] cmd Command code
315 * @param[in] address Target address
316 * @param[in] ecc_data ECC value
317 * @return data read
318 */
319 ATTR_ALWAYS_INLINE static inline
l1c_cctl_address_cmd_read(uint8_t cmd,uint32_t address,uint32_t * ecc_data)320 uint32_t l1c_cctl_address_cmd_read(uint8_t cmd, uint32_t address, uint32_t *ecc_data)
321 {
322 write_csr(CSR_MCCTLBEGINADDR, address);
323 write_csr(CSR_MCCTLCOMMAND, cmd);
324 *ecc_data = read_csr(CSR_MECC_CODE);
325 return read_csr(CSR_MCCTLDATA);
326 }
327
328 /*
329 * @brief Cache control command write address
330 *
331 * Send IX write tag/data cmd
332 * @param[in] cmd Command code
333 * @param[in] address Target address
334 * @param[in] data Data to be written
335 * @param[in] ecc_data ECC of data
336 */
337 ATTR_ALWAYS_INLINE static inline
l1c_cctl_address_cmd_write(uint8_t cmd,uint32_t address,uint32_t data,uint32_t ecc_data)338 void l1c_cctl_address_cmd_write(uint8_t cmd, uint32_t address, uint32_t data, uint32_t ecc_data)
339 {
340 write_csr(CSR_MCCTLBEGINADDR, address);
341 write_csr(CSR_MCCTLCOMMAND, cmd);
342 write_csr(CSR_MCCTLDATA, data);
343 write_csr(CSR_MECC_CODE, ecc_data);
344 }
345
346 #define HPM_L1C_CFG_SET_SHIFT (0UL)
347 #define HPM_L1C_CFG_SET_MASK (uint32_t)(0x7 << HPM_L1C_CFG_SET_SHIFT)
348 #define HPM_L1C_CFG_WAY_SHIFT (3UL)
349 #define HPM_L1C_CFG_WAY_MASK (uint32_t)(0x7 << HPM_L1C_CFG_WAY_SHIFT)
350 #define HPM_L1C_CFG_SIZE_SHIFT (6UL)
351 #define HPM_L1C_CFG_SIZE_MASK (uint32_t)(0x7 << HPM_L1C_CFG_SIZE_SHIFT)
352 #define HPM_L1C_CFG_LOCK_SHIFT (9UL)
353 #define HPM_L1C_CFG_LOCK_MASK (uint32_t)(0x1 << HPM_L1C_CFG_LOCK_SHIFT)
354 #define HPM_L1C_CFG_ECC_SHIFT (10UL)
355 #define HPM_L1C_CFG_ECC_MASK (uint32_t)(0x3 << HPM_L1C_CFG_ECC_SHIFT)
356 #define HPM_L1C_CFG_LMB_SHIFT (12UL)
357 #define HPM_L1C_CFG_LMB_MASK (uint32_t)(0x7 << HPM_L1C_CFG_LMB_SHIFT)
358 #define HPM_L1C_CFG_LM_SIZE_SHIFT (15UL)
359 #define HPM_L1C_CFG_LM_SIZE_MASK (uint32_t)(0x1F << HPM_L1C_CFG_LM_SIZE_SHIFT)
360 #define HPM_L1C_CFG_LM_ECC_SHIFT (21UL)
361 #define HPM_L1C_CFG_LM_ECC_MASK (uint32_t)(0x3 << HPM_L1C_CFG_LM_ECC_SHIFT)
362 #define HPM_L1C_CFG_SETH_SHIFT (24UL)
363 #define HPM_L1C_CFG_SETH_MASK (uint32_t)(0x1 << HPM_L1C_CFG_SETH_SHIFT)
364
365 /**
366 * @brief Align down based on cache line size
367 */
368 #define HPM_L1C_CACHELINE_ALIGN_DOWN(n) ((uint32_t)(n) & ~(HPM_L1C_CACHELINE_SIZE - 1U))
369
370 /**
371 * @brief Align up based on cache line size
372 */
373 #define HPM_L1C_CACHELINE_ALIGN_UP(n) HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)(n) + HPM_L1C_CACHELINE_SIZE - 1U)
374
375 /**
376 * @brief Get I-cache configuration
377 *
378 * @return I-cache config register
379 */
l1c_ic_get_config(void)380 ATTR_ALWAYS_INLINE static inline uint32_t l1c_ic_get_config(void)
381 {
382 return read_csr(CSR_MICM_CFG);
383 }
384
385 /**
386 * @brief Get D-cache configuration
387 *
388 * @return D-cache config register
389 */
l1c_dc_get_config(void)390 ATTR_ALWAYS_INLINE static inline uint32_t l1c_dc_get_config(void)
391 {
392 return read_csr(CSR_MDCM_CFG);
393 }
394
395 /*
396 * @brief D-cache disable
397 */
398 void l1c_dc_disable(void);
399
400 /*
401 * @brief D-cache enable
402 */
403 void l1c_dc_enable(void);
404
405 /*
406 * @brief D-cache invalidate by address
407 * @param[in] address Start address to be invalidated
408 * @param[in] size Size of memory to be invalidated
409 */
410 void l1c_dc_invalidate(uint32_t address, uint32_t size);
411
412 /*
413 * @brief D-cache writeback by address
414 * @param[in] address Start address to be writtenback
415 * @param[in] size Size of memory to be writtenback
416 */
417 void l1c_dc_writeback(uint32_t address, uint32_t size);
418
419 /*
420 * @brief D-cache invalidate and writeback by address
421 * @param[in] address Start address to be invalidated and writtenback
422 * @param[in] size Size of memory to be invalidted and writtenback
423 */
424 void l1c_dc_flush(uint32_t address, uint32_t size);
425
426 /*
427 * @brief D-cache fill and lock by address
428 * @param[in] address Start address to be filled and locked
429 * @param[in] size Size of memory to be filled and locked
430 */
431 void l1c_dc_fill_lock(uint32_t address, uint32_t size);
432
433 /*
434 * @brief I-cache disable
435 */
436 void l1c_ic_disable(void);
437
438 /*
439 * @brief I-cache enable
440 */
441 void l1c_ic_enable(void);
442
443 /*
444 * @brief I-cache invalidate by address
445 * @param[in] address Start address to be invalidated
446 * @param[in] size Size of memory to be invalidated
447 */
448 void l1c_ic_invalidate(uint32_t address, uint32_t size);
449
450 /*
451 * @brief I-cache fill and lock by address
452 * @param[in] address Start address to be locked
453 * @param[in] size Size of memory to be locked
454 */
455 void l1c_ic_fill_lock(uint32_t address, uint32_t size);
456
457 /*
458 * @brief Invalidate all icache and writeback all dcache
459 */
460 void l1c_fence_i(void);
461
462 /*
463 * @brief Invalidate all d-cache
464 */
465 void l1c_dc_invalidate_all(void);
466
467 /*
468 * @brief Writeback all d-cache
469 */
470 void l1c_dc_writeback_all(void);
471
472 /*
473 * @brief Flush all d-cache
474 */
475 void l1c_dc_flush_all(void);
476
477 #ifdef __cplusplus
478 }
479 #endif
480
481 /**
482 * @}
483 */
484
485 #endif /* _HPM_L1_CACHE_H */
486