1 // Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <stdlib.h>
16 #include <assert.h>
17 #include <string.h>
18 #include <stdio.h>
19
20 #include <esp_osal/esp_osal.h>
21 #include <esp_osal/task.h>
22 #include <esp_osal/semphr.h>
23 #if CONFIG_IDF_TARGET_ESP32
24 #include "soc/dport_reg.h"
25 #include <esp32/rom/spi_flash.h>
26 #include <esp32/rom/cache.h>
27 #elif CONFIG_IDF_TARGET_ESP32S2
28 #include "esp32s2/rom/spi_flash.h"
29 #include "esp32s2/rom/cache.h"
30 #include "soc/extmem_reg.h"
31 #include "soc/cache_memory.h"
32 #elif CONFIG_IDF_TARGET_ESP32S3
33 #include "esp32s3/rom/spi_flash.h"
34 #include "esp32s3/rom/cache.h"
35 #include "soc/extmem_reg.h"
36 #include "soc/cache_memory.h"
37 #elif CONFIG_IDF_TARGET_ESP32C3
38 #include "esp32c3/rom/spi_flash.h"
39 #include "esp32c3/rom/cache.h"
40 #include "soc/extmem_reg.h"
41 #include "soc/cache_memory.h"
42 #endif
43 #include <soc/soc.h>
44 #include "sdkconfig.h"
45 #ifndef CONFIG_FREERTOS_UNICORE
46 #include "esp_ipc.h"
47 #endif
48 #include "esp_attr.h"
49 #include "esp_intr_alloc.h"
50 #include "esp_spi_flash.h"
51 #include "esp_log.h"
52
53 static __attribute__((unused)) const char *TAG = "cache";
54
55 #define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
56
57 #define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
58 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
59 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
60
61 #define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
62 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
63 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
64
65 #define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
66 #define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
67
68 static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
69 static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
70
71 static uint32_t s_flash_op_cache_state[2];
72
73 #ifndef CONFIG_FREERTOS_UNICORE
74 static SemaphoreHandle_t s_flash_op_mutex;
75 static volatile bool s_flash_op_can_start = false;
76 static volatile bool s_flash_op_complete = false;
77 #ifndef NDEBUG
78 static volatile int s_flash_op_cpu = -1;
79 #endif
80
spi_flash_init_lock(void)81 void spi_flash_init_lock(void)
82 {
83 s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
84 assert(s_flash_op_mutex != NULL);
85 }
86
spi_flash_op_lock(void)87 void spi_flash_op_lock(void)
88 {
89 xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
90 }
91
spi_flash_op_unlock(void)92 void spi_flash_op_unlock(void)
93 {
94 xSemaphoreGiveRecursive(s_flash_op_mutex);
95 }
96 /*
97 If you're going to modify this, keep in mind that while the flash caches of the pro and app
98 cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
99 with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
100 when accessing psram from the former CPU.
101 */
102
spi_flash_op_block_func(void * arg)103 void IRAM_ATTR spi_flash_op_block_func(void *arg)
104 {
105 // Disable scheduler on this CPU
106 vTaskSuspendAll();
107 // Restore interrupts that aren't located in IRAM
108 esp_intr_noniram_disable();
109 uint32_t cpuid = (uint32_t) arg;
110 // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
111 // CPU may reset the flag back to false before IPC task has a chance to check it
112 // (if it is preempted by an ISR taking non-trivial amount of time)
113 s_flash_op_complete = false;
114 s_flash_op_can_start = true;
115 while (!s_flash_op_complete) {
116 // busy loop here and wait for the other CPU to finish flash operation
117 }
118 // Flash operation is complete, re-enable cache
119 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
120 // Restore interrupts that aren't located in IRAM
121 esp_intr_noniram_enable();
122 // Re-enable scheduler
123 xTaskResumeAll();
124 }
125
spi_flash_disable_interrupts_caches_and_other_cpu(void)126 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
127 {
128 assert(esp_ptr_in_dram((const void *)get_sp()));
129
130 spi_flash_op_lock();
131
132 const int cpuid = xPortGetCoreID();
133 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
134 #ifndef NDEBUG
135 // For sanity check later: record the CPU which has started doing flash operation
136 assert(s_flash_op_cpu == -1);
137 s_flash_op_cpu = cpuid;
138 #endif
139
140 if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
141 // Scheduler hasn't been started yet, it means that spi_flash API is being
142 // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
143 // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
144 // which is in IRAM. So it is safe to disable cache for the other_cpuid after
145 // esp_intr_noniram_disable.
146 assert(other_cpuid == 1);
147 } else {
148 // Temporarily raise current task priority to prevent a deadlock while
149 // waiting for IPC task to start on the other CPU
150 int old_prio = uxTaskPriorityGet(NULL);
151 vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
152 // Signal to the spi_flash_op_block_task on the other CPU that we need it to
153 // disable cache there and block other tasks from executing.
154 s_flash_op_can_start = false;
155 esp_err_t ret = esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void *) other_cpuid);
156 assert(ret == ESP_OK);
157 while (!s_flash_op_can_start) {
158 // Busy loop and wait for spi_flash_op_block_func to disable cache
159 // on the other CPU
160 }
161 // Disable scheduler on the current CPU
162 vTaskSuspendAll();
163 // Can now set the priority back to the normal one
164 vTaskPrioritySet(NULL, old_prio);
165 // This is guaranteed to run on CPU <cpuid> because the other CPU is now
166 // occupied by highest priority task
167 assert(xPortGetCoreID() == cpuid);
168 }
169 // Kill interrupts that aren't located in IRAM
170 esp_intr_noniram_disable();
171 // This CPU executes this routine, with non-IRAM interrupts and the scheduler
172 // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
173 // with non-iram interrupts and the scheduler disabled. None of these CPUs will
174 // touch external RAM or flash this way, so we can safely disable caches.
175 spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
176 spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
177 }
178
spi_flash_enable_interrupts_caches_and_other_cpu(void)179 void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
180 {
181 const int cpuid = xPortGetCoreID();
182 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
183 #ifndef NDEBUG
184 // Sanity check: flash operation ends on the same CPU as it has started
185 assert(cpuid == s_flash_op_cpu);
186 // More sanity check: if scheduler isn't started, only CPU0 can call this.
187 assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
188 s_flash_op_cpu = -1;
189 #endif
190
191 // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
192 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
193 spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
194
195 if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
196 // Signal to spi_flash_op_block_task that flash operation is complete
197 s_flash_op_complete = true;
198 }
199
200 // Re-enable non-iram interrupts
201 esp_intr_noniram_enable();
202
203 // Resume tasks on the current CPU, if the scheduler has started.
204 // NOTE: enabling non-IRAM interrupts has to happen before this,
205 // because once the scheduler has started, due to preemption the
206 // current task can end up being moved to the other CPU.
207 // But esp_intr_noniram_enable has to be called on the same CPU which
208 // called esp_intr_noniram_disable
209 if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
210 xTaskResumeAll();
211 }
212 // Release API lock
213 spi_flash_op_unlock();
214 }
215
spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)216 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
217 {
218 const uint32_t cpuid = xPortGetCoreID();
219 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
220
221 // do not care about other CPU, it was halted upon entering panic handler
222 spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
223 // Kill interrupts that aren't located in IRAM
224 esp_intr_noniram_disable();
225 // Disable cache on this CPU as well
226 spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
227 }
228
spi_flash_enable_interrupts_caches_no_os(void)229 void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
230 {
231 const uint32_t cpuid = xPortGetCoreID();
232
233 // Re-enable cache on this CPU
234 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
235 // Re-enable non-iram interrupts
236 esp_intr_noniram_enable();
237 }
238
239 #else // CONFIG_FREERTOS_UNICORE
240
spi_flash_init_lock(void)241 void spi_flash_init_lock(void)
242 {
243 }
244 static UINT32 flash_op_lock = 0;
spi_flash_op_lock(void)245 void spi_flash_op_lock(void)
246 {
247 // vTaskSuspendAll();
248 flash_op_lock = ArchIntLock();
249 LOS_TaskLock();
250 }
251
spi_flash_op_unlock(void)252 void spi_flash_op_unlock(void)
253 {
254 // xTaskResumeAll();
255 LOS_TaskUnlock();
256 ArchIntRestore(flash_op_lock);
257 }
258
259
spi_flash_disable_interrupts_caches_and_other_cpu(void)260 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
261 {
262 spi_flash_op_lock();
263 esp_intr_noniram_disable();
264 spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
265 }
266
spi_flash_enable_interrupts_caches_and_other_cpu(void)267 void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
268 {
269 spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
270 esp_intr_noniram_enable();
271 spi_flash_op_unlock();
272 }
273
spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)274 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
275 {
276 // Kill interrupts that aren't located in IRAM
277 esp_intr_noniram_disable();
278 // Disable cache on this CPU as well
279 spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
280 }
281
spi_flash_enable_interrupts_caches_no_os(void)282 void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
283 {
284 // Re-enable cache on this CPU
285 spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
286 // Re-enable non-iram interrupts
287 esp_intr_noniram_enable();
288 }
289
290 #endif // CONFIG_FREERTOS_UNICORE
291
292 /**
293 * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
294 * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
295 * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
296 */
spi_flash_disable_cache(uint32_t cpuid,uint32_t * saved_state)297 static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
298 {
299 #if CONFIG_IDF_TARGET_ESP32
300 uint32_t ret = 0;
301 const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
302 if (cpuid == 0) {
303 ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
304 while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
305 ;
306 }
307 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
308 }
309 #if !CONFIG_FREERTOS_UNICORE
310 else {
311 ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
312 while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
313 ;
314 }
315 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
316 }
317 #endif
318 *saved_state = ret;
319 #elif CONFIG_IDF_TARGET_ESP32S2
320 *saved_state = Cache_Suspend_ICache();
321 #elif CONFIG_IDF_TARGET_ESP32S3
322 uint32_t icache_state, dcache_state;
323 icache_state = Cache_Suspend_ICache() << 16;
324 dcache_state = Cache_Suspend_DCache();
325 *saved_state = icache_state | dcache_state;
326 #elif CONFIG_IDF_TARGET_ESP32C3
327 uint32_t icache_state;
328 icache_state = Cache_Suspend_ICache() << 16;
329 *saved_state = icache_state;
330 #endif
331 }
332
spi_flash_restore_cache(uint32_t cpuid,uint32_t saved_state)333 static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
334 {
335 #if CONFIG_IDF_TARGET_ESP32
336 const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
337 if (cpuid == 0) {
338 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
339 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
340 }
341 #if !CONFIG_FREERTOS_UNICORE
342 else {
343 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
344 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
345 }
346 #endif
347 #elif CONFIG_IDF_TARGET_ESP32S2
348 Cache_Resume_ICache(saved_state);
349 #elif CONFIG_IDF_TARGET_ESP32S3
350 Cache_Resume_DCache(saved_state & 0xffff);
351 Cache_Resume_ICache(saved_state >> 16);
352 #elif CONFIG_IDF_TARGET_ESP32C3
353 Cache_Resume_ICache(saved_state >> 16);
354 #endif
355 }
356
spi_flash_cache_enabled(void)357 IRAM_ATTR bool spi_flash_cache_enabled(void)
358 {
359 #if CONFIG_IDF_TARGET_ESP32
360 bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
361 #if portNUM_PROCESSORS == 2
362 result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
363 #endif
364 #elif CONFIG_IDF_TARGET_ESP32S2
365 bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
366 #elif CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
367 bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
368 #endif
369 return result;
370 }
371
372 #if CONFIG_IDF_TARGET_ESP32S2
esp_config_instruction_cache_mode(void)373 IRAM_ATTR void esp_config_instruction_cache_mode(void)
374 {
375 cache_size_t cache_size;
376 cache_ways_t cache_ways;
377 cache_line_size_t cache_line_size;
378
379 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
380 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
381 cache_size = CACHE_SIZE_8KB;
382 #else
383 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
384 cache_size = CACHE_SIZE_16KB;
385 #endif
386 cache_ways = CACHE_4WAYS_ASSOC;
387 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B
388 cache_line_size = CACHE_LINE_SIZE_16B;
389 #else
390 cache_line_size = CACHE_LINE_SIZE_32B;
391 #endif
392 ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
393 Cache_Suspend_ICache();
394 Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
395 Cache_Invalidate_ICache_All();
396 Cache_Resume_ICache(0);
397 }
398
esp_config_data_cache_mode(void)399 IRAM_ATTR void esp_config_data_cache_mode(void)
400 {
401 cache_size_t cache_size;
402 cache_ways_t cache_ways;
403 cache_line_size_t cache_line_size;
404
405 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
406 #if CONFIG_ESP32S2_DATA_CACHE_8KB
407 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
408 cache_size = CACHE_SIZE_8KB;
409 #else
410 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
411 cache_size = CACHE_SIZE_16KB;
412 #endif
413 #else
414 #if CONFIG_ESP32S2_DATA_CACHE_8KB
415 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
416 cache_size = CACHE_SIZE_8KB;
417 #else
418 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
419 cache_size = CACHE_SIZE_16KB;
420 #endif
421 #endif
422
423 cache_ways = CACHE_4WAYS_ASSOC;
424 #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
425 cache_line_size = CACHE_LINE_SIZE_16B;
426 #else
427 cache_line_size = CACHE_LINE_SIZE_32B;
428 #endif
429 ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
430 Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
431 Cache_Invalidate_DCache_All();
432 }
433
esp_enable_cache_flash_wrap(bool icache,bool dcache)434 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
435 {
436 uint32_t i_autoload, d_autoload;
437 if (icache) {
438 i_autoload = Cache_Suspend_ICache();
439 }
440 if (dcache) {
441 d_autoload = Cache_Suspend_DCache();
442 }
443 REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_FLASH_WRAP_AROUND);
444 if (icache) {
445 Cache_Resume_ICache(i_autoload);
446 }
447 if (dcache) {
448 Cache_Resume_DCache(d_autoload);
449 }
450 }
451
452 #if CONFIG_ESP32S2_SPIRAM_SUPPORT
esp_enable_cache_spiram_wrap(bool icache,bool dcache)453 static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
454 {
455 uint32_t i_autoload, d_autoload;
456 if (icache) {
457 i_autoload = Cache_Suspend_ICache();
458 }
459 if (dcache) {
460 d_autoload = Cache_Suspend_DCache();
461 }
462 REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_SRAM_RD_WRAP_AROUND);
463 if (icache) {
464 Cache_Resume_ICache(i_autoload);
465 }
466 if (dcache) {
467 Cache_Resume_DCache(d_autoload);
468 }
469 }
470 #endif
471
esp_enable_cache_wrap(bool icache_wrap_enable,bool dcache_wrap_enable)472 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
473 {
474 int icache_wrap_size = 0, dcache_wrap_size = 0;
475 int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
476 int flash_wrap_size = 0, spiram_wrap_size = 0;
477 int flash_count = 0, spiram_count = 0;
478 int i;
479 bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
480 uint32_t drom0_in_icache = 1;//always 1 in esp32s2
481 #if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
482 drom0_in_icache = 0;
483 #endif
484
485 if (icache_wrap_enable) {
486 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B
487 icache_wrap_size = 16;
488 #else
489 icache_wrap_size = 32;
490 #endif
491 }
492 if (dcache_wrap_enable) {
493 #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B || CONFIG_ESP32S3_DATA_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B
494 dcache_wrap_size = 16;
495 #else
496 dcache_wrap_size = 32;
497 #endif
498 }
499
500 uint32_t instruction_use_spiram = 0;
501 uint32_t rodata_use_spiram = 0;
502 #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
503 extern uint32_t esp_spiram_instruction_access_enabled(void);
504 instruction_use_spiram = esp_spiram_instruction_access_enabled();
505 #endif
506 #if CONFIG_SPIRAM_RODATA
507 extern uint32_t esp_spiram_rodata_access_enabled(void);
508 rodata_use_spiram = esp_spiram_rodata_access_enabled();
509 #endif
510
511 if (instruction_use_spiram) {
512 spiram_wrap_sizes[0] = icache_wrap_size;
513 } else {
514 flash_wrap_sizes[0] = icache_wrap_size;
515 }
516 if (rodata_use_spiram) {
517 if (drom0_in_icache) {
518 spiram_wrap_sizes[0] = icache_wrap_size;
519 } else {
520 spiram_wrap_sizes[1] = dcache_wrap_size;
521 flash_wrap_sizes[1] = dcache_wrap_size;
522 }
523 #ifdef CONFIG_EXT_RODATA_SUPPORT
524 spiram_wrap_sizes[1] = dcache_wrap_size;
525 #endif
526 } else {
527 if (drom0_in_icache) {
528 flash_wrap_sizes[0] = icache_wrap_size;
529 } else {
530 flash_wrap_sizes[1] = dcache_wrap_size;
531 }
532 #ifdef CONFIG_EXT_RODATA_SUPPORT
533 flash_wrap_sizes[1] = dcache_wrap_size;
534 #endif
535 }
536 #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
537 spiram_wrap_sizes[1] = dcache_wrap_size;
538 #endif
539 for (i = 0; i < 2; i++) {
540 if (flash_wrap_sizes[i] != -1) {
541 flash_count++;
542 flash_wrap_size = flash_wrap_sizes[i];
543 }
544 }
545 for (i = 0; i < 2; i++) {
546 if (spiram_wrap_sizes[i] != -1) {
547 spiram_count++;
548 spiram_wrap_size = spiram_wrap_sizes[i];
549 }
550 }
551 if (flash_count + spiram_count <= 2) {
552 flash_spiram_wrap_together = false;
553 } else {
554 flash_spiram_wrap_together = true;
555 }
556 ESP_EARLY_LOGI(TAG, "flash_count=%d, size=%d, spiram_count=%d, size=%d,together=%d", flash_count, flash_wrap_size, spiram_count, spiram_wrap_size, flash_spiram_wrap_together);
557 if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
558 ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
559 if (spiram_wrap_size == 0) {
560 return ESP_FAIL;
561 }
562 if (flash_spiram_wrap_together) {
563 ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
564 return ESP_FAIL;
565 }
566 }
567 if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
568 ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
569 if (flash_wrap_size == 0) {
570 return ESP_FAIL;
571 }
572 if (flash_spiram_wrap_together) {
573 ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
574 return ESP_FAIL;
575 }
576 }
577
578 if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
579 ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
580 return ESP_FAIL;
581 }
582
583 #ifdef CONFIG_FLASHMODE_QIO
584 flash_support_wrap = true;
585 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
586 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
587 flash_support_wrap = false;
588 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
589 }
590 #else
591 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
592 #endif
593
594 #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
595 extern bool psram_support_wrap_size(uint32_t wrap_size);
596 if (!psram_support_wrap_size(spiram_wrap_size)) {
597 spiram_support_wrap = false;
598 ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
599 }
600 #endif
601
602 if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
603 ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
604 return ESP_FAIL;
605 }
606
607 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
608 if (flash_support_wrap && flash_wrap_size > 0) {
609 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
610 spi_flash_enable_wrap(flash_wrap_size);
611 esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
612 }
613 #if CONFIG_ESP32S2_SPIRAM_SUPPORT
614 extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
615 if (spiram_support_wrap && spiram_wrap_size > 0) {
616 ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
617 psram_enable_wrap(spiram_wrap_size);
618 esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
619 }
620 #endif
621
622 return ESP_OK;
623
624 }
625 #endif
626 #if CONFIG_IDF_TARGET_ESP32S3
esp_config_instruction_cache_mode(void)627 IRAM_ATTR void esp_config_instruction_cache_mode(void)
628 {
629 cache_size_t cache_size;
630 cache_ways_t cache_ways;
631 cache_line_size_t cache_line_size;
632
633 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
634 Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_INVALID);
635 cache_size = CACHE_SIZE_HALF;
636 #else
637 Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_IBANK1);
638 cache_size = CACHE_SIZE_FULL;
639 #endif
640 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_4WAYS
641 cache_ways = CACHE_4WAYS_ASSOC;
642 #else
643 cache_ways = CACHE_8WAYS_ASSOC;
644 #endif
645 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
646 cache_line_size = CACHE_LINE_SIZE_16B;
647 #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
648 cache_line_size = CACHE_LINE_SIZE_32B;
649 #else
650 cache_line_size = CACHE_LINE_SIZE_64B;
651 #endif
652 ESP_EARLY_LOGI(TAG, "Instruction cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 16 : 32, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
653 Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
654 Cache_Invalidate_ICache_All();
655 extern void Cache_Enable_ICache(uint32_t autoload);
656 Cache_Enable_ICache(0);
657 }
658
esp_config_data_cache_mode(void)659 IRAM_ATTR void esp_config_data_cache_mode(void)
660 {
661 cache_size_t cache_size;
662 cache_ways_t cache_ways;
663 cache_line_size_t cache_line_size;
664
665 #if CONFIG_ESP32S3_DATA_CACHE_32KB
666 Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK1, CACHE_MEMORY_INVALID);
667 cache_size = CACHE_SIZE_HALF;
668 #else
669 Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK0, CACHE_MEMORY_DBANK1);
670 cache_size = CACHE_SIZE_FULL;
671 #endif
672 #if CONFIG_ESP32S3_DATA_CACHE_4WAYS
673 cache_ways = CACHE_4WAYS_ASSOC;
674 #else
675 cache_ways = CACHE_8WAYS_ASSOC;
676 #endif
677 #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
678 cache_line_size = CACHE_LINE_SIZE_16B;
679 #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
680 cache_line_size = CACHE_LINE_SIZE_32B;
681 #else
682 cache_line_size = CACHE_LINE_SIZE_64B;
683 #endif
684 // ESP_EARLY_LOGI(TAG, "Data cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 32 : 64, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
685 Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
686 Cache_Invalidate_DCache_All();
687 }
688
esp_enable_cache_flash_wrap(bool icache,bool dcache)689 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
690 {
691 uint32_t i_autoload, d_autoload;
692 if (icache) {
693 i_autoload = Cache_Suspend_ICache();
694 }
695 if (dcache) {
696 d_autoload = Cache_Suspend_DCache();
697 }
698 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
699 if (icache) {
700 Cache_Resume_ICache(i_autoload);
701 }
702 if (dcache) {
703 Cache_Resume_DCache(d_autoload);
704 }
705 }
706
707 #if CONFIG_ESP32S3_SPIRAM_SUPPORT
esp_enable_cache_spiram_wrap(bool icache,bool dcache)708 static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
709 {
710 uint32_t i_autoload, d_autoload;
711 if (icache) {
712 i_autoload = Cache_Suspend_ICache();
713 }
714 if (dcache) {
715 d_autoload = Cache_Suspend_DCache();
716 }
717 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_SRAM_RD_WRAP_AROUND);
718 if (icache) {
719 Cache_Resume_ICache(i_autoload);
720 }
721 if (dcache) {
722 Cache_Resume_DCache(d_autoload);
723 }
724 }
725 #endif
726
esp_enable_cache_wrap(bool icache_wrap_enable,bool dcache_wrap_enable)727 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
728 {
729 int icache_wrap_size = 0, dcache_wrap_size = 0;
730 int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
731 int flash_wrap_size = 0, spiram_wrap_size = 0;
732 int flash_count = 0, spiram_count = 0;
733 int i;
734 bool flash_spiram_wrap_together, flash_support_wrap = false, spiram_support_wrap = true;
735 uint32_t drom0_in_icache = 0;//always 0 in chip7.2.4
736
737 if (icache_wrap_enable) {
738 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
739 icache_wrap_size = 16;
740 #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
741 icache_wrap_size = 32;
742 #else
743 icache_wrap_size = 64;
744 #endif
745 }
746 if (dcache_wrap_enable) {
747 #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
748 dcache_wrap_size = 16;
749 #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
750 dcache_wrap_size = 32;
751 #else
752 dcache_wrap_size = 64;
753 #endif
754 }
755
756 uint32_t instruction_use_spiram = 0;
757 uint32_t rodata_use_spiram = 0;
758 #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
759 extern uint32_t esp_spiram_instruction_access_enabled();
760 instruction_use_spiram = esp_spiram_instruction_access_enabled();
761 #endif
762 #if CONFIG_SPIRAM_RODATA
763 extern uint32_t esp_spiram_rodata_access_enabled();
764 rodata_use_spiram = esp_spiram_rodata_access_enabled();
765 #endif
766
767 if (instruction_use_spiram) {
768 spiram_wrap_sizes[0] = icache_wrap_size;
769 } else {
770 flash_wrap_sizes[0] = icache_wrap_size;
771 }
772 if (rodata_use_spiram) {
773 if (drom0_in_icache) {
774 spiram_wrap_sizes[0] = icache_wrap_size;
775 } else {
776 spiram_wrap_sizes[1] = dcache_wrap_size;
777 }
778 #ifdef CONFIG_EXT_RODATA_SUPPORT
779 spiram_wrap_sizes[1] = dcache_wrap_size;
780 #endif
781 } else {
782 if (drom0_in_icache) {
783 flash_wrap_sizes[0] = icache_wrap_size;
784 } else {
785 flash_wrap_sizes[1] = dcache_wrap_size;
786 }
787 #ifdef CONFIG_EXT_RODATA_SUPPORT
788 flash_wrap_sizes[1] = dcache_wrap_size;
789 #endif
790 }
791 #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
792 spiram_wrap_sizes[1] = dcache_wrap_size;
793 #endif
794 for (i = 0; i < 2; i++) {
795 if (flash_wrap_sizes[i] != -1) {
796 flash_count++;
797 flash_wrap_size = flash_wrap_sizes[i];
798 }
799 }
800 for (i = 0; i < 2; i++) {
801 if (spiram_wrap_sizes[i] != -1) {
802 spiram_count++;
803 spiram_wrap_size = spiram_wrap_sizes[i];
804 }
805 }
806 if (flash_count + spiram_count <= 2) {
807 flash_spiram_wrap_together = false;
808 } else {
809 flash_spiram_wrap_together = true;
810 }
811 if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
812 ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
813 if (spiram_wrap_size == 0) {
814 return ESP_FAIL;
815 }
816 if (flash_spiram_wrap_together) {
817 ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
818 return ESP_FAIL;
819 }
820 }
821 if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
822 ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
823 if (flash_wrap_size == 0) {
824 return ESP_FAIL;
825 }
826 if (flash_spiram_wrap_together) {
827 ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
828 return ESP_FAIL;
829 }
830 }
831
832 if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
833 ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
834 return ESP_FAIL;
835 }
836
837 #ifdef CONFIG_FLASHMODE_QIO
838 flash_support_wrap = true;
839 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
840 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
841 flash_support_wrap = false;
842 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
843 }
844 #else
845 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
846 #endif
847
848
849 #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
850 extern bool psram_support_wrap_size(uint32_t wrap_size);
851 if (!psram_support_wrap_size(spiram_wrap_size)) {
852 spiram_support_wrap = false;
853 ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
854 }
855 #endif
856
857 if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
858 ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
859 return ESP_FAIL;
860 }
861
862 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
863 if (flash_support_wrap && flash_wrap_size > 0) {
864 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
865 spi_flash_enable_wrap(flash_wrap_size);
866 esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
867 }
868 #if CONFIG_ESP32S3_SPIRAM_SUPPORT
869 extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
870 if (spiram_support_wrap && spiram_wrap_size > 0) {
871 ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
872 psram_enable_wrap(spiram_wrap_size);
873 esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
874 }
875 #endif
876
877 return ESP_OK;
878
879 }
880 #endif
881
882 #if CONFIG_IDF_TARGET_ESP32C3
883
esp_enable_cache_flash_wrap(bool icache)884 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache)
885 {
886 uint32_t i_autoload;
887 if (icache) {
888 i_autoload = Cache_Suspend_ICache();
889 }
890 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
891 if (icache) {
892 Cache_Resume_ICache(i_autoload);
893 }
894 }
895
esp_enable_cache_wrap(bool icache_wrap_enable)896 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable)
897 {
898 int flash_wrap_size = 0;
899 bool flash_support_wrap = false;
900
901 if (icache_wrap_enable) {
902 flash_wrap_size = 32;
903 }
904
905 #ifdef CONFIG_FLASHMODE_QIO
906 flash_support_wrap = true;
907 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
908 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
909 flash_support_wrap = false;
910 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
911 }
912 #else
913 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
914 #endif // CONFIG_FLASHMODE_QIO
915
916 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
917 if (flash_support_wrap && flash_wrap_size > 0) {
918 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
919 spi_flash_enable_wrap(flash_wrap_size);
920 esp_enable_cache_flash_wrap((flash_wrap_size > 0));
921 }
922 return ESP_OK;
923 }
924 #endif // CONFIG_IDF_TARGET_ESP32C3
925
spi_flash_enable_cache(uint32_t cpuid)926 void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
927 {
928 #if CONFIG_IDF_TARGET_ESP32
929 uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
930 cache_value &= DPORT_CACHE_GET_MASK(cpuid);
931
932 // Re-enable cache on this CPU
933 spi_flash_restore_cache(cpuid, cache_value);
934 #else
935 spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
936 #endif
937 }
938