1 /*
2 * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdlib.h>
8 #include <assert.h>
9 #include <string.h>
10 #include <stdio.h>
11
12 #include <freertos/FreeRTOS.h>
13 #include <freertos/task.h>
14 #include <freertos/semphr.h>
15 #if CONFIG_IDF_TARGET_ESP32
16 #include "soc/dport_reg.h"
17 #include <esp32/rom/spi_flash.h>
18 #include <esp32/rom/cache.h>
19 #elif CONFIG_IDF_TARGET_ESP32S2
20 #include "esp32s2/rom/spi_flash.h"
21 #include "esp32s2/rom/cache.h"
22 #include "soc/extmem_reg.h"
23 #include "soc/cache_memory.h"
24 #elif CONFIG_IDF_TARGET_ESP32S3
25 #include "esp32s3/rom/spi_flash.h"
26 #include "esp32s3/rom/cache.h"
27 #include "soc/extmem_reg.h"
28 #include "soc/cache_memory.h"
29 #elif CONFIG_IDF_TARGET_ESP32C3
30 #include "esp32c3/rom/spi_flash.h"
31 #include "esp32c3/rom/cache.h"
32 #include "soc/extmem_reg.h"
33 #include "soc/cache_memory.h"
34 #elif CONFIG_IDF_TARGET_ESP32H2
35 #include "esp32h2/rom/spi_flash.h"
36 #include "esp32h2/rom/cache.h"
37 #include "soc/extmem_reg.h"
38 #include "soc/cache_memory.h"
39 #endif
40 #include <soc/soc.h>
41 #include "sdkconfig.h"
42 #ifndef CONFIG_FREERTOS_UNICORE
43 #include "esp_ipc.h"
44 #endif
45 #include "esp_attr.h"
46 #include "esp_intr_alloc.h"
47 #include "esp_spi_flash.h"
48 #include "esp_log.h"
49
50 static __attribute__((unused)) const char *TAG = "cache";
51
52 #define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
53
54 #define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
55 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
56 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
57
58 #define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
59 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
60 DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
61
62 #define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
63 #define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
64
65 static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
66 static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
67
68 static uint32_t s_flash_op_cache_state[2];
69
70 #ifndef CONFIG_FREERTOS_UNICORE
71 static SemaphoreHandle_t s_flash_op_mutex;
72 static volatile bool s_flash_op_can_start = false;
73 static volatile bool s_flash_op_complete = false;
74 #ifndef NDEBUG
75 static volatile int s_flash_op_cpu = -1;
76 #endif
77
esp_task_stack_is_sane_cache_disabled(void)78 static inline bool esp_task_stack_is_sane_cache_disabled(void)
79 {
80 const void *sp = (const void *)esp_cpu_get_sp();
81
82 return esp_ptr_in_dram(sp)
83 #if CONFIG_ESP_SYSTEM_ALLOW_RTC_FAST_MEM_AS_HEAP
84 || esp_ptr_in_rtc_dram_fast(sp)
85 #endif
86 ;
87 }
88
spi_flash_init_lock(void)89 void spi_flash_init_lock(void)
90 {
91 s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
92 assert(s_flash_op_mutex != NULL);
93 }
94
spi_flash_op_lock(void)95 void spi_flash_op_lock(void)
96 {
97 xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
98 }
99
spi_flash_op_unlock(void)100 void spi_flash_op_unlock(void)
101 {
102 xSemaphoreGiveRecursive(s_flash_op_mutex);
103 }
104 /*
105 If you're going to modify this, keep in mind that while the flash caches of the pro and app
106 cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
107 with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
108 when accessing psram from the former CPU.
109 */
110
spi_flash_op_block_func(void * arg)111 void IRAM_ATTR spi_flash_op_block_func(void *arg)
112 {
113 // Disable scheduler on this CPU
114 vTaskSuspendAll();
115 // Restore interrupts that aren't located in IRAM
116 esp_intr_noniram_disable();
117 uint32_t cpuid = (uint32_t) arg;
118 // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
119 // CPU may reset the flag back to false before IPC task has a chance to check it
120 // (if it is preempted by an ISR taking non-trivial amount of time)
121 s_flash_op_complete = false;
122 s_flash_op_can_start = true;
123 while (!s_flash_op_complete) {
124 // busy loop here and wait for the other CPU to finish flash operation
125 }
126 // Flash operation is complete, re-enable cache
127 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
128 // Restore interrupts that aren't located in IRAM
129 esp_intr_noniram_enable();
130 // Re-enable scheduler
131 xTaskResumeAll();
132 }
133
spi_flash_disable_interrupts_caches_and_other_cpu(void)134 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
135 {
136 assert(esp_task_stack_is_sane_cache_disabled());
137
138 spi_flash_op_lock();
139
140 const int cpuid = xPortGetCoreID();
141 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
142 #ifndef NDEBUG
143 // For sanity check later: record the CPU which has started doing flash operation
144 assert(s_flash_op_cpu == -1);
145 s_flash_op_cpu = cpuid;
146 #endif
147
148 if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
149 // Scheduler hasn't been started yet, it means that spi_flash API is being
150 // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
151 // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
152 // which is in IRAM. So it is safe to disable cache for the other_cpuid after
153 // esp_intr_noniram_disable.
154 assert(other_cpuid == 1);
155 } else {
156 // Temporarily raise current task priority to prevent a deadlock while
157 // waiting for IPC task to start on the other CPU
158 int old_prio = uxTaskPriorityGet(NULL);
159 vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
160 // Signal to the spi_flash_op_block_task on the other CPU that we need it to
161 // disable cache there and block other tasks from executing.
162 s_flash_op_can_start = false;
163 ESP_ERROR_CHECK(esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void *) other_cpuid));
164
165 while (!s_flash_op_can_start) {
166 // Busy loop and wait for spi_flash_op_block_func to disable cache
167 // on the other CPU
168 }
169 // Disable scheduler on the current CPU
170 vTaskSuspendAll();
171 // Can now set the priority back to the normal one
172 vTaskPrioritySet(NULL, old_prio);
173 // This is guaranteed to run on CPU <cpuid> because the other CPU is now
174 // occupied by highest priority task
175 assert(xPortGetCoreID() == cpuid);
176 }
177 // Kill interrupts that aren't located in IRAM
178 esp_intr_noniram_disable();
179 // This CPU executes this routine, with non-IRAM interrupts and the scheduler
180 // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
181 // with non-iram interrupts and the scheduler disabled. None of these CPUs will
182 // touch external RAM or flash this way, so we can safely disable caches.
183 spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
184 spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
185 }
186
spi_flash_enable_interrupts_caches_and_other_cpu(void)187 void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
188 {
189 const int cpuid = xPortGetCoreID();
190 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
191 #ifndef NDEBUG
192 // Sanity check: flash operation ends on the same CPU as it has started
193 assert(cpuid == s_flash_op_cpu);
194 // More sanity check: if scheduler isn't started, only CPU0 can call this.
195 assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
196 s_flash_op_cpu = -1;
197 #endif
198
199 // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
200 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
201 spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
202
203 if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
204 // Signal to spi_flash_op_block_task that flash operation is complete
205 s_flash_op_complete = true;
206 }
207
208 // Re-enable non-iram interrupts
209 esp_intr_noniram_enable();
210
211 // Resume tasks on the current CPU, if the scheduler has started.
212 // NOTE: enabling non-IRAM interrupts has to happen before this,
213 // because once the scheduler has started, due to preemption the
214 // current task can end up being moved to the other CPU.
215 // But esp_intr_noniram_enable has to be called on the same CPU which
216 // called esp_intr_noniram_disable
217 if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
218 xTaskResumeAll();
219 }
220 // Release API lock
221 spi_flash_op_unlock();
222 }
223
spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)224 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
225 {
226 const uint32_t cpuid = xPortGetCoreID();
227 const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
228
229 // do not care about other CPU, it was halted upon entering panic handler
230 spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
231 // Kill interrupts that aren't located in IRAM
232 esp_intr_noniram_disable();
233 // Disable cache on this CPU as well
234 spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
235 }
236
spi_flash_enable_interrupts_caches_no_os(void)237 void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
238 {
239 const uint32_t cpuid = xPortGetCoreID();
240
241 // Re-enable cache on this CPU
242 spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
243 // Re-enable non-iram interrupts
244 esp_intr_noniram_enable();
245 }
246
247 #else // CONFIG_FREERTOS_UNICORE
248
spi_flash_init_lock(void)249 void spi_flash_init_lock(void)
250 {
251 }
252
spi_flash_op_lock(void)253 void spi_flash_op_lock(void)
254 {
255 vTaskSuspendAll();
256 }
257
spi_flash_op_unlock(void)258 void spi_flash_op_unlock(void)
259 {
260 xTaskResumeAll();
261 }
262
263
spi_flash_disable_interrupts_caches_and_other_cpu(void)264 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
265 {
266 spi_flash_op_lock();
267 esp_intr_noniram_disable();
268 spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
269 }
270
spi_flash_enable_interrupts_caches_and_other_cpu(void)271 void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
272 {
273 spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
274 esp_intr_noniram_enable();
275 spi_flash_op_unlock();
276 }
277
spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)278 void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
279 {
280 // Kill interrupts that aren't located in IRAM
281 esp_intr_noniram_disable();
282 // Disable cache on this CPU as well
283 spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
284 }
285
spi_flash_enable_interrupts_caches_no_os(void)286 void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
287 {
288 // Re-enable cache on this CPU
289 spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
290 // Re-enable non-iram interrupts
291 esp_intr_noniram_enable();
292 }
293
294 #endif // CONFIG_FREERTOS_UNICORE
295
296 /**
297 * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
298 * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
299 * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
300 */
spi_flash_disable_cache(uint32_t cpuid,uint32_t * saved_state)301 static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
302 {
303 #if CONFIG_IDF_TARGET_ESP32
304 uint32_t ret = 0;
305 const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
306 if (cpuid == 0) {
307 ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
308 while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
309 ;
310 }
311 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
312 }
313 #if !CONFIG_FREERTOS_UNICORE
314 else {
315 ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
316 while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
317 ;
318 }
319 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
320 }
321 #endif
322 *saved_state = ret;
323 #elif CONFIG_IDF_TARGET_ESP32S2
324 *saved_state = Cache_Suspend_ICache();
325 #elif CONFIG_IDF_TARGET_ESP32S3
326 uint32_t icache_state, dcache_state;
327 icache_state = Cache_Suspend_ICache() << 16;
328 dcache_state = Cache_Suspend_DCache();
329 *saved_state = icache_state | dcache_state;
330 #elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
331 uint32_t icache_state;
332 icache_state = Cache_Suspend_ICache() << 16;
333 *saved_state = icache_state;
334 #endif
335 }
336
spi_flash_restore_cache(uint32_t cpuid,uint32_t saved_state)337 static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
338 {
339 #if CONFIG_IDF_TARGET_ESP32
340 const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
341 if (cpuid == 0) {
342 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
343 DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
344 }
345 #if !CONFIG_FREERTOS_UNICORE
346 else {
347 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
348 DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
349 }
350 #endif
351 #elif CONFIG_IDF_TARGET_ESP32S2
352 Cache_Resume_ICache(saved_state);
353 #elif CONFIG_IDF_TARGET_ESP32S3
354 Cache_Resume_DCache(saved_state & 0xffff);
355 Cache_Resume_ICache(saved_state >> 16);
356 #elif CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
357 Cache_Resume_ICache(saved_state >> 16);
358 #endif
359 }
360
spi_flash_cache_enabled(void)361 IRAM_ATTR bool spi_flash_cache_enabled(void)
362 {
363 #if CONFIG_IDF_TARGET_ESP32
364 bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
365 #if portNUM_PROCESSORS == 2
366 result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
367 #endif
368 #elif CONFIG_IDF_TARGET_ESP32S2
369 bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
370 #elif CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
371 bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
372 #endif
373 return result;
374 }
375
376 #if CONFIG_IDF_TARGET_ESP32S2
esp_config_instruction_cache_mode(void)377 IRAM_ATTR void esp_config_instruction_cache_mode(void)
378 {
379 cache_size_t cache_size;
380 cache_ways_t cache_ways;
381 cache_line_size_t cache_line_size;
382
383 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
384 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
385 cache_size = CACHE_SIZE_8KB;
386 #else
387 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
388 cache_size = CACHE_SIZE_16KB;
389 #endif
390 cache_ways = CACHE_4WAYS_ASSOC;
391 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B
392 cache_line_size = CACHE_LINE_SIZE_16B;
393 #else
394 cache_line_size = CACHE_LINE_SIZE_32B;
395 #endif
396 ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
397 Cache_Suspend_ICache();
398 Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
399 Cache_Invalidate_ICache_All();
400 Cache_Resume_ICache(0);
401 }
402
esp_config_data_cache_mode(void)403 IRAM_ATTR void esp_config_data_cache_mode(void)
404 {
405 cache_size_t cache_size;
406 cache_ways_t cache_ways;
407 cache_line_size_t cache_line_size;
408
409 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
410 #if CONFIG_ESP32S2_DATA_CACHE_8KB
411 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
412 cache_size = CACHE_SIZE_8KB;
413 #else
414 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
415 cache_size = CACHE_SIZE_16KB;
416 #endif
417 #else
418 #if CONFIG_ESP32S2_DATA_CACHE_8KB
419 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
420 cache_size = CACHE_SIZE_8KB;
421 #else
422 Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
423 cache_size = CACHE_SIZE_16KB;
424 #endif
425 #endif
426
427 cache_ways = CACHE_4WAYS_ASSOC;
428 #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
429 cache_line_size = CACHE_LINE_SIZE_16B;
430 #else
431 cache_line_size = CACHE_LINE_SIZE_32B;
432 #endif
433 ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
434 Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
435 Cache_Invalidate_DCache_All();
436 }
437
esp_enable_cache_flash_wrap(bool icache,bool dcache)438 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
439 {
440 uint32_t i_autoload, d_autoload;
441 if (icache) {
442 i_autoload = Cache_Suspend_ICache();
443 }
444 if (dcache) {
445 d_autoload = Cache_Suspend_DCache();
446 }
447 REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_FLASH_WRAP_AROUND);
448 if (icache) {
449 Cache_Resume_ICache(i_autoload);
450 }
451 if (dcache) {
452 Cache_Resume_DCache(d_autoload);
453 }
454 }
455
456 #if CONFIG_ESP32S2_SPIRAM_SUPPORT
esp_enable_cache_spiram_wrap(bool icache,bool dcache)457 static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
458 {
459 uint32_t i_autoload, d_autoload;
460 if (icache) {
461 i_autoload = Cache_Suspend_ICache();
462 }
463 if (dcache) {
464 d_autoload = Cache_Suspend_DCache();
465 }
466 REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_SRAM_RD_WRAP_AROUND);
467 if (icache) {
468 Cache_Resume_ICache(i_autoload);
469 }
470 if (dcache) {
471 Cache_Resume_DCache(d_autoload);
472 }
473 }
474 #endif
475
esp_enable_cache_wrap(bool icache_wrap_enable,bool dcache_wrap_enable)476 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
477 {
478 int icache_wrap_size = 0, dcache_wrap_size = 0;
479 int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
480 int flash_wrap_size = 0, spiram_wrap_size = 0;
481 int flash_count = 0, spiram_count = 0;
482 int i;
483 bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
484 uint32_t drom0_in_icache = 1;//always 1 in esp32s2
485 #if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
486 drom0_in_icache = 0;
487 #endif
488
489 if (icache_wrap_enable) {
490 #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32H2_INSTRUCTION_CACHE_LINE_16B
491 icache_wrap_size = 16;
492 #else
493 icache_wrap_size = 32;
494 #endif
495 }
496 if (dcache_wrap_enable) {
497 #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B || CONFIG_ESP32S3_DATA_CACHE_LINE_16B || CONFIG_ESP32C3_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32H2_INSTRUCTION_CACHE_LINE_16B
498 dcache_wrap_size = 16;
499 #else
500 dcache_wrap_size = 32;
501 #endif
502 }
503
504 uint32_t instruction_use_spiram = 0;
505 uint32_t rodata_use_spiram = 0;
506 #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
507 extern uint32_t esp_spiram_instruction_access_enabled(void);
508 instruction_use_spiram = esp_spiram_instruction_access_enabled();
509 #endif
510 #if CONFIG_SPIRAM_RODATA
511 extern uint32_t esp_spiram_rodata_access_enabled(void);
512 rodata_use_spiram = esp_spiram_rodata_access_enabled();
513 #endif
514
515 if (instruction_use_spiram) {
516 spiram_wrap_sizes[0] = icache_wrap_size;
517 } else {
518 flash_wrap_sizes[0] = icache_wrap_size;
519 }
520 if (rodata_use_spiram) {
521 if (drom0_in_icache) {
522 spiram_wrap_sizes[0] = icache_wrap_size;
523 } else {
524 spiram_wrap_sizes[1] = dcache_wrap_size;
525 flash_wrap_sizes[1] = dcache_wrap_size;
526 }
527 #ifdef CONFIG_EXT_RODATA_SUPPORT
528 spiram_wrap_sizes[1] = dcache_wrap_size;
529 #endif
530 } else {
531 if (drom0_in_icache) {
532 flash_wrap_sizes[0] = icache_wrap_size;
533 } else {
534 flash_wrap_sizes[1] = dcache_wrap_size;
535 }
536 #ifdef CONFIG_EXT_RODATA_SUPPORT
537 flash_wrap_sizes[1] = dcache_wrap_size;
538 #endif
539 }
540 #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
541 spiram_wrap_sizes[1] = dcache_wrap_size;
542 #endif
543 for (i = 0; i < 2; i++) {
544 if (flash_wrap_sizes[i] != -1) {
545 flash_count++;
546 flash_wrap_size = flash_wrap_sizes[i];
547 }
548 }
549 for (i = 0; i < 2; i++) {
550 if (spiram_wrap_sizes[i] != -1) {
551 spiram_count++;
552 spiram_wrap_size = spiram_wrap_sizes[i];
553 }
554 }
555 if (flash_count + spiram_count <= 2) {
556 flash_spiram_wrap_together = false;
557 } else {
558 flash_spiram_wrap_together = true;
559 }
560 ESP_EARLY_LOGI(TAG, "flash_count=%d, size=%d, spiram_count=%d, size=%d,together=%d", flash_count, flash_wrap_size, spiram_count, spiram_wrap_size, flash_spiram_wrap_together);
561 if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
562 ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
563 if (spiram_wrap_size == 0) {
564 return ESP_FAIL;
565 }
566 if (flash_spiram_wrap_together) {
567 ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
568 return ESP_FAIL;
569 }
570 }
571 if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
572 ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
573 if (flash_wrap_size == 0) {
574 return ESP_FAIL;
575 }
576 if (flash_spiram_wrap_together) {
577 ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
578 return ESP_FAIL;
579 }
580 }
581
582 if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
583 ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
584 return ESP_FAIL;
585 }
586
587 #ifdef CONFIG_FLASHMODE_QIO
588 flash_support_wrap = true;
589 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
590 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
591 flash_support_wrap = false;
592 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
593 }
594 #else
595 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
596 #endif
597
598 #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
599 extern bool psram_support_wrap_size(uint32_t wrap_size);
600 if (!psram_support_wrap_size(spiram_wrap_size)) {
601 spiram_support_wrap = false;
602 ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
603 }
604 #endif
605
606 if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
607 ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
608 return ESP_FAIL;
609 }
610
611 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
612 if (flash_support_wrap && flash_wrap_size > 0) {
613 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
614 spi_flash_enable_wrap(flash_wrap_size);
615 esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
616 }
617 #if CONFIG_ESP32S2_SPIRAM_SUPPORT
618 extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
619 if (spiram_support_wrap && spiram_wrap_size > 0) {
620 ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
621 psram_enable_wrap(spiram_wrap_size);
622 esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
623 }
624 #endif
625
626 return ESP_OK;
627
628 }
629 #endif
630 #if CONFIG_IDF_TARGET_ESP32S3
esp_config_instruction_cache_mode(void)631 IRAM_ATTR void esp_config_instruction_cache_mode(void)
632 {
633 cache_size_t cache_size;
634 cache_ways_t cache_ways;
635 cache_line_size_t cache_line_size;
636
637 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
638 Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_INVALID);
639 cache_size = CACHE_SIZE_HALF;
640 #else
641 Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_IBANK1);
642 cache_size = CACHE_SIZE_FULL;
643 #endif
644 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_4WAYS
645 cache_ways = CACHE_4WAYS_ASSOC;
646 #else
647 cache_ways = CACHE_8WAYS_ASSOC;
648 #endif
649 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
650 cache_line_size = CACHE_LINE_SIZE_16B;
651 #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
652 cache_line_size = CACHE_LINE_SIZE_32B;
653 #else
654 cache_line_size = CACHE_LINE_SIZE_64B;
655 #endif
656 ESP_EARLY_LOGI(TAG, "Instruction cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 16 : 32, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
657 Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
658 Cache_Invalidate_ICache_All();
659 extern void Cache_Enable_ICache(uint32_t autoload);
660 Cache_Enable_ICache(0);
661 }
662
esp_config_data_cache_mode(void)663 IRAM_ATTR void esp_config_data_cache_mode(void)
664 {
665 cache_size_t cache_size;
666 cache_ways_t cache_ways;
667 cache_line_size_t cache_line_size;
668
669 #if CONFIG_ESP32S3_DATA_CACHE_32KB
670 Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK1, CACHE_MEMORY_INVALID);
671 cache_size = CACHE_SIZE_HALF;
672 #else
673 Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK0, CACHE_MEMORY_DBANK1);
674 cache_size = CACHE_SIZE_FULL;
675 #endif
676 #if CONFIG_ESP32S3_DATA_CACHE_4WAYS
677 cache_ways = CACHE_4WAYS_ASSOC;
678 #else
679 cache_ways = CACHE_8WAYS_ASSOC;
680 #endif
681 #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
682 cache_line_size = CACHE_LINE_SIZE_16B;
683 #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
684 cache_line_size = CACHE_LINE_SIZE_32B;
685 #else
686 cache_line_size = CACHE_LINE_SIZE_64B;
687 #endif
688 // ESP_EARLY_LOGI(TAG, "Data cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 32 : 64, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
689 Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
690 Cache_Invalidate_DCache_All();
691 }
692
esp_enable_cache_flash_wrap(bool icache,bool dcache)693 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
694 {
695 uint32_t i_autoload, d_autoload;
696 if (icache) {
697 i_autoload = Cache_Suspend_ICache();
698 }
699 if (dcache) {
700 d_autoload = Cache_Suspend_DCache();
701 }
702 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
703 if (icache) {
704 Cache_Resume_ICache(i_autoload);
705 }
706 if (dcache) {
707 Cache_Resume_DCache(d_autoload);
708 }
709 }
710
711 #if CONFIG_ESP32S3_SPIRAM_SUPPORT
esp_enable_cache_spiram_wrap(bool icache,bool dcache)712 static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
713 {
714 uint32_t i_autoload, d_autoload;
715 if (icache) {
716 i_autoload = Cache_Suspend_ICache();
717 }
718 if (dcache) {
719 d_autoload = Cache_Suspend_DCache();
720 }
721 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_SRAM_RD_WRAP_AROUND);
722 if (icache) {
723 Cache_Resume_ICache(i_autoload);
724 }
725 if (dcache) {
726 Cache_Resume_DCache(d_autoload);
727 }
728 }
729 #endif
730
esp_enable_cache_wrap(bool icache_wrap_enable,bool dcache_wrap_enable)731 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
732 {
733 int icache_wrap_size = 0, dcache_wrap_size = 0;
734 int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
735 int flash_wrap_size = 0, spiram_wrap_size = 0;
736 int flash_count = 0, spiram_count = 0;
737 int i;
738 bool flash_spiram_wrap_together, flash_support_wrap = false, spiram_support_wrap = true;
739 uint32_t drom0_in_icache = 0;//always 0 in chip7.2.4
740
741 if (icache_wrap_enable) {
742 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
743 icache_wrap_size = 16;
744 #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
745 icache_wrap_size = 32;
746 #else
747 icache_wrap_size = 64;
748 #endif
749 }
750 if (dcache_wrap_enable) {
751 #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
752 dcache_wrap_size = 16;
753 #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
754 dcache_wrap_size = 32;
755 #else
756 dcache_wrap_size = 64;
757 #endif
758 }
759
760 uint32_t instruction_use_spiram = 0;
761 uint32_t rodata_use_spiram = 0;
762 #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
763 extern uint32_t esp_spiram_instruction_access_enabled();
764 instruction_use_spiram = esp_spiram_instruction_access_enabled();
765 #endif
766 #if CONFIG_SPIRAM_RODATA
767 extern uint32_t esp_spiram_rodata_access_enabled();
768 rodata_use_spiram = esp_spiram_rodata_access_enabled();
769 #endif
770
771 if (instruction_use_spiram) {
772 spiram_wrap_sizes[0] = icache_wrap_size;
773 } else {
774 flash_wrap_sizes[0] = icache_wrap_size;
775 }
776 if (rodata_use_spiram) {
777 if (drom0_in_icache) {
778 spiram_wrap_sizes[0] = icache_wrap_size;
779 } else {
780 spiram_wrap_sizes[1] = dcache_wrap_size;
781 }
782 #ifdef CONFIG_EXT_RODATA_SUPPORT
783 spiram_wrap_sizes[1] = dcache_wrap_size;
784 #endif
785 } else {
786 if (drom0_in_icache) {
787 flash_wrap_sizes[0] = icache_wrap_size;
788 } else {
789 flash_wrap_sizes[1] = dcache_wrap_size;
790 }
791 #ifdef CONFIG_EXT_RODATA_SUPPORT
792 flash_wrap_sizes[1] = dcache_wrap_size;
793 #endif
794 }
795 #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
796 spiram_wrap_sizes[1] = dcache_wrap_size;
797 #endif
798 for (i = 0; i < 2; i++) {
799 if (flash_wrap_sizes[i] != -1) {
800 flash_count++;
801 flash_wrap_size = flash_wrap_sizes[i];
802 }
803 }
804 for (i = 0; i < 2; i++) {
805 if (spiram_wrap_sizes[i] != -1) {
806 spiram_count++;
807 spiram_wrap_size = spiram_wrap_sizes[i];
808 }
809 }
810 if (flash_count + spiram_count <= 2) {
811 flash_spiram_wrap_together = false;
812 } else {
813 flash_spiram_wrap_together = true;
814 }
815 if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
816 ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
817 if (spiram_wrap_size == 0) {
818 return ESP_FAIL;
819 }
820 if (flash_spiram_wrap_together) {
821 ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
822 return ESP_FAIL;
823 }
824 }
825 if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
826 ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
827 if (flash_wrap_size == 0) {
828 return ESP_FAIL;
829 }
830 if (flash_spiram_wrap_together) {
831 ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
832 return ESP_FAIL;
833 }
834 }
835
836 if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
837 ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
838 return ESP_FAIL;
839 }
840
841 #ifdef CONFIG_FLASHMODE_QIO
842 flash_support_wrap = true;
843 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
844 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
845 flash_support_wrap = false;
846 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
847 }
848 #else
849 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
850 #endif
851
852
853 #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
854 extern bool psram_support_wrap_size(uint32_t wrap_size);
855 if (!psram_support_wrap_size(spiram_wrap_size)) {
856 spiram_support_wrap = false;
857 ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
858 }
859 #endif
860
861 if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
862 ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
863 return ESP_FAIL;
864 }
865
866 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
867 if (flash_support_wrap && flash_wrap_size > 0) {
868 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
869 spi_flash_enable_wrap(flash_wrap_size);
870 esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
871 }
872 #if CONFIG_ESP32S3_SPIRAM_SUPPORT
873 extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
874 if (spiram_support_wrap && spiram_wrap_size > 0) {
875 ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
876 psram_enable_wrap(spiram_wrap_size);
877 esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
878 }
879 #endif
880
881 return ESP_OK;
882
883 }
884 #endif
885
886 #if CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
887
esp_enable_cache_flash_wrap(bool icache)888 static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache)
889 {
890 uint32_t i_autoload;
891 if (icache) {
892 i_autoload = Cache_Suspend_ICache();
893 }
894 REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
895 if (icache) {
896 Cache_Resume_ICache(i_autoload);
897 }
898 }
899
esp_enable_cache_wrap(bool icache_wrap_enable)900 esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable)
901 {
902 int flash_wrap_size = 0;
903 bool flash_support_wrap = false;
904
905 if (icache_wrap_enable) {
906 flash_wrap_size = 32;
907 }
908
909 #ifdef CONFIG_FLASHMODE_QIO
910 flash_support_wrap = true;
911 extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
912 if (!spi_flash_support_wrap_size(flash_wrap_size)) {
913 flash_support_wrap = false;
914 ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
915 }
916 #else
917 ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
918 #endif // CONFIG_FLASHMODE_QIO
919
920 extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
921 if (flash_support_wrap && flash_wrap_size > 0) {
922 ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
923 spi_flash_enable_wrap(flash_wrap_size);
924 esp_enable_cache_flash_wrap((flash_wrap_size > 0));
925 }
926 return ESP_OK;
927 }
928 #endif // CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32H2
929
spi_flash_enable_cache(uint32_t cpuid)930 void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
931 {
932 #if CONFIG_IDF_TARGET_ESP32
933 uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
934 cache_value &= DPORT_CACHE_GET_MASK(cpuid);
935
936 // Re-enable cache on this CPU
937 spi_flash_restore_cache(cpuid, cache_value);
938 #else
939 spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
940 #endif
941 }
942