1 /*
2 * SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 /**
8 * @file cache_err_int.c
9 * @brief The cache has an interrupt that can be raised as soon as an access to a cached
10 * region (Flash, PSRAM) is done without the cache being enabled.
11 * We use that here to panic the CPU, which from a debugging perspective,
12 * is better than grabbing bad data from the bus.
13 */
14
15 #include <stdint.h>
16 #include "sdkconfig.h"
17 #include "esp_err.h"
18 #include "esp_attr.h"
19 #include "esp_intr_alloc.h"
20 #include "soc/soc.h"
21 #include "soc/extmem_reg.h"
22 #include "soc/periph_defs.h"
23 #include "hal/cpu_hal.h"
24 #include "esp32s3/dport_access.h"
25 #include "esp32s3/rom/ets_sys.h"
26
esp_cache_err_int_init(void)27 void esp_cache_err_int_init(void)
28 {
29 uint32_t core_id = cpu_hal_get_core_id();
30 ESP_INTR_DISABLE(ETS_CACHEERR_INUM);
31
32 // We do not register a handler for the interrupt because it is interrupt
33 // level 4 which is not serviceable from C. Instead, xtensa_vectors.S has
34 // a call to the panic handler for this interrupt.
35 intr_matrix_set(core_id, ETS_CACHE_IA_INTR_SOURCE, ETS_CACHEERR_INUM);
36
37 // Enable invalid cache access interrupt when the cache is disabled.
38 // When the interrupt happens, we can not determine the CPU where the
39 // invalid cache access has occurred. We enable the interrupt to catch
40 // invalid access on both CPUs, but the interrupt is connected to the
41 // CPU which happens to call this function.
42 // For this reason, panic handler backtrace will not be correct if the
43 // interrupt is connected to PRO CPU and invalid access happens on the APP CPU.
44
45 SET_PERI_REG_MASK(EXTMEM_CACHE_ILG_INT_CLR_REG,
46 EXTMEM_MMU_ENTRY_FAULT_INT_CLR |
47 EXTMEM_DCACHE_WRITE_FLASH_INT_CLR |
48 EXTMEM_DCACHE_PRELOAD_OP_FAULT_INT_CLR |
49 EXTMEM_DCACHE_SYNC_OP_FAULT_INT_CLR |
50 EXTMEM_ICACHE_PRELOAD_OP_FAULT_INT_CLR |
51 EXTMEM_ICACHE_SYNC_OP_FAULT_INT_CLR);
52 SET_PERI_REG_MASK(EXTMEM_CACHE_ILG_INT_ENA_REG,
53 EXTMEM_MMU_ENTRY_FAULT_INT_ENA |
54 EXTMEM_DCACHE_WRITE_FLASH_INT_ENA |
55 EXTMEM_DCACHE_PRELOAD_OP_FAULT_INT_ENA |
56 EXTMEM_DCACHE_SYNC_OP_FAULT_INT_ENA |
57 EXTMEM_ICACHE_PRELOAD_OP_FAULT_INT_ENA |
58 EXTMEM_ICACHE_SYNC_OP_FAULT_INT_ENA);
59
60 if (core_id == PRO_CPU_NUM) {
61 intr_matrix_set(core_id, ETS_CACHE_CORE0_ACS_INTR_SOURCE, ETS_CACHEERR_INUM);
62 /* On the hardware side, stat by clearing all the bits reponsible for
63 * enabling cache access error interrupts. */
64 SET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_CLR_REG,
65 EXTMEM_CORE0_DBUS_REJECT_INT_CLR |
66 EXTMEM_CORE0_DBUS_ACS_MSK_DC_INT_CLR |
67 EXTMEM_CORE0_IBUS_REJECT_INT_CLR |
68 EXTMEM_CORE0_IBUS_WR_IC_INT_CLR |
69 EXTMEM_CORE0_IBUS_ACS_MSK_IC_INT_CLR);
70
71 /* Enable cache access error interrupts */
72 SET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_ENA_REG,
73 EXTMEM_CORE0_DBUS_REJECT_INT_ENA |
74 EXTMEM_CORE0_DBUS_ACS_MSK_DC_INT_ENA |
75 EXTMEM_CORE0_IBUS_REJECT_INT_ENA |
76 EXTMEM_CORE0_IBUS_WR_IC_INT_ENA |
77 EXTMEM_CORE0_IBUS_ACS_MSK_IC_INT_ENA);
78 } else {
79 intr_matrix_set(core_id, ETS_CACHE_CORE1_ACS_INTR_SOURCE, ETS_CACHEERR_INUM);
80
81 /* On the hardware side, stat by clearing all the bits reponsible for
82 * enabling cache access error interrupts. */
83 SET_PERI_REG_MASK(EXTMEM_CORE1_ACS_CACHE_INT_CLR_REG,
84 EXTMEM_CORE1_DBUS_REJECT_INT_CLR |
85 EXTMEM_CORE1_DBUS_ACS_MSK_DC_INT_CLR |
86 EXTMEM_CORE1_IBUS_REJECT_INT_CLR |
87 EXTMEM_CORE1_IBUS_WR_IC_INT_CLR |
88 EXTMEM_CORE1_IBUS_ACS_MSK_IC_INT_CLR);
89
90 /* Enable cache access error interrupts */
91 SET_PERI_REG_MASK(EXTMEM_CORE1_ACS_CACHE_INT_ENA_REG,
92 EXTMEM_CORE1_DBUS_REJECT_INT_ENA |
93 EXTMEM_CORE1_DBUS_ACS_MSK_DC_INT_ENA |
94 EXTMEM_CORE1_IBUS_REJECT_INT_ENA |
95 EXTMEM_CORE1_IBUS_WR_IC_INT_ENA |
96 EXTMEM_CORE1_IBUS_ACS_MSK_IC_INT_ENA);
97 }
98
99 ESP_INTR_ENABLE(ETS_CACHEERR_INUM);
100 }
101
esp_cache_err_get_cpuid(void)102 int IRAM_ATTR esp_cache_err_get_cpuid(void)
103 {
104 const uint32_t pro_mask = EXTMEM_CORE0_DBUS_REJECT_ST |
105 EXTMEM_CORE0_DBUS_ACS_MSK_DCACHE_ST |
106 EXTMEM_CORE0_IBUS_REJECT_ST |
107 EXTMEM_CORE0_IBUS_WR_ICACHE_ST |
108 EXTMEM_CORE0_IBUS_ACS_MSK_ICACHE_ST;
109
110 if (GET_PERI_REG_MASK(EXTMEM_CORE0_ACS_CACHE_INT_ST_REG, pro_mask)) {
111 return PRO_CPU_NUM;
112 }
113
114 const uint32_t app_mask = EXTMEM_CORE1_DBUS_REJECT_ST |
115 EXTMEM_CORE1_DBUS_ACS_MSK_DCACHE_ST |
116 EXTMEM_CORE1_IBUS_REJECT_ST |
117 EXTMEM_CORE1_IBUS_WR_ICACHE_ST |
118 EXTMEM_CORE1_IBUS_ACS_MSK_ICACHE_ST;
119
120 if (GET_PERI_REG_MASK(EXTMEM_CORE1_ACS_CACHE_INT_ST_REG, app_mask)) {
121 return APP_CPU_NUM;
122 }
123
124 return -1;
125 }
126