1 /*
2  * SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 // The LL layer for Cache register operations
8 
9 #pragma once
10 
11 #include "soc/extmem_reg.h"
12 #include "soc/ext_mem_defs.h"
13 #include "hal/cache_types.h"
14 #include "hal/assert.h"
15 
16 #ifdef __cplusplus
17 extern "C" {
18 #endif
19 #define CACHE_LL_ENABLE_DISABLE_STATE_SW            1   //There's no register indicating cache enable/disable state, we need to use software way for this state.
20 
21 #define CACHE_LL_DEFAULT_IBUS_MASK                  CACHE_BUS_IBUS0
22 #define CACHE_LL_DEFAULT_DBUS_MASK                  CACHE_BUS_DBUS0
23 
24 #define CACHE_LL_L1_ACCESS_EVENT_MASK               (1<<4)
25 #define CACHE_LL_L1_ACCESS_EVENT_CACHE_FAIL         (1<<4)
26 
27 
28 /**
29  * @brief Get the buses of a particular cache that are mapped to a virtual address range
30  *
31  * External virtual address can only be accessed when the involved cache buses are enabled.
32  * This API is to get the cache buses where the memory region (from `vaddr_start` to `vaddr_start + len`) reside.
33  *
34  * @param cache_id          cache ID (when l1 cache is per core)
35  * @param vaddr_start       virtual address start
36  * @param len               vaddr length
37  */
38 #if !BOOTLOADER_BUILD
39 __attribute__((always_inline))
40 #endif
cache_ll_l1_get_bus(uint32_t cache_id,uint32_t vaddr_start,uint32_t len)41 static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
42 {
43     HAL_ASSERT(cache_id == 0);
44     cache_bus_mask_t mask = 0;
45 
46     uint32_t vaddr_end = vaddr_start + len - 1;
47     if (vaddr_start >= IRAM0_CACHE_ADDRESS_LOW && vaddr_end < IRAM0_CACHE_ADDRESS_HIGH) {
48         //c6 the I/D bus memory are shared, so we always return `CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0`
49         mask |= CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0;
50     } else {
51         HAL_ASSERT(0);          //Out of region
52     }
53 
54     return mask;
55 }
56 
57 /**
58  * Enable the Cache Buses
59  *
60  * @param cache_id    cache ID (when l1 cache is per core)
61  * @param mask        To know which buses should be enabled
62  */
63 #if !BOOTLOADER_BUILD
64 __attribute__((always_inline))
65 #endif
cache_ll_l1_enable_bus(uint32_t cache_id,cache_bus_mask_t mask)66 static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
67 {
68     HAL_ASSERT(cache_id == 0);
69     //On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
70     HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
71 
72     uint32_t ibus_mask = 0;
73     ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_L1_CACHE_SHUT_IBUS : 0;
74     REG_CLR_BIT(EXTMEM_L1_CACHE_CTRL_REG, ibus_mask);
75 
76     uint32_t dbus_mask = 0;
77     dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_L1_CACHE_SHUT_DBUS : 0;
78     REG_CLR_BIT(EXTMEM_L1_CACHE_CTRL_REG, dbus_mask);
79 }
80 
81 /**
82  * Disable the Cache Buses
83  *
84  * @param cache_id    cache ID (when l1 cache is per core)
85  * @param mask        To know which buses should be disabled
86  */
87 __attribute__((always_inline))
cache_ll_l1_disable_bus(uint32_t cache_id,cache_bus_mask_t mask)88 static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
89 {
90     HAL_ASSERT(cache_id == 0);
91     //On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
92     HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
93 
94     uint32_t ibus_mask = 0;
95     ibus_mask |= (mask & CACHE_BUS_IBUS0) ? EXTMEM_L1_CACHE_SHUT_IBUS : 0;
96     REG_SET_BIT(EXTMEM_L1_CACHE_CTRL_REG, ibus_mask);
97 
98     uint32_t dbus_mask = 0;
99     dbus_mask |= (mask & CACHE_BUS_DBUS0) ? EXTMEM_L1_CACHE_SHUT_DBUS : 0;
100     REG_SET_BIT(EXTMEM_L1_CACHE_CTRL_REG, dbus_mask);
101 }
102 
103 /*------------------------------------------------------------------------------
104  * Interrupt
105  *----------------------------------------------------------------------------*/
106 /**
107  * @brief Enable Cache access error interrupt
108  *
109  * @param cache_id    Cache ID, not used on C3. For compabitlity
110  * @param mask        Interrupt mask
111  */
cache_ll_l1_enable_access_error_intr(uint32_t cache_id,uint32_t mask)112 static inline void cache_ll_l1_enable_access_error_intr(uint32_t cache_id, uint32_t mask)
113 {
114     SET_PERI_REG_MASK(EXTMEM_L1_CACHE_ACS_FAIL_INT_ENA_REG, mask);
115 }
116 
117 /**
118  * @brief Clear Cache access error interrupt status
119  *
120  * @param cache_id    Cache ID, not used on C3. For compabitlity
121  * @param mask        Interrupt mask
122  */
cache_ll_l1_clear_access_error_intr(uint32_t cache_id,uint32_t mask)123 static inline void cache_ll_l1_clear_access_error_intr(uint32_t cache_id, uint32_t mask)
124 {
125     SET_PERI_REG_MASK(EXTMEM_L1_CACHE_ACS_FAIL_INT_CLR_REG, mask);
126 }
127 
128 /**
129  * @brief Get Cache access error interrupt status
130  *
131  * @param cache_id    Cache ID, not used on C3. For compabitlity
132  * @param mask        Interrupt mask
133  *
134  * @return            Status mask
135  */
cache_ll_l1_get_access_error_intr_status(uint32_t cache_id,uint32_t mask)136 static inline uint32_t cache_ll_l1_get_access_error_intr_status(uint32_t cache_id, uint32_t mask)
137 {
138     return GET_PERI_REG_MASK(EXTMEM_L1_CACHE_ACS_FAIL_INT_ST_REG, mask);
139 }
140 
141 #ifdef __cplusplus
142 }
143 #endif
144