1 /*
2 * SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 // The LL layer for Cache register operations
8
9 #pragma once
10
11 #include <stdbool.h>
12 #include "soc/dport_reg.h"
13 #include "soc/ext_mem_defs.h"
14 #include "hal/cache_types.h"
15 #include "hal/assert.h"
16
17
18 #ifdef __cplusplus
19 extern "C" {
20 #endif
21
22 /**
23 * @brief Get the buses of a particular cache that are mapped to a virtual address range
24 *
25 * External virtual address can only be accessed when the involved cache buses are enabled.
26 * This API is to get the cache buses where the memory region (from `vaddr_start` to `vaddr_start + len`) reside.
27 *
28 * @param cache_id cache ID (when l1 cache is per core)
29 * @param vaddr_start virtual address start
30 * @param len vaddr length
31 */
32 #if !BOOTLOADER_BUILD
33 __attribute__((always_inline))
34 #endif
cache_ll_l1_get_bus(uint32_t cache_id,uint32_t vaddr_start,uint32_t len)35 static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
36 {
37 HAL_ASSERT(cache_id == 0 || cache_id == 1);
38 cache_bus_mask_t mask = 0;
39
40 uint32_t vaddr_end = vaddr_start + len - 1;
41 if (vaddr_start >= IROM0_CACHE_ADDRESS_HIGH) {
42 HAL_ASSERT(false); //out of range
43 } else if (vaddr_start >= IROM0_CACHE_ADDRESS_LOW) {
44 mask |= CACHE_BUS_IBUS2;
45 } else if (vaddr_start >= IRAM1_CACHE_ADDRESS_LOW) {
46 mask |= CACHE_BUS_IBUS1;
47 mask |= (vaddr_end >= IROM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS2 : 0;
48 } else if (vaddr_start >= IRAM0_CACHE_ADDRESS_LOW) {
49 mask |= CACHE_BUS_IBUS0;
50 mask |= (vaddr_end >= IRAM1_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS1 : 0;
51 mask |= (vaddr_end >= IROM0_CACHE_ADDRESS_LOW) ? CACHE_BUS_IBUS2 : 0;
52 } else if (vaddr_start >= DRAM1_CACHE_ADDRESS_LOW) {
53 HAL_ASSERT(vaddr_end < DRAM1_CACHE_ADDRESS_HIGH); //out of range, vaddr should be consecutive, see `ext_mem_defs.h`
54 mask |= CACHE_BUS_DBUS1;
55 } else if (vaddr_start >= DROM0_CACHE_ADDRESS_LOW) {
56 HAL_ASSERT(vaddr_end < DROM0_CACHE_ADDRESS_HIGH); //out of range, vaddr should be consecutive, see `ext_mem_defs.h`
57 mask |= CACHE_BUS_DBUS0;
58 } else {
59 HAL_ASSERT(false);
60 }
61
62 return mask;
63 }
64
65 /**
66 * Enable the Cache Buses
67 *
68 * @param cache_id cache ID (when l1 cache is per core)
69 * @param mask To know which buses should be enabled
70 * @param enable 1: enable; 0: disable
71 */
72 #if !BOOTLOADER_BUILD
73 __attribute__((always_inline))
74 #endif
cache_ll_l1_enable_bus(uint32_t cache_id,cache_bus_mask_t mask)75 static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
76 {
77 (void) mask;
78 HAL_ASSERT(cache_id == 0 || cache_id == 1);
79
80 uint32_t bus_mask = 0;
81 if (cache_id == 0) {
82 bus_mask |= (mask & CACHE_BUS_IBUS0) ? DPORT_PRO_CACHE_MASK_IRAM0 : 0;
83 bus_mask |= (mask & CACHE_BUS_IBUS1) ? DPORT_PRO_CACHE_MASK_IRAM1 : 0;
84 bus_mask |= (mask & CACHE_BUS_IBUS2) ? DPORT_PRO_CACHE_MASK_IROM0 : 0;
85
86 bus_mask |= (mask & CACHE_BUS_DBUS0) ? DPORT_PRO_CACHE_MASK_DROM0 : 0;
87 bus_mask |= (mask & CACHE_BUS_DBUS1) ? DPORT_PRO_CACHE_MASK_DRAM1 : 0;
88
89 DPORT_REG_CLR_BIT(DPORT_PRO_CACHE_CTRL1_REG, bus_mask);
90 } else {
91 bus_mask |= (mask & CACHE_BUS_IBUS0) ? DPORT_APP_CACHE_MASK_IRAM0 : 0;
92 bus_mask |= (mask & CACHE_BUS_IBUS1) ? DPORT_APP_CACHE_MASK_IRAM1 : 0;
93 bus_mask |= (mask & CACHE_BUS_IBUS2) ? DPORT_APP_CACHE_MASK_IROM0 : 0;
94
95 bus_mask |= (mask & CACHE_BUS_DBUS0) ? DPORT_APP_CACHE_MASK_DROM0 : 0;
96 bus_mask |= (mask & CACHE_BUS_DBUS1) ? DPORT_APP_CACHE_MASK_DRAM1 : 0;
97
98 DPORT_REG_CLR_BIT(DPORT_APP_CACHE_CTRL1_REG, bus_mask);
99 }
100 }
101
102 /**
103 * Returns enabled buses for a given core
104 *
105 * @param cache_id cache ID (when l1 cache is per core)
106 *
107 * @return State of enabled buses
108 */
109 __attribute__((always_inline))
cache_ll_l1_get_enabled_bus(uint32_t cache_id)110 static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
111 {
112 cache_bus_mask_t mask = 0;
113 HAL_ASSERT(cache_id == 0 || cache_id == 1);
114 if (cache_id == 0) {
115 uint32_t bus_mask= DPORT_REG_READ(DPORT_PRO_CACHE_CTRL1_REG);
116 mask |= (!(bus_mask & DPORT_PRO_CACHE_MASK_IRAM0)) ? CACHE_BUS_IBUS0 : 0;
117 mask |= (!(bus_mask & DPORT_PRO_CACHE_MASK_IRAM1)) ? CACHE_BUS_IBUS1 : 0;
118 mask |= (!(bus_mask & DPORT_PRO_CACHE_MASK_IROM0)) ? CACHE_BUS_IBUS2 : 0;
119
120 mask |= (!(bus_mask & DPORT_PRO_CACHE_MASK_DROM0)) ? CACHE_BUS_DBUS0 : 0;
121 mask |= (!(bus_mask & DPORT_PRO_CACHE_MASK_DRAM1)) ? CACHE_BUS_DBUS1 : 0;
122 } else {
123 uint32_t bus_mask= DPORT_REG_READ(DPORT_APP_CACHE_CTRL1_REG);
124 mask |= (!(bus_mask & DPORT_APP_CACHE_MASK_IRAM0)) ? CACHE_BUS_IBUS0 : 0;
125 mask |= (!(bus_mask & DPORT_APP_CACHE_MASK_IRAM1)) ? CACHE_BUS_IBUS1 : 0;
126 mask |= (!(bus_mask & DPORT_APP_CACHE_MASK_IROM0)) ? CACHE_BUS_IBUS2 : 0;
127
128 mask |= (!(bus_mask & DPORT_APP_CACHE_MASK_DROM0)) ? CACHE_BUS_DBUS0 : 0;
129 mask |= (!(bus_mask & DPORT_APP_CACHE_MASK_DRAM1)) ? CACHE_BUS_DBUS1 : 0;
130 }
131 return mask;
132 }
133
134 /**
135 * Disable the Cache Buses
136 *
137 * @param cache_id cache ID (when l1 cache is per core)
138 * @param mask To know which buses should be enabled
139 * @param enable 1: enable; 0: disable
140 */
141 __attribute__((always_inline))
cache_ll_l1_disable_bus(uint32_t cache_id,cache_bus_mask_t mask)142 static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
143 {
144 (void) mask;
145 HAL_ASSERT(cache_id == 0 || cache_id == 1);
146
147 uint32_t bus_mask = 0;
148 if (cache_id == 0) {
149 bus_mask |= (mask & CACHE_BUS_IBUS0) ? DPORT_PRO_CACHE_MASK_IRAM0 : 0;
150 bus_mask |= (mask & CACHE_BUS_IBUS1) ? DPORT_PRO_CACHE_MASK_IRAM1 : 0;
151 bus_mask |= (mask & CACHE_BUS_IBUS2) ? DPORT_PRO_CACHE_MASK_IROM0 : 0;
152
153 bus_mask |= (mask & CACHE_BUS_DBUS0) ? DPORT_PRO_CACHE_MASK_DROM0 : 0;
154 bus_mask |= (mask & CACHE_BUS_DBUS1) ? DPORT_PRO_CACHE_MASK_DRAM1 : 0;
155
156 DPORT_REG_SET_BIT(DPORT_PRO_CACHE_CTRL1_REG, bus_mask);
157 } else {
158 bus_mask |= (mask & CACHE_BUS_IBUS0) ? DPORT_APP_CACHE_MASK_IRAM0 : 0;
159 bus_mask |= (mask & CACHE_BUS_IBUS1) ? DPORT_APP_CACHE_MASK_IRAM1 : 0;
160 bus_mask |= (mask & CACHE_BUS_IBUS2) ? DPORT_APP_CACHE_MASK_IROM0 : 0;
161
162 bus_mask |= (mask & CACHE_BUS_DBUS0) ? DPORT_APP_CACHE_MASK_DROM0 : 0;
163 bus_mask |= (mask & CACHE_BUS_DBUS1) ? DPORT_APP_CACHE_MASK_DRAM1 : 0;
164
165 DPORT_REG_SET_BIT(DPORT_APP_CACHE_CTRL1_REG, bus_mask);
166 }
167 }
168
169 #ifdef __cplusplus
170 }
171 #endif
172