1 /*
2 * Copyright 2016-2021 NXP
3 * All rights reserved.
4 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 */
7
8 #ifndef _FSL_CACHE_H_
9 #define _FSL_CACHE_H_
10
11 #include "fsl_common.h"
12
13 /*!
14 * @addtogroup cache64
15 * @{
16 */
17
18 /*******************************************************************************
19 * Definitions
20 ******************************************************************************/
21
22 /*! @name Driver version */
23 /*@{*/
24 /*! @brief cache driver version. */
25 #define FSL_CACHE_DRIVER_VERSION (MAKE_VERSION(2, 0, 6))
26 /*@}*/
27
28 /*! @brief cache line size. */
29 #define CACHE64_LINESIZE_BYTE (FSL_FEATURE_CACHE64_CTRL_LINESIZE_BYTE)
30
31 #if (defined(FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT) && (FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT > 0))
32 /*! @brief cache region number. */
33 #define CACHE64_REGION_NUM (3U)
34 /*! @brief cache region alignment. */
35 #define CACHE64_REGION_ALIGNMENT (0x400U)
36
37 /*! @brief Level 2 cache controller way size. */
38 typedef enum _cache64_policy
39 {
40 kCACHE64_PolicyNonCacheable = 0, /*!< Non-cacheable */
41 kCACHE64_PolicyWriteThrough = 1, /*!< Write through */
42 kCACHE64_PolicyWriteBack = 2, /*!< Write back */
43 } cache64_policy_t;
44
45 /*! @brief CACHE64 configuration structure. */
46 typedef struct _cache64_config
47 {
48 /*!< The cache controller can divide whole memory into 3 regions.
49 * Boundary address is the FlexSPI internal address (start from 0) instead of system
50 * address (start from FlexSPI AMBA base) to split adjacent regions and must be 1KB
51 * aligned. The boundary address itself locates in upper region. */
52 uint32_t boundaryAddr[CACHE64_REGION_NUM - 1];
53 /*!< Cacheable policy for each region. */
54 cache64_policy_t policy[CACHE64_REGION_NUM];
55 } cache64_config_t;
56 #endif
57
58 /*******************************************************************************
59 * API
60 ******************************************************************************/
61
62 #if defined(__cplusplus)
63 extern "C" {
64 #endif
65
66 /*!
67 * @name cache control for cache64
68 *@{
69 */
70
71 #if (defined(FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT) && (FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT > 0))
72 /*!
73 * @brief Returns an instance number given periphearl base address.
74 *
75 * @param base The peripheral base address.
76 * @return CACHE64_POLSEL instance number starting from 0.
77 */
78 uint32_t CACHE64_GetInstance(CACHE64_POLSEL_Type *base);
79 #endif
80
81 /*!
82 * brief Returns an instance number given physical memory address.
83 *
84 * param address The physical memory address.
85 * @return CACHE64_CTRL instance number starting from 0.
86 */
87 uint32_t CACHE64_GetInstanceByAddr(uint32_t address);
88
89 #if (defined(FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT) && (FSL_FEATURE_SOC_CACHE64_POLSEL_COUNT > 0))
90 /*!
91 * @brief Initializes an CACHE64 instance with the user configuration structure.
92 *
93 * This function configures the CACHE64 module with user-defined settings. Call the CACHE64_GetDefaultConfig() function
94 * to configure the configuration structure and get the default configuration.
95 *
96 * @param base CACHE64_POLSEL peripheral base address.
97 * @param config Pointer to a user-defined configuration structure.
98 * @retval kStatus_Success CACHE64 initialize succeed
99 */
100 status_t CACHE64_Init(CACHE64_POLSEL_Type *base, const cache64_config_t *config);
101
102 /*!
103 * @brief Gets the default configuration structure.
104 *
105 * This function initializes the CACHE64 configuration structure to a default value. The default
106 * values are first region covers whole cacheable area, and policy set to write back.
107 *
108 * @param config Pointer to a configuration structure.
109 */
110 void CACHE64_GetDefaultConfig(cache64_config_t *config);
111 #endif
112
113 /*!
114 * @brief Enables the cache.
115 *
116 * @param base CACHE64_CTRL peripheral base address.
117 *
118 */
119 void CACHE64_EnableCache(CACHE64_CTRL_Type *base);
120
121 /*!
122 * @brief Disables the cache.
123 *
124 * @param base CACHE64_CTRL peripheral base address.
125 *
126 */
127 void CACHE64_DisableCache(CACHE64_CTRL_Type *base);
128
129 /*!
130 * @brief Invalidates the cache.
131 *
132 * @param base CACHE64_CTRL peripheral base address.
133 *
134 */
135 void CACHE64_InvalidateCache(CACHE64_CTRL_Type *base);
136
137 /*!
138 * @brief Invalidates cache by range.
139 *
140 * @param address The physical address of cache.
141 * @param size_byte size of the memory to be invalidated.
142 * @note Address and size should be aligned to "CACHE64_LINESIZE_BYTE".
143 * The startAddr here will be forced to align to CACHE64_LINESIZE_BYTE if
144 * startAddr is not aligned. For the size_byte, application should make sure the
145 * alignment or make sure the right operation order if the size_byte is not aligned.
146 */
147 void CACHE64_InvalidateCacheByRange(uint32_t address, uint32_t size_byte);
148
149 /*!
150 * @brief Cleans the cache.
151 *
152 * @param base CACHE64_CTRL peripheral base address.
153 *
154 */
155 void CACHE64_CleanCache(CACHE64_CTRL_Type *base);
156
157 /*!
158 * @brief Cleans cache by range.
159 *
160 * @param address The physical address of cache.
161 * @param size_byte size of the memory to be cleaned.
162 * @note Address and size should be aligned to "CACHE64_LINESIZE_BYTE".
163 * The startAddr here will be forced to align to CACHE64_LINESIZE_BYTE if
164 * startAddr is not aligned. For the size_byte, application should make sure the
165 * alignment or make sure the right operation order if the size_byte is not aligned.
166 */
167 void CACHE64_CleanCacheByRange(uint32_t address, uint32_t size_byte);
168
169 /*!
170 * @brief Cleans and invalidates the cache.
171 *
172 * @param base CACHE64_CTRL peripheral base address.
173 *
174 */
175 void CACHE64_CleanInvalidateCache(CACHE64_CTRL_Type *base);
176
177 /*!
178 * @brief Cleans and invalidate cache by range.
179 *
180 * @param address The physical address of cache.
181 * @param size_byte size of the memory to be Cleaned and Invalidated.
182 * @note Address and size should be aligned to "CACHE64_LINESIZE_BYTE".
183 * The startAddr here will be forced to align to CACHE64_LINESIZE_BYTE if
184 * startAddr is not aligned. For the size_byte, application should make sure the
185 * alignment or make sure the right operation order if the size_byte is not aligned.
186 */
187 void CACHE64_CleanInvalidateCacheByRange(uint32_t address, uint32_t size_byte);
188
189 #if !(defined(FSL_FEATURE_CACHE64_CTRL_HAS_NO_WRITE_BUF) && FSL_FEATURE_CACHE64_CTRL_HAS_NO_WRITE_BUF)
190 /*!
191 * @brief Enables/disables the write buffer.
192 *
193 * @param base CACHE64_CTRL peripheral base address.
194 * @param enable The enable or disable flag.
195 * true - enable the write buffer.
196 * false - disable the write buffer.
197 */
198 void CACHE64_EnableWriteBuffer(CACHE64_CTRL_Type *base, bool enable);
199 #endif
200
201 /*@}*/
202
203 /*!
204 * @name Unified Cache Control for all caches
205 *@{
206 */
207
208 /*!
209 * @brief Invalidates instruction cache by range.
210 *
211 * @param address The physical address.
212 * @param size_byte size of the memory to be invalidated.
213 * @note Address and size should be aligned to CACHE64_LINESIZE_BYTE due to the cache operation unit
214 * FSL_FEATURE_CACHE64_CTRL_LINESIZE_BYTE. The startAddr here will be forced to align to the cache line
215 * size if startAddr is not aligned. For the size_byte, application should make sure the
216 * alignment or make sure the right operation order if the size_byte is not aligned.
217 */
ICACHE_InvalidateByRange(uint32_t address,uint32_t size_byte)218 static inline void ICACHE_InvalidateByRange(uint32_t address, uint32_t size_byte)
219 {
220 CACHE64_InvalidateCacheByRange(address, size_byte);
221 }
222
223 /*!
224 * @brief Invalidates data cache by range.
225 *
226 * @param address The physical address.
227 * @param size_byte size of the memory to be invalidated.
228 * @note Address and size should be aligned to CACHE64_LINESIZE_BYTE due to the cache operation unit
229 * FSL_FEATURE_CACHE64_CTRL_LINESIZE_BYTE. The startAddr here will be forced to align to the cache line
230 * size if startAddr is not aligned. For the size_byte, application should make sure the
231 * alignment or make sure the right operation order if the size_byte is not aligned.
232 */
DCACHE_InvalidateByRange(uint32_t address,uint32_t size_byte)233 static inline void DCACHE_InvalidateByRange(uint32_t address, uint32_t size_byte)
234 {
235 CACHE64_InvalidateCacheByRange(address, size_byte);
236 }
237
238 /*!
239 * @brief Clean data cache by range.
240 *
241 * @param address The physical address.
242 * @param size_byte size of the memory to be cleaned.
243 * @note Address and size should be aligned to CACHE64_LINESIZE_BYTE due to the cache operation unit
244 * FSL_FEATURE_CACHE64_CTRL_LINESIZE_BYTE. The startAddr here will be forced to align to the cache line
245 * size if startAddr is not aligned. For the size_byte, application should make sure the
246 * alignment or make sure the right operation order if the size_byte is not aligned.
247 */
DCACHE_CleanByRange(uint32_t address,uint32_t size_byte)248 static inline void DCACHE_CleanByRange(uint32_t address, uint32_t size_byte)
249 {
250 CACHE64_CleanCacheByRange(address, size_byte);
251 }
252
253 /*!
254 * @brief Cleans and Invalidates data cache by range.
255 *
256 * @param address The physical address.
257 * @param size_byte size of the memory to be Cleaned and Invalidated.
258 * @note Address and size should be aligned to CACHE64_LINESIZE_BYTE due to the cache operation unit
259 * FSL_FEATURE_CACHE64_CTRL_LINESIZE_BYTE. The startAddr here will be forced to align to the cache line
260 * size if startAddr is not aligned. For the size_byte, application should make sure the
261 * alignment or make sure the right operation order if the size_byte is not aligned.
262 */
DCACHE_CleanInvalidateByRange(uint32_t address,uint32_t size_byte)263 static inline void DCACHE_CleanInvalidateByRange(uint32_t address, uint32_t size_byte)
264 {
265 CACHE64_CleanInvalidateCacheByRange(address, size_byte);
266 }
267
268 /*@}*/
269
270 #if defined(__cplusplus)
271 }
272 #endif
273
274 /*! @}*/
275
276 #endif /* _FSL_CACHE_H_*/
277