1 /*
2 * Copyright (c) 2017-2020 Arm Limited
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /**
18 * \file cache_drv.c
19 * \brief Driver for L1 instruction cache based on SSE-200 version r1p0
20 */
21
22 #include "cache_drv.h"
23
24 /**
25 * \brief L1 cache register map structure
26 */
27 struct _arm_cache_reg_map_t {
28 volatile uint32_t cacheichwparams;
29 /*!< Offset: 0x000 (R/ ) HW Parameters Register */
30 volatile uint32_t cacheicctrl;
31 /*!< Offset: 0x004 (R/W) Control Register */
32 volatile uint32_t reserved_0[62];
33 /*!< Offset: 0x00C-0x0FC Reserved */
34 volatile uint32_t cacheicirqstat;
35 /*!< Offset: 0x100 (R/ ) Interrupt Request Status Register */
36 volatile uint32_t cacheicirqsclr;
37 /*!< Offset: 0x104 ( /W) Interrupt Status Clear Register */
38 volatile uint32_t cacheicirqen;
39 /*!< Offset: 0x108 (R/W) Interrupt Enable Register */
40 volatile uint32_t cacheicdbgfillerr;
41 /*!< Offset: 0x10C (R/ ) Fill Error Address Register */
42 volatile uint32_t reserved_1[124];
43 /*!< Offset: 0x110-0x2FC Reserved */
44 volatile uint32_t cacheicsh;
45 /*!< Offset: 0x300 (R/ ) Cache Statistic Hit Register */
46 volatile uint32_t cacheicsm;
47 /*!< Offset: 0x304 (R/ ) Cache Statistic Miss Register */
48 volatile uint32_t cacheicsuc;
49 /*!< Offset: 0x308 (R/ ) Cache Statistic Uncached Register */
50 volatile uint32_t reserved_2[331];
51 /*!< Offset: 0x30C-0xFCC Reserved */
52 volatile uint32_t cachepidr4;
53 /*!< Offset: 0xFD0 (R/ ) Product ID Register 4 */
54 volatile uint32_t cachepidr5;
55 /*!< Offset: 0xFD4 (R/ ) Product ID Register 5 */
56 volatile uint32_t cachepidr6;
57 /*!< Offset: 0xFD8 (R/ ) Product ID Register 6 */
58 volatile uint32_t cachepidr7;
59 /*!< Offset: 0xFDC (R/ ) Product ID Register 7 */
60 volatile uint32_t cachepidr0;
61 /*!< Offset: 0xFE0 (R/ ) Product ID Register 0 */
62 volatile uint32_t cachepidr1;
63 /*!< Offset: 0xFE4 (R/ ) Product ID Register 1 */
64 volatile uint32_t cachepidr2;
65 /*!< Offset: 0xFE8 (R/ ) Product ID Register 2 */
66 volatile uint32_t cachepidr3;
67 /*!< Offset: 0xFEC (R/ ) Product ID Register 3 */
68 volatile uint32_t cachecidr0;
69 /*!< Offset: 0xFF0 (R/ ) Component ID Register 0 */
70 volatile uint32_t cachecidr1;
71 /*!< Offset: 0xFF4 (R/ ) Component ID Register 1 */
72 volatile uint32_t cachecidr2;
73 /*!< Offset: 0xFF8 (R/ ) Component ID Register 2 */
74 volatile uint32_t cachecidr3;
75 /*!< Offset: 0xFFC (R/ ) Component ID Register 3 */
76 };
77
78 #define ARM_CACHEICHWPARAMS_CSIZE_MASK (0xFu<<ARM_CACHEICHWPARAMS_CSIZE_OFF)
79 #define ARM_CACHEICHWPARAMS_STATS_MASK (0x1u<<ARM_CACHEICHWPARAMS_STATS_OFF)
80 #define ARM_CACHEICHWPARAMS_INVMAT_MASK (0x1u<<ARM_CACHEICHWPARAMS_INVMAT_OFF)
81
82 #define ARM_CACHEICCTRL_CACHEEN_MASK (0x1u<<ARM_CACHEICCTRL_CACHEEN_OFF)
83 #define ARM_CACHEICCTRL_PINV_MASK (0x1u<<ARM_CACHEICCTRL_PINV_OFF)
84 #define ARM_CACHEICCTRL_FINV_MASK (0x1u<<ARM_CACHEICCTRL_FINV_OFF)
85 #define ARM_CACHEICCTRL_STATEN_MASK (0x1u<<ARM_CACHEICCTRL_STATEN_OFF)
86 #define ARM_CACHEICCTRL_STATC_MASK (0x1u<<ARM_CACHEICCTRL_STATC_OFF)
87 #define ARM_CACHEICCTRL_HALLOC_MASK (0x1u<<ARM_CACHEICCTRL_HALLOC_OFF)
88
arm_cache_get_size(struct arm_cache_dev_t * dev)89 enum arm_cache_size_t arm_cache_get_size(struct arm_cache_dev_t* dev)
90 {
91 struct _arm_cache_reg_map_t* p_cache =
92 (struct _arm_cache_reg_map_t*)dev->cfg->base;
93
94 enum arm_cache_size_t val = (enum arm_cache_size_t)
95 (p_cache->cacheichwparams & ARM_CACHEICHWPARAMS_CSIZE_MASK);
96 /**
97 * 9: 512 byte
98 * 10: 1 KB
99 * 11: 2 KB
100 * 12: 4 KB
101 * 13: 8 KB
102 * 14: 16 KB
103 * Other values are reserved, returning error
104 */
105 if ((val < arm_cache_size_512B) || (val > arm_cache_size_16KB)) {
106 return arm_cache_size_err;
107 }
108
109 return val;
110 }
111
arm_cache_is_stat_func_available(struct arm_cache_dev_t * dev)112 bool arm_cache_is_stat_func_available(struct arm_cache_dev_t* dev)
113 {
114 struct _arm_cache_reg_map_t* p_cache =
115 (struct _arm_cache_reg_map_t*)dev->cfg->base;
116
117 return (p_cache->cacheichwparams & ARM_CACHEICHWPARAMS_STATS_MASK) != 0;
118 }
119
arm_cache_is_invalidate_cache_line_enabled(struct arm_cache_dev_t * dev)120 bool arm_cache_is_invalidate_cache_line_enabled(struct arm_cache_dev_t* dev)
121 {
122 struct _arm_cache_reg_map_t* p_cache =
123 (struct _arm_cache_reg_map_t*)dev->cfg->base;
124
125 return (p_cache->cacheichwparams & ARM_CACHEICHWPARAMS_INVMAT_MASK) != 0;
126 }
127
arm_cache_enable(struct arm_cache_dev_t * dev)128 void arm_cache_enable(struct arm_cache_dev_t* dev)
129 {
130 struct _arm_cache_reg_map_t* p_cache =
131 (struct _arm_cache_reg_map_t*)dev->cfg->base;
132
133 p_cache->cacheicctrl |= ARM_CACHEICCTRL_CACHEEN_MASK;
134 }
135
arm_cache_enable_blocking(struct arm_cache_dev_t * dev)136 void arm_cache_enable_blocking(struct arm_cache_dev_t* dev)
137 {
138 struct _arm_cache_reg_map_t* p_cache =
139 (struct _arm_cache_reg_map_t*)dev->cfg->base;
140
141 p_cache->cacheicctrl |= ARM_CACHEICCTRL_CACHEEN_MASK;
142
143 while ((arm_cache_get_raw_intr_status(dev) & arm_cache_cec_intr_mask) == 0) {
144 }
145
146 arm_cache_clear_intr(dev, arm_cache_cec_intr_mask);
147 }
148
arm_cache_disable(struct arm_cache_dev_t * dev)149 void arm_cache_disable(struct arm_cache_dev_t* dev)
150 {
151 struct _arm_cache_reg_map_t* p_cache =
152 (struct _arm_cache_reg_map_t*)dev->cfg->base;
153
154 p_cache->cacheicctrl &= ~ARM_CACHEICCTRL_CACHEEN_MASK;
155 }
156
arm_cache_disable_blocking(struct arm_cache_dev_t * dev)157 void arm_cache_disable_blocking(struct arm_cache_dev_t* dev)
158 {
159 struct _arm_cache_reg_map_t* p_cache =
160 (struct _arm_cache_reg_map_t*)dev->cfg->base;
161
162 p_cache->cacheicctrl &= ~ARM_CACHEICCTRL_CACHEEN_MASK;
163
164 while ((arm_cache_get_raw_intr_status(dev) & arm_cache_cdc_intr_mask) == 0) {
165 }
166
167 arm_cache_clear_intr(dev, arm_cache_cdc_intr_mask);
168 }
169
arm_cache_is_enabled(struct arm_cache_dev_t * dev)170 bool arm_cache_is_enabled(struct arm_cache_dev_t* dev)
171 {
172 struct _arm_cache_reg_map_t* p_cache =
173 (struct _arm_cache_reg_map_t*)dev->cfg->base;
174 return (p_cache->cacheicctrl & ARM_CACHEICCTRL_CACHEEN_MASK) != 0;
175 }
176
arm_cache_full_invalidate(struct arm_cache_dev_t * dev)177 void arm_cache_full_invalidate(struct arm_cache_dev_t* dev)
178 {
179 struct _arm_cache_reg_map_t* p_cache =
180 (struct _arm_cache_reg_map_t*)dev->cfg->base;
181
182 p_cache->cacheicctrl |= ARM_CACHEICCTRL_FINV_MASK;
183 }
184
arm_cache_full_invalidate_blocking(struct arm_cache_dev_t * dev)185 void arm_cache_full_invalidate_blocking(struct arm_cache_dev_t* dev)
186 {
187 struct _arm_cache_reg_map_t* p_cache =
188 (struct _arm_cache_reg_map_t*)dev->cfg->base;
189
190 p_cache->cacheicctrl |= ARM_CACHEICCTRL_FINV_MASK;
191
192 while ((arm_cache_get_raw_intr_status(dev) & arm_cache_ic_intr_mask) == 0) {
193 }
194
195 arm_cache_clear_intr(dev, arm_cache_ic_intr_mask);
196 }
197
arm_cache_statistic_enable(struct arm_cache_dev_t * dev)198 void arm_cache_statistic_enable(struct arm_cache_dev_t* dev)
199 {
200 struct _arm_cache_reg_map_t* p_cache =
201 (struct _arm_cache_reg_map_t*)dev->cfg->base;
202
203 p_cache->cacheicctrl |= ARM_CACHEICCTRL_STATEN_MASK;
204 }
205
arm_cache_statistic_disable(struct arm_cache_dev_t * dev)206 void arm_cache_statistic_disable(struct arm_cache_dev_t* dev)
207 {
208 struct _arm_cache_reg_map_t* p_cache =
209 (struct _arm_cache_reg_map_t*)dev->cfg->base;
210
211 p_cache->cacheicctrl &= ~ARM_CACHEICCTRL_STATEN_MASK;
212 }
213
arm_cache_clear_statistic_value(struct arm_cache_dev_t * dev)214 void arm_cache_clear_statistic_value(struct arm_cache_dev_t* dev)
215 {
216 struct _arm_cache_reg_map_t* p_cache =
217 (struct _arm_cache_reg_map_t*)dev->cfg->base;
218
219 p_cache->cacheicctrl |= ARM_CACHEICCTRL_STATC_MASK;
220 }
221
arm_cache_handler_alloc_enable(struct arm_cache_dev_t * dev)222 void arm_cache_handler_alloc_enable(struct arm_cache_dev_t* dev)
223 {
224 struct _arm_cache_reg_map_t* p_cache =
225 (struct _arm_cache_reg_map_t*)dev->cfg->base;
226
227 p_cache->cacheicctrl |= ARM_CACHEICCTRL_HALLOC_MASK;
228 }
229
arm_cache_handler_alloc_disable(struct arm_cache_dev_t * dev)230 void arm_cache_handler_alloc_disable(struct arm_cache_dev_t* dev)
231 {
232 struct _arm_cache_reg_map_t* p_cache =
233 (struct _arm_cache_reg_map_t*)dev->cfg->base;
234
235 p_cache->cacheicctrl &= ~ARM_CACHEICCTRL_HALLOC_MASK;
236 }
237
arm_cache_enable_intr(struct arm_cache_dev_t * dev,enum arm_cache_intr_t mask)238 void arm_cache_enable_intr(struct arm_cache_dev_t* dev,
239 enum arm_cache_intr_t mask)
240 {
241 struct _arm_cache_reg_map_t* p_cache =
242 (struct _arm_cache_reg_map_t*)dev->cfg->base;
243
244 p_cache->cacheicirqen |= (uint32_t)(mask);
245
246 }
247
arm_cache_disable_intr(struct arm_cache_dev_t * dev,enum arm_cache_intr_t mask)248 void arm_cache_disable_intr(struct arm_cache_dev_t* dev,
249 enum arm_cache_intr_t mask)
250 {
251 struct _arm_cache_reg_map_t* p_cache =
252 (struct _arm_cache_reg_map_t*)dev->cfg->base;
253
254 p_cache->cacheicirqen &= ~(uint32_t)(mask);
255 }
256
arm_cache_clear_intr(struct arm_cache_dev_t * dev,enum arm_cache_intr_t mask)257 void arm_cache_clear_intr(struct arm_cache_dev_t* dev,
258 enum arm_cache_intr_t mask)
259 {
260 struct _arm_cache_reg_map_t* p_cache =
261 (struct _arm_cache_reg_map_t*)dev->cfg->base;
262
263 p_cache->cacheicirqsclr = (uint32_t)mask;
264 }
265
arm_cache_get_masked_intr_status(struct arm_cache_dev_t * dev)266 enum arm_cache_intr_t arm_cache_get_masked_intr_status(
267 struct arm_cache_dev_t* dev)
268 {
269 struct _arm_cache_reg_map_t* p_cache =
270 (struct _arm_cache_reg_map_t*)dev->cfg->base;
271
272 return (enum arm_cache_intr_t)
273 (p_cache->cacheicirqstat & p_cache->cacheicirqen);
274 }
275
arm_cache_get_raw_intr_status(struct arm_cache_dev_t * dev)276 enum arm_cache_intr_t arm_cache_get_raw_intr_status(
277 struct arm_cache_dev_t* dev)
278 {
279 struct _arm_cache_reg_map_t* p_cache =
280 (struct _arm_cache_reg_map_t*)dev->cfg->base;
281
282 return (enum arm_cache_intr_t)(p_cache->cacheicirqstat);
283 }
284
arm_cache_get_debug_fill_address(struct arm_cache_dev_t * dev)285 uint32_t arm_cache_get_debug_fill_address(struct arm_cache_dev_t* dev)
286 {
287 struct _arm_cache_reg_map_t* p_cache =
288 (struct _arm_cache_reg_map_t*)dev->cfg->base;
289
290 return p_cache->cacheicdbgfillerr;
291 }
292
arm_cache_get_hit_count(struct arm_cache_dev_t * dev)293 uint32_t arm_cache_get_hit_count(struct arm_cache_dev_t* dev)
294 {
295 struct _arm_cache_reg_map_t* p_cache =
296 (struct _arm_cache_reg_map_t*)dev->cfg->base;
297
298 return p_cache->cacheicsh;
299 }
300
arm_cache_get_miss_count(struct arm_cache_dev_t * dev)301 uint32_t arm_cache_get_miss_count(struct arm_cache_dev_t* dev)
302 {
303 struct _arm_cache_reg_map_t* p_cache =
304 (struct _arm_cache_reg_map_t*)dev->cfg->base;
305
306 return p_cache->cacheicsm;
307 }
308
arm_cache_get_uncached_count(struct arm_cache_dev_t * dev)309 uint32_t arm_cache_get_uncached_count(struct arm_cache_dev_t* dev)
310 {
311 struct _arm_cache_reg_map_t* p_cache =
312 (struct _arm_cache_reg_map_t*)dev->cfg->base;
313
314 return p_cache->cacheicsuc;
315 }
316