1 /*
2 * Copyright 2021 Intel Corporation
3 * SPDX-License-Identifier: Apache-2.0
4 */
5 #ifndef ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
6 #define ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
7
8 #include <xtensa/config/core-isa.h>
9 #include <zephyr/toolchain.h>
10 #include <zephyr/sys/util.h>
11 #include <zephyr/debug/sparse.h>
12 #include <xtensa/hal.h>
13
14 #ifdef __cplusplus
15 extern "C" {
16 #endif
17
18 #define Z_DCACHE_MAX (XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS)
19
20 #if XCHAL_DCACHE_SIZE
21 BUILD_ASSERT(Z_IS_POW2(XCHAL_DCACHE_LINESIZE));
22 BUILD_ASSERT(Z_IS_POW2(Z_DCACHE_MAX));
23 #endif
24
25 #if defined(CONFIG_DCACHE) || defined(__DOXYGEN__)
26
27 /** Implementation of @ref arch_dcache_flush_range. */
arch_dcache_flush_range(void * addr,size_t bytes)28 static ALWAYS_INLINE int arch_dcache_flush_range(void *addr, size_t bytes)
29 {
30 #if XCHAL_DCACHE_SIZE
31 size_t step = XCHAL_DCACHE_LINESIZE;
32 size_t first = ROUND_DOWN(addr, step);
33 size_t last = ROUND_UP(((long)addr) + bytes, step);
34 size_t line;
35
36 for (line = first; bytes && line < last; line += step) {
37 __asm__ volatile("dhwb %0, 0" :: "r"(line));
38 }
39 #endif
40 return 0;
41 }
42
43 /** Implementation of @ref arch_dcache_flush_and_invd_range. */
arch_dcache_flush_and_invd_range(void * addr,size_t bytes)44 static ALWAYS_INLINE int arch_dcache_flush_and_invd_range(void *addr, size_t bytes)
45 {
46 #if XCHAL_DCACHE_SIZE
47 size_t step = XCHAL_DCACHE_LINESIZE;
48 size_t first = ROUND_DOWN(addr, step);
49 size_t last = ROUND_UP(((long)addr) + bytes, step);
50 size_t line;
51
52 for (line = first; bytes && line < last; line += step) {
53 __asm__ volatile("dhwbi %0, 0" :: "r"(line));
54 }
55 #endif
56 return 0;
57 }
58
59 /** Implementation of @ref arch_dcache_invd_range. */
arch_dcache_invd_range(void * addr,size_t bytes)60 static ALWAYS_INLINE int arch_dcache_invd_range(void *addr, size_t bytes)
61 {
62 #if XCHAL_DCACHE_SIZE
63 size_t step = XCHAL_DCACHE_LINESIZE;
64 size_t first = ROUND_DOWN(addr, step);
65 size_t last = ROUND_UP(((long)addr) + bytes, step);
66 size_t line;
67
68 for (line = first; bytes && line < last; line += step) {
69 __asm__ volatile("dhi %0, 0" :: "r"(line));
70 }
71 #endif
72 return 0;
73 }
74
75 /** Implementation of @ref arch_dcache_invd_all. */
arch_dcache_invd_all(void)76 static ALWAYS_INLINE int arch_dcache_invd_all(void)
77 {
78 #if XCHAL_DCACHE_SIZE
79 size_t step = XCHAL_DCACHE_LINESIZE;
80 size_t line;
81
82 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
83 __asm__ volatile("dii %0, 0" :: "r"(line));
84 }
85 #endif
86 return 0;
87 }
88
89 /** Implementation of @ref arch_dcache_flush_all. */
arch_dcache_flush_all(void)90 static ALWAYS_INLINE int arch_dcache_flush_all(void)
91 {
92 #if XCHAL_DCACHE_SIZE
93 size_t step = XCHAL_DCACHE_LINESIZE;
94 size_t line;
95
96 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
97 __asm__ volatile("diwb %0, 0" :: "r"(line));
98 }
99 #endif
100 return 0;
101 }
102
103 /** Implementation of @ref arch_dcache_flush_and_invd_all. */
arch_dcache_flush_and_invd_all(void)104 static ALWAYS_INLINE int arch_dcache_flush_and_invd_all(void)
105 {
106 #if XCHAL_DCACHE_SIZE
107 size_t step = XCHAL_DCACHE_LINESIZE;
108 size_t line;
109
110 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
111 __asm__ volatile("diwbi %0, 0" :: "r"(line));
112 }
113 #endif
114 return 0;
115 }
116
117 /** Implementation of @ref arch_dcache_enable. */
arch_dcache_enable(void)118 static ALWAYS_INLINE void arch_dcache_enable(void)
119 {
120 /* nothing */
121 }
122
123 /** Implementation of @ref arch_dcache_disable. */
arch_dcache_disable(void)124 static ALWAYS_INLINE void arch_dcache_disable(void)
125 {
126 /* nothing */
127 }
128
129 #endif /* CONFIG_DCACHE */
130
131 #if defined(CONFIG_ICACHE) || defined(__DOXYGEN__)
132
133 /** Implementation of @ref arch_icache_line_size_get. */
arch_icache_line_size_get(void)134 static ALWAYS_INLINE size_t arch_icache_line_size_get(void)
135 {
136 return -ENOTSUP;
137 }
138
139 /** Implementation of @ref arch_icache_flush_all. */
arch_icache_flush_all(void)140 static ALWAYS_INLINE int arch_icache_flush_all(void)
141 {
142 return -ENOTSUP;
143 }
144
145 /** Implementation of @ref arch_icache_invd_all. */
arch_icache_invd_all(void)146 static ALWAYS_INLINE int arch_icache_invd_all(void)
147 {
148 #if XCHAL_ICACHE_SIZE
149 xthal_icache_all_invalidate();
150 #endif
151 return 0;
152 }
153
154 /** Implementation of @ref arch_icache_flush_and_invd_all. */
arch_icache_flush_and_invd_all(void)155 static ALWAYS_INLINE int arch_icache_flush_and_invd_all(void)
156 {
157 return -ENOTSUP;
158 }
159
160 /** Implementation of @ref arch_icache_flush_range. */
arch_icache_flush_range(void * addr,size_t size)161 static ALWAYS_INLINE int arch_icache_flush_range(void *addr, size_t size)
162 {
163 return -ENOTSUP;
164 }
165
166 /** Implementation of @ref arch_icache_invd_range. */
arch_icache_invd_range(void * addr,size_t size)167 static ALWAYS_INLINE int arch_icache_invd_range(void *addr, size_t size)
168 {
169 #if XCHAL_ICACHE_SIZE
170 xthal_icache_region_invalidate(addr, size);
171 #endif
172 return 0;
173 }
174
175 /** Implementation of @ref arch_icache_flush_and_invd_range. */
arch_icache_flush_and_invd_range(void * addr,size_t size)176 static ALWAYS_INLINE int arch_icache_flush_and_invd_range(void *addr, size_t size)
177 {
178 return -ENOTSUP;
179 }
180
181 /** Implementation of @ref arch_icache_enable. */
arch_icache_enable(void)182 static ALWAYS_INLINE void arch_icache_enable(void)
183 {
184 /* nothing */
185 }
186
187 /** Implementation of @ref arch_icache_disable. */
arch_icache_disable(void)188 static ALWAYS_INLINE void arch_icache_disable(void)
189 {
190 /* nothing */
191 }
192
193 #endif /* CONFIG_ICACHE */
194
arch_cache_is_ptr_cached(void * ptr)195 static inline bool arch_cache_is_ptr_cached(void *ptr)
196 {
197 ARG_UNUSED(ptr);
198
199 return false;
200 }
201
arch_cache_is_ptr_uncached(void * ptr)202 static inline bool arch_cache_is_ptr_uncached(void *ptr)
203 {
204 ARG_UNUSED(ptr);
205
206 return false;
207 }
208
arch_cache_cached_ptr_get(void * ptr)209 static inline void *arch_cache_cached_ptr_get(void *ptr)
210 {
211 return ptr;
212 }
213
arch_cache_uncached_ptr_get(void * ptr)214 static inline void *arch_cache_uncached_ptr_get(void *ptr)
215 {
216 return ptr;
217 }
218
arch_cache_init(void)219 static ALWAYS_INLINE void arch_cache_init(void)
220 {
221 }
222
223 #if defined(CONFIG_CACHE_CAN_SAY_MEM_COHERENCE)
arch_mem_coherent(void * ptr)224 static ALWAYS_INLINE bool arch_mem_coherent(void *ptr)
225 {
226 ARG_UNUSED(ptr);
227 return true;
228 }
229 #endif
230
231 #ifdef __cplusplus
232 } /* extern "C" */
233 #endif
234
235 #endif /* ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_ */
236