1 /*
2 * Copyright (c) 2018 Synopsys, Inc. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7
8 #ifndef ZEPHYR_INCLUDE_ARCH_ARC_V2_SJLI_H
9 #define ZEPHYR_INCLUDE_ARCH_ARC_V2_SJLI_H
10
11 #define SJLI_CALL_ARC_SECURE 0
12
13 #define ARC_S_CALL_AUX_READ 0
14 #define ARC_S_CALL_AUX_WRITE 1
15 #define ARC_S_CALL_IRQ_ALLOC 2
16 #define ARC_S_CALL_CLRI 3
17 #define ARC_S_CALL_SETI 4
18 #define ARC_S_CALL_LIMIT 5
19
20
21
22 #define ARC_N_IRQ_START_LEVEL ((CONFIG_NUM_IRQ_PRIO_LEVELS + 1) / 2)
23
24 #ifndef _ASMLANGUAGE
25
26 #include <zephyr/types.h>
27 #include <stdbool.h>
28
29 #include <arch/arc/v2/aux_regs.h>
30
31 #ifdef __cplusplus
32 extern "C" {
33 #endif
34
35
36 #define arc_sjli(id) \
37 (__asm__ volatile("sjli %[sjli_id]\n" :: [sjli_id] "i" (id)))
38
39 #ifdef CONFIG_ARC_SECURE_FIRMWARE
40 typedef uint32_t (*_arc_s_call_handler_t)(uint32_t arg1, uint32_t arg2, uint32_t arg3,
41 uint32_t arg4, uint32_t arg5, uint32_t arg6);
42
43
44 extern void arc_go_to_normal(uint32_t addr);
45 extern void _arc_do_secure_call(void);
46 extern const _arc_s_call_handler_t arc_s_call_table[ARC_S_CALL_LIMIT];
47
48 #endif
49
50
51 #ifdef CONFIG_ARC_NORMAL_FIRMWARE
52
_arc_s_call_invoke6(uint32_t arg1,uint32_t arg2,uint32_t arg3,uint32_t arg4,uint32_t arg5,uint32_t arg6,uint32_t call_id)53 static inline uint32_t _arc_s_call_invoke6(uint32_t arg1, uint32_t arg2, uint32_t arg3,
54 uint32_t arg4, uint32_t arg5, uint32_t arg6,
55 uint32_t call_id)
56 {
57 register uint32_t ret __asm__("r0") = arg1;
58 register uint32_t r1 __asm__("r1") = arg2;
59 register uint32_t r2 __asm__("r2") = arg3;
60 register uint32_t r3 __asm__("r3") = arg4;
61 register uint32_t r4 __asm__("r4") = arg5;
62 register uint32_t r5 __asm__("r5") = arg6;
63 register uint32_t r6 __asm__("r6") = call_id;
64
65 compiler_barrier();
66
67 __asm__ volatile(
68 "sjli %[id]\n"
69 : "=r"(ret)
70 : [id] "i" (SJLI_CALL_ARC_SECURE),
71 "r" (ret), "r" (r1), "r" (r2), "r" (r3),
72 "r" (r4), "r" (r5), "r" (r6));
73
74 return ret;
75 }
76
_arc_s_call_invoke5(uint32_t arg1,uint32_t arg2,uint32_t arg3,uint32_t arg4,uint32_t arg5,uint32_t call_id)77 static inline uint32_t _arc_s_call_invoke5(uint32_t arg1, uint32_t arg2, uint32_t arg3,
78 uint32_t arg4, uint32_t arg5, uint32_t call_id)
79 {
80 register uint32_t ret __asm__("r0") = arg1;
81 register uint32_t r1 __asm__("r1") = arg2;
82 register uint32_t r2 __asm__("r2") = arg3;
83 register uint32_t r3 __asm__("r3") = arg4;
84 register uint32_t r4 __asm__("r4") = arg5;
85 register uint32_t r6 __asm__("r6") = call_id;
86
87 compiler_barrier();
88
89 __asm__ volatile(
90 "sjli %[id]\n"
91 : "=r"(ret)
92 : [id] "i" (SJLI_CALL_ARC_SECURE),
93 "r" (ret), "r" (r1), "r" (r2), "r" (r3),
94 "r" (r4), "r" (r6));
95
96 return ret;
97 }
98
_arc_s_call_invoke4(uint32_t arg1,uint32_t arg2,uint32_t arg3,uint32_t arg4,uint32_t call_id)99 static inline uint32_t _arc_s_call_invoke4(uint32_t arg1, uint32_t arg2, uint32_t arg3,
100 uint32_t arg4, uint32_t call_id)
101 {
102 register uint32_t ret __asm__("r0") = arg1;
103 register uint32_t r1 __asm__("r1") = arg2;
104 register uint32_t r2 __asm__("r2") = arg3;
105 register uint32_t r3 __asm__("r3") = arg4;
106 register uint32_t r6 __asm__("r6") = call_id;
107
108 compiler_barrier();
109
110 __asm__ volatile(
111 "sjli %[id]\n"
112 : "=r"(ret)
113 : [id] "i" (SJLI_CALL_ARC_SECURE),
114 "r" (ret), "r" (r1), "r" (r2), "r" (r3),
115 "r" (r6));
116
117 return ret;
118 }
119
_arc_s_call_invoke3(uint32_t arg1,uint32_t arg2,uint32_t arg3,uint32_t call_id)120 static inline uint32_t _arc_s_call_invoke3(uint32_t arg1, uint32_t arg2, uint32_t arg3,
121 uint32_t call_id)
122 {
123 register uint32_t ret __asm__("r0") = arg1;
124 register uint32_t r1 __asm__("r1") = arg2;
125 register uint32_t r2 __asm__("r2") = arg3;
126 register uint32_t r6 __asm__("r6") = call_id;
127
128 compiler_barrier();
129
130 __asm__ volatile(
131 "sjli %[id]\n"
132 : "=r"(ret)
133 : [id] "i" (SJLI_CALL_ARC_SECURE),
134 "r" (ret), "r" (r1), "r" (r2), "r" (r6));
135
136 return ret;
137 }
138
_arc_s_call_invoke2(uint32_t arg1,uint32_t arg2,uint32_t call_id)139 static inline uint32_t _arc_s_call_invoke2(uint32_t arg1, uint32_t arg2, uint32_t call_id)
140 {
141 register uint32_t ret __asm__("r0") = arg1;
142 register uint32_t r1 __asm__("r1") = arg2;
143 register uint32_t r6 __asm__("r6") = call_id;
144
145 compiler_barrier();
146
147 __asm__ volatile(
148 "sjli %[id]\n"
149 : "=r"(ret)
150 : [id] "i" (SJLI_CALL_ARC_SECURE),
151 "r" (ret), "r" (r1), "r" (r6));
152
153 return ret;
154 }
155
_arc_s_call_invoke1(uint32_t arg1,uint32_t call_id)156 static inline uint32_t _arc_s_call_invoke1(uint32_t arg1, uint32_t call_id)
157 {
158 register uint32_t ret __asm__("r0") = arg1;
159 register uint32_t r6 __asm__("r6") = call_id;
160
161 compiler_barrier();
162
163 __asm__ volatile(
164 "sjli %[id]\n"
165 : "=r"(ret)
166 : [id] "i" (SJLI_CALL_ARC_SECURE),
167 "r" (ret), "r" (r6));
168
169 return ret;
170 }
171
_arc_s_call_invoke0(uint32_t call_id)172 static inline uint32_t _arc_s_call_invoke0(uint32_t call_id)
173 {
174 register uint32_t ret __asm__("r0");
175 register uint32_t r6 __asm__("r6") = call_id;
176
177 compiler_barrier();
178
179 __asm__ volatile(
180 "sjli %[id]\n"
181 : "=r"(ret)
182 : [id] "i" (SJLI_CALL_ARC_SECURE),
183 "r" (ret), "r" (r6));
184
185 return ret;
186 }
187
_arch_is_user_context(void)188 static inline bool _arch_is_user_context(void)
189 {
190 uint32_t status;
191
192 compiler_barrier();
193
194 __asm__ volatile("lr %0, [%[status32]]\n"
195 : "=r"(status)
196 : [status32] "i" (_ARC_V2_STATUS32));
197
198 return !(status & _ARC_V2_STATUS32_US) ? true : false;
199 }
200
201
202
203
204 #endif /* CONFIG_ARC_NORMAL_FIRMWARE */
205
206 #ifdef __cplusplus
207 }
208 #endif
209
210 #endif /* _ASMLANGUAGE */
211
212 #endif /* ZEPHYR_INCLUDE_ARCH_ARC_V2_SECURE_H */
213