1 /*
2  * Copyright (c) 2018 Linaro Limited.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 /**
8  * @file
9  * @brief ARM AArch32 specific syscall header
10  *
11  * This header contains the ARM AArch32 specific syscall interface.  It is
12  * included by the syscall interface architecture-abstraction header
13  * (include/arch/syscall.h)
14  */
15 
16 #ifndef ZEPHYR_INCLUDE_ARCH_ARM_SYSCALL_H_
17 #define ZEPHYR_INCLUDE_ARCH_ARM_SYSCALL_H_
18 
19 #define _SVC_CALL_CONTEXT_SWITCH	0
20 #define _SVC_CALL_IRQ_OFFLOAD		1
21 #define _SVC_CALL_RUNTIME_EXCEPT	2
22 #define _SVC_CALL_SYSTEM_CALL		3
23 
24 #ifdef CONFIG_USERSPACE
25 #ifndef _ASMLANGUAGE
26 
27 #include <zephyr/types.h>
28 #include <stdbool.h>
29 #include <zephyr/arch/arm/misc.h>
30 
31 #ifdef __cplusplus
32 extern "C" {
33 #endif
34 
35 
36 /* Syscall invocation macros. arm-specific machine constraints used to ensure
37  * args land in the proper registers.
38  */
arch_syscall_invoke6(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t arg5,uintptr_t arg6,uintptr_t call_id)39 static inline uintptr_t arch_syscall_invoke6(uintptr_t arg1, uintptr_t arg2,
40 					     uintptr_t arg3, uintptr_t arg4,
41 					     uintptr_t arg5, uintptr_t arg6,
42 					     uintptr_t call_id)
43 {
44 	register uint32_t ret __asm__("r0") = arg1;
45 	register uint32_t r1 __asm__("r1") = arg2;
46 	register uint32_t r2 __asm__("r2") = arg3;
47 	register uint32_t r3 __asm__("r3") = arg4;
48 	register uint32_t r4 __asm__("r4") = arg5;
49 	register uint32_t r5 __asm__("r5") = arg6;
50 	register uint32_t r6 __asm__("r6") = call_id;
51 
52 	__asm__ volatile("svc %[svid]\n"
53 			 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3)
54 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
55 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
56 			   "r" (r4), "r" (r5), "r" (r6)
57 			 : "r8", "memory", "ip");
58 
59 	return ret;
60 }
61 
arch_syscall_invoke5(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t arg5,uintptr_t call_id)62 static inline uintptr_t arch_syscall_invoke5(uintptr_t arg1, uintptr_t arg2,
63 					     uintptr_t arg3, uintptr_t arg4,
64 					     uintptr_t arg5,
65 					     uintptr_t call_id)
66 {
67 	register uint32_t ret __asm__("r0") = arg1;
68 	register uint32_t r1 __asm__("r1") = arg2;
69 	register uint32_t r2 __asm__("r2") = arg3;
70 	register uint32_t r3 __asm__("r3") = arg4;
71 	register uint32_t r4 __asm__("r4") = arg5;
72 	register uint32_t r6 __asm__("r6") = call_id;
73 
74 	__asm__ volatile("svc %[svid]\n"
75 			 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3)
76 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
77 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
78 			   "r" (r4), "r" (r6)
79 			 : "r8", "memory", "ip");
80 
81 	return ret;
82 }
83 
arch_syscall_invoke4(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t call_id)84 static inline uintptr_t arch_syscall_invoke4(uintptr_t arg1, uintptr_t arg2,
85 					     uintptr_t arg3, uintptr_t arg4,
86 					     uintptr_t call_id)
87 {
88 	register uint32_t ret __asm__("r0") = arg1;
89 	register uint32_t r1 __asm__("r1") = arg2;
90 	register uint32_t r2 __asm__("r2") = arg3;
91 	register uint32_t r3 __asm__("r3") = arg4;
92 	register uint32_t r6 __asm__("r6") = call_id;
93 
94 	__asm__ volatile("svc %[svid]\n"
95 			 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3)
96 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
97 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
98 			   "r" (r6)
99 			 : "r8", "memory", "ip");
100 
101 	return ret;
102 }
103 
arch_syscall_invoke3(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t call_id)104 static inline uintptr_t arch_syscall_invoke3(uintptr_t arg1, uintptr_t arg2,
105 					     uintptr_t arg3,
106 					     uintptr_t call_id)
107 {
108 	register uint32_t ret __asm__("r0") = arg1;
109 	register uint32_t r1 __asm__("r1") = arg2;
110 	register uint32_t r2 __asm__("r2") = arg3;
111 	register uint32_t r6 __asm__("r6") = call_id;
112 
113 	__asm__ volatile("svc %[svid]\n"
114 			 : "=r"(ret), "=r"(r1), "=r"(r2)
115 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
116 			   "r" (ret), "r" (r1), "r" (r2), "r" (r6)
117 			 : "r8", "memory", "r3", "ip");
118 
119 	return ret;
120 }
121 
arch_syscall_invoke2(uintptr_t arg1,uintptr_t arg2,uintptr_t call_id)122 static inline uintptr_t arch_syscall_invoke2(uintptr_t arg1, uintptr_t arg2,
123 					     uintptr_t call_id)
124 {
125 	register uint32_t ret __asm__("r0") = arg1;
126 	register uint32_t r1 __asm__("r1") = arg2;
127 	register uint32_t r6 __asm__("r6") = call_id;
128 
129 	__asm__ volatile("svc %[svid]\n"
130 			 : "=r"(ret), "=r"(r1)
131 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
132 			   "r" (ret), "r" (r1), "r" (r6)
133 			 : "r8", "memory", "r2", "r3", "ip");
134 
135 	return ret;
136 }
137 
arch_syscall_invoke1(uintptr_t arg1,uintptr_t call_id)138 static inline uintptr_t arch_syscall_invoke1(uintptr_t arg1,
139 					     uintptr_t call_id)
140 {
141 	register uint32_t ret __asm__("r0") = arg1;
142 	register uint32_t r6 __asm__("r6") = call_id;
143 
144 	__asm__ volatile("svc %[svid]\n"
145 			 : "=r"(ret)
146 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
147 			   "r" (ret), "r" (r6)
148 			 : "r8", "memory", "r1", "r2", "r3", "ip");
149 	return ret;
150 }
151 
arch_syscall_invoke0(uintptr_t call_id)152 static inline uintptr_t arch_syscall_invoke0(uintptr_t call_id)
153 {
154 	register uint32_t ret __asm__("r0");
155 	register uint32_t r6 __asm__("r6") = call_id;
156 
157 	__asm__ volatile("svc %[svid]\n"
158 			 : "=r"(ret)
159 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
160 			   "r" (ret), "r" (r6)
161 			 : "r8", "memory", "r1", "r2", "r3", "ip");
162 
163 	return ret;
164 }
165 
arch_is_user_context(void)166 static inline bool arch_is_user_context(void)
167 {
168 #if defined(CONFIG_CPU_CORTEX_M)
169 	uint32_t value;
170 
171 	/* check for handler mode */
172 	__asm__ volatile("mrs %0, IPSR\n\t" : "=r"(value));
173 	if (value) {
174 		return false;
175 	}
176 #endif
177 
178 	return z_arm_thread_is_in_user_mode();
179 }
180 
181 #ifdef __cplusplus
182 }
183 #endif
184 
185 #endif /* _ASMLANGUAGE */
186 #endif /* CONFIG_USERSPACE */
187 #endif /* ZEPHYR_INCLUDE_ARCH_ARM_SYSCALL_H_ */
188