1 /*
2  * Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 /**
8  * @file
9  * @brief ARM64 specific syscall header
10  *
11  * This header contains the ARM64 specific syscall interface.  It is
12  * included by the syscall interface architecture-abstraction header
13  * (include/arch64/syscall.h)
14  */
15 
16 #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_SYSCALL_H_
17 #define ZEPHYR_INCLUDE_ARCH_ARM64_SYSCALL_H_
18 
19 #define _SVC_CALL_IRQ_OFFLOAD		1
20 #define _SVC_CALL_RUNTIME_EXCEPT	2
21 #define _SVC_CALL_SYSTEM_CALL		3
22 
23 #ifdef CONFIG_USERSPACE
24 #ifndef _ASMLANGUAGE
25 
26 #include <zephyr/types.h>
27 #include <stdbool.h>
28 #include <zephyr/arch/arm64/lib_helpers.h>
29 #include <zephyr/arch/arm64/tpidrro_el0.h>
30 
31 #ifdef __cplusplus
32 extern "C" {
33 #endif
34 
35 /*
36  * Syscall invocation macros. arm-specific machine constraints used to ensure
37  * args land in the proper registers.
38  */
arch_syscall_invoke6(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t arg5,uintptr_t arg6,uintptr_t call_id)39 static inline uintptr_t arch_syscall_invoke6(uintptr_t arg1, uintptr_t arg2,
40 					     uintptr_t arg3, uintptr_t arg4,
41 					     uintptr_t arg5, uintptr_t arg6,
42 					     uintptr_t call_id)
43 {
44 	register uint64_t ret __asm__("x0") = arg1;
45 	register uint64_t r1 __asm__("x1") = arg2;
46 	register uint64_t r2 __asm__("x2") = arg3;
47 	register uint64_t r3 __asm__("x3") = arg4;
48 	register uint64_t r4 __asm__("x4") = arg5;
49 	register uint64_t r5 __asm__("x5") = arg6;
50 	register uint64_t r8 __asm__("x8") = call_id;
51 
52 	__asm__ volatile("svc %[svid]\n"
53 			 : "=r"(ret)
54 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
55 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
56 			   "r" (r4), "r" (r5), "r" (r8)
57 			 : "memory");
58 
59 	return ret;
60 }
61 
arch_syscall_invoke5(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t arg5,uintptr_t call_id)62 static inline uintptr_t arch_syscall_invoke5(uintptr_t arg1, uintptr_t arg2,
63 					     uintptr_t arg3, uintptr_t arg4,
64 					     uintptr_t arg5,
65 					     uintptr_t call_id)
66 {
67 	register uint64_t ret __asm__("x0") = arg1;
68 	register uint64_t r1 __asm__("x1") = arg2;
69 	register uint64_t r2 __asm__("x2") = arg3;
70 	register uint64_t r3 __asm__("x3") = arg4;
71 	register uint64_t r4 __asm__("x4") = arg5;
72 	register uint64_t r8 __asm__("x8") = call_id;
73 
74 	__asm__ volatile("svc %[svid]\n"
75 			 : "=r"(ret)
76 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
77 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
78 			   "r" (r4), "r" (r8)
79 			 : "memory");
80 
81 	return ret;
82 }
83 
arch_syscall_invoke4(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t arg4,uintptr_t call_id)84 static inline uintptr_t arch_syscall_invoke4(uintptr_t arg1, uintptr_t arg2,
85 					     uintptr_t arg3, uintptr_t arg4,
86 					     uintptr_t call_id)
87 {
88 	register uint64_t ret __asm__("x0") = arg1;
89 	register uint64_t r1 __asm__("x1") = arg2;
90 	register uint64_t r2 __asm__("x2") = arg3;
91 	register uint64_t r3 __asm__("x3") = arg4;
92 	register uint64_t r8 __asm__("x8") = call_id;
93 
94 	__asm__ volatile("svc %[svid]\n"
95 			 : "=r"(ret)
96 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
97 			   "r" (ret), "r" (r1), "r" (r2), "r" (r3),
98 			   "r" (r8)
99 			 : "memory");
100 
101 	return ret;
102 }
103 
arch_syscall_invoke3(uintptr_t arg1,uintptr_t arg2,uintptr_t arg3,uintptr_t call_id)104 static inline uintptr_t arch_syscall_invoke3(uintptr_t arg1, uintptr_t arg2,
105 					     uintptr_t arg3,
106 					     uintptr_t call_id)
107 {
108 	register uint64_t ret __asm__("x0") = arg1;
109 	register uint64_t r1 __asm__("x1") = arg2;
110 	register uint64_t r2 __asm__("x2") = arg3;
111 	register uint64_t r8 __asm__("x8") = call_id;
112 
113 	__asm__ volatile("svc %[svid]\n"
114 			 : "=r"(ret)
115 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
116 			   "r" (ret), "r" (r1), "r" (r2), "r" (r8)
117 			 : "memory");
118 
119 	return ret;
120 }
121 
arch_syscall_invoke2(uintptr_t arg1,uintptr_t arg2,uintptr_t call_id)122 static inline uintptr_t arch_syscall_invoke2(uintptr_t arg1, uintptr_t arg2,
123 					     uintptr_t call_id)
124 {
125 	register uint64_t ret __asm__("x0") = arg1;
126 	register uint64_t r1 __asm__("x1") = arg2;
127 	register uint64_t r8 __asm__("x8") = call_id;
128 
129 	__asm__ volatile("svc %[svid]\n"
130 			 : "=r"(ret)
131 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
132 			   "r" (ret), "r" (r1), "r" (r8)
133 			 : "memory");
134 
135 	return ret;
136 }
137 
arch_syscall_invoke1(uintptr_t arg1,uintptr_t call_id)138 static inline uintptr_t arch_syscall_invoke1(uintptr_t arg1,
139 					     uintptr_t call_id)
140 {
141 	register uint64_t ret __asm__("x0") = arg1;
142 	register uint64_t r8 __asm__("x8") = call_id;
143 
144 	__asm__ volatile("svc %[svid]\n"
145 			 : "=r"(ret)
146 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
147 			   "r" (ret), "r" (r8)
148 			 : "memory");
149 	return ret;
150 }
151 
arch_syscall_invoke0(uintptr_t call_id)152 static inline uintptr_t arch_syscall_invoke0(uintptr_t call_id)
153 {
154 	register uint64_t ret __asm__("x0");
155 	register uint64_t r8 __asm__("x8") = call_id;
156 
157 	__asm__ volatile("svc %[svid]\n"
158 			 : "=r"(ret)
159 			 : [svid] "i" (_SVC_CALL_SYSTEM_CALL),
160 			   "r" (ret), "r" (r8)
161 			 : "memory");
162 
163 	return ret;
164 }
165 
arch_is_user_context(void)166 static inline bool arch_is_user_context(void)
167 {
168 	return (read_tpidrro_el0() & TPIDRROEL0_IN_EL0) != 0;
169 }
170 
171 #ifdef __cplusplus
172 }
173 #endif
174 
175 #endif /* _ASMLANGUAGE */
176 #endif /* CONFIG_USERSPACE */
177 
178 #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_SYSCALL_H_ */
179