1 /*
2  * Copyright (c) 2023-2024 Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the License); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 /*
20  * CMSIS-Core(A) Compiler LLVM/Clang Header File
21  */
22 
23 #ifndef __CMSIS_CLANG_A_H
24 #define __CMSIS_CLANG_A_H
25 
26 #pragma clang system_header   /* treat file as system include file */
27 
28 #ifndef __CMSIS_CLANG_H
29   #error "This file must not be included directly"
30 #endif
31 /**
32   \brief   STRT Unprivileged (8 bit)
33   \details Executes a Unprivileged STRT instruction for 8 bit values.
34   \param [in]  value  Value to store
35   \param [in]    ptr  Pointer to location
36  */
__STRBT(uint8_t value,volatile uint8_t * ptr)37 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
38 {
39   __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
40 }
41 
42 
43 /**
44   \brief   STRT Unprivileged (16 bit)
45   \details Executes a Unprivileged STRT instruction for 16 bit values.
46   \param [in]  value  Value to store
47   \param [in]    ptr  Pointer to location
48  */
__STRHT(uint16_t value,volatile uint16_t * ptr)49 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
50 {
51   __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
52 }
53 
54 
55 /**
56   \brief   STRT Unprivileged (32 bit)
57   \details Executes a Unprivileged STRT instruction for 32 bit values.
58   \param [in]  value  Value to store
59   \param [in]    ptr  Pointer to location
60  */
__STRT(uint32_t value,volatile uint32_t * ptr)61 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
62 {
63   __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
64 }
65 
66 /* ###################  Compiler specific Intrinsics  ########################### */
67 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
68   Access to dedicated SIMD instructions
69   @{
70 */
71 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
72 #define     __SADD8                 __builtin_arm_sadd8
73 #define     __QADD8                 __builtin_arm_qadd8
74 #define     __SHADD8                __builtin_arm_shadd8
75 #define     __UADD8                 __builtin_arm_uadd8
76 #define     __UQADD8                __builtin_arm_uqadd8
77 #define     __UHADD8                __builtin_arm_uhadd8
78 #define     __SSUB8                 __builtin_arm_ssub8
79 #define     __QSUB8                 __builtin_arm_qsub8
80 #define     __SHSUB8                __builtin_arm_shsub8
81 #define     __USUB8                 __builtin_arm_usub8
82 #define     __UQSUB8                __builtin_arm_uqsub8
83 #define     __UHSUB8                __builtin_arm_uhsub8
84 #define     __SADD16                __builtin_arm_sadd16
85 #define     __QADD16                __builtin_arm_qadd16
86 #define     __SHADD16               __builtin_arm_shadd16
87 #define     __UADD16                __builtin_arm_uadd16
88 #define     __UQADD16               __builtin_arm_uqadd16
89 #define     __UHADD16               __builtin_arm_uhadd16
90 #define     __SSUB16                __builtin_arm_ssub16
91 #define     __QSUB16                __builtin_arm_qsub16
92 #define     __SHSUB16               __builtin_arm_shsub16
93 #define     __USUB16                __builtin_arm_usub16
94 #define     __UQSUB16               __builtin_arm_uqsub16
95 #define     __UHSUB16               __builtin_arm_uhsub16
96 #define     __SASX                  __builtin_arm_sasx
97 #define     __QASX                  __builtin_arm_qasx
98 #define     __SHASX                 __builtin_arm_shasx
99 #define     __UASX                  __builtin_arm_uasx
100 #define     __UQASX                 __builtin_arm_uqasx
101 #define     __UHASX                 __builtin_arm_uhasx
102 #define     __SSAX                  __builtin_arm_ssax
103 #define     __QSAX                  __builtin_arm_qsax
104 #define     __SHSAX                 __builtin_arm_shsax
105 #define     __USAX                  __builtin_arm_usax
106 #define     __UQSAX                 __builtin_arm_uqsax
107 #define     __UHSAX                 __builtin_arm_uhsax
108 #define     __USAD8                 __builtin_arm_usad8
109 #define     __USADA8                __builtin_arm_usada8
110 #define     __SSAT16                __builtin_arm_ssat16
111 #define     __USAT16                __builtin_arm_usat16
112 #define     __UXTB16                __builtin_arm_uxtb16
113 #define     __UXTAB16               __builtin_arm_uxtab16
114 #define     __SXTB16                __builtin_arm_sxtb16
115 #define     __SXTAB16               __builtin_arm_sxtab16
116 #define     __SMUAD                 __builtin_arm_smuad
117 #define     __SMUADX                __builtin_arm_smuadx
118 #define     __SMLAD                 __builtin_arm_smlad
119 #define     __SMLADX                __builtin_arm_smladx
120 #define     __SMLALD                __builtin_arm_smlald
121 #define     __SMLALDX               __builtin_arm_smlaldx
122 #define     __SMUSD                 __builtin_arm_smusd
123 #define     __SMUSDX                __builtin_arm_smusdx
124 #define     __SMLSD                 __builtin_arm_smlsd
125 #define     __SMLSDX                __builtin_arm_smlsdx
126 #define     __SMLSLD                __builtin_arm_smlsld
127 #define     __SMLSLDX               __builtin_arm_smlsldx
128 #define     __SEL                   __builtin_arm_sel
129 #define     __QADD                  __builtin_arm_qadd
130 #define     __QSUB                  __builtin_arm_qsub
131 
132 #define __PKHBT(ARG1,ARG2,ARG3) \
133 __extension__ \
134 ({                          \
135   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
136   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
137   __RES; \
138  })
139 
140 #define __PKHTB(ARG1,ARG2,ARG3) \
141 __extension__ \
142 ({                          \
143   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
144   if (ARG3 == 0) \
145     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
146   else \
147     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
148   __RES; \
149  })
150 
__SXTB16_RORn(uint32_t op1,uint32_t rotate)151 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
152 {
153     uint32_t result;
154     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
155     {
156         __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
157     }
158     else
159     {
160         result = __SXTB16(__ROR(op1, rotate));
161     }
162     return result;
163 }
164 
__SXTAB16_RORn(uint32_t op1,uint32_t op2,uint32_t rotate)165 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
166 {
167     uint32_t result;
168     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
169     {
170         __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
171     }
172     else
173     {
174         result = __SXTAB16(op1, __ROR(op2, rotate));
175     }
176     return result;
177 }
178 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)179 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
180 {
181   int32_t result;
182 
183   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
184   return (result);
185 }
186 
187 #endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
188  /** @} end of group CMSIS_SIMD_intrinsics */
189 
190 /* ###########################  Core Function Access  ########################### */
191 /** \ingroup  CMSIS_Core_FunctionInterface
192     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
193   @{
194  */
195 
196 /** \brief  Get CPSR Register
197     \return               CPSR Register value
198  */
__get_CPSR(void)199 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
200 {
201   uint32_t result;
202   __ASM volatile("MRS %0, cpsr" : "=r" (result) );
203   return(result);
204 }
205 
206 /** \brief  Set CPSR Register
207     \param [in]    cpsr  CPSR value to set
208  */
__set_CPSR(uint32_t cpsr)209 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
210 {
211   __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
212 }
213 
214 /** \brief  Get Mode
215     \return                Processor Mode
216  */
__get_mode(void)217 __STATIC_FORCEINLINE uint32_t __get_mode(void)
218 {
219   return (__get_CPSR() & 0x1FU);
220 }
221 
222 /** \brief  Set Mode
223     \param [in]    mode  Mode value to set
224  */
__set_mode(uint32_t mode)225 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
226 {
227   __ASM volatile("MSR  cpsr_c, %0" : : "r" (mode) : "memory");
228 }
229 
230 /** \brief  Get Stack Pointer
231     \return Stack Pointer value
232  */
__get_SP(void)233 __STATIC_FORCEINLINE uint32_t __get_SP(void)
234 {
235   uint32_t result;
236   __ASM volatile("MOV  %0, sp" : "=r" (result) : : "memory");
237   return result;
238 }
239 
240 /** \brief  Set Stack Pointer
241     \param [in]    stack  Stack Pointer value to set
242  */
__set_SP(uint32_t stack)243 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
244 {
245   __ASM volatile("MOV  sp, %0" : : "r" (stack) : "memory");
246 }
247 
248 /** \brief  Get USR/SYS Stack Pointer
249     \return USR/SYS Stack Pointer value
250  */
__get_SP_usr(void)251 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
252 {
253   uint32_t cpsr;
254   uint32_t result;
255   __ASM volatile(
256     "MRS     %0, cpsr   \n"
257     "CPS     #0x1F      \n" // no effect in USR mode
258     "MOV     %1, sp     \n"
259     "MSR     cpsr_c, %0 \n" // no effect in USR mode
260     "ISB" :  "=r"(cpsr), "=r"(result) : : "memory"
261    );
262   return result;
263 }
264 
265 /** \brief  Set USR/SYS Stack Pointer
266     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
267  */
__set_SP_usr(uint32_t topOfProcStack)268 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
269 {
270   uint32_t cpsr;
271   __ASM volatile(
272     "MRS     %0, cpsr   \n"
273     "CPS     #0x1F      \n" // no effect in USR mode
274     "MOV     sp, %1     \n"
275     "MSR     cpsr_c, %0 \n" // no effect in USR mode
276     "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory"
277    );
278 }
279 
280 /** \brief  Get FPEXC
281     \return               Floating Point Exception Control register value
282  */
__get_FPEXC(void)283 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
284 {
285 #if (__FPU_PRESENT == 1)
286   uint32_t result;
287   __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
288   return(result);
289 #else
290   return(0);
291 #endif
292 }
293 
294 /** \brief  Set FPEXC
295     \param [in]    fpexc  Floating Point Exception Control value to set
296  */
__set_FPEXC(uint32_t fpexc)297 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
298 {
299 #if (__FPU_PRESENT == 1)
300   __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
301 #endif
302 }
303 
304 /** @} end of CMSIS_Core_RegAccFunctions */
305 
306 
307 /*
308  * Include common core functions to access Coprocessor 15 registers
309  */
310 
311 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
312 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
313 #define __get_CP64(cp, op1, Rt, CRm)         __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : "=r" (Rt) : : "memory" )
314 #define __set_CP64(cp, op1, Rt, CRm)         __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : : "r" (Rt) : "memory" )
315 
316 #include "cmsis_cp15.h"
317 
318 /** \brief  Enable Floating Point Unit
319 
320   Critical section, called from undef handler, so systick is disabled
321  */
__FPU_Enable(void)322 __STATIC_INLINE void __FPU_Enable(void)
323 {
324   // Permit access to VFP/NEON, registers by modifying CPACR
325   const uint32_t cpacr = __get_CPACR();
326   __set_CPACR(cpacr | 0x00F00000ul);
327   __ISB();
328 
329   // Enable VFP/NEON
330   const uint32_t fpexc = __get_FPEXC();
331   __set_FPEXC(fpexc | 0x40000000ul);
332 
333   __ASM volatile(
334     // Initialise VFP/NEON registers to 0
335     "        MOV     R2,#0             \n"
336 
337     // Initialise D16 registers to 0
338     "        VMOV    D0, R2,R2         \n"
339     "        VMOV    D1, R2,R2         \n"
340     "        VMOV    D2, R2,R2         \n"
341     "        VMOV    D3, R2,R2         \n"
342     "        VMOV    D4, R2,R2         \n"
343     "        VMOV    D5, R2,R2         \n"
344     "        VMOV    D6, R2,R2         \n"
345     "        VMOV    D7, R2,R2         \n"
346     "        VMOV    D8, R2,R2         \n"
347     "        VMOV    D9, R2,R2         \n"
348     "        VMOV    D10,R2,R2         \n"
349     "        VMOV    D11,R2,R2         \n"
350     "        VMOV    D12,R2,R2         \n"
351     "        VMOV    D13,R2,R2         \n"
352     "        VMOV    D14,R2,R2         \n"
353     "        VMOV    D15,R2,R2         \n"
354 
355 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
356     // Initialise D32 registers to 0
357     "        VMOV    D16,R2,R2         \n"
358     "        VMOV    D17,R2,R2         \n"
359     "        VMOV    D18,R2,R2         \n"
360     "        VMOV    D19,R2,R2         \n"
361     "        VMOV    D20,R2,R2         \n"
362     "        VMOV    D21,R2,R2         \n"
363     "        VMOV    D22,R2,R2         \n"
364     "        VMOV    D23,R2,R2         \n"
365     "        VMOV    D24,R2,R2         \n"
366     "        VMOV    D25,R2,R2         \n"
367     "        VMOV    D26,R2,R2         \n"
368     "        VMOV    D27,R2,R2         \n"
369     "        VMOV    D28,R2,R2         \n"
370     "        VMOV    D29,R2,R2         \n"
371     "        VMOV    D30,R2,R2         \n"
372     "        VMOV    D31,R2,R2         \n"
373 #endif
374     : : : "cc", "r2"
375   );
376 
377   // Initialise FPSCR to a known state
378   const uint32_t fpscr = __get_FPSCR();
379   __set_FPSCR(fpscr & 0x00086060ul);
380 }
381 
382 /*@} end of group CMSIS_Core_intrinsics */
383 
384 #pragma clang diagnostic pop
385 
386 #endif /* __CMSIS_CLANG_A_H */
387