1 /*
2  * Copyright (c) 2009-2024 Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the License); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 /*
20  * CMSIS-Core(A) Compiler ARMClang (Arm Compiler 6) Header File
21  */
22 
23 #ifndef __CMSIS_ARMCLANG_A_H
24 #define __CMSIS_ARMCLANG_A_H
25 
26 #pragma clang system_header   /* treat file as system include file */
27 
28 #ifndef __CMSIS_ARMCLANG_H
29   #error "This file must not be included directly"
30 #endif
31 
32 /**
33   \brief   STRT Unprivileged (8 bit)
34   \details Executes a Unprivileged STRT instruction for 8 bit values.
35   \param [in]  value  Value to store
36   \param [in]    ptr  Pointer to location
37  */
__STRBT(uint8_t value,volatile uint8_t * ptr)38 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
39 {
40   __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
41 }
42 
43 
44 /**
45   \brief   STRT Unprivileged (16 bit)
46   \details Executes a Unprivileged STRT instruction for 16 bit values.
47   \param [in]  value  Value to store
48   \param [in]    ptr  Pointer to location
49  */
__STRHT(uint16_t value,volatile uint16_t * ptr)50 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
51 {
52   __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
53 }
54 
55 
56 /**
57   \brief   STRT Unprivileged (32 bit)
58   \details Executes a Unprivileged STRT instruction for 32 bit values.
59   \param [in]  value  Value to store
60   \param [in]    ptr  Pointer to location
61  */
__STRT(uint32_t value,volatile uint32_t * ptr)62 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
63 {
64   __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
65 }
66 
67 
68 
69 /* ###################  Compiler specific Intrinsics  ########################### */
70 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
71   Access to dedicated SIMD instructions
72   @{
73 */
74 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
75 
76 #define     __SADD8                 __builtin_arm_sadd8
77 #define     __QADD8                 __builtin_arm_qadd8
78 #define     __SHADD8                __builtin_arm_shadd8
79 #define     __UADD8                 __builtin_arm_uadd8
80 #define     __UQADD8                __builtin_arm_uqadd8
81 #define     __UHADD8                __builtin_arm_uhadd8
82 #define     __SSUB8                 __builtin_arm_ssub8
83 #define     __QSUB8                 __builtin_arm_qsub8
84 #define     __SHSUB8                __builtin_arm_shsub8
85 #define     __USUB8                 __builtin_arm_usub8
86 #define     __UQSUB8                __builtin_arm_uqsub8
87 #define     __UHSUB8                __builtin_arm_uhsub8
88 #define     __SADD16                __builtin_arm_sadd16
89 #define     __QADD16                __builtin_arm_qadd16
90 #define     __SHADD16               __builtin_arm_shadd16
91 #define     __UADD16                __builtin_arm_uadd16
92 #define     __UQADD16               __builtin_arm_uqadd16
93 #define     __UHADD16               __builtin_arm_uhadd16
94 #define     __SSUB16                __builtin_arm_ssub16
95 #define     __QSUB16                __builtin_arm_qsub16
96 #define     __SHSUB16               __builtin_arm_shsub16
97 #define     __USUB16                __builtin_arm_usub16
98 #define     __UQSUB16               __builtin_arm_uqsub16
99 #define     __UHSUB16               __builtin_arm_uhsub16
100 #define     __SASX                  __builtin_arm_sasx
101 #define     __QASX                  __builtin_arm_qasx
102 #define     __SHASX                 __builtin_arm_shasx
103 #define     __UASX                  __builtin_arm_uasx
104 #define     __UQASX                 __builtin_arm_uqasx
105 #define     __UHASX                 __builtin_arm_uhasx
106 #define     __SSAX                  __builtin_arm_ssax
107 #define     __QSAX                  __builtin_arm_qsax
108 #define     __SHSAX                 __builtin_arm_shsax
109 #define     __USAX                  __builtin_arm_usax
110 #define     __UQSAX                 __builtin_arm_uqsax
111 #define     __UHSAX                 __builtin_arm_uhsax
112 #define     __USAD8                 __builtin_arm_usad8
113 #define     __USADA8                __builtin_arm_usada8
114 #define     __SSAT16                __builtin_arm_ssat16
115 #define     __USAT16                __builtin_arm_usat16
116 #define     __UXTB16                __builtin_arm_uxtb16
117 #define     __UXTAB16               __builtin_arm_uxtab16
118 #define     __SXTB16                __builtin_arm_sxtb16
119 #define     __SXTAB16               __builtin_arm_sxtab16
120 #define     __SMUAD                 __builtin_arm_smuad
121 #define     __SMUADX                __builtin_arm_smuadx
122 #define     __SMLAD                 __builtin_arm_smlad
123 #define     __SMLADX                __builtin_arm_smladx
124 #define     __SMLALD                __builtin_arm_smlald
125 #define     __SMLALDX               __builtin_arm_smlaldx
126 #define     __SMUSD                 __builtin_arm_smusd
127 #define     __SMUSDX                __builtin_arm_smusdx
128 #define     __SMLSD                 __builtin_arm_smlsd
129 #define     __SMLSDX                __builtin_arm_smlsdx
130 #define     __SMLSLD                __builtin_arm_smlsld
131 #define     __SMLSLDX               __builtin_arm_smlsldx
132 #define     __SEL                   __builtin_arm_sel
133 #define     __QADD                  __builtin_arm_qadd
134 #define     __QSUB                  __builtin_arm_qsub
135 
136 #define __PKHBT(ARG1,ARG2,ARG3) \
137 __extension__ \
138 ({                          \
139   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
140   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
141   __RES; \
142  })
143 
144 #define __PKHTB(ARG1,ARG2,ARG3) \
145 __extension__ \
146 ({                          \
147   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
148   if (ARG3 == 0) \
149     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
150   else \
151     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
152   __RES; \
153  })
154 
__SXTB16_RORn(uint32_t op1,uint32_t rotate)155 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
156 {
157     uint32_t result;
158     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
159     {
160         __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
161     }
162     else
163     {
164         result = __SXTB16(__ROR(op1, rotate));
165     }
166     return result;
167 }
168 
__SXTAB16_RORn(uint32_t op1,uint32_t op2,uint32_t rotate)169 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
170 {
171     uint32_t result;
172     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
173     {
174         __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
175     }
176     else
177     {
178         result = __SXTAB16(op1, __ROR(op2, rotate));
179     }
180     return result;
181 }
182 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)183 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
184 {
185   int32_t result;
186 
187   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
188   return (result);
189 }
190 
191 #endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
192  /** @} end of group CMSIS_SIMD_intrinsics */
193 
194 /* ###########################  Core Function Access  ########################### */
195 /** \ingroup  CMSIS_Core_FunctionInterface
196     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
197   @{
198  */
199 
200 /** \brief  Get CPSR Register
201     \return               CPSR Register value
202  */
__get_CPSR(void)203 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
204 {
205   uint32_t result;
206   __ASM volatile("MRS %0, cpsr" : "=r" (result) );
207   return(result);
208 }
209 
210 /** \brief  Set CPSR Register
211     \param [in]    cpsr  CPSR value to set
212  */
__set_CPSR(uint32_t cpsr)213 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
214 {
215   __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
216 }
217 
218 /** \brief  Get Mode
219     \return                Processor Mode
220  */
__get_mode(void)221 __STATIC_FORCEINLINE uint32_t __get_mode(void)
222 {
223   return (__get_CPSR() & 0x1FU);
224 }
225 
226 /** \brief  Set Mode
227     \param [in]    mode  Mode value to set
228  */
__set_mode(uint32_t mode)229 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
230 {
231   __ASM volatile("MSR  cpsr_c, %0" : : "r" (mode) : "memory");
232 }
233 
234 /** \brief  Get Stack Pointer
235     \return Stack Pointer value
236  */
__get_SP(void)237 __STATIC_FORCEINLINE uint32_t __get_SP(void)
238 {
239   uint32_t result;
240   __ASM volatile("MOV  %0, sp" : "=r" (result) : : "memory");
241   return result;
242 }
243 
244 /** \brief  Set Stack Pointer
245     \param [in]    stack  Stack Pointer value to set
246  */
__set_SP(uint32_t stack)247 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
248 {
249   __ASM volatile("MOV  sp, %0" : : "r" (stack) : "memory");
250 }
251 
252 /** \brief  Get USR/SYS Stack Pointer
253     \return USR/SYS Stack Pointer value
254  */
__get_SP_usr(void)255 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
256 {
257   uint32_t cpsr;
258   uint32_t result;
259   __ASM volatile(
260     "MRS     %0, cpsr   \n"
261     "CPS     #0x1F      \n" // no effect in USR mode
262     "MOV     %1, sp     \n"
263     "MSR     cpsr_c, %0 \n" // no effect in USR mode
264     "ISB" :  "=r"(cpsr), "=r"(result) : : "memory"
265    );
266   return result;
267 }
268 
269 /** \brief  Set USR/SYS Stack Pointer
270     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
271  */
__set_SP_usr(uint32_t topOfProcStack)272 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
273 {
274   uint32_t cpsr;
275   __ASM volatile(
276     "MRS     %0, cpsr   \n"
277     "CPS     #0x1F      \n" // no effect in USR mode
278     "MOV     sp, %1     \n"
279     "MSR     cpsr_c, %0 \n" // no effect in USR mode
280     "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory"
281    );
282 }
283 
284 /** \brief  Get FPEXC
285     \return               Floating Point Exception Control register value
286  */
__get_FPEXC(void)287 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
288 {
289 #if (__FPU_PRESENT == 1)
290   uint32_t result;
291   __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
292   return(result);
293 #else
294   return(0);
295 #endif
296 }
297 
298 /** \brief  Set FPEXC
299     \param [in]    fpexc  Floating Point Exception Control value to set
300  */
__set_FPEXC(uint32_t fpexc)301 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
302 {
303 #if (__FPU_PRESENT == 1)
304   __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
305 #endif
306 }
307 
308 /** @} end of CMSIS_Core_RegAccFunctions */
309 
310 
311 /*
312  * Include common core functions to access Coprocessor 15 registers
313  */
314 
315 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
316 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
317 #define __get_CP64(cp, op1, Rt, CRm)         __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : "=r" (Rt) : : "memory" )
318 #define __set_CP64(cp, op1, Rt, CRm)         __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : : "r" (Rt) : "memory" )
319 
320 #include "cmsis_cp15.h"
321 
322 /** \brief  Enable Floating Point Unit
323 
324   Critical section, called from undef handler, so systick is disabled
325  */
__FPU_Enable(void)326 __STATIC_INLINE void __FPU_Enable(void)
327 {
328   __ASM volatile(
329     // Permit access to VFP/NEON, registers by modifying CPACR
330     "        MRC     p15,0,R1,c1,c0,2  \n"
331     "        ORR     R1,R1,#0x00F00000 \n"
332     "        MCR     p15,0,R1,c1,c0,2  \n"
333 
334     // Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
335     "        ISB                       \n"
336 
337     // Enable VFP/NEON
338     "        VMRS    R1,FPEXC          \n"
339     "        ORR     R1,R1,#0x40000000 \n"
340     "        VMSR    FPEXC,R1          \n"
341 
342     // Initialise VFP/NEON registers to 0
343     "        MOV     R2,#0             \n"
344 
345     // Initialise D16 registers to 0
346     "        VMOV    D0, R2,R2         \n"
347     "        VMOV    D1, R2,R2         \n"
348     "        VMOV    D2, R2,R2         \n"
349     "        VMOV    D3, R2,R2         \n"
350     "        VMOV    D4, R2,R2         \n"
351     "        VMOV    D5, R2,R2         \n"
352     "        VMOV    D6, R2,R2         \n"
353     "        VMOV    D7, R2,R2         \n"
354     "        VMOV    D8, R2,R2         \n"
355     "        VMOV    D9, R2,R2         \n"
356     "        VMOV    D10,R2,R2         \n"
357     "        VMOV    D11,R2,R2         \n"
358     "        VMOV    D12,R2,R2         \n"
359     "        VMOV    D13,R2,R2         \n"
360     "        VMOV    D14,R2,R2         \n"
361     "        VMOV    D15,R2,R2         \n"
362 
363 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
364     // Initialise D32 registers to 0
365     "        VMOV    D16,R2,R2         \n"
366     "        VMOV    D17,R2,R2         \n"
367     "        VMOV    D18,R2,R2         \n"
368     "        VMOV    D19,R2,R2         \n"
369     "        VMOV    D20,R2,R2         \n"
370     "        VMOV    D21,R2,R2         \n"
371     "        VMOV    D22,R2,R2         \n"
372     "        VMOV    D23,R2,R2         \n"
373     "        VMOV    D24,R2,R2         \n"
374     "        VMOV    D25,R2,R2         \n"
375     "        VMOV    D26,R2,R2         \n"
376     "        VMOV    D27,R2,R2         \n"
377     "        VMOV    D28,R2,R2         \n"
378     "        VMOV    D29,R2,R2         \n"
379     "        VMOV    D30,R2,R2         \n"
380     "        VMOV    D31,R2,R2         \n"
381 #endif
382 
383   // Initialise FPSCR to a known state
384     "        VMRS    R1,FPSCR          \n"
385     "        LDR     R2,=0x00086060    \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
386     "        AND     R1,R1,R2          \n"
387     "        VMSR    FPSCR,R1            "
388     : : : "cc", "r1", "r2"
389   );
390 }
391 
392 #endif /* __CMSIS_ARMCLANG_A_H */
393