1 /**************************************************************************//**
2  * @file     cmsis_gcc.h
3  * @brief    CMSIS compiler GCC header file
4  * @version  V6.0.0
5  * @date     27. July 2024
6  ******************************************************************************/
7 /*
8  * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27 
28 #pragma GCC system_header   /* treat file as system include file */
29 
30 #include <arm_acle.h>
31 
32 /* Fallback for __has_builtin */
33 #ifndef __has_builtin
34   #define __has_builtin(x) (0)
35 #endif
36 
37 /* CMSIS compiler specific defines */
38 #ifndef   __ASM
39   #define __ASM                                  __asm
40 #endif
41 #ifndef   __INLINE
42   #define __INLINE                               inline
43 #endif
44 #ifndef   __STATIC_INLINE
45   #define __STATIC_INLINE                        static inline
46 #endif
47 #ifndef   __STATIC_FORCEINLINE
48   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
49 #endif
50 #ifndef   __NO_RETURN
51   #define __NO_RETURN                            __attribute__((__noreturn__))
52 #endif
53 #ifndef   CMSIS_DEPRECATED
54   #define CMSIS_DEPRECATED                       __attribute__((deprecated))
55 #endif
56 #ifndef   __USED
57   #define __USED                                 __attribute__((used))
58 #endif
59 #ifndef   __WEAK
60   #define __WEAK                                 __attribute__((weak))
61 #endif
62 #ifndef   __PACKED
63   #define __PACKED                               __attribute__((packed, aligned(1)))
64 #endif
65 #ifndef   __PACKED_STRUCT
66   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
67 #endif
68 #ifndef   __PACKED_UNION
69   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
70 #endif
71 #ifndef   __UNALIGNED_UINT16_WRITE
72   #pragma GCC diagnostic push
73   #pragma GCC diagnostic ignored "-Wpacked"
74   #pragma GCC diagnostic ignored "-Wattributes"
75   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
76   #pragma GCC diagnostic pop
77   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
78 #endif
79 #ifndef   __UNALIGNED_UINT16_READ
80   #pragma GCC diagnostic push
81   #pragma GCC diagnostic ignored "-Wpacked"
82   #pragma GCC diagnostic ignored "-Wattributes"
83   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84   #pragma GCC diagnostic pop
85   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
86 #endif
87 #ifndef   __UNALIGNED_UINT32_WRITE
88   #pragma GCC diagnostic push
89   #pragma GCC diagnostic ignored "-Wpacked"
90   #pragma GCC diagnostic ignored "-Wattributes"
91   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
92   #pragma GCC diagnostic pop
93   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
94 #endif
95 #ifndef   __UNALIGNED_UINT32_READ
96   #pragma GCC diagnostic push
97   #pragma GCC diagnostic ignored "-Wpacked"
98   #pragma GCC diagnostic ignored "-Wattributes"
99   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100   #pragma GCC diagnostic pop
101   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
102 #endif
103 #ifndef   __ALIGNED
104   #define __ALIGNED(x)                           __attribute__((aligned(x)))
105 #endif
106 #ifndef   __RESTRICT
107   #define __RESTRICT                             __restrict
108 #endif
109 #ifndef   __COMPILER_BARRIER
110   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
111 #endif
112 #ifndef __NO_INIT
113   #define __NO_INIT                              __attribute__ ((section (".noinit")))
114 #endif
115 #ifndef __ALIAS
116   #define __ALIAS(x)                             __attribute__ ((alias(x)))
117 #endif
118 
119 /* ##########################  Core Instruction Access  ######################### */
120 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
121   Access to dedicated instructions
122   @{
123 */
124 
125 /* Define macros for porting to both thumb1 and thumb2.
126  * For thumb1, use low register (r0-r7), specified by constraint "l"
127  * Otherwise, use general registers, specified by constraint "r" */
128 #if defined (__thumb__) && !defined (__thumb2__)
129 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
130 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
131 #define __CMSIS_GCC_USE_REG(r) "l" (r)
132 #else
133 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
134 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
135 #define __CMSIS_GCC_USE_REG(r) "r" (r)
136 #endif
137 
138 /**
139   \brief   No Operation
140   \details No Operation does nothing. This instruction can be used for code alignment purposes.
141  */
142 #define __NOP()         __ASM volatile ("nop")
143 
144 
145 /**
146   \brief   Wait For Interrupt
147   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
148  */
149 #define __WFI()         __ASM volatile ("wfi":::"memory")
150 
151 
152 /**
153   \brief   Wait For Event
154   \details Wait For Event is a hint instruction that permits the processor to enter
155            a low-power state until one of a number of events occurs.
156  */
157 #define __WFE()         __ASM volatile ("wfe":::"memory")
158 
159 
160 /**
161   \brief   Send Event
162   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
163  */
164 #define __SEV()         __ASM volatile ("sev")
165 
166 
167 /**
168   \brief   Instruction Synchronization Barrier
169   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
170            so that all instructions following the ISB are fetched from cache or memory,
171            after the instruction has been completed.
172  */
__ISB(void)173 __STATIC_FORCEINLINE void __ISB(void)
174 {
175   __ASM volatile ("isb 0xF":::"memory");
176 }
177 
178 
179 /**
180   \brief   Data Synchronization Barrier
181   \details Acts as a special kind of Data Memory Barrier.
182            It completes when all explicit memory accesses before this instruction complete.
183  */
__DSB(void)184 __STATIC_FORCEINLINE void __DSB(void)
185 {
186   __ASM volatile ("dsb 0xF":::"memory");
187 }
188 
189 
190 /**
191   \brief   Data Memory Barrier
192   \details Ensures the apparent order of the explicit memory operations before
193            and after the instruction, without ensuring their completion.
194  */
__DMB(void)195 __STATIC_FORCEINLINE void __DMB(void)
196 {
197   __ASM volatile ("dmb 0xF":::"memory");
198 }
199 
200 
201 /**
202   \brief   Reverse byte order (32 bit)
203   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
204   \param [in]    value  Value to reverse
205   \return               Reversed value
206  */
__REV(uint32_t value)207 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
208 {
209   return __builtin_bswap32(value);
210 }
211 
212 
213 /**
214   \brief   Reverse byte order (16 bit)
215   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
216   \param [in]    value  Value to reverse
217   \return               Reversed value
218  */
__REV16(uint32_t value)219 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
220 {
221   uint32_t result;
222 
223   __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
224   return (result);
225 }
226 
227 
228 /**
229   \brief   Reverse byte order (16 bit)
230   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
231   \param [in]    value  Value to reverse
232   \return               Reversed value
233  */
__REVSH(int16_t value)234 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
235 {
236   return (int16_t)__builtin_bswap16(value);
237 }
238 
239 
240 /**
241   \brief   Rotate Right in unsigned value (32 bit)
242   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
243   \param [in]    op1  Value to rotate
244   \param [in]    op2  Number of Bits to rotate
245   \return               Rotated value
246  */
__ROR(uint32_t op1,uint32_t op2)247 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
248 {
249   op2 %= 32U;
250   if (op2 == 0U)
251   {
252     return op1;
253   }
254   return (op1 >> op2) | (op1 << (32U - op2));
255 }
256 
257 
258 /**
259   \brief   Breakpoint
260   \details Causes the processor to enter Debug state.
261            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
262   \param [in]    value  is ignored by the processor.
263                  If required, a debugger can use it to store additional information about the breakpoint.
264  */
265 #define __BKPT(value) __ASM volatile ("bkpt "#value)
266 
267 
268 /**
269   \brief   Reverse bit order of value
270   \details Reverses the bit order of the given value.
271   \param [in]    value  Value to reverse
272   \return               Reversed value
273  */
__RBIT(uint32_t value)274 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
275 {
276   uint32_t result;
277 
278 #if (__ARM_ARCH_ISA_THUMB >= 2)
279    __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
280 #else
281   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
282 
283   result = value;                      /* r will be reversed bits of v; first get LSB of v */
284   for (value >>= 1U; value != 0U; value >>= 1U)
285   {
286     result <<= 1U;
287     result |= value & 1U;
288     s--;
289   }
290   result <<= s;                        /* shift when v's highest bits are zero */
291 #endif
292   return (result);
293 }
294 
295 
296 /**
297   \brief   Count leading zeros
298   \details Counts the number of leading zeros of a data value.
299   \param [in]  value  Value to count the leading zeros
300   \return             number of leading zeros in value
301  */
__CLZ(uint32_t value)302 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
303 {
304   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
305      __builtin_clz(0) is undefined behaviour, so handle this case specially.
306      This guarantees ARM-compatible results if happening to compile on a non-ARM
307      target, and ensures the compiler doesn't decide to activate any
308      optimisations using the logic "value was passed to __builtin_clz, so it
309      is non-zero".
310      ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
311      single CLZ instruction.
312    */
313   if (value == 0U)
314   {
315     return 32U;
316   }
317   return __builtin_clz(value);
318 }
319 
320 
321 #if (__ARM_FEATURE_SAT    >= 1)
322 /**
323   \brief   Signed Saturate
324   \details Saturates a signed value.
325   \param [in]  value  Value to be saturated
326   \param [in]    sat  Bit position to saturate to (1..32)
327   \return             Saturated value
328  */
329 #define __SSAT(value, sat) __ssat(value, sat)
330 
331 
332 /**
333   \brief   Unsigned Saturate
334   \details Saturates an unsigned value.
335   \param [in]  value  Value to be saturated
336   \param [in]    sat  Bit position to saturate to (0..31)
337   \return             Saturated value
338  */
339 #define __USAT(value, sat) __usat(value, sat)
340 
341 #else /* (__ARM_FEATURE_SAT >= 1) */
342 /**
343   \brief   Signed Saturate
344   \details Saturates a signed value.
345   \param [in]  value  Value to be saturated
346   \param [in]    sat  Bit position to saturate to (1..32)
347   \return             Saturated value
348  */
__SSAT(int32_t val,uint32_t sat)349 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
350 {
351   if ((sat >= 1U) && (sat <= 32U))
352   {
353     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
354     const int32_t min = -1 - max ;
355     if (val > max)
356     {
357       return (max);
358     }
359     else if (val < min)
360     {
361       return (min);
362     }
363   }
364   return (val);
365 }
366 
367 
368 /**
369   \brief   Unsigned Saturate
370   \details Saturates an unsigned value.
371   \param [in]  value  Value to be saturated
372   \param [in]    sat  Bit position to saturate to (0..31)
373   \return             Saturated value
374  */
__USAT(int32_t val,uint32_t sat)375 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
376 {
377   if (sat <= 31U)
378   {
379     const uint32_t max = ((1U << sat) - 1U);
380     if (val > (int32_t)max)
381     {
382       return (max);
383     }
384     else if (val < 0)
385     {
386       return (0U);
387     }
388   }
389   return ((uint32_t)val);
390 }
391 #endif /* (__ARM_FEATURE_SAT >= 1) */
392 
393 
394 #if (__ARM_FEATURE_LDREX >= 1)
395 /**
396   \brief   Remove the exclusive lock
397   \details Removes the exclusive lock which is created by LDREX.
398  */
__CLREX(void)399 __STATIC_FORCEINLINE void __CLREX(void)
400 {
401   __ASM volatile ("clrex" ::: "memory");
402 }
403 
404 
405 /**
406   \brief   LDR Exclusive (8 bit)
407   \details Executes a exclusive LDR instruction for 8 bit value.
408   \param [in]    ptr  Pointer to data
409   \return             value of type uint8_t at (*ptr)
410  */
__LDREXB(volatile uint8_t * addr)411 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
412 {
413   uint32_t result;
414 
415   __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
416   return ((uint8_t) result);    /* Add explicit type cast here */
417 }
418 
419 
420 /**
421   \brief   STR Exclusive (8 bit)
422   \details Executes a exclusive STR instruction for 8 bit values.
423   \param [in]  value  Value to store
424   \param [in]    ptr  Pointer to location
425   \return          0  Function succeeded
426   \return          1  Function failed
427  */
__STREXB(uint8_t value,volatile uint8_t * addr)428 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
429 {
430   uint32_t result;
431 
432   __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
433   return (result);
434 }
435 #endif /* (__ARM_FEATURE_LDREX >= 1) */
436 
437 
438 #if (__ARM_FEATURE_LDREX >= 2)
439 /**
440   \brief   LDR Exclusive (16 bit)
441   \details Executes a exclusive LDR instruction for 16 bit values.
442   \param [in]    ptr  Pointer to data
443   \return        value of type uint16_t at (*ptr)
444  */
__LDREXH(volatile uint16_t * addr)445 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
446 {
447   uint32_t result;
448 
449   __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
450   return ((uint16_t)result);    /* Add explicit type cast here */
451 }
452 
453 
454 /**
455   \brief   STR Exclusive (16 bit)
456   \details Executes a exclusive STR instruction for 16 bit values.
457   \param [in]  value  Value to store
458   \param [in]    ptr  Pointer to location
459   \return          0  Function succeeded
460   \return          1  Function failed
461  */
__STREXH(uint16_t value,volatile uint16_t * addr)462 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
463 {
464   uint32_t result;
465 
466   __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
467   return (result);
468 }
469 #endif /* (__ARM_FEATURE_LDREX >= 2) */
470 
471 
472 #if (__ARM_FEATURE_LDREX >= 4)
473 /**
474   \brief   LDR Exclusive (32 bit)
475   \details Executes a exclusive LDR instruction for 32 bit values.
476   \param [in]    ptr  Pointer to data
477   \return        value of type uint32_t at (*ptr)
478  */
__LDREXW(volatile uint32_t * addr)479 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
480 {
481   uint32_t result;
482 
483   __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
484   return (result);
485 }
486 
487 
488 /**
489   \brief   STR Exclusive (32 bit)
490   \details Executes a exclusive STR instruction for 32 bit values.
491   \param [in]  value  Value to store
492   \param [in]    ptr  Pointer to location
493   \return          0  Function succeeded
494   \return          1  Function failed
495  */
__STREXW(uint32_t value,volatile uint32_t * addr)496 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
497 {
498   uint32_t result;
499 
500   __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
501   return (result);
502 }
503 #endif /* (__ARM_FEATURE_LDREX >= 4) */
504 
505 
506 #if (__ARM_ARCH_ISA_THUMB >= 2)
507 /**
508   \brief   Rotate Right with Extend (32 bit)
509   \details Moves each bit of a bitstring right by one bit.
510            The carry input is shifted in at the left end of the bitstring.
511   \param [in]    value  Value to rotate
512   \return               Rotated value
513  */
__RRX(uint32_t value)514 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
515 {
516   uint32_t result;
517 
518   __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
519   return (result);
520 }
521 
522 
523 /**
524   \brief   LDRT Unprivileged (8 bit)
525   \details Executes a Unprivileged LDRT instruction for 8 bit value.
526   \param [in]    ptr  Pointer to data
527   \return             value of type uint8_t at (*ptr)
528  */
__LDRBT(volatile uint8_t * ptr)529 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
530 {
531   uint32_t result;
532 
533   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
534   return ((uint8_t)result);    /* Add explicit type cast here */
535 }
536 
537 
538 /**
539   \brief   LDRT Unprivileged (16 bit)
540   \details Executes a Unprivileged LDRT instruction for 16 bit values.
541   \param [in]    ptr  Pointer to data
542   \return        value of type uint16_t at (*ptr)
543  */
__LDRHT(volatile uint16_t * ptr)544 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
545 {
546   uint32_t result;
547 
548   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
549   return ((uint16_t)result);    /* Add explicit type cast here */
550 }
551 
552 
553 /**
554   \brief   LDRT Unprivileged (32 bit)
555   \details Executes a Unprivileged LDRT instruction for 32 bit values.
556   \param [in]    ptr  Pointer to data
557   \return        value of type uint32_t at (*ptr)
558  */
__LDRT(volatile uint32_t * ptr)559 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
560 {
561   uint32_t result;
562 
563   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
564   return (result);
565 }
566 
567 
568 /**
569   \brief   STRT Unprivileged (8 bit)
570   \details Executes a Unprivileged STRT instruction for 8 bit values.
571   \param [in]  value  Value to store
572   \param [in]    ptr  Pointer to location
573  */
__STRBT(uint8_t value,volatile uint8_t * ptr)574 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
575 {
576   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
577 }
578 
579 
580 /**
581   \brief   STRT Unprivileged (16 bit)
582   \details Executes a Unprivileged STRT instruction for 16 bit values.
583   \param [in]  value  Value to store
584   \param [in]    ptr  Pointer to location
585  */
__STRHT(uint16_t value,volatile uint16_t * ptr)586 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
587 {
588   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
589 }
590 
591 
592 /**
593   \brief   STRT Unprivileged (32 bit)
594   \details Executes a Unprivileged STRT instruction for 32 bit values.
595   \param [in]  value  Value to store
596   \param [in]    ptr  Pointer to location
597  */
__STRT(uint32_t value,volatile uint32_t * ptr)598 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
599 {
600   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
601 }
602 #endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
603 
604 
605 #if (__ARM_ARCH >= 8)
606 /**
607   \brief   Load-Acquire (8 bit)
608   \details Executes a LDAB instruction for 8 bit value.
609   \param [in]    ptr  Pointer to data
610   \return             value of type uint8_t at (*ptr)
611  */
__LDAB(volatile uint8_t * ptr)612 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
613 {
614   uint32_t result;
615 
616   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
617   return ((uint8_t)result);    /* Add explicit type cast here */
618 }
619 
620 
621 /**
622   \brief   Load-Acquire (16 bit)
623   \details Executes a LDAH instruction for 16 bit values.
624   \param [in]    ptr  Pointer to data
625   \return        value of type uint16_t at (*ptr)
626  */
__LDAH(volatile uint16_t * ptr)627 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
628 {
629   uint32_t result;
630 
631   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
632   return ((uint16_t)result);    /* Add explicit type cast here */
633 }
634 
635 
636 /**
637   \brief   Load-Acquire (32 bit)
638   \details Executes a LDA instruction for 32 bit values.
639   \param [in]    ptr  Pointer to data
640   \return        value of type uint32_t at (*ptr)
641  */
__LDA(volatile uint32_t * ptr)642 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
643 {
644   uint32_t result;
645 
646   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
647   return (result);
648 }
649 
650 
651 /**
652   \brief   Store-Release (8 bit)
653   \details Executes a STLB instruction for 8 bit values.
654   \param [in]  value  Value to store
655   \param [in]    ptr  Pointer to location
656  */
__STLB(uint8_t value,volatile uint8_t * ptr)657 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
658 {
659   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
660 }
661 
662 
663 /**
664   \brief   Store-Release (16 bit)
665   \details Executes a STLH instruction for 16 bit values.
666   \param [in]  value  Value to store
667   \param [in]    ptr  Pointer to location
668  */
__STLH(uint16_t value,volatile uint16_t * ptr)669 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
670 {
671   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
672 }
673 
674 
675 /**
676   \brief   Store-Release (32 bit)
677   \details Executes a STL instruction for 32 bit values.
678   \param [in]  value  Value to store
679   \param [in]    ptr  Pointer to location
680  */
__STL(uint32_t value,volatile uint32_t * ptr)681 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
682 {
683   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
684 }
685 
686 
687 /**
688   \brief   Load-Acquire Exclusive (8 bit)
689   \details Executes a LDAB exclusive instruction for 8 bit value.
690   \param [in]    ptr  Pointer to data
691   \return             value of type uint8_t at (*ptr)
692  */
__LDAEXB(volatile uint8_t * ptr)693 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
694 {
695   uint32_t result;
696 
697   __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
698   return ((uint8_t)result);    /* Add explicit type cast here */
699 }
700 
701 
702 /**
703   \brief   Load-Acquire Exclusive (16 bit)
704   \details Executes a LDAH exclusive instruction for 16 bit values.
705   \param [in]    ptr  Pointer to data
706   \return        value of type uint16_t at (*ptr)
707  */
__LDAEXH(volatile uint16_t * ptr)708 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
709 {
710   uint32_t result;
711 
712   __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
713   return ((uint16_t)result);    /* Add explicit type cast here */
714 }
715 
716 
717 /**
718   \brief   Load-Acquire Exclusive (32 bit)
719   \details Executes a LDA exclusive instruction for 32 bit values.
720   \param [in]    ptr  Pointer to data
721   \return        value of type uint32_t at (*ptr)
722  */
__LDAEX(volatile uint32_t * ptr)723 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
724 {
725   uint32_t result;
726 
727   __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
728   return (result);
729 }
730 
731 
732 /**
733   \brief   Store-Release Exclusive (8 bit)
734   \details Executes a STLB exclusive instruction for 8 bit values.
735   \param [in]  value  Value to store
736   \param [in]    ptr  Pointer to location
737   \return          0  Function succeeded
738   \return          1  Function failed
739  */
__STLEXB(uint8_t value,volatile uint8_t * ptr)740 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
741 {
742   uint32_t result;
743 
744   __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
745   return (result);
746 }
747 
748 
749 /**
750   \brief   Store-Release Exclusive (16 bit)
751   \details Executes a STLH exclusive instruction for 16 bit values.
752   \param [in]  value  Value to store
753   \param [in]    ptr  Pointer to location
754   \return          0  Function succeeded
755   \return          1  Function failed
756  */
__STLEXH(uint16_t value,volatile uint16_t * ptr)757 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
758 {
759   uint32_t result;
760 
761   __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
762   return (result);
763 }
764 
765 
766 /**
767   \brief   Store-Release Exclusive (32 bit)
768   \details Executes a STL exclusive instruction for 32 bit values.
769   \param [in]  value  Value to store
770   \param [in]    ptr  Pointer to location
771   \return          0  Function succeeded
772   \return          1  Function failed
773  */
__STLEX(uint32_t value,volatile uint32_t * ptr)774 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
775 {
776   uint32_t result;
777 
778   __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
779   return (result);
780 }
781 
782 #endif /* (__ARM_ARCH >= 8) */
783 
784 /** @}*/ /* end of group CMSIS_Core_InstructionInterface */
785 
786 
787 /* ###########################  Core Function Access  ########################### */
788 /** \ingroup  CMSIS_Core_FunctionInterface
789     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
790   @{
791  */
792 
793 /**
794   \brief   Enable IRQ Interrupts
795   \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
796            Can only be executed in Privileged modes.
797  */
__enable_irq(void)798 __STATIC_FORCEINLINE void __enable_irq(void)
799 {
800   __ASM volatile ("cpsie i" : : : "memory");
801 }
802 
803 
804 /**
805   \brief   Disable IRQ Interrupts
806   \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
807            Can only be executed in Privileged modes.
808  */
__disable_irq(void)809 __STATIC_FORCEINLINE void __disable_irq(void)
810 {
811   __ASM volatile ("cpsid i" : : : "memory");
812 }
813 
814 #if (__ARM_ARCH_ISA_THUMB >= 2)
815   /**
816     \brief   Enable FIQ
817   \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
818              Can only be executed in Privileged modes.
819    */
__enable_fault_irq(void)820   __STATIC_FORCEINLINE void __enable_fault_irq(void)
821   {
822     __ASM volatile ("cpsie f" : : : "memory");
823   }
824 
825 
826   /**
827     \brief   Disable FIQ
828     \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
829              Can only be executed in Privileged modes.
830    */
__disable_fault_irq(void)831   __STATIC_FORCEINLINE void __disable_fault_irq(void)
832   {
833     __ASM volatile ("cpsid f" : : : "memory");
834   }
835 #endif
836 
837 
838 /**
839   \brief   Get FPSCR
840   \details Returns the current value of the Floating Point Status/Control register.
841   \return               Floating Point Status/Control register value
842  */
__get_FPSCR(void)843 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
844 {
845 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
846   return (__builtin_arm_get_fpscr());
847 #else
848   return (0U);
849 #endif
850 }
851 
852 
853 /**
854   \brief   Set FPSCR
855   \details Assigns the given value to the Floating Point Status/Control register.
856   \param [in]    fpscr  Floating Point Status/Control value to set
857  */
__set_FPSCR(uint32_t fpscr)858 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
859 {
860 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
861   __builtin_arm_set_fpscr(fpscr);
862 #else
863   (void)fpscr;
864 #endif
865 }
866 
867 
868 /** @} end of CMSIS_Core_RegAccFunctions */
869 
870 
871 /* ###################  Compiler specific Intrinsics  ########################### */
872 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
873   Access to dedicated SIMD instructions
874   @{
875 */
876 
877 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
878   #define     __SADD8                 __sadd8
879   #define     __QADD8                 __qadd8
880   #define     __SHADD8                __shadd8
881   #define     __UADD8                 __uadd8
882   #define     __UQADD8                __uqadd8
883   #define     __UHADD8                __uhadd8
884   #define     __SSUB8                 __ssub8
885   #define     __QSUB8                 __qsub8
886   #define     __SHSUB8                __shsub8
887   #define     __USUB8                 __usub8
888   #define     __UQSUB8                __uqsub8
889   #define     __UHSUB8                __uhsub8
890   #define     __SADD16                __sadd16
891   #define     __QADD16                __qadd16
892   #define     __SHADD16               __shadd16
893   #define     __UADD16                __uadd16
894   #define     __UQADD16               __uqadd16
895   #define     __UHADD16               __uhadd16
896   #define     __SSUB16                __ssub16
897   #define     __QSUB16                __qsub16
898   #define     __SHSUB16               __shsub16
899   #define     __USUB16                __usub16
900   #define     __UQSUB16               __uqsub16
901   #define     __UHSUB16               __uhsub16
902   #define     __SASX                  __sasx
903   #define     __QASX                  __qasx
904   #define     __SHASX                 __shasx
905   #define     __UASX                  __uasx
906   #define     __UQASX                 __uqasx
907   #define     __UHASX                 __uhasx
908   #define     __SSAX                  __ssax
909   #define     __QSAX                  __qsax
910   #define     __SHSAX                 __shsax
911   #define     __USAX                  __usax
912   #define     __UQSAX                 __uqsax
913   #define     __UHSAX                 __uhsax
914   #define     __USAD8                 __usad8
915   #define     __USADA8                __usada8
916   #define     __SSAT16                __ssat16
917   #define     __USAT16                __usat16
918   #define     __UXTB16                __uxtb16
919   #define     __UXTAB16               __uxtab16
920   #define     __SXTB16                __sxtb16
921   #define     __SXTAB16               __sxtab16
922   #define     __SMUAD                 __smuad
923   #define     __SMUADX                __smuadx
924   #define     __SMLAD                 __smlad
925   #define     __SMLADX                __smladx
926   #define     __SMLALD                __smlald
927   #define     __SMLALDX               __smlaldx
928   #define     __SMUSD                 __smusd
929   #define     __SMUSDX                __smusdx
930   #define     __SMLSD                 __smlsd
931   #define     __SMLSDX                __smlsdx
932   #define     __SMLSLD                __smlsld
933   #define     __SMLSLDX               __smlsldx
934   #define     __SEL                   __sel
935   #define     __QADD                  __qadd
936   #define     __QSUB                  __qsub
937 
938   #define __PKHBT(ARG1,ARG2,ARG3) \
939   __extension__ \
940   ({                          \
941     uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
942     __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
943     __RES; \
944    })
945 
946   #define __PKHTB(ARG1,ARG2,ARG3) \
947   __extension__ \
948   ({                          \
949     uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
950     if (ARG3 == 0) \
951       __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
952     else \
953       __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
954     __RES; \
955    })
956 
__SXTB16_RORn(uint32_t op1,uint32_t rotate)957   __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
958   {
959       uint32_t result;
960       if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
961       {
962           __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
963       }
964       else
965       {
966           result = __SXTB16(__ROR(op1, rotate));
967       }
968       return result;
969   }
970 
__SXTAB16_RORn(uint32_t op1,uint32_t op2,uint32_t rotate)971   __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
972   {
973       uint32_t result;
974       if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
975       {
976           __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
977       }
978       else
979       {
980           result = __SXTAB16(op1, __ROR(op2, rotate));
981       }
982       return result;
983   }
984 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)985   __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
986   {
987     int32_t result;
988 
989     __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
990     return (result);
991   }
992 #endif /* (__ARM_FEATURE_DSP == 1) */
993 /** @} end of group CMSIS_SIMD_intrinsics */
994 
995 // Include the profile specific settings:
996 #if __ARM_ARCH_PROFILE == 'A'
997   #include "a-profile/cmsis_gcc_a.h"
998 #elif __ARM_ARCH_PROFILE == 'R'
999   #include "r-profile/cmsis_gcc_r.h"
1000 #elif __ARM_ARCH_PROFILE == 'M'
1001   #include "m-profile/cmsis_gcc_m.h"
1002 #else
1003   #error "Unknown Arm architecture profile"
1004 #endif
1005 
1006 #endif /* __CMSIS_GCC_H */
1007