1 /*
2  * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the License); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 /*
20  * CMSIS-Core(M) Compiler GCC Header File
21  */
22 
23 #ifndef __CMSIS_GCC_M_H
24 #define __CMSIS_GCC_M_H
25 
26 /* ignore some GCC warnings */
27 #pragma GCC diagnostic push
28 #pragma GCC diagnostic ignored "-Wsign-conversion"
29 #pragma GCC diagnostic ignored "-Wconversion"
30 #pragma GCC diagnostic ignored "-Wunused-parameter"
31 
32 #include <arm_acle.h>
33 
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36   #define __has_builtin(x) (0)
37 #endif
38 
39 /* CMSIS compiler specific defines */
40 #ifndef   __ASM
41   #define __ASM                                  __asm
42 #endif
43 #ifndef   __INLINE
44   #define __INLINE                               inline
45 #endif
46 #ifndef   __STATIC_INLINE
47   #define __STATIC_INLINE                        static inline
48 #endif
49 #ifndef   __STATIC_FORCEINLINE
50   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
51 #endif
52 #ifndef   __NO_RETURN
53   #define __NO_RETURN                            __attribute__((__noreturn__))
54 #endif
55 #ifndef   __USED
56   #define __USED                                 __attribute__((used))
57 #endif
58 #ifndef   __WEAK
59   #define __WEAK                                 __attribute__((weak))
60 #endif
61 #ifndef   __PACKED
62   #define __PACKED                               __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef   __PACKED_STRUCT
65   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef   __PACKED_UNION
68   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef   __UNALIGNED_UINT16_WRITE
71   #pragma GCC diagnostic push
72   #pragma GCC diagnostic ignored "-Wpacked"
73   #pragma GCC diagnostic ignored "-Wattributes"
74   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
75   #pragma GCC diagnostic pop
76   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
77 #endif
78 #ifndef   __UNALIGNED_UINT16_READ
79   #pragma GCC diagnostic push
80   #pragma GCC diagnostic ignored "-Wpacked"
81   #pragma GCC diagnostic ignored "-Wattributes"
82   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
83   #pragma GCC diagnostic pop
84   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
85 #endif
86 #ifndef   __UNALIGNED_UINT32_WRITE
87   #pragma GCC diagnostic push
88   #pragma GCC diagnostic ignored "-Wpacked"
89   #pragma GCC diagnostic ignored "-Wattributes"
90   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
91   #pragma GCC diagnostic pop
92   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
93 #endif
94 #ifndef   __UNALIGNED_UINT32_READ
95   #pragma GCC diagnostic push
96   #pragma GCC diagnostic ignored "-Wpacked"
97   #pragma GCC diagnostic ignored "-Wattributes"
98   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
99   #pragma GCC diagnostic pop
100   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
101 #endif
102 #ifndef   __ALIGNED
103   #define __ALIGNED(x)                           __attribute__((aligned(x)))
104 #endif
105 #ifndef   __RESTRICT
106   #define __RESTRICT                             __restrict
107 #endif
108 #ifndef   __COMPILER_BARRIER
109   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
110 #endif
111 #ifndef __NO_INIT
112   #define __NO_INIT                              __attribute__ ((section (".noinit")))
113 #endif
114 #ifndef __ALIAS
115   #define __ALIAS(x)                             __attribute__ ((alias(x)))
116 #endif
117 
118 /* #########################  Startup and Lowlevel Init  ######################## */
119 #ifndef __PROGRAM_START
120 
121 /**
122   \brief   Initializes data and bss sections
123   \details This default implementations initialized all data and additional bss
124            sections relying on .copy.table and .zero.table specified properly
125            in the used linker script.
126 
127  */
__cmsis_start(void)128 __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
129 {
130   extern void _start(void) __NO_RETURN;
131 
132   typedef struct __copy_table {
133     uint32_t const* src;
134     uint32_t* dest;
135     uint32_t  wlen;
136   } __copy_table_t;
137 
138   typedef struct __zero_table {
139     uint32_t* dest;
140     uint32_t  wlen;
141   } __zero_table_t;
142 
143   extern const __copy_table_t __copy_table_start__;
144   extern const __copy_table_t __copy_table_end__;
145   extern const __zero_table_t __zero_table_start__;
146   extern const __zero_table_t __zero_table_end__;
147 
148   for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
149     for(uint32_t i=0u; i<pTable->wlen; ++i) {
150       pTable->dest[i] = pTable->src[i];
151     }
152   }
153 
154   for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
155     for(uint32_t i=0u; i<pTable->wlen; ++i) {
156       pTable->dest[i] = 0u;
157     }
158   }
159 
160   _start();
161 }
162 
163 #define __PROGRAM_START           __cmsis_start
164 #endif
165 
166 #ifndef __INITIAL_SP
167 #define __INITIAL_SP              __StackTop
168 #endif
169 
170 #ifndef __STACK_LIMIT
171 #define __STACK_LIMIT             __StackLimit
172 #endif
173 
174 #ifndef __VECTOR_TABLE
175 #define __VECTOR_TABLE            __Vectors
176 #endif
177 
178 #ifndef __VECTOR_TABLE_ATTRIBUTE
179 #define __VECTOR_TABLE_ATTRIBUTE  __attribute__((used, section(".vectors")))
180 #endif
181 
182 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
183 #ifndef __STACK_SEAL
184 #define __STACK_SEAL              __StackSeal
185 #endif
186 
187 #ifndef __TZ_STACK_SEAL_SIZE
188 #define __TZ_STACK_SEAL_SIZE      8U
189 #endif
190 
191 #ifndef __TZ_STACK_SEAL_VALUE
192 #define __TZ_STACK_SEAL_VALUE     0xFEF5EDA5FEF5EDA5ULL
193 #endif
194 
195 
__TZ_set_STACKSEAL_S(uint32_t * stackTop)196 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
197   *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
198 }
199 #endif
200 
201 
202 /* ##########################  Core Instruction Access  ######################### */
203 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
204   Access to dedicated instructions
205   @{
206 */
207 
208 /* Define macros for porting to both thumb1 and thumb2.
209  * For thumb1, use low register (r0-r7), specified by constraint "l"
210  * Otherwise, use general registers, specified by constraint "r" */
211 #if defined (__thumb__) && !defined (__thumb2__)
212 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
213 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
214 #define __CMSIS_GCC_USE_REG(r) "l" (r)
215 #else
216 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
217 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
218 #define __CMSIS_GCC_USE_REG(r) "r" (r)
219 #endif
220 
221 /**
222   \brief   No Operation
223   \details No Operation does nothing. This instruction can be used for code alignment purposes.
224  */
225 #define __NOP()         __ASM volatile ("nop")
226 
227 
228 /**
229   \brief   Wait For Interrupt
230   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
231  */
232 #define __WFI()         __ASM volatile ("wfi":::"memory")
233 
234 
235 /**
236   \brief   Wait For Event
237   \details Wait For Event is a hint instruction that permits the processor to enter
238            a low-power state until one of a number of events occurs.
239  */
240 #define __WFE()         __ASM volatile ("wfe":::"memory")
241 
242 
243 /**
244   \brief   Send Event
245   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
246  */
247 #define __SEV()         __ASM volatile ("sev")
248 
249 
250 /**
251   \brief   Instruction Synchronization Barrier
252   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
253            so that all instructions following the ISB are fetched from cache or memory,
254            after the instruction has been completed.
255  */
__ISB(void)256 __STATIC_FORCEINLINE void __ISB(void)
257 {
258   __ASM volatile ("isb 0xF":::"memory");
259 }
260 
261 
262 /**
263   \brief   Data Synchronization Barrier
264   \details Acts as a special kind of Data Memory Barrier.
265            It completes when all explicit memory accesses before this instruction complete.
266  */
__DSB(void)267 __STATIC_FORCEINLINE void __DSB(void)
268 {
269   __ASM volatile ("dsb 0xF":::"memory");
270 }
271 
272 
273 /**
274   \brief   Data Memory Barrier
275   \details Ensures the apparent order of the explicit memory operations before
276            and after the instruction, without ensuring their completion.
277  */
__DMB(void)278 __STATIC_FORCEINLINE void __DMB(void)
279 {
280   __ASM volatile ("dmb 0xF":::"memory");
281 }
282 
283 
284 /**
285   \brief   Reverse byte order (32 bit)
286   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
287   \param [in]    value  Value to reverse
288   \return               Reversed value
289  */
__REV(uint32_t value)290 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
291 {
292   return __builtin_bswap32(value);
293 }
294 
295 
296 /**
297   \brief   Reverse byte order (16 bit)
298   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
299   \param [in]    value  Value to reverse
300   \return               Reversed value
301  */
__REV16(uint32_t value)302 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
303 {
304   uint32_t result;
305 
306   __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
307   return (result);
308 }
309 
310 
311 /**
312   \brief   Reverse byte order (16 bit)
313   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
314   \param [in]    value  Value to reverse
315   \return               Reversed value
316  */
__REVSH(int16_t value)317 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
318 {
319   return (int16_t)__builtin_bswap16(value);
320 }
321 
322 
323 /**
324   \brief   Rotate Right in unsigned value (32 bit)
325   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
326   \param [in]    op1  Value to rotate
327   \param [in]    op2  Number of Bits to rotate
328   \return               Rotated value
329  */
__ROR(uint32_t op1,uint32_t op2)330 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
331 {
332   op2 %= 32U;
333   if (op2 == 0U)
334   {
335     return op1;
336   }
337   return (op1 >> op2) | (op1 << (32U - op2));
338 }
339 
340 /**
341   \brief   Breakpoint
342   \details Causes the processor to enter Debug state.
343            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
344   \param [in]    value  is ignored by the processor.
345                  If required, a debugger can use it to store additional information about the breakpoint.
346  */
347 #define __BKPT(value) __ASM volatile ("bkpt "#value)
348 
349 
350 /**
351   \brief   Reverse bit order of value
352   \details Reverses the bit order of the given value.
353   \param [in]    value  Value to reverse
354   \return               Reversed value
355  */
__RBIT(uint32_t value)356 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
357 {
358   uint32_t result;
359 
360 #if (__ARM_ARCH_ISA_THUMB >= 2)
361    __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
362 #else
363   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
364 
365   result = value;                      /* r will be reversed bits of v; first get LSB of v */
366   for (value >>= 1U; value != 0U; value >>= 1U)
367   {
368     result <<= 1U;
369     result |= value & 1U;
370     s--;
371   }
372   result <<= s;                        /* shift when v's highest bits are zero */
373 #endif
374   return (result);
375 }
376 
377 
378 /**
379   \brief   Count leading zeros
380   \details Counts the number of leading zeros of a data value.
381   \param [in]  value  Value to count the leading zeros
382   \return             number of leading zeros in value
383  */
__CLZ(uint32_t value)384 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
385 {
386   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
387      __builtin_clz(0) is undefined behaviour, so handle this case specially.
388      This guarantees ARM-compatible results if happening to compile on a non-ARM
389      target, and ensures the compiler doesn't decide to activate any
390      optimisations using the logic "value was passed to __builtin_clz, so it
391      is non-zero".
392      ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
393      single CLZ instruction.
394    */
395   if (value == 0U)
396   {
397     return 32U;
398   }
399   return __builtin_clz(value);
400 }
401 
402 
403 #if (__ARM_FEATURE_SAT    >= 1)
404 /**
405   \brief   Signed Saturate
406   \details Saturates a signed value.
407   \param [in]  value  Value to be saturated
408   \param [in]    sat  Bit position to saturate to (1..32)
409   \return             Saturated value
410  */
411 #define __SSAT(value, sat) __ssat(value, sat)
412 
413 
414 /**
415   \brief   Unsigned Saturate
416   \details Saturates an unsigned value.
417   \param [in]  value  Value to be saturated
418   \param [in]    sat  Bit position to saturate to (0..31)
419   \return             Saturated value
420  */
421 #define __USAT(value, sat) __usat(value, sat)
422 
423 #else /* (__ARM_FEATURE_SAT >= 1) */
424 /**
425   \brief   Signed Saturate
426   \details Saturates a signed value.
427   \param [in]  value  Value to be saturated
428   \param [in]    sat  Bit position to saturate to (1..32)
429   \return             Saturated value
430  */
__SSAT(int32_t val,uint32_t sat)431 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
432 {
433   if ((sat >= 1U) && (sat <= 32U))
434   {
435     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
436     const int32_t min = -1 - max ;
437     if (val > max)
438     {
439       return (max);
440     }
441     else if (val < min)
442     {
443       return (min);
444     }
445   }
446   return (val);
447 }
448 
449 
450 /**
451   \brief   Unsigned Saturate
452   \details Saturates an unsigned value.
453   \param [in]  value  Value to be saturated
454   \param [in]    sat  Bit position to saturate to (0..31)
455   \return             Saturated value
456  */
__USAT(int32_t val,uint32_t sat)457 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
458 {
459   if (sat <= 31U)
460   {
461     const uint32_t max = ((1U << sat) - 1U);
462     if (val > (int32_t)max)
463     {
464       return (max);
465     }
466     else if (val < 0)
467     {
468       return (0U);
469     }
470   }
471   return ((uint32_t)val);
472 }
473 #endif /* (__ARM_FEATURE_SAT >= 1) */
474 
475 
476 #if (__ARM_FEATURE_LDREX >= 1)
477 /**
478   \brief   Remove the exclusive lock
479   \details Removes the exclusive lock which is created by LDREX.
480  */
__CLREX(void)481 __STATIC_FORCEINLINE void __CLREX(void)
482 {
483   __ASM volatile ("clrex" ::: "memory");
484 }
485 
486 
487 /**
488   \brief   LDR Exclusive (8 bit)
489   \details Executes a exclusive LDR instruction for 8 bit value.
490   \param [in]    ptr  Pointer to data
491   \return             value of type uint8_t at (*ptr)
492  */
__LDREXB(volatile uint8_t * addr)493 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
494 {
495   uint32_t result;
496 
497   __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
498   return ((uint8_t) result);    /* Add explicit type cast here */
499 }
500 
501 
502 /**
503   \brief   STR Exclusive (8 bit)
504   \details Executes a exclusive STR instruction for 8 bit values.
505   \param [in]  value  Value to store
506   \param [in]    ptr  Pointer to location
507   \return          0  Function succeeded
508   \return          1  Function failed
509  */
__STREXB(uint8_t value,volatile uint8_t * addr)510 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
511 {
512   uint32_t result;
513 
514   __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
515   return (result);
516 }
517 #endif /* (__ARM_FEATURE_LDREX >= 1) */
518 
519 
520 #if (__ARM_FEATURE_LDREX >= 2)
521 /**
522   \brief   LDR Exclusive (16 bit)
523   \details Executes a exclusive LDR instruction for 16 bit values.
524   \param [in]    ptr  Pointer to data
525   \return        value of type uint16_t at (*ptr)
526  */
__LDREXH(volatile uint16_t * addr)527 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
528 {
529   uint32_t result;
530 
531   __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
532   return ((uint16_t)result);    /* Add explicit type cast here */
533 }
534 
535 
536 /**
537   \brief   STR Exclusive (16 bit)
538   \details Executes a exclusive STR instruction for 16 bit values.
539   \param [in]  value  Value to store
540   \param [in]    ptr  Pointer to location
541   \return          0  Function succeeded
542   \return          1  Function failed
543  */
__STREXH(uint16_t value,volatile uint16_t * addr)544 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
545 {
546   uint32_t result;
547 
548   __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
549   return (result);
550 }
551 #endif /* (__ARM_FEATURE_LDREX >= 2) */
552 
553 
554 #if (__ARM_FEATURE_LDREX >= 4)
555 /**
556   \brief   LDR Exclusive (32 bit)
557   \details Executes a exclusive LDR instruction for 32 bit values.
558   \param [in]    ptr  Pointer to data
559   \return        value of type uint32_t at (*ptr)
560  */
__LDREXW(volatile uint32_t * addr)561 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
562 {
563   uint32_t result;
564 
565   __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
566   return (result);
567 }
568 
569 
570 /**
571   \brief   STR Exclusive (32 bit)
572   \details Executes a exclusive STR instruction for 32 bit values.
573   \param [in]  value  Value to store
574   \param [in]    ptr  Pointer to location
575   \return          0  Function succeeded
576   \return          1  Function failed
577  */
__STREXW(uint32_t value,volatile uint32_t * addr)578 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
579 {
580   uint32_t result;
581 
582   __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
583   return (result);
584 }
585 #endif /* (__ARM_FEATURE_LDREX >= 4) */
586 
587 
588 #if (__ARM_ARCH_ISA_THUMB >= 2)
589 /**
590   \brief   Rotate Right with Extend (32 bit)
591   \details Moves each bit of a bitstring right by one bit.
592            The carry input is shifted in at the left end of the bitstring.
593   \param [in]    value  Value to rotate
594   \return               Rotated value
595  */
__RRX(uint32_t value)596 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
597 {
598   uint32_t result;
599 
600   __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
601   return (result);
602 }
603 
604 
605 /**
606   \brief   LDRT Unprivileged (8 bit)
607   \details Executes a Unprivileged LDRT instruction for 8 bit value.
608   \param [in]    ptr  Pointer to data
609   \return             value of type uint8_t at (*ptr)
610  */
__LDRBT(volatile uint8_t * ptr)611 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
612 {
613   uint32_t result;
614 
615   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
616   return ((uint8_t)result);    /* Add explicit type cast here */
617 }
618 
619 
620 /**
621   \brief   LDRT Unprivileged (16 bit)
622   \details Executes a Unprivileged LDRT instruction for 16 bit values.
623   \param [in]    ptr  Pointer to data
624   \return        value of type uint16_t at (*ptr)
625  */
__LDRHT(volatile uint16_t * ptr)626 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
627 {
628   uint32_t result;
629 
630   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
631   return ((uint16_t)result);    /* Add explicit type cast here */
632 }
633 
634 
635 /**
636   \brief   LDRT Unprivileged (32 bit)
637   \details Executes a Unprivileged LDRT instruction for 32 bit values.
638   \param [in]    ptr  Pointer to data
639   \return        value of type uint32_t at (*ptr)
640  */
__LDRT(volatile uint32_t * ptr)641 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
642 {
643   uint32_t result;
644 
645   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
646   return (result);
647 }
648 
649 
650 /**
651   \brief   STRT Unprivileged (8 bit)
652   \details Executes a Unprivileged STRT instruction for 8 bit values.
653   \param [in]  value  Value to store
654   \param [in]    ptr  Pointer to location
655  */
__STRBT(uint8_t value,volatile uint8_t * ptr)656 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
657 {
658   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
659 }
660 
661 
662 /**
663   \brief   STRT Unprivileged (16 bit)
664   \details Executes a Unprivileged STRT instruction for 16 bit values.
665   \param [in]  value  Value to store
666   \param [in]    ptr  Pointer to location
667  */
__STRHT(uint16_t value,volatile uint16_t * ptr)668 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
669 {
670   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
671 }
672 
673 
674 /**
675   \brief   STRT Unprivileged (32 bit)
676   \details Executes a Unprivileged STRT instruction for 32 bit values.
677   \param [in]  value  Value to store
678   \param [in]    ptr  Pointer to location
679  */
__STRT(uint32_t value,volatile uint32_t * ptr)680 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
681 {
682   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
683 }
684 #endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
685 
686 
687 #if (__ARM_ARCH >= 8)
688 /**
689   \brief   Load-Acquire (8 bit)
690   \details Executes a LDAB instruction for 8 bit value.
691   \param [in]    ptr  Pointer to data
692   \return             value of type uint8_t at (*ptr)
693  */
__LDAB(volatile uint8_t * ptr)694 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
695 {
696   uint32_t result;
697 
698   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
699   return ((uint8_t)result);    /* Add explicit type cast here */
700 }
701 
702 
703 /**
704   \brief   Load-Acquire (16 bit)
705   \details Executes a LDAH instruction for 16 bit values.
706   \param [in]    ptr  Pointer to data
707   \return        value of type uint16_t at (*ptr)
708  */
__LDAH(volatile uint16_t * ptr)709 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
710 {
711   uint32_t result;
712 
713   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
714   return ((uint16_t)result);    /* Add explicit type cast here */
715 }
716 
717 
718 /**
719   \brief   Load-Acquire (32 bit)
720   \details Executes a LDA instruction for 32 bit values.
721   \param [in]    ptr  Pointer to data
722   \return        value of type uint32_t at (*ptr)
723  */
__LDA(volatile uint32_t * ptr)724 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
725 {
726   uint32_t result;
727 
728   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
729   return (result);
730 }
731 
732 
733 /**
734   \brief   Store-Release (8 bit)
735   \details Executes a STLB instruction for 8 bit values.
736   \param [in]  value  Value to store
737   \param [in]    ptr  Pointer to location
738  */
__STLB(uint8_t value,volatile uint8_t * ptr)739 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
740 {
741   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
742 }
743 
744 
745 /**
746   \brief   Store-Release (16 bit)
747   \details Executes a STLH instruction for 16 bit values.
748   \param [in]  value  Value to store
749   \param [in]    ptr  Pointer to location
750  */
__STLH(uint16_t value,volatile uint16_t * ptr)751 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
752 {
753   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
754 }
755 
756 
757 /**
758   \brief   Store-Release (32 bit)
759   \details Executes a STL instruction for 32 bit values.
760   \param [in]  value  Value to store
761   \param [in]    ptr  Pointer to location
762  */
__STL(uint32_t value,volatile uint32_t * ptr)763 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
764 {
765   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
766 }
767 
768 
769 /**
770   \brief   Load-Acquire Exclusive (8 bit)
771   \details Executes a LDAB exclusive instruction for 8 bit value.
772   \param [in]    ptr  Pointer to data
773   \return             value of type uint8_t at (*ptr)
774  */
__LDAEXB(volatile uint8_t * ptr)775 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
776 {
777   uint32_t result;
778 
779   __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
780   return ((uint8_t)result);    /* Add explicit type cast here */
781 }
782 
783 
784 /**
785   \brief   Load-Acquire Exclusive (16 bit)
786   \details Executes a LDAH exclusive instruction for 16 bit values.
787   \param [in]    ptr  Pointer to data
788   \return        value of type uint16_t at (*ptr)
789  */
__LDAEXH(volatile uint16_t * ptr)790 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
791 {
792   uint32_t result;
793 
794   __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
795   return ((uint16_t)result);    /* Add explicit type cast here */
796 }
797 
798 
799 /**
800   \brief   Load-Acquire Exclusive (32 bit)
801   \details Executes a LDA exclusive instruction for 32 bit values.
802   \param [in]    ptr  Pointer to data
803   \return        value of type uint32_t at (*ptr)
804  */
__LDAEX(volatile uint32_t * ptr)805 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
806 {
807   uint32_t result;
808 
809   __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
810   return (result);
811 }
812 
813 
814 /**
815   \brief   Store-Release Exclusive (8 bit)
816   \details Executes a STLB exclusive instruction for 8 bit values.
817   \param [in]  value  Value to store
818   \param [in]    ptr  Pointer to location
819   \return          0  Function succeeded
820   \return          1  Function failed
821  */
__STLEXB(uint8_t value,volatile uint8_t * ptr)822 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
823 {
824   uint32_t result;
825 
826   __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
827   return (result);
828 }
829 
830 
831 /**
832   \brief   Store-Release Exclusive (16 bit)
833   \details Executes a STLH exclusive instruction for 16 bit values.
834   \param [in]  value  Value to store
835   \param [in]    ptr  Pointer to location
836   \return          0  Function succeeded
837   \return          1  Function failed
838  */
__STLEXH(uint16_t value,volatile uint16_t * ptr)839 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
840 {
841   uint32_t result;
842 
843   __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
844   return (result);
845 }
846 
847 
848 /**
849   \brief   Store-Release Exclusive (32 bit)
850   \details Executes a STL exclusive instruction for 32 bit values.
851   \param [in]  value  Value to store
852   \param [in]    ptr  Pointer to location
853   \return          0  Function succeeded
854   \return          1  Function failed
855  */
__STLEX(uint32_t value,volatile uint32_t * ptr)856 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
857 {
858   uint32_t result;
859 
860   __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
861   return (result);
862 }
863 
864 #endif /* (__ARM_ARCH >= 8) */
865 
866 /** @}*/ /* end of group CMSIS_Core_InstructionInterface */
867 
868 
869 /* ###########################  Core Function Access  ########################### */
870 /** \ingroup  CMSIS_Core_FunctionInterface
871     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
872   @{
873  */
874 
875 /**
876   \brief   Enable IRQ Interrupts
877   \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
878            Can only be executed in Privileged modes.
879  */
__enable_irq(void)880 __STATIC_FORCEINLINE void __enable_irq(void)
881 {
882   __ASM volatile ("cpsie i" : : : "memory");
883 }
884 
885 
886 /**
887   \brief   Disable IRQ Interrupts
888   \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
889            Can only be executed in Privileged modes.
890  */
__disable_irq(void)891 __STATIC_FORCEINLINE void __disable_irq(void)
892 {
893   __ASM volatile ("cpsid i" : : : "memory");
894 }
895 
896 
897 /**
898   \brief   Get Control Register
899   \details Returns the content of the Control Register.
900   \return               Control Register value
901  */
__get_CONTROL(void)902 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
903 {
904   uint32_t result;
905 
906   __ASM volatile ("MRS %0, control" : "=r" (result) );
907   return (result);
908 }
909 
910 
911 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
912 /**
913   \brief   Get Control Register (non-secure)
914   \details Returns the content of the non-secure Control Register when in secure mode.
915   \return               non-secure Control Register value
916  */
__TZ_get_CONTROL_NS(void)917 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
918 {
919   uint32_t result;
920 
921   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
922   return (result);
923 }
924 #endif
925 
926 
927 /**
928   \brief   Set Control Register
929   \details Writes the given value to the Control Register.
930   \param [in]    control  Control Register value to set
931  */
__set_CONTROL(uint32_t control)932 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
933 {
934   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
935   __ISB();
936 }
937 
938 
939 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
940 /**
941   \brief   Set Control Register (non-secure)
942   \details Writes the given value to the non-secure Control Register when in secure state.
943   \param [in]    control  Control Register value to set
944  */
__TZ_set_CONTROL_NS(uint32_t control)945 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
946 {
947   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
948   __ISB();
949 }
950 #endif
951 
952 
953 /**
954   \brief   Get IPSR Register
955   \details Returns the content of the IPSR Register.
956   \return               IPSR Register value
957  */
__get_IPSR(void)958 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
959 {
960   uint32_t result;
961 
962   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
963   return (result);
964 }
965 
966 
967 /**
968   \brief   Get APSR Register
969   \details Returns the content of the APSR Register.
970   \return               APSR Register value
971  */
__get_APSR(void)972 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
973 {
974   uint32_t result;
975 
976   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
977   return (result);
978 }
979 
980 
981 /**
982   \brief   Get xPSR Register
983   \details Returns the content of the xPSR Register.
984   \return               xPSR Register value
985  */
__get_xPSR(void)986 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
987 {
988   uint32_t result;
989 
990   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
991   return (result);
992 }
993 
994 
995 /**
996   \brief   Get Process Stack Pointer
997   \details Returns the current value of the Process Stack Pointer (PSP).
998   \return               PSP Register value
999  */
__get_PSP(void)1000 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
1001 {
1002   uint32_t result;
1003 
1004   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
1005   return (result);
1006 }
1007 
1008 
1009 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1010 /**
1011   \brief   Get Process Stack Pointer (non-secure)
1012   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
1013   \return               PSP Register value
1014  */
__TZ_get_PSP_NS(void)1015 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
1016 {
1017   uint32_t result;
1018 
1019   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
1020   return (result);
1021 }
1022 #endif
1023 
1024 
1025 /**
1026   \brief   Set Process Stack Pointer
1027   \details Assigns the given value to the Process Stack Pointer (PSP).
1028   \param [in]    topOfProcStack  Process Stack Pointer value to set
1029  */
__set_PSP(uint32_t topOfProcStack)1030 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
1031 {
1032   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
1033 }
1034 
1035 
1036 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1037 /**
1038   \brief   Set Process Stack Pointer (non-secure)
1039   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
1040   \param [in]    topOfProcStack  Process Stack Pointer value to set
1041  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)1042 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1043 {
1044   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
1045 }
1046 #endif
1047 
1048 
1049 /**
1050   \brief   Get Main Stack Pointer
1051   \details Returns the current value of the Main Stack Pointer (MSP).
1052   \return               MSP Register value
1053  */
__get_MSP(void)1054 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
1055 {
1056   uint32_t result;
1057 
1058   __ASM volatile ("MRS %0, msp" : "=r" (result) );
1059   return (result);
1060 }
1061 
1062 
1063 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1064 /**
1065   \brief   Get Main Stack Pointer (non-secure)
1066   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
1067   \return               MSP Register value
1068  */
__TZ_get_MSP_NS(void)1069 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
1070 {
1071   uint32_t result;
1072 
1073   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
1074   return (result);
1075 }
1076 #endif
1077 
1078 
1079 /**
1080   \brief   Set Main Stack Pointer
1081   \details Assigns the given value to the Main Stack Pointer (MSP).
1082   \param [in]    topOfMainStack  Main Stack Pointer value to set
1083  */
__set_MSP(uint32_t topOfMainStack)1084 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
1085 {
1086   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
1087 }
1088 
1089 
1090 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1091 /**
1092   \brief   Set Main Stack Pointer (non-secure)
1093   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
1094   \param [in]    topOfMainStack  Main Stack Pointer value to set
1095  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)1096 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1097 {
1098   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
1099 }
1100 #endif
1101 
1102 
1103 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1104 /**
1105   \brief   Get Stack Pointer (non-secure)
1106   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
1107   \return               SP Register value
1108  */
__TZ_get_SP_NS(void)1109 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
1110 {
1111   uint32_t result;
1112 
1113   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
1114   return (result);
1115 }
1116 
1117 
1118 /**
1119   \brief   Set Stack Pointer (non-secure)
1120   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
1121   \param [in]    topOfStack  Stack Pointer value to set
1122  */
__TZ_set_SP_NS(uint32_t topOfStack)1123 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
1124 {
1125   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
1126 }
1127 #endif
1128 
1129 
1130 /**
1131   \brief   Get Priority Mask
1132   \details Returns the current state of the priority mask bit from the Priority Mask Register.
1133   \return               Priority Mask value
1134  */
__get_PRIMASK(void)1135 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
1136 {
1137   uint32_t result;
1138 
1139   __ASM volatile ("MRS %0, primask" : "=r" (result) );
1140   return (result);
1141 }
1142 
1143 
1144 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1145 /**
1146   \brief   Get Priority Mask (non-secure)
1147   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
1148   \return               Priority Mask value
1149  */
__TZ_get_PRIMASK_NS(void)1150 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1151 {
1152   uint32_t result;
1153 
1154   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1155   return (result);
1156 }
1157 #endif
1158 
1159 
1160 /**
1161   \brief   Set Priority Mask
1162   \details Assigns the given value to the Priority Mask Register.
1163   \param [in]    priMask  Priority Mask
1164  */
__set_PRIMASK(uint32_t priMask)1165 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1166 {
1167   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1168 }
1169 
1170 
1171 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1172 /**
1173   \brief   Set Priority Mask (non-secure)
1174   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
1175   \param [in]    priMask  Priority Mask
1176  */
__TZ_set_PRIMASK_NS(uint32_t priMask)1177 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1178 {
1179   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1180 }
1181 #endif
1182 
1183 
1184 #if (__ARM_ARCH_ISA_THUMB >= 2)
1185 /**
1186   \brief   Enable FIQ
1187   \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
1188            Can only be executed in Privileged modes.
1189  */
__enable_fault_irq(void)1190 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1191 {
1192   __ASM volatile ("cpsie f" : : : "memory");
1193 }
1194 
1195 
1196 /**
1197   \brief   Disable FIQ
1198   \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
1199            Can only be executed in Privileged modes.
1200  */
__disable_fault_irq(void)1201 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1202 {
1203   __ASM volatile ("cpsid f" : : : "memory");
1204 }
1205 
1206 
1207 /**
1208   \brief   Get Base Priority
1209   \details Returns the current value of the Base Priority register.
1210   \return               Base Priority register value
1211  */
__get_BASEPRI(void)1212 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1213 {
1214   uint32_t result;
1215 
1216   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1217   return (result);
1218 }
1219 
1220 
1221 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1222 /**
1223   \brief   Get Base Priority (non-secure)
1224   \details Returns the current value of the non-secure Base Priority register when in secure state.
1225   \return               Base Priority register value
1226  */
__TZ_get_BASEPRI_NS(void)1227 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1228 {
1229   uint32_t result;
1230 
1231   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1232   return (result);
1233 }
1234 #endif
1235 
1236 
1237 /**
1238   \brief   Set Base Priority
1239   \details Assigns the given value to the Base Priority register.
1240   \param [in]    basePri  Base Priority value to set
1241  */
__set_BASEPRI(uint32_t basePri)1242 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1243 {
1244   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1245 }
1246 
1247 
1248 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1249 /**
1250   \brief   Set Base Priority (non-secure)
1251   \details Assigns the given value to the non-secure Base Priority register when in secure state.
1252   \param [in]    basePri  Base Priority value to set
1253  */
__TZ_set_BASEPRI_NS(uint32_t basePri)1254 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1255 {
1256   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1257 }
1258 #endif
1259 
1260 
1261 /**
1262   \brief   Set Base Priority with condition
1263   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
1264            or the new value increases the BASEPRI priority level.
1265   \param [in]    basePri  Base Priority value to set
1266  */
__set_BASEPRI_MAX(uint32_t basePri)1267 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1268 {
1269   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1270 }
1271 
1272 
1273 /**
1274   \brief   Get Fault Mask
1275   \details Returns the current value of the Fault Mask register.
1276   \return               Fault Mask register value
1277  */
__get_FAULTMASK(void)1278 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1279 {
1280   uint32_t result;
1281 
1282   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1283   return (result);
1284 }
1285 
1286 
1287 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1288 /**
1289   \brief   Get Fault Mask (non-secure)
1290   \details Returns the current value of the non-secure Fault Mask register when in secure state.
1291   \return               Fault Mask register value
1292  */
__TZ_get_FAULTMASK_NS(void)1293 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1294 {
1295   uint32_t result;
1296 
1297   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1298   return (result);
1299 }
1300 #endif
1301 
1302 
1303 /**
1304   \brief   Set Fault Mask
1305   \details Assigns the given value to the Fault Mask register.
1306   \param [in]    faultMask  Fault Mask value to set
1307  */
__set_FAULTMASK(uint32_t faultMask)1308 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1309 {
1310   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1311 }
1312 
1313 
1314 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1315 /**
1316   \brief   Set Fault Mask (non-secure)
1317   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
1318   \param [in]    faultMask  Fault Mask value to set
1319  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)1320 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1321 {
1322   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1323 }
1324 #endif
1325 
1326 #endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
1327 
1328 
1329 #if (__ARM_ARCH >= 8)
1330 /**
1331   \brief   Get Process Stack Pointer Limit
1332   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1333   Stack Pointer Limit register hence zero is returned always in non-secure
1334   mode.
1335 
1336   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
1337   \return               PSPLIM Register value
1338  */
__get_PSPLIM(void)1339 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1340 {
1341 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1342       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
1343        (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
1344   /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1345   return (0U);
1346 #else
1347   uint32_t result;
1348   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
1349   return (result);
1350 #endif
1351 }
1352 
1353 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1354 /**
1355   \brief   Get Process Stack Pointer Limit (non-secure)
1356   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1357   Stack Pointer Limit register hence zero is returned always.
1358 
1359   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1360   \return               PSPLIM Register value
1361  */
__TZ_get_PSPLIM_NS(void)1362 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1363 {
1364 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1365       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
1366   /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1367   return (0U);
1368 #else
1369   uint32_t result;
1370   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
1371   return (result);
1372 #endif
1373 }
1374 #endif
1375 
1376 
1377 /**
1378   \brief   Set Process Stack Pointer Limit
1379   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1380   Stack Pointer Limit register hence the write is silently ignored in non-secure
1381   mode.
1382 
1383   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
1384   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1385  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)1386 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1387 {
1388 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1389       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
1390        (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
1391   /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1392   (void)ProcStackPtrLimit;
1393 #else
1394   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1395 #endif
1396 }
1397 
1398 
1399 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1400 /**
1401   \brief   Set Process Stack Pointer (non-secure)
1402   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1403   Stack Pointer Limit register hence the write is silently ignored.
1404 
1405   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1406   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1407  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)1408 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1409 {
1410 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1411       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
1412   /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1413   (void)ProcStackPtrLimit;
1414 #else
1415   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1416 #endif
1417 }
1418 #endif
1419 
1420 
1421 /**
1422   \brief   Get Main Stack Pointer Limit
1423   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1424   Stack Pointer Limit register hence zero is returned always.
1425 
1426   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
1427   \return               MSPLIM Register value
1428  */
__get_MSPLIM(void)1429 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1430 {
1431 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1432       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
1433        (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
1434   /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1435   return (0U);
1436 #else
1437   uint32_t result;
1438   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1439   return (result);
1440 #endif
1441 }
1442 
1443 
1444 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1445 /**
1446   \brief   Get Main Stack Pointer Limit (non-secure)
1447   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1448   Stack Pointer Limit register hence zero is returned always.
1449 
1450   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
1451   \return               MSPLIM Register value
1452  */
__TZ_get_MSPLIM_NS(void)1453 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1454 {
1455 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1456       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
1457   /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1458   return (0U);
1459 #else
1460   uint32_t result;
1461   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1462   return (result);
1463 #endif
1464 }
1465 #endif
1466 
1467 
1468 /**
1469   \brief   Set Main Stack Pointer Limit
1470   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1471   Stack Pointer Limit register hence the write is silently ignored.
1472 
1473   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
1474   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
1475  */
__set_MSPLIM(uint32_t MainStackPtrLimit)1476 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1477 {
1478 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1479       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
1480        (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
1481   /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1482   (void)MainStackPtrLimit;
1483 #else
1484   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1485 #endif
1486 }
1487 
1488 
1489 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
1490 /**
1491   \brief   Set Main Stack Pointer Limit (non-secure)
1492   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1493   Stack Pointer Limit register hence the write is silently ignored.
1494 
1495   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
1496   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
1497  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)1498 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1499 {
1500 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1501       !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
1502   /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1503   (void)MainStackPtrLimit;
1504 #else
1505   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1506 #endif
1507 }
1508 #endif
1509 
1510 #endif /* (__ARM_ARCH >= 8) */
1511 
1512 
1513 /**
1514   \brief   Get FPSCR
1515   \details Returns the current value of the Floating Point Status/Control register.
1516   \return               Floating Point Status/Control register value
1517  */
__get_FPSCR(void)1518 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
1519 {
1520 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
1521   return (__builtin_arm_get_fpscr());
1522 #else
1523   return (0U);
1524 #endif
1525 }
1526 
1527 
1528 /**
1529   \brief   Set FPSCR
1530   \details Assigns the given value to the Floating Point Status/Control register.
1531   \param [in]    fpscr  Floating Point Status/Control value to set
1532  */
__set_FPSCR(uint32_t fpscr)1533 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1534 {
1535 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
1536   __builtin_arm_set_fpscr(fpscr);
1537 #else
1538   (void)fpscr;
1539 #endif
1540 }
1541 
1542 
1543 /** @} end of CMSIS_Core_RegAccFunctions */
1544 
1545 
1546 /* ###################  Compiler specific Intrinsics  ########################### */
1547 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1548   Access to dedicated SIMD instructions
1549   @{
1550 */
1551 
1552 #if (__ARM_FEATURE_DSP == 1)
1553 #define     __SADD8                 __sadd8
1554 #define     __QADD8                 __qadd8
1555 #define     __SHADD8                __shadd8
1556 #define     __UADD8                 __uadd8
1557 #define     __UQADD8                __uqadd8
1558 #define     __UHADD8                __uhadd8
1559 #define     __SSUB8                 __ssub8
1560 #define     __QSUB8                 __qsub8
1561 #define     __SHSUB8                __shsub8
1562 #define     __USUB8                 __usub8
1563 #define     __UQSUB8                __uqsub8
1564 #define     __UHSUB8                __uhsub8
1565 #define     __SADD16                __sadd16
1566 #define     __QADD16                __qadd16
1567 #define     __SHADD16               __shadd16
1568 #define     __UADD16                __uadd16
1569 #define     __UQADD16               __uqadd16
1570 #define     __UHADD16               __uhadd16
1571 #define     __SSUB16                __ssub16
1572 #define     __QSUB16                __qsub16
1573 #define     __SHSUB16               __shsub16
1574 #define     __USUB16                __usub16
1575 #define     __UQSUB16               __uqsub16
1576 #define     __UHSUB16               __uhsub16
1577 #define     __SASX                  __sasx
1578 #define     __QASX                  __qasx
1579 #define     __SHASX                 __shasx
1580 #define     __UASX                  __uasx
1581 #define     __UQASX                 __uqasx
1582 #define     __UHASX                 __uhasx
1583 #define     __SSAX                  __ssax
1584 #define     __QSAX                  __qsax
1585 #define     __SHSAX                 __shsax
1586 #define     __USAX                  __usax
1587 #define     __UQSAX                 __uqsax
1588 #define     __UHSAX                 __uhsax
1589 #define     __USAD8                 __usad8
1590 #define     __USADA8                __usada8
1591 #define     __SSAT16                __ssat16
1592 #define     __USAT16                __usat16
1593 #define     __UXTB16                __uxtb16
1594 #define     __UXTAB16               __uxtab16
1595 #define     __SXTB16                __sxtb16
1596 #define     __SXTAB16               __sxtab16
1597 #define     __SMUAD                 __smuad
1598 #define     __SMUADX                __smuadx
1599 #define     __SMLAD                 __smlad
1600 #define     __SMLADX                __smladx
1601 #define     __SMLALD                __smlald
1602 #define     __SMLALDX               __smlaldx
1603 #define     __SMUSD                 __smusd
1604 #define     __SMUSDX                __smusdx
1605 #define     __SMLSD                 __smlsd
1606 #define     __SMLSDX                __smlsdx
1607 #define     __SMLSLD                __smlsld
1608 #define     __SMLSLDX               __smlsldx
1609 #define     __SEL                   __sel
1610 #define     __QADD                  __qadd
1611 #define     __QSUB                  __qsub
1612 
1613 #define __PKHBT(ARG1,ARG2,ARG3) \
1614 __extension__ \
1615 ({                          \
1616   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1617   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1618   __RES; \
1619  })
1620 
1621 #define __PKHTB(ARG1,ARG2,ARG3) \
1622 __extension__ \
1623 ({                          \
1624   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1625   if (ARG3 == 0) \
1626     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
1627   else \
1628     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1629   __RES; \
1630  })
1631 
__SXTB16_RORn(uint32_t op1,uint32_t rotate)1632 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
1633 {
1634     uint32_t result;
1635     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
1636     {
1637         __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
1638     }
1639     else
1640     {
1641         result = __SXTB16(__ROR(op1, rotate));
1642     }
1643     return result;
1644 }
1645 
__SXTAB16_RORn(uint32_t op1,uint32_t op2,uint32_t rotate)1646 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
1647 {
1648     uint32_t result;
1649     if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
1650     {
1651         __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
1652     }
1653     else
1654     {
1655         result = __SXTAB16(op1, __ROR(op2, rotate));
1656     }
1657     return result;
1658 }
1659 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1660 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1661 {
1662   int32_t result;
1663 
1664   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1665   return (result);
1666 }
1667 
1668 #endif /* (__ARM_FEATURE_DSP == 1) */
1669 /** @} end of group CMSIS_SIMD_intrinsics */
1670 
1671 
1672 #pragma GCC diagnostic pop
1673 
1674 #endif /* __CMSIS_GCC_M_H */
1675