1 /*
2 * Copyright (c) 2023-2024 Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 /*
20 * CMSIS-Core(M) Compiler TIARMClang Header File
21 */
22
23 #ifndef __CMSIS_TIARMCLANG_M_H
24 #define __CMSIS_TIARMCLANG_M_H
25
26 #pragma clang system_header /* treat file as system include file */
27
28 #if (__ARM_ACLE >= 200)
29 #include <arm_acle.h>
30 #else
31 #error Compiler must support ACLE V2.0
32 #endif /* (__ARM_ACLE >= 200) */
33
34 /* CMSIS compiler specific defines */
35 #ifndef __ASM
36 #define __ASM __asm
37 #endif
38 #ifndef __INLINE
39 #define __INLINE __inline
40 #endif
41 #ifndef __STATIC_INLINE
42 #define __STATIC_INLINE static __inline
43 #endif
44 #ifndef __STATIC_FORCEINLINE
45 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
46 #endif
47 #ifndef __NO_RETURN
48 #define __NO_RETURN __attribute__((__noreturn__))
49 #endif
50 #ifndef __USED
51 #define __USED __attribute__((used))
52 #endif
53 #ifndef __WEAK
54 #define __WEAK __attribute__((weak))
55 #endif
56 #ifndef __PACKED
57 #define __PACKED __attribute__((packed, aligned(1)))
58 #endif
59 #ifndef __PACKED_STRUCT
60 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
61 #endif
62 #ifndef __PACKED_UNION
63 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
64 #endif
65 #ifndef __UNALIGNED_UINT16_WRITE
66 #pragma clang diagnostic push
67 #pragma clang diagnostic ignored "-Wpacked"
68 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
69 #pragma clang diagnostic pop
70 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
71 #endif
72 #ifndef __UNALIGNED_UINT16_READ
73 #pragma clang diagnostic push
74 #pragma clang diagnostic ignored "-Wpacked"
75 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
76 #pragma clang diagnostic pop
77 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
78 #endif
79 #ifndef __UNALIGNED_UINT32_WRITE
80 #pragma clang diagnostic push
81 #pragma clang diagnostic ignored "-Wpacked"
82 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
83 #pragma clang diagnostic pop
84 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT32_READ
87 #pragma clang diagnostic push
88 #pragma clang diagnostic ignored "-Wpacked"
89 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
90 #pragma clang diagnostic pop
91 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
92 #endif
93 #ifndef __ALIGNED
94 #define __ALIGNED(x) __attribute__((aligned(x)))
95 #endif
96 #ifndef __RESTRICT
97 #define __RESTRICT __restrict
98 #endif
99 #ifndef __COMPILER_BARRIER
100 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
101 #endif
102 #ifndef __NO_INIT
103 #define __NO_INIT __attribute__ ((section (".noinit")))
104 #endif
105 #ifndef __ALIAS
106 #define __ALIAS(x) __attribute__ ((alias(x)))
107 #endif
108
109 /* ######################### Startup and Lowlevel Init ######################## */
110 #ifndef __PROGRAM_START
111 #define __PROGRAM_START _c_int00
112 #endif
113
114 #ifndef __INITIAL_SP
115 #define __INITIAL_SP __STACK_END
116 #endif
117
118 #ifndef __STACK_LIMIT
119 #define __STACK_LIMIT __STACK_SIZE
120 #endif
121
122 #ifndef __VECTOR_TABLE
123 #define __VECTOR_TABLE __Vectors
124 #endif
125
126 #ifndef __VECTOR_TABLE_ATTRIBUTE
127 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".intvecs")))
128 #endif
129
130 #if (__ARM_FEATURE_CMSE == 3)
131 #ifndef __STACK_SEAL
132 #define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base
133 #endif
134
135 #ifndef __TZ_STACK_SEAL_SIZE
136 #define __TZ_STACK_SEAL_SIZE 8U
137 #endif
138
139 #ifndef __TZ_STACK_SEAL_VALUE
140 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
141 #endif
142
143
__TZ_set_STACKSEAL_S(uint32_t * stackTop)144 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
145 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
146 }
147 #endif
148
149
150 /* ########################## Core Instruction Access ######################### */
151 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
152 Access to dedicated instructions
153 @{
154 */
155
156 /* Define macros for porting to both thumb1 and thumb2.
157 * For thumb1, use low register (r0-r7), specified by constraint "l"
158 * Otherwise, use general registers, specified by constraint "r" */
159 #if defined (__thumb__) && !defined (__thumb2__)
160 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
161 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
162 #define __CMSIS_GCC_USE_REG(r) "l" (r)
163 #else
164 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
165 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
166 #define __CMSIS_GCC_USE_REG(r) "r" (r)
167 #endif
168
169 /**
170 \brief No Operation
171 \details No Operation does nothing. This instruction can be used for code alignment purposes.
172 */
173 #define __NOP() __nop()
174
175
176 /**
177 \brief Wait For Interrupt
178 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
179 */
180 #define __WFI() __wfi()
181
182
183 /**
184 \brief Wait For Event
185 \details Wait For Event is a hint instruction that permits the processor to enter
186 a low-power state until one of a number of events occurs.
187 */
188 #define __WFE() __wfe()
189
190
191 /**
192 \brief Send Event
193 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
194 */
195 #define __SEV() __sev()
196
197
198 /**
199 \brief Instruction Synchronization Barrier
200 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
201 so that all instructions following the ISB are fetched from cache or memory,
202 after the instruction has been completed.
203 */
204 #define __ISB() __isb(0xF)
205
206
207 /**
208 \brief Data Synchronization Barrier
209 \details Acts as a special kind of Data Memory Barrier.
210 It completes when all explicit memory accesses before this instruction complete.
211 */
212 #define __DSB() __dsb(0xF)
213
214
215 /**
216 \brief Data Memory Barrier
217 \details Ensures the apparent order of the explicit memory operations before
218 and after the instruction, without ensuring their completion.
219 */
220 #define __DMB() __dmb(0xF)
221
222
223 /**
224 \brief Reverse byte order (32 bit)
225 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
226 \param [in] value Value to reverse
227 \return Reversed value
228 */
229 #define __REV(value) __rev(value)
230
231
232 /**
233 \brief Reverse byte order (16 bit)
234 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
235 \param [in] value Value to reverse
236 \return Reversed value
237 */
238 #define __REV16(value) __rev16(value)
239
240
241 /**
242 \brief Reverse byte order (16 bit)
243 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
244 \param [in] value Value to reverse
245 \return Reversed value
246 */
247 #define __REVSH(value) __revsh(value)
248
249
250 /**
251 \brief Rotate Right in unsigned value (32 bit)
252 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
253 \param [in] op1 Value to rotate
254 \param [in] op2 Number of Bits to rotate
255 \return Rotated value
256 */
257 #define __ROR(op1, op2) __ror(op1, op2)
258
259
260 /**
261 \brief Breakpoint
262 \details Causes the processor to enter Debug state.
263 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
264 \param [in] value is ignored by the processor.
265 If required, a debugger can use it to store additional information about the breakpoint.
266 */
267 #define __BKPT(value) __ASM volatile ("bkpt "#value)
268
269
270 /**
271 \brief Reverse bit order of value
272 \details Reverses the bit order of the given value.
273 \param [in] value Value to reverse
274 \return Reversed value
275 */
276 #define __RBIT(value) __rbit(value)
277
278
279 /**
280 \brief Count leading zeros
281 \details Counts the number of leading zeros of a data value.
282 \param [in] value Value to count the leading zeros
283 \return number of leading zeros in value
284 */
285 #define __CLZ(value) __clz(value)
286
287
288 /* __ARM_FEATURE_SAT is wrong for for Armv8-M Baseline devices */
289 #if ((__ARM_FEATURE_SAT >= 1) && \
290 (__ARM_ARCH_ISA_THUMB >= 2) )
291 /**
292 \brief Signed Saturate
293 \details Saturates a signed value.
294 \param [in] value Value to be saturated
295 \param [in] sat Bit position to saturate to (1..32)
296 \return Saturated value
297 */
298 #define __SSAT(value, sat) __ssat(value, sat)
299
300
301 /**
302 \brief Unsigned Saturate
303 \details Saturates an unsigned value.
304 \param [in] value Value to be saturated
305 \param [in] sat Bit position to saturate to (0..31)
306 \return Saturated value
307 */
308 #define __USAT(value, sat) __usat(value, sat)
309
310 #else /* (__ARM_FEATURE_SAT >= 1) */
311 /**
312 \brief Signed Saturate
313 \details Saturates a signed value.
314 \param [in] value Value to be saturated
315 \param [in] sat Bit position to saturate to (1..32)
316 \return Saturated value
317 */
__SSAT(int32_t val,uint32_t sat)318 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
319 {
320 if ((sat >= 1U) && (sat <= 32U))
321 {
322 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
323 const int32_t min = -1 - max ;
324 if (val > max)
325 {
326 return (max);
327 }
328 else if (val < min)
329 {
330 return (min);
331 }
332 }
333 return (val);
334 }
335
336
337 /**
338 \brief Unsigned Saturate
339 \details Saturates an unsigned value.
340 \param [in] value Value to be saturated
341 \param [in] sat Bit position to saturate to (0..31)
342 \return Saturated value
343 */
__USAT(int32_t val,uint32_t sat)344 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
345 {
346 if (sat <= 31U)
347 {
348 const uint32_t max = ((1U << sat) - 1U);
349 if (val > (int32_t)max)
350 {
351 return (max);
352 }
353 else if (val < 0)
354 {
355 return (0U);
356 }
357 }
358 return ((uint32_t)val);
359 }
360 #endif /* (__ARM_FEATURE_SAT >= 1) */
361
362
363 #if (__ARM_FEATURE_LDREX >= 1)
364 /**
365 \brief Remove the exclusive lock
366 \details Removes the exclusive lock which is created by LDREX.
367 */
368 #define __CLREX __builtin_arm_clrex
369
370
371 /**
372 \brief LDR Exclusive (8 bit)
373 \details Executes a exclusive LDR instruction for 8 bit value.
374 \param [in] ptr Pointer to data
375 \return value of type uint8_t at (*ptr)
376 */
377 #define __LDREXB (uint8_t)__builtin_arm_ldrex
378
379
380 /**
381 \brief STR Exclusive (8 bit)
382 \details Executes a exclusive STR instruction for 8 bit values.
383 \param [in] value Value to store
384 \param [in] ptr Pointer to location
385 \return 0 Function succeeded
386 \return 1 Function failed
387 */
388 #define __STREXB (uint32_t)__builtin_arm_strex
389 #endif /* (__ARM_FEATURE_LDREX >= 1) */
390
391
392 #if (__ARM_FEATURE_LDREX >= 2)
393 /**
394 \brief LDR Exclusive (16 bit)
395 \details Executes a exclusive LDR instruction for 16 bit values.
396 \param [in] ptr Pointer to data
397 \return value of type uint16_t at (*ptr)
398 */
399 #define __LDREXH (uint16_t)__builtin_arm_ldrex
400
401
402 /**
403 \brief STR Exclusive (16 bit)
404 \details Executes a exclusive STR instruction for 16 bit values.
405 \param [in] value Value to store
406 \param [in] ptr Pointer to location
407 \return 0 Function succeeded
408 \return 1 Function failed
409 */
410 #define __STREXH (uint32_t)__builtin_arm_strex
411 #endif /* (__ARM_FEATURE_LDREX >= 2) */
412
413
414 #if (__ARM_FEATURE_LDREX >= 4)
415 /**
416 \brief LDR Exclusive (32 bit)
417 \details Executes a exclusive LDR instruction for 32 bit values.
418 \param [in] ptr Pointer to data
419 \return value of type uint32_t at (*ptr)
420 */
421 #define __LDREXW (uint32_t)__builtin_arm_ldrex
422
423
424 /**
425 \brief STR Exclusive (32 bit)
426 \details Executes a exclusive STR instruction for 32 bit values.
427 \param [in] value Value to store
428 \param [in] ptr Pointer to location
429 \return 0 Function succeeded
430 \return 1 Function failed
431 */
432 #define __STREXW (uint32_t)__builtin_arm_strex
433 #endif /* (__ARM_FEATURE_LDREX >= 4) */
434
435
436 #if (__ARM_ARCH_ISA_THUMB >= 2)
437 /**
438 \brief Rotate Right with Extend (32 bit)
439 \details Moves each bit of a bitstring right by one bit.
440 The carry input is shifted in at the left end of the bitstring.
441 \param [in] value Value to rotate
442 \return Rotated value
443 */
__RRX(uint32_t value)444 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
445 {
446 uint32_t result;
447
448 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
449 return (result);
450 }
451
452
453 /**
454 \brief LDRT Unprivileged (8 bit)
455 \details Executes a Unprivileged LDRT instruction for 8 bit value.
456 \param [in] ptr Pointer to data
457 \return value of type uint8_t at (*ptr)
458 */
__LDRBT(volatile uint8_t * ptr)459 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
460 {
461 uint32_t result;
462
463 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
464 return ((uint8_t)result); /* Add explicit type cast here */
465 }
466
467
468 /**
469 \brief LDRT Unprivileged (16 bit)
470 \details Executes a Unprivileged LDRT instruction for 16 bit values.
471 \param [in] ptr Pointer to data
472 \return value of type uint16_t at (*ptr)
473 */
__LDRHT(volatile uint16_t * ptr)474 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
475 {
476 uint32_t result;
477
478 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
479 return ((uint16_t)result); /* Add explicit type cast here */
480 }
481
482
483 /**
484 \brief LDRT Unprivileged (32 bit)
485 \details Executes a Unprivileged LDRT instruction for 32 bit values.
486 \param [in] ptr Pointer to data
487 \return value of type uint32_t at (*ptr)
488 */
__LDRT(volatile uint32_t * ptr)489 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
490 {
491 uint32_t result;
492
493 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
494 return (result);
495 }
496
497
498 /**
499 \brief STRT Unprivileged (8 bit)
500 \details Executes a Unprivileged STRT instruction for 8 bit values.
501 \param [in] value Value to store
502 \param [in] ptr Pointer to location
503 */
__STRBT(uint8_t value,volatile uint8_t * ptr)504 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
505 {
506 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
507 }
508
509
510 /**
511 \brief STRT Unprivileged (16 bit)
512 \details Executes a Unprivileged STRT instruction for 16 bit values.
513 \param [in] value Value to store
514 \param [in] ptr Pointer to location
515 */
__STRHT(uint16_t value,volatile uint16_t * ptr)516 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
517 {
518 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
519 }
520
521
522 /**
523 \brief STRT Unprivileged (32 bit)
524 \details Executes a Unprivileged STRT instruction for 32 bit values.
525 \param [in] value Value to store
526 \param [in] ptr Pointer to location
527 */
__STRT(uint32_t value,volatile uint32_t * ptr)528 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
529 {
530 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
531 }
532 #endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
533
534
535 #if (__ARM_ARCH >= 8)
536 /**
537 \brief Load-Acquire (8 bit)
538 \details Executes a LDAB instruction for 8 bit value.
539 \param [in] ptr Pointer to data
540 \return value of type uint8_t at (*ptr)
541 */
__LDAB(volatile uint8_t * ptr)542 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
543 {
544 uint32_t result;
545
546 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
547 return ((uint8_t)result); /* Add explicit type cast here */
548 }
549
550
551 /**
552 \brief Load-Acquire (16 bit)
553 \details Executes a LDAH instruction for 16 bit values.
554 \param [in] ptr Pointer to data
555 \return value of type uint16_t at (*ptr)
556 */
__LDAH(volatile uint16_t * ptr)557 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
558 {
559 uint32_t result;
560
561 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
562 return ((uint16_t)result); /* Add explicit type cast here */
563 }
564
565
566 /**
567 \brief Load-Acquire (32 bit)
568 \details Executes a LDA instruction for 32 bit values.
569 \param [in] ptr Pointer to data
570 \return value of type uint32_t at (*ptr)
571 */
__LDA(volatile uint32_t * ptr)572 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
573 {
574 uint32_t result;
575
576 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
577 return (result);
578 }
579
580
581 /**
582 \brief Store-Release (8 bit)
583 \details Executes a STLB instruction for 8 bit values.
584 \param [in] value Value to store
585 \param [in] ptr Pointer to location
586 */
__STLB(uint8_t value,volatile uint8_t * ptr)587 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
588 {
589 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
590 }
591
592
593 /**
594 \brief Store-Release (16 bit)
595 \details Executes a STLH instruction for 16 bit values.
596 \param [in] value Value to store
597 \param [in] ptr Pointer to location
598 */
__STLH(uint16_t value,volatile uint16_t * ptr)599 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
600 {
601 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
602 }
603
604
605 /**
606 \brief Store-Release (32 bit)
607 \details Executes a STL instruction for 32 bit values.
608 \param [in] value Value to store
609 \param [in] ptr Pointer to location
610 */
__STL(uint32_t value,volatile uint32_t * ptr)611 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
612 {
613 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
614 }
615
616
617 /**
618 \brief Load-Acquire Exclusive (8 bit)
619 \details Executes a LDAB exclusive instruction for 8 bit value.
620 \param [in] ptr Pointer to data
621 \return value of type uint8_t at (*ptr)
622 */
623 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
624
625
626 /**
627 \brief Load-Acquire Exclusive (16 bit)
628 \details Executes a LDAH exclusive instruction for 16 bit values.
629 \param [in] ptr Pointer to data
630 \return value of type uint16_t at (*ptr)
631 */
632 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
633
634
635 /**
636 \brief Load-Acquire Exclusive (32 bit)
637 \details Executes a LDA exclusive instruction for 32 bit values.
638 \param [in] ptr Pointer to data
639 \return value of type uint32_t at (*ptr)
640 */
641 #define __LDAEX (uint32_t)__builtin_arm_ldaex
642
643
644 /**
645 \brief Store-Release Exclusive (8 bit)
646 \details Executes a STLB exclusive instruction for 8 bit values.
647 \param [in] value Value to store
648 \param [in] ptr Pointer to location
649 \return 0 Function succeeded
650 \return 1 Function failed
651 */
652 #define __STLEXB (uint32_t)__builtin_arm_stlex
653
654
655 /**
656 \brief Store-Release Exclusive (16 bit)
657 \details Executes a STLH exclusive instruction for 16 bit values.
658 \param [in] value Value to store
659 \param [in] ptr Pointer to location
660 \return 0 Function succeeded
661 \return 1 Function failed
662 */
663 #define __STLEXH (uint32_t)__builtin_arm_stlex
664
665
666 /**
667 \brief Store-Release Exclusive (32 bit)
668 \details Executes a STL exclusive instruction for 32 bit values.
669 \param [in] value Value to store
670 \param [in] ptr Pointer to location
671 \return 0 Function succeeded
672 \return 1 Function failed
673 */
674 #define __STLEX (uint32_t)__builtin_arm_stlex
675
676 #endif /* (__ARM_ARCH >= 8) */
677
678 /** @}*/ /* end of group CMSIS_Core_InstructionInterface */
679
680
681 /* ########################### Core Function Access ########################### */
682 /** \ingroup CMSIS_Core_FunctionInterface
683 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
684 @{
685 */
686
687 /**
688 \brief Enable IRQ Interrupts
689 \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
690 Can only be executed in Privileged modes.
691 */
692 #ifndef __ARM_COMPAT_H
__enable_irq(void)693 __STATIC_FORCEINLINE void __enable_irq(void)
694 {
695 __ASM volatile ("cpsie i" : : : "memory");
696 }
697 #endif
698
699
700 /**
701 \brief Disable IRQ Interrupts
702 \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
703 Can only be executed in Privileged modes.
704 */
705 #ifndef __ARM_COMPAT_H
__disable_irq(void)706 __STATIC_FORCEINLINE void __disable_irq(void)
707 {
708 __ASM volatile ("cpsid i" : : : "memory");
709 }
710 #endif
711
712
713 /**
714 \brief Get Control Register
715 \details Returns the content of the Control Register.
716 \return Control Register value
717 */
__get_CONTROL(void)718 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
719 {
720 uint32_t result;
721
722 __ASM volatile ("MRS %0, control" : "=r" (result) );
723 return (result);
724 }
725
726
727 #if (__ARM_FEATURE_CMSE == 3)
728 /**
729 \brief Get Control Register (non-secure)
730 \details Returns the content of the non-secure Control Register when in secure mode.
731 \return non-secure Control Register value
732 */
__TZ_get_CONTROL_NS(void)733 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
734 {
735 uint32_t result;
736
737 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
738 return (result);
739 }
740 #endif
741
742
743 /**
744 \brief Set Control Register
745 \details Writes the given value to the Control Register.
746 \param [in] control Control Register value to set
747 */
__set_CONTROL(uint32_t control)748 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
749 {
750 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
751 __ISB();
752 }
753
754
755 #if (__ARM_FEATURE_CMSE == 3)
756 /**
757 \brief Set Control Register (non-secure)
758 \details Writes the given value to the non-secure Control Register when in secure state.
759 \param [in] control Control Register value to set
760 */
__TZ_set_CONTROL_NS(uint32_t control)761 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
762 {
763 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
764 __ISB();
765 }
766 #endif
767
768
769 /**
770 \brief Get IPSR Register
771 \details Returns the content of the IPSR Register.
772 \return IPSR Register value
773 */
__get_IPSR(void)774 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
775 {
776 uint32_t result;
777
778 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
779 return (result);
780 }
781
782
783 /**
784 \brief Get APSR Register
785 \details Returns the content of the APSR Register.
786 \return APSR Register value
787 */
__get_APSR(void)788 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
789 {
790 uint32_t result;
791
792 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
793 return (result);
794 }
795
796
797 /**
798 \brief Get xPSR Register
799 \details Returns the content of the xPSR Register.
800 \return xPSR Register value
801 */
__get_xPSR(void)802 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
803 {
804 uint32_t result;
805
806 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
807 return (result);
808 }
809
810
811 /**
812 \brief Get Process Stack Pointer
813 \details Returns the current value of the Process Stack Pointer (PSP).
814 \return PSP Register value
815 */
__get_PSP(void)816 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
817 {
818 uint32_t result;
819
820 __ASM volatile ("MRS %0, psp" : "=r" (result) );
821 return (result);
822 }
823
824
825 #if (__ARM_FEATURE_CMSE == 3)
826 /**
827 \brief Get Process Stack Pointer (non-secure)
828 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
829 \return PSP Register value
830 */
__TZ_get_PSP_NS(void)831 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
832 {
833 uint32_t result;
834
835 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
836 return (result);
837 }
838 #endif
839
840
841 /**
842 \brief Set Process Stack Pointer
843 \details Assigns the given value to the Process Stack Pointer (PSP).
844 \param [in] topOfProcStack Process Stack Pointer value to set
845 */
__set_PSP(uint32_t topOfProcStack)846 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
847 {
848 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
849 }
850
851
852 #if (__ARM_FEATURE_CMSE == 3)
853 /**
854 \brief Set Process Stack Pointer (non-secure)
855 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
856 \param [in] topOfProcStack Process Stack Pointer value to set
857 */
__TZ_set_PSP_NS(uint32_t topOfProcStack)858 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
859 {
860 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
861 }
862 #endif
863
864
865 /**
866 \brief Get Main Stack Pointer
867 \details Returns the current value of the Main Stack Pointer (MSP).
868 \return MSP Register value
869 */
__get_MSP(void)870 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
871 {
872 uint32_t result;
873
874 __ASM volatile ("MRS %0, msp" : "=r" (result) );
875 return (result);
876 }
877
878
879 #if (__ARM_FEATURE_CMSE == 3)
880 /**
881 \brief Get Main Stack Pointer (non-secure)
882 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
883 \return MSP Register value
884 */
__TZ_get_MSP_NS(void)885 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
886 {
887 uint32_t result;
888
889 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
890 return (result);
891 }
892 #endif
893
894
895 /**
896 \brief Set Main Stack Pointer
897 \details Assigns the given value to the Main Stack Pointer (MSP).
898 \param [in] topOfMainStack Main Stack Pointer value to set
899 */
__set_MSP(uint32_t topOfMainStack)900 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
901 {
902 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
903 }
904
905
906 #if (__ARM_FEATURE_CMSE == 3)
907 /**
908 \brief Set Main Stack Pointer (non-secure)
909 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
910 \param [in] topOfMainStack Main Stack Pointer value to set
911 */
__TZ_set_MSP_NS(uint32_t topOfMainStack)912 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
913 {
914 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
915 }
916 #endif
917
918
919 #if (__ARM_FEATURE_CMSE == 3)
920 /**
921 \brief Get Stack Pointer (non-secure)
922 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
923 \return SP Register value
924 */
__TZ_get_SP_NS(void)925 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
926 {
927 uint32_t result;
928
929 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
930 return (result);
931 }
932
933
934 /**
935 \brief Set Stack Pointer (non-secure)
936 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
937 \param [in] topOfStack Stack Pointer value to set
938 */
__TZ_set_SP_NS(uint32_t topOfStack)939 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
940 {
941 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
942 }
943 #endif
944
945
946 /**
947 \brief Get Priority Mask
948 \details Returns the current state of the priority mask bit from the Priority Mask Register.
949 \return Priority Mask value
950 */
__get_PRIMASK(void)951 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
952 {
953 uint32_t result;
954
955 __ASM volatile ("MRS %0, primask" : "=r" (result) );
956 return (result);
957 }
958
959
960 #if (__ARM_FEATURE_CMSE == 3)
961 /**
962 \brief Get Priority Mask (non-secure)
963 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
964 \return Priority Mask value
965 */
__TZ_get_PRIMASK_NS(void)966 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
967 {
968 uint32_t result;
969
970 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
971 return (result);
972 }
973 #endif
974
975
976 /**
977 \brief Set Priority Mask
978 \details Assigns the given value to the Priority Mask Register.
979 \param [in] priMask Priority Mask
980 */
__set_PRIMASK(uint32_t priMask)981 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
982 {
983 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
984 }
985
986
987 #if (__ARM_FEATURE_CMSE == 3)
988 /**
989 \brief Set Priority Mask (non-secure)
990 \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
991 \param [in] priMask Priority Mask
992 */
__TZ_set_PRIMASK_NS(uint32_t priMask)993 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
994 {
995 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
996 }
997 #endif
998
999
1000 #if (__ARM_ARCH_ISA_THUMB >= 2)
1001 /**
1002 \brief Enable FIQ
1003 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
1004 Can only be executed in Privileged modes.
1005 */
__enable_fault_irq(void)1006 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1007 {
1008 __ASM volatile ("cpsie f" : : : "memory");
1009 }
1010
1011
1012 /**
1013 \brief Disable FIQ
1014 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
1015 Can only be executed in Privileged modes.
1016 */
__disable_fault_irq(void)1017 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1018 {
1019 __ASM volatile ("cpsid f" : : : "memory");
1020 }
1021
1022
1023 /**
1024 \brief Get Base Priority
1025 \details Returns the current value of the Base Priority register.
1026 \return Base Priority register value
1027 */
__get_BASEPRI(void)1028 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1029 {
1030 uint32_t result;
1031
1032 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1033 return (result);
1034 }
1035
1036
1037 #if (__ARM_FEATURE_CMSE == 3)
1038 /**
1039 \brief Get Base Priority (non-secure)
1040 \details Returns the current value of the non-secure Base Priority register when in secure state.
1041 \return Base Priority register value
1042 */
__TZ_get_BASEPRI_NS(void)1043 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1044 {
1045 uint32_t result;
1046
1047 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1048 return (result);
1049 }
1050 #endif
1051
1052
1053 /**
1054 \brief Set Base Priority
1055 \details Assigns the given value to the Base Priority register.
1056 \param [in] basePri Base Priority value to set
1057 */
__set_BASEPRI(uint32_t basePri)1058 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1059 {
1060 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1061 }
1062
1063
1064 #if (__ARM_FEATURE_CMSE == 3)
1065 /**
1066 \brief Set Base Priority (non-secure)
1067 \details Assigns the given value to the non-secure Base Priority register when in secure state.
1068 \param [in] basePri Base Priority value to set
1069 */
__TZ_set_BASEPRI_NS(uint32_t basePri)1070 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1071 {
1072 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1073 }
1074 #endif
1075
1076
1077 /**
1078 \brief Set Base Priority with condition
1079 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
1080 or the new value increases the BASEPRI priority level.
1081 \param [in] basePri Base Priority value to set
1082 */
__set_BASEPRI_MAX(uint32_t basePri)1083 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1084 {
1085 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1086 }
1087
1088
1089 /**
1090 \brief Get Fault Mask
1091 \details Returns the current value of the Fault Mask register.
1092 \return Fault Mask register value
1093 */
__get_FAULTMASK(void)1094 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1095 {
1096 uint32_t result;
1097
1098 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1099 return (result);
1100 }
1101
1102
1103 #if (__ARM_FEATURE_CMSE == 3)
1104 /**
1105 \brief Get Fault Mask (non-secure)
1106 \details Returns the current value of the non-secure Fault Mask register when in secure state.
1107 \return Fault Mask register value
1108 */
__TZ_get_FAULTMASK_NS(void)1109 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1110 {
1111 uint32_t result;
1112
1113 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1114 return (result);
1115 }
1116 #endif
1117
1118
1119 /**
1120 \brief Set Fault Mask
1121 \details Assigns the given value to the Fault Mask register.
1122 \param [in] faultMask Fault Mask value to set
1123 */
__set_FAULTMASK(uint32_t faultMask)1124 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1125 {
1126 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1127 }
1128
1129
1130 #if (__ARM_FEATURE_CMSE == 3)
1131 /**
1132 \brief Set Fault Mask (non-secure)
1133 \details Assigns the given value to the non-secure Fault Mask register when in secure state.
1134 \param [in] faultMask Fault Mask value to set
1135 */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)1136 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1137 {
1138 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1139 }
1140 #endif
1141
1142 #endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
1143
1144
1145 #if (__ARM_ARCH >= 8)
1146 /**
1147 \brief Get Process Stack Pointer Limit
1148 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1149 Stack Pointer Limit register hence zero is returned always in non-secure
1150 mode.
1151
1152 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
1153 \return PSPLIM Register value
1154 */
__get_PSPLIM(void)1155 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1156 {
1157 #if (((__ARM_ARCH_8M_MAIN__ < 1) && \
1158 (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
1159 (__ARM_FEATURE_CMSE < 3) )
1160 /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1161 return (0U);
1162 #else
1163 uint32_t result;
1164 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1165 return (result);
1166 #endif
1167 }
1168
1169 #if (__ARM_FEATURE_CMSE == 3)
1170 /**
1171 \brief Get Process Stack Pointer Limit (non-secure)
1172 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1173 Stack Pointer Limit register hence zero is returned always.
1174
1175 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1176 \return PSPLIM Register value
1177 */
__TZ_get_PSPLIM_NS(void)1178 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1179 {
1180 #if ((__ARM_ARCH_8M_MAIN__ < 1) && \
1181 (__ARM_ARCH_8_1M_MAIN__ < 1) )
1182 /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1183 return (0U);
1184 #else
1185 uint32_t result;
1186 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1187 return (result);
1188 #endif
1189 }
1190 #endif
1191
1192
1193 /**
1194 \brief Set Process Stack Pointer Limit
1195 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1196 Stack Pointer Limit register hence the write is silently ignored in non-secure
1197 mode.
1198
1199 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
1200 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
1201 */
__set_PSPLIM(uint32_t ProcStackPtrLimit)1202 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1203 {
1204 #if (((__ARM_ARCH_8M_MAIN__ < 1) && \
1205 (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
1206 (__ARM_FEATURE_CMSE < 3) )
1207 /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1208 (void)ProcStackPtrLimit;
1209 #else
1210 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1211 #endif
1212 }
1213
1214
1215 #if (__ARM_FEATURE_CMSE == 3)
1216 /**
1217 \brief Set Process Stack Pointer (non-secure)
1218 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1219 Stack Pointer Limit register hence the write is silently ignored.
1220
1221 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1222 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
1223 */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)1224 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1225 {
1226 #if ((__ARM_ARCH_8M_MAIN__ < 1) && \
1227 (__ARM_ARCH_8_1M_MAIN__ < 1) )
1228 /* without main extensions, the non-secure PSPLIM is RAZ/WI */
1229 (void)ProcStackPtrLimit;
1230 #else
1231 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1232 #endif
1233 }
1234 #endif
1235
1236
1237 /**
1238 \brief Get Main Stack Pointer Limit
1239 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1240 Stack Pointer Limit register hence zero is returned always.
1241
1242 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
1243 \return MSPLIM Register value
1244 */
__get_MSPLIM(void)1245 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1246 {
1247 #if (((__ARM_ARCH_8M_MAIN__ < 1) && \
1248 (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
1249 (__ARM_FEATURE_CMSE < 3) )
1250 /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1251 return (0U);
1252 #else
1253 uint32_t result;
1254 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1255 return (result);
1256 #endif
1257 }
1258
1259
1260 #if (__ARM_FEATURE_CMSE == 3)
1261 /**
1262 \brief Get Main Stack Pointer Limit (non-secure)
1263 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1264 Stack Pointer Limit register hence zero is returned always.
1265
1266 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
1267 \return MSPLIM Register value
1268 */
__TZ_get_MSPLIM_NS(void)1269 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1270 {
1271 #if ((__ARM_ARCH_8M_MAIN__ < 1) && \
1272 (__ARM_ARCH_8_1M_MAIN__ < 1) )
1273 /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1274 return (0U);
1275 #else
1276 uint32_t result;
1277 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1278 return (result);
1279 #endif
1280 }
1281 #endif
1282
1283
1284 /**
1285 \brief Set Main Stack Pointer Limit
1286 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1287 Stack Pointer Limit register hence the write is silently ignored.
1288
1289 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
1290 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
1291 */
__set_MSPLIM(uint32_t MainStackPtrLimit)1292 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1293 {
1294 #if (((__ARM_ARCH_8M_MAIN__ < 1) && \
1295 (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
1296 (__ARM_FEATURE_CMSE < 3) )
1297 /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1298 (void)MainStackPtrLimit;
1299 #else
1300 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1301 #endif
1302 }
1303
1304
1305 #if (__ARM_FEATURE_CMSE == 3)
1306 /**
1307 \brief Set Main Stack Pointer Limit (non-secure)
1308 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1309 Stack Pointer Limit register hence the write is silently ignored.
1310
1311 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
1312 \param [in] MainStackPtrLimit Main Stack Pointer value to set
1313 */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)1314 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1315 {
1316 #if ((__ARM_ARCH_8M_MAIN__ < 1) && \
1317 (__ARM_ARCH_8_1M_MAIN__ < 1) )
1318 /* without main extensions, the non-secure MSPLIM is RAZ/WI */
1319 (void)MainStackPtrLimit;
1320 #else
1321 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1322 #endif
1323 }
1324 #endif
1325
1326 #endif /* (__ARM_ARCH >= 8) */
1327
1328
1329 /**
1330 \brief Get FPSCR
1331 \details Returns the current value of the Floating Point Status/Control register.
1332 \return Floating Point Status/Control register value
1333 */
__get_FPSCR(void)1334 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
1335 {
1336 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
1337 return (__builtin_arm_get_fpscr());
1338 #else
1339 return (0U);
1340 #endif
1341 }
1342
1343
1344 /**
1345 \brief Set FPSCR
1346 \details Assigns the given value to the Floating Point Status/Control register.
1347 \param [in] fpscr Floating Point Status/Control value to set
1348 */
__set_FPSCR(uint32_t fpscr)1349 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1350 {
1351 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
1352 __builtin_arm_set_fpscr(fpscr);
1353 #else
1354 (void)fpscr;
1355 #endif
1356 }
1357
1358
1359 /** @} end of CMSIS_Core_RegAccFunctions */
1360
1361
1362 /* ################### Compiler specific Intrinsics ########################### */
1363 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1364 Access to dedicated SIMD instructions
1365 @{
1366 */
1367
1368 #if (__ARM_FEATURE_DSP == 1)
1369 #define __SADD8 __sadd8
1370 #define __QADD8 __qadd8
1371 #define __SHADD8 __shadd8
1372 #define __UADD8 __uadd8
1373 #define __UQADD8 __uqadd8
1374 #define __UHADD8 __uhadd8
1375 #define __SSUB8 __ssub8
1376 #define __QSUB8 __qsub8
1377 #define __SHSUB8 __shsub8
1378 #define __USUB8 __usub8
1379 #define __UQSUB8 __uqsub8
1380 #define __UHSUB8 __uhsub8
1381 #define __SADD16 __sadd16
1382 #define __QADD16 __qadd16
1383 #define __SHADD16 __shadd16
1384 #define __UADD16 __uadd16
1385 #define __UQADD16 __uqadd16
1386 #define __UHADD16 __uhadd16
1387 #define __SSUB16 __ssub16
1388 #define __QSUB16 __qsub16
1389 #define __SHSUB16 __shsub16
1390 #define __USUB16 __usub16
1391 #define __UQSUB16 __uqsub16
1392 #define __UHSUB16 __uhsub16
1393 #define __SASX __sasx
1394 #define __QASX __qasx
1395 #define __SHASX __shasx
1396 #define __UASX __uasx
1397 #define __UQASX __uqasx
1398 #define __UHASX __uhasx
1399 #define __SSAX __ssax
1400 #define __QSAX __qsax
1401 #define __SHSAX __shsax
1402 #define __USAX __usax
1403 #define __UQSAX __uqsax
1404 #define __UHSAX __uhsax
1405 #define __USAD8 __usad8
1406 #define __USADA8 __usada8
1407 #define __SSAT16 __ssat16
1408 #define __USAT16 __usat16
1409 #define __UXTB16 __uxtb16
1410 #define __UXTAB16 __uxtab16
1411 #define __SXTB16 __sxtb16
1412 #define __SXTAB16 __sxtab16
1413 #define __SMUAD __smuad
1414 #define __SMUADX __smuadx
1415 #define __SMLAD __smlad
1416 #define __SMLADX __smladx
1417 #define __SMLALD __smlald
1418 #define __SMLALDX __smlaldx
1419 #define __SMUSD __smusd
1420 #define __SMUSDX __smusdx
1421 #define __SMLSD __smlsd
1422 #define __SMLSDX __smlsdx
1423 #define __SMLSLD __smlsld
1424 #define __SMLSLDX __smlsldx
1425 #define __SEL __sel
1426 #define __QADD __qadd
1427 #define __QSUB __qsub
1428
1429 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1430 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1431
1432 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1433 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1434
1435 #define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1436
1437 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1438
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1439 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1440 {
1441 int32_t result;
1442
1443 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1444 return (result);
1445 }
1446
1447 #endif /* (__ARM_FEATURE_DSP == 1) */
1448 /** @} end of group CMSIS_SIMD_intrinsics */
1449
1450
1451 #endif /* __CMSIS_TIARMCLANG_M_H */
1452