1 /*
2 * Copyright (c) 2009-2024 Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 /*
20 * CMSIS-Core(A) Compiler ARMClang (Arm Compiler 6) Header File
21 */
22
23 #ifndef __CMSIS_ARMCLANG_A_H
24 #define __CMSIS_ARMCLANG_A_H
25
26 #pragma clang system_header /* treat file as system include file */
27
28 /* CMSIS compiler specific defines */
29 #ifndef __ASM
30 #define __ASM __asm
31 #endif
32 #ifndef __INLINE
33 #define __INLINE __inline
34 #endif
35 #ifndef __FORCEINLINE
36 #define __FORCEINLINE __attribute__((always_inline))
37 #endif
38 #ifndef __STATIC_INLINE
39 #define __STATIC_INLINE static __inline
40 #endif
41 #ifndef __STATIC_FORCEINLINE
42 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
43 #endif
44 #ifndef __NO_RETURN
45 #define __NO_RETURN __attribute__((__noreturn__))
46 #endif
47 #ifndef CMSIS_DEPRECATED
48 #define CMSIS_DEPRECATED __attribute__((deprecated))
49 #endif
50 #ifndef __USED
51 #define __USED __attribute__((used))
52 #endif
53 #ifndef __WEAK
54 #define __WEAK __attribute__((weak))
55 #endif
56 #ifndef __PACKED
57 #define __PACKED __attribute__((packed, aligned(1)))
58 #endif
59 #ifndef __PACKED_STRUCT
60 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
61 #endif
62 #ifndef __UNALIGNED_UINT16_WRITE
63 #pragma clang diagnostic push
64 #pragma clang diagnostic ignored "-Wpacked"
65 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
66 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
67 #pragma clang diagnostic pop
68 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
69 #endif
70 #ifndef __UNALIGNED_UINT16_READ
71 #pragma clang diagnostic push
72 #pragma clang diagnostic ignored "-Wpacked"
73 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
74 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
75 #pragma clang diagnostic pop
76 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT32_WRITE
79 #pragma clang diagnostic push
80 #pragma clang diagnostic ignored "-Wpacked"
81 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
82 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
83 #pragma clang diagnostic pop
84 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT32_READ
87 #pragma clang diagnostic push
88 #pragma clang diagnostic ignored "-Wpacked"
89 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
90 #pragma clang diagnostic pop
91 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
92 #endif
93 #ifndef __ALIGNED
94 #define __ALIGNED(x) __attribute__((aligned(x)))
95 #endif
96 #ifndef __PACKED
97 #define __PACKED __attribute__((packed))
98 #endif
99 #ifndef __COMPILER_BARRIER
100 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
101 #endif
102
103
104 /* ########################## Core Instruction Access ######################### */
105 /**
106 \brief No Operation
107 \details No Operation does nothing. This instruction can be used for code alignment purposes.
108 */
109 #define __NOP __builtin_arm_nop
110
111
112 /**
113 \brief Wait For Interrupt
114 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
115 */
116 #define __WFI __builtin_arm_wfi
117
118
119 /**
120 \brief Wait For Event
121 \details Wait For Event is a hint instruction that permits the processor to enter
122 a low-power state until one of a number of events occurs.
123 */
124 #define __WFE __builtin_arm_wfe
125
126
127 /**
128 \brief Send Event
129 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
130 */
131 #define __SEV __builtin_arm_sev
132
133
134 /**
135 \brief Instruction Synchronization Barrier
136 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
137 so that all instructions following the ISB are fetched from cache or memory,
138 after the instruction has been completed.
139 */
140 #define __ISB() __builtin_arm_isb(0xF)
141
142 /**
143 \brief Data Synchronization Barrier
144 \details Acts as a special kind of Data Memory Barrier.
145 It completes when all explicit memory accesses before this instruction complete.
146 */
147 #define __DSB() __builtin_arm_dsb(0xF)
148
149
150 /**
151 \brief Data Memory Barrier
152 \details Ensures the apparent order of the explicit memory operations before
153 and after the instruction, without ensuring their completion.
154 */
155 #define __DMB() __builtin_arm_dmb(0xF)
156
157
158 /**
159 \brief Reverse byte order (32 bit)
160 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
161 \param [in] value Value to reverse
162 \return Reversed value
163 */
164 #define __REV(value) __builtin_bswap32(value)
165
166
167 /**
168 \brief Reverse byte order (16 bit)
169 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
170 \param [in] value Value to reverse
171 \return Reversed value
172 */
173 #define __REV16(value) __ROR(__REV(value), 16)
174
175
176 /**
177 \brief Reverse byte order (16 bit)
178 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
179 \param [in] value Value to reverse
180 \return Reversed value
181 */
182 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
183
184
185 /**
186 \brief Rotate Right in unsigned value (32 bit)
187 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
188 \param [in] op1 Value to rotate
189 \param [in] op2 Number of Bits to rotate
190 \return Rotated value
191 */
__ROR(uint32_t op1,uint32_t op2)192 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
193 {
194 op2 %= 32U;
195 if (op2 == 0U)
196 {
197 return op1;
198 }
199 return (op1 >> op2) | (op1 << (32U - op2));
200 }
201
202
203 /**
204 \brief Breakpoint
205 \details Causes the processor to enter Debug state.
206 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
207 \param [in] value is ignored by the processor.
208 If required, a debugger can use it to store additional information about the breakpoint.
209 */
210 #define __BKPT(value) __ASM volatile ("bkpt "#value)
211
212
213 /**
214 \brief Reverse bit order of value
215 \details Reverses the bit order of the given value.
216 \param [in] value Value to reverse
217 \return Reversed value
218 */
219 #define __RBIT __builtin_arm_rbit
220
221
222 /**
223 \brief Count leading zeros
224 \details Counts the number of leading zeros of a data value.
225 \param [in] value Value to count the leading zeros
226 \return number of leading zeros in value
227 */
__CLZ(uint32_t value)228 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
229 {
230 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
231 __builtin_clz(0) is undefined behaviour, so handle this case specially.
232 This guarantees ARM-compatible results if happening to compile on a non-ARM
233 target, and ensures the compiler doesn't decide to activate any
234 optimisations using the logic "value was passed to __builtin_clz, so it
235 is non-zero".
236 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
237 single CLZ instruction.
238 */
239 if (value == 0U)
240 {
241 return 32U;
242 }
243 return __builtin_clz(value);
244 }
245
246
247 /**
248 \brief LDR Exclusive (8 bit)
249 \details Executes a exclusive LDR instruction for 8 bit value.
250 \param [in] ptr Pointer to data
251 \return value of type uint8_t at (*ptr)
252 */
253 #define __LDREXB (uint8_t)__builtin_arm_ldrex
254
255
256 /**
257 \brief LDR Exclusive (16 bit)
258 \details Executes a exclusive LDR instruction for 16 bit values.
259 \param [in] ptr Pointer to data
260 \return value of type uint16_t at (*ptr)
261 */
262 #define __LDREXH (uint16_t)__builtin_arm_ldrex
263
264
265 /**
266 \brief LDR Exclusive (32 bit)
267 \details Executes a exclusive LDR instruction for 32 bit values.
268 \param [in] ptr Pointer to data
269 \return value of type uint32_t at (*ptr)
270 */
271 #define __LDREXW (uint32_t)__builtin_arm_ldrex
272
273
274 /**
275 \brief STR Exclusive (8 bit)
276 \details Executes a exclusive STR instruction for 8 bit values.
277 \param [in] value Value to store
278 \param [in] ptr Pointer to location
279 \return 0 Function succeeded
280 \return 1 Function failed
281 */
282 #define __STREXB (uint32_t)__builtin_arm_strex
283
284
285 /**
286 \brief STR Exclusive (16 bit)
287 \details Executes a exclusive STR instruction for 16 bit values.
288 \param [in] value Value to store
289 \param [in] ptr Pointer to location
290 \return 0 Function succeeded
291 \return 1 Function failed
292 */
293 #define __STREXH (uint32_t)__builtin_arm_strex
294
295
296 /**
297 \brief STR Exclusive (32 bit)
298 \details Executes a exclusive STR instruction for 32 bit values.
299 \param [in] value Value to store
300 \param [in] ptr Pointer to location
301 \return 0 Function succeeded
302 \return 1 Function failed
303 */
304 #define __STREXW (uint32_t)__builtin_arm_strex
305
306
307 /**
308 \brief Remove the exclusive lock
309 \details Removes the exclusive lock which is created by LDREX.
310 */
311 #define __CLREX __builtin_arm_clrex
312
313 /**
314 \brief Signed Saturate
315 \details Saturates a signed value.
316 \param [in] value Value to be saturated
317 \param [in] sat Bit position to saturate to (1..32)
318 \return Saturated value
319 */
320 #define __SSAT __builtin_arm_ssat
321
322
323 /**
324 \brief Unsigned Saturate
325 \details Saturates an unsigned value.
326 \param [in] value Value to be saturated
327 \param [in] sat Bit position to saturate to (0..31)
328 \return Saturated value
329 */
330 #define __USAT __builtin_arm_usat
331
332 /**
333 \brief Rotate Right with Extend (32 bit)
334 \details Moves each bit of a bitstring right by one bit.
335 The carry input is shifted in at the left end of the bitstring.
336 \param [in] value Value to rotate
337 \return Rotated value
338 */
__RRX(uint32_t value)339 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
340 {
341 uint32_t result;
342
343 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
344 return (result);
345 }
346
347
348 /**
349 \brief LDRT Unprivileged (8 bit)
350 \details Executes a Unprivileged LDRT instruction for 8 bit value.
351 \param [in] ptr Pointer to data
352 \return value of type uint8_t at (*ptr)
353 */
__LDRBT(volatile uint8_t * ptr)354 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
355 {
356 uint32_t result;
357
358 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
359 return ((uint8_t)result); /* Add explicit type cast here */
360 }
361
362
363 /**
364 \brief LDRT Unprivileged (16 bit)
365 \details Executes a Unprivileged LDRT instruction for 16 bit values.
366 \param [in] ptr Pointer to data
367 \return value of type uint16_t at (*ptr)
368 */
__LDRHT(volatile uint16_t * ptr)369 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
370 {
371 uint32_t result;
372
373 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
374 return ((uint16_t)result); /* Add explicit type cast here */
375 }
376
377
378 /**
379 \brief LDRT Unprivileged (32 bit)
380 \details Executes a Unprivileged LDRT instruction for 32 bit values.
381 \param [in] ptr Pointer to data
382 \return value of type uint32_t at (*ptr)
383 */
__LDRT(volatile uint32_t * ptr)384 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
385 {
386 uint32_t result;
387
388 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
389 return (result);
390 }
391
392
393 /**
394 \brief STRT Unprivileged (8 bit)
395 \details Executes a Unprivileged STRT instruction for 8 bit values.
396 \param [in] value Value to store
397 \param [in] ptr Pointer to location
398 */
__STRBT(uint8_t value,volatile uint8_t * ptr)399 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
400 {
401 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
402 }
403
404
405 /**
406 \brief STRT Unprivileged (16 bit)
407 \details Executes a Unprivileged STRT instruction for 16 bit values.
408 \param [in] value Value to store
409 \param [in] ptr Pointer to location
410 */
__STRHT(uint16_t value,volatile uint16_t * ptr)411 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
412 {
413 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
414 }
415
416
417 /**
418 \brief STRT Unprivileged (32 bit)
419 \details Executes a Unprivileged STRT instruction for 32 bit values.
420 \param [in] value Value to store
421 \param [in] ptr Pointer to location
422 */
__STRT(uint32_t value,volatile uint32_t * ptr)423 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
424 {
425 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
426 }
427
428 /* ################### Compiler specific Intrinsics ########################### */
429
430 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
431
432 #define __SADD8 __builtin_arm_sadd8
433 #define __QADD8 __builtin_arm_qadd8
434 #define __SHADD8 __builtin_arm_shadd8
435 #define __UADD8 __builtin_arm_uadd8
436 #define __UQADD8 __builtin_arm_uqadd8
437 #define __UHADD8 __builtin_arm_uhadd8
438 #define __SSUB8 __builtin_arm_ssub8
439 #define __QSUB8 __builtin_arm_qsub8
440 #define __SHSUB8 __builtin_arm_shsub8
441 #define __USUB8 __builtin_arm_usub8
442 #define __UQSUB8 __builtin_arm_uqsub8
443 #define __UHSUB8 __builtin_arm_uhsub8
444 #define __SADD16 __builtin_arm_sadd16
445 #define __QADD16 __builtin_arm_qadd16
446 #define __SHADD16 __builtin_arm_shadd16
447 #define __UADD16 __builtin_arm_uadd16
448 #define __UQADD16 __builtin_arm_uqadd16
449 #define __UHADD16 __builtin_arm_uhadd16
450 #define __SSUB16 __builtin_arm_ssub16
451 #define __QSUB16 __builtin_arm_qsub16
452 #define __SHSUB16 __builtin_arm_shsub16
453 #define __USUB16 __builtin_arm_usub16
454 #define __UQSUB16 __builtin_arm_uqsub16
455 #define __UHSUB16 __builtin_arm_uhsub16
456 #define __SASX __builtin_arm_sasx
457 #define __QASX __builtin_arm_qasx
458 #define __SHASX __builtin_arm_shasx
459 #define __UASX __builtin_arm_uasx
460 #define __UQASX __builtin_arm_uqasx
461 #define __UHASX __builtin_arm_uhasx
462 #define __SSAX __builtin_arm_ssax
463 #define __QSAX __builtin_arm_qsax
464 #define __SHSAX __builtin_arm_shsax
465 #define __USAX __builtin_arm_usax
466 #define __UQSAX __builtin_arm_uqsax
467 #define __UHSAX __builtin_arm_uhsax
468 #define __USAD8 __builtin_arm_usad8
469 #define __USADA8 __builtin_arm_usada8
470 #define __SSAT16 __builtin_arm_ssat16
471 #define __USAT16 __builtin_arm_usat16
472 #define __UXTB16 __builtin_arm_uxtb16
473 #define __UXTAB16 __builtin_arm_uxtab16
474 #define __SXTB16 __builtin_arm_sxtb16
475 #define __SXTAB16 __builtin_arm_sxtab16
476 #define __SMUAD __builtin_arm_smuad
477 #define __SMUADX __builtin_arm_smuadx
478 #define __SMLAD __builtin_arm_smlad
479 #define __SMLADX __builtin_arm_smladx
480 #define __SMLALD __builtin_arm_smlald
481 #define __SMLALDX __builtin_arm_smlaldx
482 #define __SMUSD __builtin_arm_smusd
483 #define __SMUSDX __builtin_arm_smusdx
484 #define __SMLSD __builtin_arm_smlsd
485 #define __SMLSDX __builtin_arm_smlsdx
486 #define __SMLSLD __builtin_arm_smlsld
487 #define __SMLSLDX __builtin_arm_smlsldx
488 #define __SEL __builtin_arm_sel
489 #define __QADD __builtin_arm_qadd
490 #define __QSUB __builtin_arm_qsub
491
492 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
493 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
494
495 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
496 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
497
498 #define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
499
500 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
501
__SMMLA(int32_t op1,int32_t op2,int32_t op3)502 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
503 {
504 int32_t result;
505
506 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
507 return (result);
508 }
509
510 #endif /* (__ARM_FEATURE_DSP == 1) */
511
512 /* ########################### Core Function Access ########################### */
513
514 /**
515 \brief Enable IRQ Interrupts
516 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
517 Can only be executed in Privileged modes.
518 */
__enable_irq(void)519 __STATIC_FORCEINLINE void __enable_irq(void)
520 {
521 __ASM volatile ("cpsie i" : : : "memory");
522 }
523
524 /**
525 \brief Disable IRQ Interrupts
526 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
527 Can only be executed in Privileged modes.
528 */
__disable_irq(void)529 __STATIC_FORCEINLINE void __disable_irq(void)
530 {
531 __ASM volatile ("cpsid i" : : : "memory");
532 }
533
534 /**
535 \brief Enable FIQ
536 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
537 Can only be executed in Privileged modes.
538 */
__enable_fault_irq(void)539 __STATIC_FORCEINLINE void __enable_fault_irq(void)
540 {
541 __ASM volatile ("cpsie f" : : : "memory");
542 }
543
544
545 /**
546 \brief Disable FIQ
547 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
548 Can only be executed in Privileged modes.
549 */
__disable_fault_irq(void)550 __STATIC_FORCEINLINE void __disable_fault_irq(void)
551 {
552 __ASM volatile ("cpsid f" : : : "memory");
553 }
554
555
556 /**
557 \brief Get FPSCR
558 \details Returns the current value of the Floating Point Status/Control register.
559 \return Floating Point Status/Control register value
560 */
__get_FPSCR(void)561 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
562 {
563 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
564 return __builtin_arm_get_fpscr();
565 #else
566 return(0U);
567 #endif
568 }
569
570
571 /**
572 \brief Set FPSCR
573 \details Assigns the given value to the Floating Point Status/Control register.
574 \param [in] fpscr Floating Point Status/Control value to set
575 */
__set_FPSCR(uint32_t fpscr)576 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
577 {
578 #if (defined(__ARM_FP) && (__ARM_FP >= 1))
579 __builtin_arm_set_fpscr(fpscr);
580 #else
581 (void)fpscr;
582 #endif
583 }
584
585
586 /** \brief Get CPSR Register
587 \return CPSR Register value
588 */
__get_CPSR(void)589 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
590 {
591 uint32_t result;
592 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
593 return(result);
594 }
595
596 /** \brief Set CPSR Register
597 \param [in] cpsr CPSR value to set
598 */
__set_CPSR(uint32_t cpsr)599 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
600 {
601 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
602 }
603
604 /** \brief Get Mode
605 \return Processor Mode
606 */
__get_mode(void)607 __STATIC_FORCEINLINE uint32_t __get_mode(void)
608 {
609 return (__get_CPSR() & 0x1FU);
610 }
611
612 /** \brief Set Mode
613 \param [in] mode Mode value to set
614 */
__set_mode(uint32_t mode)615 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
616 {
617 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
618 }
619
620 /** \brief Get Stack Pointer
621 \return Stack Pointer value
622 */
__get_SP(void)623 __STATIC_FORCEINLINE uint32_t __get_SP(void)
624 {
625 uint32_t result;
626 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
627 return result;
628 }
629
630 /** \brief Set Stack Pointer
631 \param [in] stack Stack Pointer value to set
632 */
__set_SP(uint32_t stack)633 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
634 {
635 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
636 }
637
638 /** \brief Get USR/SYS Stack Pointer
639 \return USR/SYS Stack Pointer value
640 */
__get_SP_usr(void)641 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
642 {
643 uint32_t cpsr;
644 uint32_t result;
645 __ASM volatile(
646 "MRS %0, cpsr \n"
647 "CPS #0x1F \n" // no effect in USR mode
648 "MOV %1, sp \n"
649 "MSR cpsr_c, %0 \n" // no effect in USR mode
650 "ISB" : "=r"(cpsr), "=r"(result) : : "memory"
651 );
652 return result;
653 }
654
655 /** \brief Set USR/SYS Stack Pointer
656 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
657 */
__set_SP_usr(uint32_t topOfProcStack)658 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
659 {
660 uint32_t cpsr;
661 __ASM volatile(
662 "MRS %0, cpsr \n"
663 "CPS #0x1F \n" // no effect in USR mode
664 "MOV sp, %1 \n"
665 "MSR cpsr_c, %0 \n" // no effect in USR mode
666 "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory"
667 );
668 }
669
670 /** \brief Get FPEXC
671 \return Floating Point Exception Control register value
672 */
__get_FPEXC(void)673 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
674 {
675 #if (__FPU_PRESENT == 1)
676 uint32_t result;
677 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
678 return(result);
679 #else
680 return(0);
681 #endif
682 }
683
684 /** \brief Set FPEXC
685 \param [in] fpexc Floating Point Exception Control value to set
686 */
__set_FPEXC(uint32_t fpexc)687 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
688 {
689 #if (__FPU_PRESENT == 1)
690 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
691 #endif
692 }
693
694 /*
695 * Include common core functions to access Coprocessor 15 registers
696 */
697
698 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
699 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
700 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
701 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
702
703 #include "cmsis_cp15.h"
704
705 /** \brief Enable Floating Point Unit
706
707 Critical section, called from undef handler, so systick is disabled
708 */
__FPU_Enable(void)709 __STATIC_INLINE void __FPU_Enable(void)
710 {
711 __ASM volatile(
712 // Permit access to VFP/NEON, registers by modifying CPACR
713 " MRC p15,0,R1,c1,c0,2 \n"
714 " ORR R1,R1,#0x00F00000 \n"
715 " MCR p15,0,R1,c1,c0,2 \n"
716
717 // Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
718 " ISB \n"
719
720 // Enable VFP/NEON
721 " VMRS R1,FPEXC \n"
722 " ORR R1,R1,#0x40000000 \n"
723 " VMSR FPEXC,R1 \n"
724
725 // Initialise VFP/NEON registers to 0
726 " MOV R2,#0 \n"
727
728 // Initialise D16 registers to 0
729 " VMOV D0, R2,R2 \n"
730 " VMOV D1, R2,R2 \n"
731 " VMOV D2, R2,R2 \n"
732 " VMOV D3, R2,R2 \n"
733 " VMOV D4, R2,R2 \n"
734 " VMOV D5, R2,R2 \n"
735 " VMOV D6, R2,R2 \n"
736 " VMOV D7, R2,R2 \n"
737 " VMOV D8, R2,R2 \n"
738 " VMOV D9, R2,R2 \n"
739 " VMOV D10,R2,R2 \n"
740 " VMOV D11,R2,R2 \n"
741 " VMOV D12,R2,R2 \n"
742 " VMOV D13,R2,R2 \n"
743 " VMOV D14,R2,R2 \n"
744 " VMOV D15,R2,R2 \n"
745
746 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
747 // Initialise D32 registers to 0
748 " VMOV D16,R2,R2 \n"
749 " VMOV D17,R2,R2 \n"
750 " VMOV D18,R2,R2 \n"
751 " VMOV D19,R2,R2 \n"
752 " VMOV D20,R2,R2 \n"
753 " VMOV D21,R2,R2 \n"
754 " VMOV D22,R2,R2 \n"
755 " VMOV D23,R2,R2 \n"
756 " VMOV D24,R2,R2 \n"
757 " VMOV D25,R2,R2 \n"
758 " VMOV D26,R2,R2 \n"
759 " VMOV D27,R2,R2 \n"
760 " VMOV D28,R2,R2 \n"
761 " VMOV D29,R2,R2 \n"
762 " VMOV D30,R2,R2 \n"
763 " VMOV D31,R2,R2 \n"
764 #endif
765
766 // Initialise FPSCR to a known state
767 " VMRS R1,FPSCR \n"
768 " LDR R2,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
769 " AND R1,R1,R2 \n"
770 " VMSR FPSCR,R1 "
771 : : : "cc", "r1", "r2"
772 );
773 }
774
775 #endif /* __CMSIS_ARMCLANG_A_H */
776