1 /**************************************************************************//**
2 * @file cmsis_gcc_r.h
3 * @brief CMSIS compiler GCC header file
4 * @version V6.0.0
5 * @date 4. August 2024
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #ifndef __CMSIS_GCC_R_H
26 #define __CMSIS_GCC_R_H
27
28 #ifndef __CMSIS_GCC_H
29 #error "This file must not be included directly"
30 #endif
31
32 /* ignore some GCC warnings */
33 #pragma GCC diagnostic push
34 #pragma GCC diagnostic ignored "-Wsign-conversion"
35 #pragma GCC diagnostic ignored "-Wconversion"
36 #pragma GCC diagnostic ignored "-Wunused-parameter"
37
38
39 /** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics
40 Access to dedicated SIMD instructions
41 @{
42 */
43
44 /** \brief Get CPSR Register
45 \return CPSR Register value
46 */
__get_CPSR(void)47 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
48 {
49 uint32_t result;
50 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
51 return(result);
52 }
53
54 /** \brief Set CPSR Register
55 \param [in] cpsr CPSR value to set
56 */
__set_CPSR(uint32_t cpsr)57 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
58 {
59 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
60 }
61
62 /** \brief Get Mode
63 \return Processor Mode
64 */
__get_mode(void)65 __STATIC_FORCEINLINE uint32_t __get_mode(void)
66 {
67 return (__get_CPSR() & 0x1FU);
68 }
69
70 /** \brief Set Mode
71 \param [in] mode Mode value to set
72 */
__set_mode(uint32_t mode)73 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
74 {
75 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
76 }
77
78 /** \brief Get Stack Pointer
79 \return Stack Pointer value
80 */
__get_SP(void)81 __STATIC_FORCEINLINE uint32_t __get_SP(void)
82 {
83 uint32_t result;
84 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
85 return result;
86 }
87
88 /** \brief Set Stack Pointer
89 \param [in] stack Stack Pointer value to set
90 */
__set_SP(uint32_t stack)91 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
92 {
93 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
94 }
95
96 /** \brief Get USR/SYS Stack Pointer
97 \return USR/SYS Stack Pointer value
98 */
__get_SP_usr(void)99 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
100 {
101 uint32_t cpsr = __get_CPSR();
102 uint32_t result;
103 __ASM volatile(
104 "CPS #0x1F \n"
105 "MOV %0, sp " : "=r"(result) : : "memory"
106 );
107 __set_CPSR(cpsr);
108 __ISB();
109 return result;
110 }
111
112 /** \brief Set USR/SYS Stack Pointer
113 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
114 */
__set_SP_usr(uint32_t topOfProcStack)115 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
116 {
117 uint32_t cpsr = __get_CPSR();
118 __ASM volatile(
119 "CPS #0x1F \n"
120 "MOV sp, %0 " : : "r" (topOfProcStack) : "memory"
121 );
122 __set_CPSR(cpsr);
123 __ISB();
124 }
125
126 /** \brief Get FPEXC
127 \return Floating Point Exception Control register value
128 */
__get_FPEXC(void)129 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
130 {
131 #if (__FPU_PRESENT == 1)
132 uint32_t result;
133 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
134 return(result);
135 #else
136 return(0);
137 #endif
138 }
139
140 /** \brief Set FPEXC
141 \param [in] fpexc Floating Point Exception Control value to set
142 */
__set_FPEXC(uint32_t fpexc)143 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
144 {
145 #if (__FPU_PRESENT == 1)
146 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
147 #endif
148 }
149
150 /*
151 * Include common core functions to access Coprocessor 15 registers
152 */
153
154 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
155 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
156 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
157 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
158
159 /*@} end of group CMSIS_Core_intrinsics */
160
161 #pragma GCC diagnostic pop
162
163 #endif /* __CMSIS_GCC_R_H */
164