1 /*
2  * Copyright (c) 2017-2024 IAR Systems
3  * Copyright (c) 2017-2024 Arm Limited. All rights reserved.
4  *
5  * SPDX-License-Identifier: Apache-2.0
6  *
7  * Licensed under the Apache License, Version 2.0 (the License); you may
8  * not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
15  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 
20 /*
21  * CMSIS-Core(R) Compiler ICCARM (IAR Compiler for Arm) Header File
22  */
23 
24 #ifndef __CMSIS_ICCARM_R_H
25 #define __CMSIS_ICCARM_R_H
26 
27 #pragma system_include   /* treat file as system include file */
28 
29 #ifndef __CMSIS_ICCARM_H
30   #error "This file must not be included directly"
31 #endif
32 
33 #define __get_CPSR() (__arm_rsr("CPSR"))
34 #define __get_mode() (__get_CPSR() & 0x1FU)
35 
36 #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE)))
37 #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE)))
38 
39 #if (defined (__ARM_FP)      && (__ARM_FP >= 1))
40   #define __get_FPEXC() (__arm_rsr("FPEXC"))
41   #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE))
42 #else
43   #define __get_FPEXC()             ( 0 )
44   #define __set_FPEXC(VALUE)        ((void)VALUE)
45 #endif
46 
47 #define __get_CP(cp, op1, RT, CRn, CRm, op2)                                   \
48   ((RT) = __arm_rsr("p" #cp ":" #op1 ":c" #CRn ":c" #CRm ":" #op2))
49 
50 #define __set_CP(cp, op1, RT, CRn, CRm, op2)                                   \
51   (__arm_wsr("p" #cp ":" #op1 ":c" #CRn ":c" #CRm ":" #op2, (RT)))
52 
53 #define __get_CP64(cp, op1, Rt, CRm)                                           \
54   __ASM volatile("MRRC p" #cp ", " #op1 ", %Q0, %R0, c" #CRm                   \
55                  : "=r"(Rt)                                                    \
56                  :                                                             \
57                  : "memory")
58 
59 #define __set_CP64(cp, op1, Rt, CRm)                                           \
60   __ASM volatile("MCRR p" #cp ", " #op1 ", %Q0, %R0, c" #CRm                   \
61                  :                                                             \
62                  : "r"(Rt)                                                     \
63                  : "memory")
64 
65 
__get_SP_usr(void)66 __IAR_FT uint32_t __get_SP_usr(void) {
67   uint32_t cpsr;
68   uint32_t result;
69   __ASM volatile("MRS     %0, cpsr   \n"
70                  "CPS     #0x1F      \n" // no effect in USR mode
71                  "MOV     %1, sp     \n"
72                  "MSR     cpsr_c, %2 \n" // no effect in USR mode
73                  "ISB"
74                  : "=r"(cpsr), "=r"(result)
75                  : "r"(cpsr)
76                  : "memory");
77   return result;
78 }
79 
__set_SP_usr(uint32_t topOfProcStack)80 __IAR_FT void __set_SP_usr(uint32_t topOfProcStack) {
81   uint32_t cpsr;
82   __ASM volatile("MRS     %0, cpsr   \n"
83                  "CPS     #0x1F      \n" // no effect in USR mode
84                  "MOV     sp, %1     \n"
85                  "MSR     cpsr_c, %2 \n" // no effect in USR mode
86                  "ISB"
87                  : "=r"(cpsr)
88                  : "r"(topOfProcStack), "r"(cpsr)
89                  : "memory");
90 }
91 
92 #define __get_mode() (__get_CPSR() & 0x1FU)
93 
94 __STATIC_INLINE
__FPU_Enable(void)95 void __FPU_Enable(void) {
96   __ASM volatile(
97       // Permit access to VFP/NEON, registers by modifying CPACR
98       "        MRC     p15,0,R1,c1,c0,2  \n"
99       "        ORR     R1,R1,#0x00F00000 \n"
100       "        MCR     p15,0,R1,c1,c0,2  \n"
101 
102       // Ensure that subsequent instructions occur in the context of VFP/NEON
103       // access permitted
104       "        ISB                       \n"
105 
106       // Enable VFP/NEON
107       "        VMRS    R1,FPEXC          \n"
108       "        ORR     R1,R1,#0x40000000 \n"
109       "        VMSR    FPEXC,R1          \n"
110 
111       // Initialise VFP/NEON registers to 0
112       "        MOV     R2,#0             \n"
113 
114       // Initialise D16 registers to 0
115       "        VMOV    D0, R2,R2         \n"
116       "        VMOV    D1, R2,R2         \n"
117       "        VMOV    D2, R2,R2         \n"
118       "        VMOV    D3, R2,R2         \n"
119       "        VMOV    D4, R2,R2         \n"
120       "        VMOV    D5, R2,R2         \n"
121       "        VMOV    D6, R2,R2         \n"
122       "        VMOV    D7, R2,R2         \n"
123       "        VMOV    D8, R2,R2         \n"
124       "        VMOV    D9, R2,R2         \n"
125       "        VMOV    D10,R2,R2         \n"
126       "        VMOV    D11,R2,R2         \n"
127       "        VMOV    D12,R2,R2         \n"
128       "        VMOV    D13,R2,R2         \n"
129       "        VMOV    D14,R2,R2         \n"
130       "        VMOV    D15,R2,R2         \n"
131 
132 #ifdef __ARM_ADVANCED_SIMD__
133       // Initialise D32 registers to 0
134       "        VMOV    D16,R2,R2         \n"
135       "        VMOV    D17,R2,R2         \n"
136       "        VMOV    D18,R2,R2         \n"
137       "        VMOV    D19,R2,R2         \n"
138       "        VMOV    D20,R2,R2         \n"
139       "        VMOV    D21,R2,R2         \n"
140       "        VMOV    D22,R2,R2         \n"
141       "        VMOV    D23,R2,R2         \n"
142       "        VMOV    D24,R2,R2         \n"
143       "        VMOV    D25,R2,R2         \n"
144       "        VMOV    D26,R2,R2         \n"
145       "        VMOV    D27,R2,R2         \n"
146       "        VMOV    D28,R2,R2         \n"
147       "        VMOV    D29,R2,R2         \n"
148       "        VMOV    D30,R2,R2         \n"
149       "        VMOV    D31,R2,R2         \n"
150 #endif
151 
152       // Initialise FPSCR to a known state
153       "        VMRS    R1,FPSCR          \n"
154       "        MOV32   R2,#0x00086060    \n" // Mask off all bits that do not
155                                              // have to be preserved.
156                                              // Non-preserved bits can/should be
157                                              // zero.
158       "        AND     R1,R1,R2          \n"
159       "        VMSR    FPSCR,R1          \n"
160       :
161       :
162       : "cc", "r1", "r2");
163 }
164 
165 #undef __IAR_FT
166 #undef __ICCARM_V8
167 
168 #pragma diag_default = Pe940
169 #pragma diag_default = Pe177
170 #endif /* __CMSIS_ARMCLANG_R_H */
171