1 /*
2  * Copyright (c) 2017-2018 IAR Systems
3  * Copyright (c) 2018-2023 Arm Limited. All rights reserved.
4  *
5  * SPDX-License-Identifier: Apache-2.0
6  *
7  * Licensed under the Apache License, Version 2.0 (the License); you may
8  * not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
15  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 
20 /*
21  * CMSIS-Core(A) Compiler ICCARM (IAR Compiler for Arm) Header File
22  */
23 
24 #ifndef __CMSIS_ICCARM_A_H__
25 #define __CMSIS_ICCARM_A_H__
26 
27 #ifndef __ICCARM__
28   #error This file should only be compiled by ICCARM
29 #endif
30 
31 #pragma system_include
32 
33 #define __IAR_FT _Pragma("inline=forced") __intrinsic
34 
35 #if (__VER__ >= 8000000)
36   #define __ICCARM_V8 1
37 #else
38   #define __ICCARM_V8 0
39 #endif
40 
41 #pragma language=extended
42 
43 #ifndef __ALIGNED
44   #if __ICCARM_V8
45     #define __ALIGNED(x) __attribute__((aligned(x)))
46   #elif (__VER__ >= 7080000)
47     /* Needs IAR language extensions */
48     #define __ALIGNED(x) __attribute__((aligned(x)))
49   #else
50     #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
51     #define __ALIGNED(x)
52   #endif
53 #endif
54 
55 
56 /* Define compiler macros for CPU architecture, used in CMSIS 5.
57  */
58 #if __ARM_ARCH_7A__
59 /* Macro already defined */
60 #else
61   #if defined(__ARM7A__)
62     #define __ARM_ARCH_7A__ 1
63   #endif
64 #endif
65 
66 #ifndef __ASM
67   #define __ASM __asm
68 #endif
69 
70 #ifndef   __COMPILER_BARRIER
71   #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
72 #endif
73 
74 #ifndef __INLINE
75   #define __INLINE inline
76 #endif
77 
78 #ifndef   __NO_RETURN
79   #if __ICCARM_V8
80     #define __NO_RETURN __attribute__((__noreturn__))
81   #else
82     #define __NO_RETURN _Pragma("object_attribute=__noreturn")
83   #endif
84 #endif
85 
86 #ifndef   __PACKED
87   #if __ICCARM_V8
88     #define __PACKED __attribute__((packed, aligned(1)))
89   #else
90     /* Needs IAR language extensions */
91     #define __PACKED __packed
92   #endif
93 #endif
94 
95 #ifndef   __PACKED_STRUCT
96   #if __ICCARM_V8
97     #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
98   #else
99     /* Needs IAR language extensions */
100     #define __PACKED_STRUCT __packed struct
101   #endif
102 #endif
103 
104 #ifndef   __PACKED_UNION
105   #if __ICCARM_V8
106     #define __PACKED_UNION union __attribute__((packed, aligned(1)))
107   #else
108     /* Needs IAR language extensions */
109     #define __PACKED_UNION __packed union
110   #endif
111 #endif
112 
113 #ifndef   __RESTRICT
114   #if __ICCARM_V8
115     #define __RESTRICT            __restrict
116   #else
117     /* Needs IAR language extensions */
118     #define __RESTRICT            restrict
119   #endif
120 #endif
121 
122 #ifndef   __STATIC_INLINE
123   #define __STATIC_INLINE       static inline
124 #endif
125 
126 #ifndef   __FORCEINLINE
127   #define __FORCEINLINE         _Pragma("inline=forced")
128 #endif
129 
130 #ifndef   __STATIC_FORCEINLINE
131   #define __STATIC_FORCEINLINE  __FORCEINLINE __STATIC_INLINE
132 #endif
133 
134 #ifndef   CMSIS_DEPRECATED
135   #define CMSIS_DEPRECATED      __attribute__((deprecated))
136 #endif
137 
138 #ifndef __UNALIGNED_UINT16_READ
139   #pragma language=save
140   #pragma language=extended
__iar_uint16_read(void const * ptr)141   __IAR_FT uint16_t __iar_uint16_read(void const *ptr)
142   {
143     return *(__packed uint16_t*)(ptr);
144   }
145   #pragma language=restore
146   #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
147 #endif
148 
149 
150 #ifndef __UNALIGNED_UINT16_WRITE
151   #pragma language=save
152   #pragma language=extended
__iar_uint16_write(void const * ptr,uint16_t val)153   __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
154   {
155     *(__packed uint16_t*)(ptr) = val;;
156   }
157   #pragma language=restore
158   #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
159 #endif
160 
161 #ifndef __UNALIGNED_UINT32_READ
162   #pragma language=save
163   #pragma language=extended
__iar_uint32_read(void const * ptr)164   __IAR_FT uint32_t __iar_uint32_read(void const *ptr)
165   {
166     return *(__packed uint32_t*)(ptr);
167   }
168   #pragma language=restore
169   #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
170 #endif
171 
172 #ifndef __UNALIGNED_UINT32_WRITE
173   #pragma language=save
174   #pragma language=extended
__iar_uint32_write(void const * ptr,uint32_t val)175   __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
176   {
177     *(__packed uint32_t*)(ptr) = val;;
178   }
179   #pragma language=restore
180   #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
181 #endif
182 
183 #ifndef   __USED
184   #if __ICCARM_V8
185     #define __USED __attribute__((used))
186   #else
187     #define __USED _Pragma("__root")
188   #endif
189 #endif
190 
191 #ifndef   __WEAK
192   #if __ICCARM_V8
193     #define __WEAK __attribute__((weak))
194   #else
195     #define __WEAK _Pragma("__weak")
196   #endif
197 #endif
198 
199 
200 #ifndef __ICCARM_INTRINSICS_VERSION__
201   #define __ICCARM_INTRINSICS_VERSION__  0
202 #endif
203 
204 #if __ICCARM_INTRINSICS_VERSION__ == 2
205 
206   #if defined(__CLZ)
207     #undef __CLZ
208   #endif
209   #if defined(__REVSH)
210     #undef __REVSH
211   #endif
212   #if defined(__RBIT)
213     #undef __RBIT
214   #endif
215   #if defined(__SSAT)
216     #undef __SSAT
217   #endif
218   #if defined(__USAT)
219     #undef __USAT
220   #endif
221 
222   #include "iccarm_builtin.h"
223 
224   #define __disable_fault_irq   __iar_builtin_disable_fiq
225   #define __disable_irq       __iar_builtin_disable_interrupt
226   #define __enable_fault_irq    __iar_builtin_enable_fiq
227   #define __enable_irq        __iar_builtin_enable_interrupt
228   #define __arm_rsr           __iar_builtin_rsr
229   #define __arm_wsr           __iar_builtin_wsr
230 
231   #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)))
232     #define __get_FPSCR()             (__arm_rsr("FPSCR"))
233     #define __set_FPSCR(VALUE)        (__arm_wsr("FPSCR", (VALUE)))
234   #else
235     #define __get_FPSCR()             ( 0 )
236     #define __set_FPSCR(VALUE)        ((void)VALUE)
237   #endif
238 
239   #define __get_CPSR()                (__arm_rsr("CPSR"))
240   #define __get_mode()                (__get_CPSR() & 0x1FU)
241 
242   #define __set_CPSR(VALUE)           (__arm_wsr("CPSR", (VALUE)))
243   #define __set_mode(VALUE)           (__arm_wsr("CPSR_c", (VALUE)))
244 
245 
246   #define __get_FPEXC()       (__arm_rsr("FPEXC"))
247   #define __set_FPEXC(VALUE)    (__arm_wsr("FPEXC", VALUE))
248 
249   #define __get_CP(cp, op1, RT, CRn, CRm, op2) \
250     ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2))
251 
252   #define __set_CP(cp, op1, RT, CRn, CRm, op2) \
253     (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT)))
254 
255   #define __get_CP64(cp, op1, Rt, CRm) \
256     __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : "=r" (Rt) : : "memory" )
257 
258   #define __set_CP64(cp, op1, Rt, CRm) \
259     __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : : "r" (Rt) : "memory" )
260 
261   #include "cmsis_cp15.h"
262 
263   #define __NOP     __iar_builtin_no_operation
264 
265   #define __CLZ     __iar_builtin_CLZ
266   #define __CLREX   __iar_builtin_CLREX
267 
268   #define __DMB     __iar_builtin_DMB
269   #define __DSB     __iar_builtin_DSB
270   #define __ISB     __iar_builtin_ISB
271 
272   #define __LDREXB  __iar_builtin_LDREXB
273   #define __LDREXH  __iar_builtin_LDREXH
274   #define __LDREXW  __iar_builtin_LDREX
275 
276   #define __RBIT    __iar_builtin_RBIT
277   #define __REV     __iar_builtin_REV
278   #define __REV16   __iar_builtin_REV16
279 
__REVSH(int16_t val)280   __IAR_FT int16_t __REVSH(int16_t val)
281   {
282     return (int16_t) __iar_builtin_REVSH(val);
283   }
284 
285   #define __ROR     __iar_builtin_ROR
286   #define __RRX     __iar_builtin_RRX
287 
288   #define __SEV     __iar_builtin_SEV
289 
290   #define __SSAT    __iar_builtin_SSAT
291 
292   #define __STREXB  __iar_builtin_STREXB
293   #define __STREXH  __iar_builtin_STREXH
294   #define __STREXW  __iar_builtin_STREX
295 
296   #define __USAT    __iar_builtin_USAT
297 
298   #define __WFE     __iar_builtin_WFE
299   #define __WFI     __iar_builtin_WFI
300 
301   #define __SADD8   __iar_builtin_SADD8
302   #define __QADD8   __iar_builtin_QADD8
303   #define __SHADD8  __iar_builtin_SHADD8
304   #define __UADD8   __iar_builtin_UADD8
305   #define __UQADD8  __iar_builtin_UQADD8
306   #define __UHADD8  __iar_builtin_UHADD8
307   #define __SSUB8   __iar_builtin_SSUB8
308   #define __QSUB8   __iar_builtin_QSUB8
309   #define __SHSUB8  __iar_builtin_SHSUB8
310   #define __USUB8   __iar_builtin_USUB8
311   #define __UQSUB8  __iar_builtin_UQSUB8
312   #define __UHSUB8  __iar_builtin_UHSUB8
313   #define __SADD16  __iar_builtin_SADD16
314   #define __QADD16  __iar_builtin_QADD16
315   #define __SHADD16 __iar_builtin_SHADD16
316   #define __UADD16  __iar_builtin_UADD16
317   #define __UQADD16 __iar_builtin_UQADD16
318   #define __UHADD16 __iar_builtin_UHADD16
319   #define __SSUB16  __iar_builtin_SSUB16
320   #define __QSUB16  __iar_builtin_QSUB16
321   #define __SHSUB16 __iar_builtin_SHSUB16
322   #define __USUB16  __iar_builtin_USUB16
323   #define __UQSUB16 __iar_builtin_UQSUB16
324   #define __UHSUB16 __iar_builtin_UHSUB16
325   #define __SASX    __iar_builtin_SASX
326   #define __QASX    __iar_builtin_QASX
327   #define __SHASX   __iar_builtin_SHASX
328   #define __UASX    __iar_builtin_UASX
329   #define __UQASX   __iar_builtin_UQASX
330   #define __UHASX   __iar_builtin_UHASX
331   #define __SSAX    __iar_builtin_SSAX
332   #define __QSAX    __iar_builtin_QSAX
333   #define __SHSAX   __iar_builtin_SHSAX
334   #define __USAX    __iar_builtin_USAX
335   #define __UQSAX   __iar_builtin_UQSAX
336   #define __UHSAX   __iar_builtin_UHSAX
337   #define __USAD8   __iar_builtin_USAD8
338   #define __USADA8  __iar_builtin_USADA8
339   #define __SSAT16  __iar_builtin_SSAT16
340   #define __USAT16  __iar_builtin_USAT16
341   #define __UXTB16  __iar_builtin_UXTB16
342   #define __UXTAB16 __iar_builtin_UXTAB16
343   #define __SXTB16  __iar_builtin_SXTB16
344   #define __SXTAB16 __iar_builtin_SXTAB16
345   #define __SMUAD   __iar_builtin_SMUAD
346   #define __SMUADX  __iar_builtin_SMUADX
347   #define __SMMLA   __iar_builtin_SMMLA
348   #define __SMLAD   __iar_builtin_SMLAD
349   #define __SMLADX  __iar_builtin_SMLADX
350   #define __SMLALD  __iar_builtin_SMLALD
351   #define __SMLALDX __iar_builtin_SMLALDX
352   #define __SMUSD   __iar_builtin_SMUSD
353   #define __SMUSDX  __iar_builtin_SMUSDX
354   #define __SMLSD   __iar_builtin_SMLSD
355   #define __SMLSDX  __iar_builtin_SMLSDX
356   #define __SMLSLD  __iar_builtin_SMLSLD
357   #define __SMLSLDX __iar_builtin_SMLSLDX
358   #define __SEL     __iar_builtin_SEL
359   #define __QADD    __iar_builtin_QADD
360   #define __QSUB    __iar_builtin_QSUB
361   #define __PKHBT   __iar_builtin_PKHBT
362   #define __PKHTB   __iar_builtin_PKHTB
363 
364 #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
365 
366   #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)))
367     #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
368   #endif
369 
370   #ifdef __INTRINSICS_INCLUDED
371   #error intrinsics.h is already included previously!
372   #endif
373 
374   #include <intrinsics.h>
375 
376   #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)))
377     #define __get_FPSCR() (0)
378   #endif
379 
380   #pragma diag_suppress=Pe940
381   #pragma diag_suppress=Pe177
382 
383   #define __enable_irq        __enable_interrupt
384   #define __disable_irq       __disable_interrupt
385   #define __enable_fault_irq    __enable_fiq
386   #define __disable_fault_irq   __disable_fiq
387   #define __NOP               __no_operation
388 
389   #define __get_xPSR          __get_PSR
390 
__set_mode(uint32_t mode)391   __IAR_FT void __set_mode(uint32_t mode)
392   {
393     __ASM volatile("MSR  cpsr_c, %0" : : "r" (mode) : "memory");
394   }
395 
__LDREXW(uint32_t volatile * ptr)396   __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
397   {
398     return __LDREX((unsigned long *)ptr);
399   }
400 
__STREXW(uint32_t value,uint32_t volatile * ptr)401   __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
402   {
403     return __STREX(value, (unsigned long *)ptr);
404   }
405 
406 
__RRX(uint32_t value)407   __IAR_FT uint32_t __RRX(uint32_t value)
408   {
409     uint32_t result;
410     __ASM("RRX      %0, %1" : "=r"(result) : "r" (value) : "cc");
411     return(result);
412   }
413 
414 
__ROR(uint32_t op1,uint32_t op2)415   __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
416   {
417     return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
418   }
419 
__get_FPEXC(void)420   __IAR_FT uint32_t __get_FPEXC(void)
421   {
422   #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)))
423     uint32_t result;
424     __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
425     return(result);
426   #else
427     return(0);
428   #endif
429   }
430 
__set_FPEXC(uint32_t fpexc)431   __IAR_FT void __set_FPEXC(uint32_t fpexc)
432   {
433   #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)))
434     __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
435   #endif
436   }
437 
438 
439   #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \
440     __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
441   #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \
442     __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
443   #define __get_CP64(cp, op1, Rt, CRm) \
444     __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : "=r" (Rt) : : "memory" )
445   #define __set_CP64(cp, op1, Rt, CRm) \
446     __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm  : : "r" (Rt) : "memory" )
447 
448   #include "cmsis_cp15.h"
449 
450 #endif   /* __ICCARM_INTRINSICS_VERSION__ == 2 */
451 
452 #define __BKPT(value)    __asm volatile ("BKPT     %0" : : "i"(value))
453 
454 
__get_SP_usr(void)455 __IAR_FT uint32_t __get_SP_usr(void)
456 {
457   uint32_t cpsr;
458   uint32_t result;
459   __ASM volatile(
460     "MRS     %0, cpsr   \n"
461     "CPS     #0x1F      \n" // no effect in USR mode
462     "MOV     %1, sp     \n"
463     "MSR     cpsr_c, %2 \n" // no effect in USR mode
464     "ISB" :  "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory"
465    );
466   return result;
467 }
468 
__set_SP_usr(uint32_t topOfProcStack)469 __IAR_FT void __set_SP_usr(uint32_t topOfProcStack)
470 {
471   uint32_t cpsr;
472   __ASM volatile(
473     "MRS     %0, cpsr   \n"
474     "CPS     #0x1F      \n" // no effect in USR mode
475     "MOV     sp, %1     \n"
476     "MSR     cpsr_c, %2 \n" // no effect in USR mode
477     "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory"
478    );
479 }
480 
481 #define __get_mode()                (__get_CPSR() & 0x1FU)
482 
483 __STATIC_INLINE
__FPU_Enable(void)484 void __FPU_Enable(void)
485 {
486   __ASM volatile(
487     //Permit access to VFP/NEON, registers by modifying CPACR
488     "        MRC     p15,0,R1,c1,c0,2  \n"
489     "        ORR     R1,R1,#0x00F00000 \n"
490     "        MCR     p15,0,R1,c1,c0,2  \n"
491 
492     //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
493     "        ISB                       \n"
494 
495     //Enable VFP/NEON
496     "        VMRS    R1,FPEXC          \n"
497     "        ORR     R1,R1,#0x40000000 \n"
498     "        VMSR    FPEXC,R1          \n"
499 
500     //Initialise VFP/NEON registers to 0
501     "        MOV     R2,#0             \n"
502 
503     //Initialise D16 registers to 0
504     "        VMOV    D0, R2,R2         \n"
505     "        VMOV    D1, R2,R2         \n"
506     "        VMOV    D2, R2,R2         \n"
507     "        VMOV    D3, R2,R2         \n"
508     "        VMOV    D4, R2,R2         \n"
509     "        VMOV    D5, R2,R2         \n"
510     "        VMOV    D6, R2,R2         \n"
511     "        VMOV    D7, R2,R2         \n"
512     "        VMOV    D8, R2,R2         \n"
513     "        VMOV    D9, R2,R2         \n"
514     "        VMOV    D10,R2,R2         \n"
515     "        VMOV    D11,R2,R2         \n"
516     "        VMOV    D12,R2,R2         \n"
517     "        VMOV    D13,R2,R2         \n"
518     "        VMOV    D14,R2,R2         \n"
519     "        VMOV    D15,R2,R2         \n"
520 
521 #ifdef __ARM_ADVANCED_SIMD__
522     //Initialise D32 registers to 0
523     "        VMOV    D16,R2,R2         \n"
524     "        VMOV    D17,R2,R2         \n"
525     "        VMOV    D18,R2,R2         \n"
526     "        VMOV    D19,R2,R2         \n"
527     "        VMOV    D20,R2,R2         \n"
528     "        VMOV    D21,R2,R2         \n"
529     "        VMOV    D22,R2,R2         \n"
530     "        VMOV    D23,R2,R2         \n"
531     "        VMOV    D24,R2,R2         \n"
532     "        VMOV    D25,R2,R2         \n"
533     "        VMOV    D26,R2,R2         \n"
534     "        VMOV    D27,R2,R2         \n"
535     "        VMOV    D28,R2,R2         \n"
536     "        VMOV    D29,R2,R2         \n"
537     "        VMOV    D30,R2,R2         \n"
538     "        VMOV    D31,R2,R2         \n"
539 #endif
540 
541     //Initialise FPSCR to a known state
542     "        VMRS    R1,FPSCR          \n"
543     "        MOV32   R2,#0x00086060    \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
544     "        AND     R1,R1,R2          \n"
545     "        VMSR    FPSCR,R1          \n"
546     : : : "cc", "r1", "r2"
547   );
548 }
549 
550 
551 
552 #undef __IAR_FT
553 #undef __ICCARM_V8
554 
555 #pragma diag_default=Pe940
556 #pragma diag_default=Pe177
557 
558 #endif /* __CMSIS_ICCARM_A_H__ */
559