1 /*
2 * Copyright (c) 2017-2021 IAR Systems
3 * Copyright (c) 2017-2024 Arm Limited. All rights reserved.
4 *
5 * SPDX-License-Identifier: Apache-2.0
6 *
7 * Licensed under the Apache License, Version 2.0 (the License); you may
8 * not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
15 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 */
19
20 /*
21 * CMSIS-Core(M) Compiler ICCARM (IAR Compiler for Arm) Header File
22 */
23
24 #ifndef __CMSIS_ICCARM_M_H__
25 #define __CMSIS_ICCARM_M_H__
26
27 #ifndef __ICCARM__
28 #error This file should only be compiled by ICCARM
29 #endif
30
31 #pragma system_include
32
33 #define __IAR_FT _Pragma("inline=forced") __intrinsic
34
35 #if (__VER__ >= 8000000)
36 #define __ICCARM_V8 1
37 #else
38 #define __ICCARM_V8 0
39 #endif
40
41 #ifndef __ALIGNED
42 #if __ICCARM_V8
43 #define __ALIGNED(x) __attribute__((aligned(x)))
44 #elif (__VER__ >= 7080000)
45 /* Needs IAR language extensions */
46 #define __ALIGNED(x) __attribute__((aligned(x)))
47 #else
48 #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
49 #define __ALIGNED(x)
50 #endif
51 #endif
52
53
54 /* Define compiler macros for CPU architecture, used in CMSIS 5.
55 */
56 #if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ || __ARM_ARCH_8_1M_MAIN__
57 /* Macros already defined */
58 #else
59 #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__)
60 #define __ARM_ARCH_8M_MAIN__ 1
61 #elif defined(__ARM8M_BASELINE__)
62 #define __ARM_ARCH_8M_BASE__ 1
63 #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M'
64 #if __ARM_ARCH == 6
65 #define __ARM_ARCH_6M__ 1
66 #elif __ARM_ARCH == 7
67 #if __ARM_FEATURE_DSP
68 #define __ARM_ARCH_7EM__ 1
69 #else
70 #define __ARM_ARCH_7M__ 1
71 #endif
72 #elif __ARM_ARCH == 801
73 #define __ARM_ARCH_8_1M_MAIN__ 1
74 #endif /* __ARM_ARCH */
75 #endif /* __ARM_ARCH_PROFILE == 'M' */
76 #endif
77
78 /* Alternativ core deduction for older ICCARM's */
79 #if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \
80 !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__) && !defined(__ARM_ARCH_8_1M_MAIN__)
81 #if defined(__ARM6M__) && (__CORE__ == __ARM6M__)
82 #define __ARM_ARCH_6M__ 1
83 #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__)
84 #define __ARM_ARCH_7M__ 1
85 #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__)
86 #define __ARM_ARCH_7EM__ 1
87 #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__)
88 #define __ARM_ARCH_8M_BASE__ 1
89 #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__)
90 #define __ARM_ARCH_8M_MAIN__ 1
91 #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__)
92 #define __ARM_ARCH_8M_MAIN__ 1
93 #else
94 #error "Unknown target."
95 #endif
96 #endif
97
98
99
100 #if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1
101 #define __IAR_M0_FAMILY 1
102 #elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1
103 #define __IAR_M0_FAMILY 1
104 #else
105 #define __IAR_M0_FAMILY 0
106 #endif
107
108 #ifndef __NO_INIT
109 #define __NO_INIT __attribute__ ((section (".noinit")))
110 #endif
111 #ifndef __ALIAS
112 #define __ALIAS(x) __attribute__ ((alias(x)))
113 #endif
114
115 #ifndef __ASM
116 #define __ASM __asm
117 #endif
118
119 #ifndef __COMPILER_BARRIER
120 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
121 #endif
122
123 #ifndef __INLINE
124 #define __INLINE inline
125 #endif
126
127 #ifndef __NO_RETURN
128 #if defined(__cplusplus) && __cplusplus >= 201103L
129 #define __NO_RETURN [[noreturn]]
130 #elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L
131 #define __NO_RETURN _Noreturn
132 #else
133 #define __NO_RETURN _Pragma("object_attribute=__noreturn")
134 #endif
135 #endif
136
137 #ifndef __PACKED
138 #if __ICCARM_V8
139 #define __PACKED __attribute__((packed, aligned(1)))
140 #else
141 /* Needs IAR language extensions */
142 #define __PACKED __packed
143 #endif
144 #endif
145
146 #ifndef __PACKED_STRUCT
147 #if __ICCARM_V8
148 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
149 #else
150 /* Needs IAR language extensions */
151 #define __PACKED_STRUCT __packed struct
152 #endif
153 #endif
154
155 #ifndef __PACKED_UNION
156 #if __ICCARM_V8
157 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
158 #else
159 /* Needs IAR language extensions */
160 #define __PACKED_UNION __packed union
161 #endif
162 #endif
163
164 #ifndef __RESTRICT
165 #if __ICCARM_V8
166 #define __RESTRICT __restrict
167 #else
168 /* Needs IAR language extensions */
169 #define __RESTRICT restrict
170 #endif
171 #endif
172
173 #ifndef __STATIC_INLINE
174 #define __STATIC_INLINE static inline
175 #endif
176
177 #ifndef __FORCEINLINE
178 #define __FORCEINLINE _Pragma("inline=forced")
179 #endif
180
181 #ifndef __STATIC_FORCEINLINE
182 #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE
183 #endif
184
185 #ifndef __UNALIGNED_UINT16_READ
186 #pragma language=save
187 #pragma language=extended
__iar_uint16_read(void const * ptr)188 __IAR_FT uint16_t __iar_uint16_read(void const *ptr)
189 {
190 return *(__packed uint16_t*)(ptr);
191 }
192 #pragma language=restore
193 #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
194 #endif
195
196
197 #ifndef __UNALIGNED_UINT16_WRITE
198 #pragma language=save
199 #pragma language=extended
__iar_uint16_write(void const * ptr,uint16_t val)200 __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
201 {
202 *(__packed uint16_t*)(ptr) = val;;
203 }
204 #pragma language=restore
205 #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
206 #endif
207
208 #ifndef __UNALIGNED_UINT32_READ
209 #pragma language=save
210 #pragma language=extended
__iar_uint32_read(void const * ptr)211 __IAR_FT uint32_t __iar_uint32_read(void const *ptr)
212 {
213 return *(__packed uint32_t*)(ptr);
214 }
215 #pragma language=restore
216 #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
217 #endif
218
219 #ifndef __UNALIGNED_UINT32_WRITE
220 #pragma language=save
221 #pragma language=extended
__iar_uint32_write(void const * ptr,uint32_t val)222 __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
223 {
224 *(__packed uint32_t*)(ptr) = val;;
225 }
226 #pragma language=restore
227 #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
228 #endif
229
230 #ifndef __UNALIGNED_UINT32 /* deprecated */
231 #pragma language=save
232 #pragma language=extended
233 __packed struct __iar_u32 { uint32_t v; };
234 #pragma language=restore
235 #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
236 #endif
237
238 #ifndef __USED
239 #if __ICCARM_V8
240 #define __USED __attribute__((used))
241 #else
242 #define __USED _Pragma("__root")
243 #endif
244 #endif
245
246 #undef __WEAK /* undo the definition from DLib_Defaults.h */
247 #ifndef __WEAK
248 #if __ICCARM_V8
249 #define __WEAK __attribute__((weak))
250 #else
251 #define __WEAK _Pragma("__weak")
252 #endif
253 #endif
254
255 #ifndef __PROGRAM_START
256 #define __PROGRAM_START __iar_program_start
257 #endif
258
259 #ifndef __INITIAL_SP
260 #define __INITIAL_SP CSTACK$$Limit
261 #endif
262
263 #ifndef __STACK_LIMIT
264 #define __STACK_LIMIT CSTACK$$Base
265 #endif
266
267 #ifndef __VECTOR_TABLE
268 #define __VECTOR_TABLE __vector_table
269 #endif
270
271 #ifndef __VECTOR_TABLE_ATTRIBUTE
272 #define __VECTOR_TABLE_ATTRIBUTE @".intvec"
273 #endif
274
275 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
276 #ifndef __STACK_SEAL
277 #define __STACK_SEAL STACKSEAL$$Base
278 #endif
279
280 #ifndef __TZ_STACK_SEAL_SIZE
281 #define __TZ_STACK_SEAL_SIZE 8U
282 #endif
283
284 #ifndef __TZ_STACK_SEAL_VALUE
285 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
286 #endif
287
__TZ_set_STACKSEAL_S(uint32_t * stackTop)288 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
289 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
290 }
291 #endif
292
293 #ifndef __ICCARM_INTRINSICS_VERSION__
294 #define __ICCARM_INTRINSICS_VERSION__ 0
295 #endif
296
297 #if __ICCARM_INTRINSICS_VERSION__ == 2
298
299 #if defined(__CLZ)
300 #undef __CLZ
301 #endif
302 #if defined(__REVSH)
303 #undef __REVSH
304 #endif
305 #if defined(__RBIT)
306 #undef __RBIT
307 #endif
308 #if defined(__SSAT)
309 #undef __SSAT
310 #endif
311 #if defined(__USAT)
312 #undef __USAT
313 #endif
314
315 #include "iccarm_builtin.h"
316
317 #define __disable_irq __iar_builtin_disable_interrupt
318 #define __enable_irq __iar_builtin_enable_interrupt
319 #define __arm_rsr __iar_builtin_rsr
320 #define __arm_wsr __iar_builtin_wsr
321
322
323 #if (defined(__ARM_ARCH_ISA_THUMB) && __ARM_ARCH_ISA_THUMB >= 2)
__disable_fault_irq()324 __IAR_FT void __disable_fault_irq()
325 {
326 __ASM volatile ("CPSID F" ::: "memory");
327 }
328
__enable_fault_irq()329 __IAR_FT void __enable_fault_irq()
330 {
331 __ASM volatile ("CPSIE F" ::: "memory");
332 }
333 #endif
334
335
336 #define __get_APSR() (__arm_rsr("APSR"))
337 #define __get_BASEPRI() (__arm_rsr("BASEPRI"))
338 #define __get_CONTROL() (__arm_rsr("CONTROL"))
339 #define __get_FAULTMASK() (__arm_rsr("FAULTMASK"))
340
341 #if (defined (__ARM_FP) && (__ARM_FP >= 1))
342 #define __get_FPSCR() (__arm_rsr("FPSCR"))
343 #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE)))
344 #else
345 #define __get_FPSCR() ( 0 )
346 #define __set_FPSCR(VALUE) ((void)VALUE)
347 #endif
348
349 #define __get_IPSR() (__arm_rsr("IPSR"))
350 #define __get_MSP() (__arm_rsr("MSP"))
351 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
352 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
353 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
354 // without main extensions, the non-secure MSPLIM is RAZ/WI
355 #define __get_MSPLIM() (0U)
356 #else
357 #define __get_MSPLIM() (__arm_rsr("MSPLIM"))
358 #endif
359 #define __get_PRIMASK() (__arm_rsr("PRIMASK"))
360 #define __get_PSP() (__arm_rsr("PSP"))
361
362 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
363 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
364 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
365 // without main extensions, the non-secure PSPLIM is RAZ/WI
366 #define __get_PSPLIM() (0U)
367 #else
368 #define __get_PSPLIM() (__arm_rsr("PSPLIM"))
369 #endif
370
371 #define __get_xPSR() (__arm_rsr("xPSR"))
372
373 #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE)))
374 #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE)))
375
__set_CONTROL(uint32_t control)376 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
377 {
378 __arm_wsr("CONTROL", control);
379 __iar_builtin_ISB();
380 }
381
382 #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE)))
383 #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE)))
384
385 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
386 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
387 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
388 // without main extensions, the non-secure MSPLIM is RAZ/WI
389 #define __set_MSPLIM(VALUE) ((void)(VALUE))
390 #else
391 #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE)))
392 #endif
393 #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE)))
394 #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE)))
395 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
396 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
397 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
398 // without main extensions, the non-secure PSPLIM is RAZ/WI
399 #define __set_PSPLIM(VALUE) ((void)(VALUE))
400 #else
401 #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE)))
402 #endif
403
404 #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS"))
405
__TZ_set_CONTROL_NS(uint32_t control)406 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
407 {
408 __arm_wsr("CONTROL_NS", control);
409 __iar_builtin_ISB();
410 }
411
412 #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS"))
413 #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE)))
414 #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS"))
415 #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE)))
416 #define __TZ_get_SP_NS() (__arm_rsr("SP_NS"))
417 #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE)))
418 #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS"))
419 #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE)))
420 #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS"))
421 #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE)))
422 #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS"))
423 #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE)))
424
425 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
426 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
427 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
428 // without main extensions, the non-secure PSPLIM is RAZ/WI
429 #define __TZ_get_PSPLIM_NS() (0U)
430 #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE))
431 #else
432 #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS"))
433 #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE)))
434 #endif
435
436 #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS"))
437 #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE)))
438
439 #define __NOP __iar_builtin_no_operation
440
441 #define __CLZ __iar_builtin_CLZ
442
443 /*
444 * __iar_builtin_CLREX can be reordered w.r.t. STREX during high optimizations.
445 * As a workaround we use inline assembly and a memory barrier.
446 * (IAR issue EWARM-11901)
447 */
448 #define __CLREX() (__ASM volatile ("CLREX" ::: "memory"))
449
450 #define __DMB __iar_builtin_DMB
451 #define __DSB __iar_builtin_DSB
452 #define __ISB __iar_builtin_ISB
453
454 #define __LDREXB __iar_builtin_LDREXB
455 #define __LDREXH __iar_builtin_LDREXH
456 #define __LDREXW __iar_builtin_LDREX
457
458 #define __RBIT __iar_builtin_RBIT
459 #define __REV __iar_builtin_REV
460 #define __REV16 __iar_builtin_REV16
461
__REVSH(int16_t val)462 __IAR_FT int16_t __REVSH(int16_t val)
463 {
464 return (int16_t) __iar_builtin_REVSH(val);
465 }
466
467 #define __ROR __iar_builtin_ROR
468 #define __RRX __iar_builtin_RRX
469
470 #define __SEV __iar_builtin_SEV
471
472 #if !__IAR_M0_FAMILY
473 #define __SSAT __iar_builtin_SSAT
474 #endif
475
476 #define __STREXB __iar_builtin_STREXB
477 #define __STREXH __iar_builtin_STREXH
478 #define __STREXW __iar_builtin_STREX
479
480 #if !__IAR_M0_FAMILY
481 #define __USAT __iar_builtin_USAT
482 #endif
483
484 #define __WFE __iar_builtin_WFE
485 #define __WFI __iar_builtin_WFI
486
487 #if __ARM_MEDIA__
488 #define __SADD8 __iar_builtin_SADD8
489 #define __QADD8 __iar_builtin_QADD8
490 #define __SHADD8 __iar_builtin_SHADD8
491 #define __UADD8 __iar_builtin_UADD8
492 #define __UQADD8 __iar_builtin_UQADD8
493 #define __UHADD8 __iar_builtin_UHADD8
494 #define __SSUB8 __iar_builtin_SSUB8
495 #define __QSUB8 __iar_builtin_QSUB8
496 #define __SHSUB8 __iar_builtin_SHSUB8
497 #define __USUB8 __iar_builtin_USUB8
498 #define __UQSUB8 __iar_builtin_UQSUB8
499 #define __UHSUB8 __iar_builtin_UHSUB8
500 #define __SADD16 __iar_builtin_SADD16
501 #define __QADD16 __iar_builtin_QADD16
502 #define __SHADD16 __iar_builtin_SHADD16
503 #define __UADD16 __iar_builtin_UADD16
504 #define __UQADD16 __iar_builtin_UQADD16
505 #define __UHADD16 __iar_builtin_UHADD16
506 #define __SSUB16 __iar_builtin_SSUB16
507 #define __QSUB16 __iar_builtin_QSUB16
508 #define __SHSUB16 __iar_builtin_SHSUB16
509 #define __USUB16 __iar_builtin_USUB16
510 #define __UQSUB16 __iar_builtin_UQSUB16
511 #define __UHSUB16 __iar_builtin_UHSUB16
512 #define __SASX __iar_builtin_SASX
513 #define __QASX __iar_builtin_QASX
514 #define __SHASX __iar_builtin_SHASX
515 #define __UASX __iar_builtin_UASX
516 #define __UQASX __iar_builtin_UQASX
517 #define __UHASX __iar_builtin_UHASX
518 #define __SSAX __iar_builtin_SSAX
519 #define __QSAX __iar_builtin_QSAX
520 #define __SHSAX __iar_builtin_SHSAX
521 #define __USAX __iar_builtin_USAX
522 #define __UQSAX __iar_builtin_UQSAX
523 #define __UHSAX __iar_builtin_UHSAX
524 #define __USAD8 __iar_builtin_USAD8
525 #define __USADA8 __iar_builtin_USADA8
526 #define __SSAT16 __iar_builtin_SSAT16
527 #define __USAT16 __iar_builtin_USAT16
528 #define __UXTB16 __iar_builtin_UXTB16
529 #define __UXTAB16 __iar_builtin_UXTAB16
530 #define __SXTB16 __iar_builtin_SXTB16
531 #define __SXTAB16 __iar_builtin_SXTAB16
532 #define __SMUAD __iar_builtin_SMUAD
533 #define __SMUADX __iar_builtin_SMUADX
534 #define __SMMLA __iar_builtin_SMMLA
535 #define __SMLAD __iar_builtin_SMLAD
536 #define __SMLADX __iar_builtin_SMLADX
537 #define __SMLALD __iar_builtin_SMLALD
538 #define __SMLALDX __iar_builtin_SMLALDX
539 #define __SMUSD __iar_builtin_SMUSD
540 #define __SMUSDX __iar_builtin_SMUSDX
541 #define __SMLSD __iar_builtin_SMLSD
542 #define __SMLSDX __iar_builtin_SMLSDX
543 #define __SMLSLD __iar_builtin_SMLSLD
544 #define __SMLSLDX __iar_builtin_SMLSLDX
545 #define __SEL __iar_builtin_SEL
546 #define __QADD __iar_builtin_QADD
547 #define __QSUB __iar_builtin_QSUB
548 #define __PKHBT __iar_builtin_PKHBT
549 #define __PKHTB __iar_builtin_PKHTB
550 #endif
551
552 #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
553
554 #if __IAR_M0_FAMILY
555 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
556 #define __CLZ __cmsis_iar_clz_not_active
557 #define __SSAT __cmsis_iar_ssat_not_active
558 #define __USAT __cmsis_iar_usat_not_active
559 #define __RBIT __cmsis_iar_rbit_not_active
560 #define __get_APSR __cmsis_iar_get_APSR_not_active
561 #endif
562
563
564 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
565 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ))
566 #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
567 #define __set_FPSCR __cmsis_iar_set_FPSR_not_active
568 #endif
569
570 #ifdef __INTRINSICS_INCLUDED
571 #error intrinsics.h is already included previously!
572 #endif
573
574 #include <intrinsics.h>
575
576 #if __IAR_M0_FAMILY
577 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
578 #undef __CLZ
579 #undef __SSAT
580 #undef __USAT
581 #undef __RBIT
582 #undef __get_APSR
583
__CLZ(uint32_t data)584 __STATIC_INLINE uint8_t __CLZ(uint32_t data)
585 {
586 if (data == 0U) { return 32U; }
587
588 uint32_t count = 0U;
589 uint32_t mask = 0x80000000U;
590
591 while ((data & mask) == 0U)
592 {
593 count += 1U;
594 mask = mask >> 1U;
595 }
596 return count;
597 }
598
__RBIT(uint32_t v)599 __STATIC_INLINE uint32_t __RBIT(uint32_t v)
600 {
601 uint8_t sc = 31U;
602 uint32_t r = v;
603 for (v >>= 1U; v; v >>= 1U)
604 {
605 r <<= 1U;
606 r |= v & 1U;
607 sc--;
608 }
609 return (r << sc);
610 }
611
__get_APSR(void)612 __STATIC_INLINE uint32_t __get_APSR(void)
613 {
614 uint32_t res;
615 __asm("MRS %0,APSR" : "=r" (res));
616 return res;
617 }
618
619 #endif
620
621 #if (!(defined (__ARM_FP) && (__ARM_FP >= 1)))
622 #undef __get_FPSCR
623 #undef __set_FPSCR
624 #define __get_FPSCR() (0)
625 #define __set_FPSCR(VALUE) ((void)VALUE)
626 #endif
627
628 #pragma diag_suppress=Pe940
629 #pragma diag_suppress=Pe177
630
631 #define __enable_irq __enable_interrupt
632 #define __disable_irq __disable_interrupt
633 #define __NOP __no_operation
634
635 #define __get_xPSR __get_PSR
636
637 #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0)
638
__LDREXW(uint32_t volatile * ptr)639 __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
640 {
641 return __LDREX((unsigned long *)ptr);
642 }
643
__STREXW(uint32_t value,uint32_t volatile * ptr)644 __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
645 {
646 return __STREX(value, (unsigned long *)ptr);
647 }
648 #endif
649
650
651 /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
652 #if (__CORTEX_M >= 0x03)
653
__RRX(uint32_t value)654 __IAR_FT uint32_t __RRX(uint32_t value)
655 {
656 uint32_t result;
657 __ASM volatile("RRX %0, %1" : "=r"(result) : "r" (value));
658 return(result);
659 }
660
__set_BASEPRI_MAX(uint32_t value)661 __IAR_FT void __set_BASEPRI_MAX(uint32_t value)
662 {
663 __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value));
664 }
665
__disable_fault_irq()666 __IAR_FT void __disable_fault_irq()
667 {
668 __ASM volatile ("CPSID F" ::: "memory");
669 }
670
__enable_fault_irq()671 __IAR_FT void __enable_fault_irq()
672 {
673 __ASM volatile ("CPSIE F" ::: "memory");
674 }
675
676
677 #endif /* (__CORTEX_M >= 0x03) */
678
__ROR(uint32_t op1,uint32_t op2)679 __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
680 {
681 return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
682 }
683
684 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
685 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) || \
686 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
687
__get_MSPLIM(void)688 __IAR_FT uint32_t __get_MSPLIM(void)
689 {
690 uint32_t res;
691 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
692 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
693 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
694 // without main extension and secure, there is no stack limit check.
695 res = 0U;
696 #else
697 __asm volatile("MRS %0,MSPLIM" : "=r" (res));
698 #endif
699 return res;
700 }
701
__set_MSPLIM(uint32_t value)702 __IAR_FT void __set_MSPLIM(uint32_t value)
703 {
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
705 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
706 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
707 // without main extensions and secure, there is no stack limit check.
708 (void)value;
709 #else
710 __asm volatile("MSR MSPLIM,%0" :: "r" (value));
711 #endif
712 }
713
__get_PSPLIM(void)714 __IAR_FT uint32_t __get_PSPLIM(void)
715 {
716 uint32_t res;
717 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
718 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
719 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
720 // without main extensions and secure, there is no stack limit check.
721 res = 0U;
722 #else
723 __asm volatile("MRS %0,PSPLIM" : "=r" (res));
724 #endif
725 return res;
726 }
727
__set_PSPLIM(uint32_t value)728 __IAR_FT void __set_PSPLIM(uint32_t value)
729 {
730 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
731 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
732 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
733 // without main extensions and secure, there is no stack limit check.
734 (void)value;
735 #else
736 __asm volatile("MSR PSPLIM,%0" :: "r" (value));
737 #endif
738 }
739
__TZ_get_CONTROL_NS(void)740 __IAR_FT uint32_t __TZ_get_CONTROL_NS(void)
741 {
742 uint32_t res;
743 __asm volatile("MRS %0,CONTROL_NS" : "=r" (res));
744 return res;
745 }
746
__TZ_set_CONTROL_NS(uint32_t value)747 __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value)
748 {
749 __asm volatile("MSR CONTROL_NS,%0" :: "r" (value));
750 __iar_builtin_ISB();
751 }
752
__TZ_get_PSP_NS(void)753 __IAR_FT uint32_t __TZ_get_PSP_NS(void)
754 {
755 uint32_t res;
756 __asm volatile("MRS %0,PSP_NS" : "=r" (res));
757 return res;
758 }
759
__TZ_set_PSP_NS(uint32_t value)760 __IAR_FT void __TZ_set_PSP_NS(uint32_t value)
761 {
762 __asm volatile("MSR PSP_NS,%0" :: "r" (value));
763 }
764
__TZ_get_MSP_NS(void)765 __IAR_FT uint32_t __TZ_get_MSP_NS(void)
766 {
767 uint32_t res;
768 __asm volatile("MRS %0,MSP_NS" : "=r" (res));
769 return res;
770 }
771
__TZ_set_MSP_NS(uint32_t value)772 __IAR_FT void __TZ_set_MSP_NS(uint32_t value)
773 {
774 __asm volatile("MSR MSP_NS,%0" :: "r" (value));
775 }
776
__TZ_get_SP_NS(void)777 __IAR_FT uint32_t __TZ_get_SP_NS(void)
778 {
779 uint32_t res;
780 __asm volatile("MRS %0,SP_NS" : "=r" (res));
781 return res;
782 }
__TZ_set_SP_NS(uint32_t value)783 __IAR_FT void __TZ_set_SP_NS(uint32_t value)
784 {
785 __asm volatile("MSR SP_NS,%0" :: "r" (value));
786 }
787
__TZ_get_PRIMASK_NS(void)788 __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void)
789 {
790 uint32_t res;
791 __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res));
792 return res;
793 }
794
__TZ_set_PRIMASK_NS(uint32_t value)795 __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value)
796 {
797 __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value));
798 }
799
__TZ_get_BASEPRI_NS(void)800 __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void)
801 {
802 uint32_t res;
803 __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res));
804 return res;
805 }
806
__TZ_set_BASEPRI_NS(uint32_t value)807 __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value)
808 {
809 __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value));
810 }
811
__TZ_get_FAULTMASK_NS(void)812 __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void)
813 {
814 uint32_t res;
815 __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res));
816 return res;
817 }
818
__TZ_set_FAULTMASK_NS(uint32_t value)819 __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value)
820 {
821 __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value));
822 }
823
__TZ_get_PSPLIM_NS(void)824 __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void)
825 {
826 uint32_t res;
827 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
828 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
829 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
830 // without main extensions, the non-secure PSPLIM is RAZ/WI
831 res = 0U;
832 #else
833 __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res));
834 #endif
835 return res;
836 }
837
__TZ_set_PSPLIM_NS(uint32_t value)838 __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value)
839 {
840 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
841 !(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
842 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
843 // without main extensions, the non-secure PSPLIM is RAZ/WI
844 (void)value;
845 #else
846 __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value));
847 #endif
848 }
849
__TZ_get_MSPLIM_NS(void)850 __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void)
851 {
852 uint32_t res;
853 __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res));
854 return res;
855 }
856
__TZ_set_MSPLIM_NS(uint32_t value)857 __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value)
858 {
859 __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value));
860 }
861
862 #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ or __ARM_ARCH_8_1M_MAIN__ */
863
864 #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */
865
866 #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value))
867
868 #if __IAR_M0_FAMILY
__SSAT(int32_t val,uint32_t sat)869 __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
870 {
871 if ((sat >= 1U) && (sat <= 32U))
872 {
873 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
874 const int32_t min = -1 - max ;
875 if (val > max)
876 {
877 return max;
878 }
879 else if (val < min)
880 {
881 return min;
882 }
883 }
884 return val;
885 }
886
__USAT(int32_t val,uint32_t sat)887 __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
888 {
889 if (sat <= 31U)
890 {
891 const uint32_t max = ((1U << sat) - 1U);
892 if (val > (int32_t)max)
893 {
894 return max;
895 }
896 else if (val < 0)
897 {
898 return 0U;
899 }
900 }
901 return (uint32_t)val;
902 }
903 #endif
904
905 #if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
906
__LDRBT(volatile uint8_t * addr)907 __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr)
908 {
909 uint32_t res;
910 __ASM volatile ("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
911 return ((uint8_t)res);
912 }
913
__LDRHT(volatile uint16_t * addr)914 __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr)
915 {
916 uint32_t res;
917 __ASM volatile ("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
918 return ((uint16_t)res);
919 }
920
__LDRT(volatile uint32_t * addr)921 __IAR_FT uint32_t __LDRT(volatile uint32_t *addr)
922 {
923 uint32_t res;
924 __ASM volatile ("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
925 return res;
926 }
927
__STRBT(uint8_t value,volatile uint8_t * addr)928 __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr)
929 {
930 __ASM volatile ("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
931 }
932
__STRHT(uint16_t value,volatile uint16_t * addr)933 __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr)
934 {
935 __ASM volatile ("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
936 }
937
__STRT(uint32_t value,volatile uint32_t * addr)938 __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr)
939 {
940 __ASM volatile ("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory");
941 }
942
943 #endif /* (__CORTEX_M >= 0x03) */
944
945 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
946 (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) || \
947 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
948
949
__LDAB(volatile uint8_t * ptr)950 __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr)
951 {
952 uint32_t res;
953 __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
954 return ((uint8_t)res);
955 }
956
__LDAH(volatile uint16_t * ptr)957 __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr)
958 {
959 uint32_t res;
960 __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
961 return ((uint16_t)res);
962 }
963
__LDA(volatile uint32_t * ptr)964 __IAR_FT uint32_t __LDA(volatile uint32_t *ptr)
965 {
966 uint32_t res;
967 __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
968 return res;
969 }
970
__STLB(uint8_t value,volatile uint8_t * ptr)971 __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr)
972 {
973 __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
974 }
975
__STLH(uint16_t value,volatile uint16_t * ptr)976 __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr)
977 {
978 __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
979 }
980
__STL(uint32_t value,volatile uint32_t * ptr)981 __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr)
982 {
983 __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
984 }
985
__LDAEXB(volatile uint8_t * ptr)986 __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr)
987 {
988 uint32_t res;
989 __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
990 return ((uint8_t)res);
991 }
992
__LDAEXH(volatile uint16_t * ptr)993 __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr)
994 {
995 uint32_t res;
996 __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
997 return ((uint16_t)res);
998 }
999
__LDAEX(volatile uint32_t * ptr)1000 __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr)
1001 {
1002 uint32_t res;
1003 __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
1004 return res;
1005 }
1006
__STLEXB(uint8_t value,volatile uint8_t * ptr)1007 __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1008 {
1009 uint32_t res;
1010 __ASM volatile ("STLEXB %0, %2, [%1]" : "=&r" (res) : "r" (ptr), "r" (value) : "memory");
1011 return res;
1012 }
1013
__STLEXH(uint16_t value,volatile uint16_t * ptr)1014 __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1015 {
1016 uint32_t res;
1017 __ASM volatile ("STLEXH %0, %2, [%1]" : "=&r" (res) : "r" (ptr), "r" (value) : "memory");
1018 return res;
1019 }
1020
__STLEX(uint32_t value,volatile uint32_t * ptr)1021 __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1022 {
1023 uint32_t res;
1024 __ASM volatile ("STLEX %0, %2, [%1]" : "=&r" (res) : "r" (ptr), "r" (value) : "memory");
1025 return res;
1026 }
1027
1028 #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
1029
1030 #undef __IAR_FT
1031 #undef __IAR_M0_FAMILY
1032 #undef __ICCARM_V8
1033
1034 #pragma diag_default=Pe940
1035 #pragma diag_default=Pe177
1036
1037 #define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1038
1039 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1040
1041 #endif /* __CMSIS_ICCARM_M_H__ */
1042