1 /* ARM AArch32 GCC specific public inline assembler functions and macros */
2
3 /*
4 * Copyright (c) 2015, Wind River Systems, Inc.
5 *
6 * SPDX-License-Identifier: Apache-2.0
7 */
8
9 /* Either public functions or macros or invoked by public functions */
10
11 #ifndef ZEPHYR_INCLUDE_ARCH_ARM_ASM_INLINE_GCC_H_
12 #define ZEPHYR_INCLUDE_ARCH_ARM_ASM_INLINE_GCC_H_
13
14 /*
15 * The file must not be included directly
16 * Include arch/cpu.h instead
17 */
18
19 #ifndef _ASMLANGUAGE
20
21 #include <zephyr/toolchain.h>
22 #include <zephyr/types.h>
23 #include <zephyr/arch/arm/exception.h>
24
25 #if defined(CONFIG_CPU_AARCH32_CORTEX_R) || defined(CONFIG_CPU_AARCH32_CORTEX_A)
26 #include <zephyr/arch/arm/cortex_a_r/cpu.h>
27 #endif
28
29 #ifdef __cplusplus
30 extern "C" {
31 #endif
32
33 /* On ARMv7-M and ARMv8-M Mainline CPUs, this function prevents regular
34 * exceptions (i.e. with interrupt priority lower than or equal to
35 * _EXC_IRQ_DEFAULT_PRIO) from interrupting the CPU. NMI, Faults, SVC,
36 * and Zero Latency IRQs (if supported) may still interrupt the CPU.
37 *
38 * On ARMv6-M and ARMv8-M Baseline CPUs, this function reads the value of
39 * PRIMASK which shows if interrupts are enabled, then disables all interrupts
40 * except NMI.
41 */
42
arch_irq_lock(void)43 static ALWAYS_INLINE unsigned int arch_irq_lock(void)
44 {
45 unsigned int key;
46
47 #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
48 #if CONFIG_MP_MAX_NUM_CPUS == 1 || defined(CONFIG_ARMV8_M_BASELINE)
49 __asm__ volatile("mrs %0, PRIMASK;"
50 "cpsid i"
51 : "=r" (key)
52 :
53 : "memory");
54 #else
55 #error "Cortex-M0 and Cortex-M0+ require SoC specific support for cross core synchronisation."
56 #endif
57 #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
58 unsigned int tmp;
59
60 __asm__ volatile(
61 "mov %1, %2;"
62 "mrs %0, BASEPRI;"
63 "msr BASEPRI_MAX, %1;"
64 "isb;"
65 : "=r"(key), "=r"(tmp)
66 : "i"(_EXC_IRQ_DEFAULT_PRIO)
67 : "memory");
68 #elif defined(CONFIG_ARMV7_R) || defined(CONFIG_AARCH32_ARMV8_R) \
69 || defined(CONFIG_ARMV7_A)
70 __asm__ volatile(
71 "mrs %0, cpsr;"
72 "and %0, #" STRINGIFY(I_BIT) ";"
73 "cpsid i;"
74 : "=r" (key)
75 :
76 : "memory", "cc");
77 #else
78 #error Unknown ARM architecture
79 #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
80
81 return key;
82 }
83
84
85 /* On Cortex-M0/M0+, this enables all interrupts if they were not
86 * previously disabled.
87 */
88
arch_irq_unlock(unsigned int key)89 static ALWAYS_INLINE void arch_irq_unlock(unsigned int key)
90 {
91 #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
92 if (key != 0U) {
93 return;
94 }
95 __asm__ volatile(
96 "cpsie i;"
97 "isb"
98 : : : "memory");
99 #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
100 __asm__ volatile(
101 "msr BASEPRI, %0;"
102 "isb;"
103 : : "r"(key) : "memory");
104 #elif defined(CONFIG_ARMV7_R) || defined(CONFIG_AARCH32_ARMV8_R) \
105 || defined(CONFIG_ARMV7_A)
106 if (key != 0U) {
107 return;
108 }
109 __asm__ volatile(
110 "cpsie i;"
111 : : : "memory", "cc");
112 #else
113 #error Unknown ARM architecture
114 #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
115 }
116
arch_irq_unlocked(unsigned int key)117 static ALWAYS_INLINE bool arch_irq_unlocked(unsigned int key)
118 {
119 /* This convention works for both PRIMASK and BASEPRI */
120 return key == 0U;
121 }
122
123 #ifdef __cplusplus
124 }
125 #endif
126
127 #endif /* _ASMLANGUAGE */
128
129 #endif /* ZEPHYR_INCLUDE_ARCH_ARM_ASM_INLINE_GCC_H_ */
130