1 /*
2 * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
8 #define ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
9
10 #ifndef _ASMLANGUAGE
11
12 #include <zephyr/arch/arm64/cpu.h>
13 #include <stdint.h>
14
15 /* All the macros need a memory clobber */
16
17 #define read_sysreg(reg) \
18 ({ \
19 uint64_t reg_val; \
20 __asm__ volatile ("mrs %0, " STRINGIFY(reg) \
21 : "=r" (reg_val) :: "memory"); \
22 reg_val; \
23 })
24
25 #define write_sysreg(val, reg) \
26 ({ \
27 __asm__ volatile ("msr " STRINGIFY(reg) ", %0" \
28 :: "r" (val) : "memory"); \
29 })
30
31 #define zero_sysreg(reg) \
32 ({ \
33 __asm__ volatile ("msr " STRINGIFY(reg) ", xzr" \
34 ::: "memory"); \
35 })
36
37 #define MAKE_REG_HELPER(reg) \
38 static ALWAYS_INLINE uint64_t read_##reg(void) \
39 { \
40 return read_sysreg(reg); \
41 } \
42 static ALWAYS_INLINE void write_##reg(uint64_t val) \
43 { \
44 write_sysreg(val, reg); \
45 } \
46 static ALWAYS_INLINE void zero_##reg(void) \
47 { \
48 zero_sysreg(reg); \
49 }
50
51 #define MAKE_REG_HELPER_EL123(reg) \
52 MAKE_REG_HELPER(reg##_el1) \
53 MAKE_REG_HELPER(reg##_el2) \
54 MAKE_REG_HELPER(reg##_el3)
55
56 MAKE_REG_HELPER(ccsidr_el1);
57 MAKE_REG_HELPER(clidr_el1);
58 MAKE_REG_HELPER(cntfrq_el0);
59 MAKE_REG_HELPER(cnthctl_el2);
60 MAKE_REG_HELPER(cnthp_ctl_el2);
61 MAKE_REG_HELPER(cnthps_ctl_el2);
62 MAKE_REG_HELPER(cntv_ctl_el0)
63 MAKE_REG_HELPER(cntv_cval_el0)
64 MAKE_REG_HELPER(cntvct_el0);
65 MAKE_REG_HELPER(cntvoff_el2);
66 MAKE_REG_HELPER(currentel);
67 MAKE_REG_HELPER(csselr_el1);
68 MAKE_REG_HELPER(daif)
69 MAKE_REG_HELPER(hcr_el2);
70 MAKE_REG_HELPER(id_aa64pfr0_el1);
71 MAKE_REG_HELPER(id_aa64mmfr0_el1);
72 MAKE_REG_HELPER(mpidr_el1);
73 MAKE_REG_HELPER(par_el1);
74 #if !defined(CONFIG_ARMV8_R)
75 MAKE_REG_HELPER(scr_el3);
76 #endif /* CONFIG_ARMV8_R */
77 MAKE_REG_HELPER(tpidrro_el0);
78 MAKE_REG_HELPER(vmpidr_el2);
79 MAKE_REG_HELPER(sp_el0);
80
81 MAKE_REG_HELPER_EL123(actlr)
82 MAKE_REG_HELPER_EL123(cpacr)
83 MAKE_REG_HELPER_EL123(cptr)
84 MAKE_REG_HELPER_EL123(elr)
85 MAKE_REG_HELPER_EL123(esr)
86 MAKE_REG_HELPER_EL123(far)
87 MAKE_REG_HELPER_EL123(mair)
88 MAKE_REG_HELPER_EL123(sctlr)
89 MAKE_REG_HELPER_EL123(spsr)
90 MAKE_REG_HELPER_EL123(tcr)
91 MAKE_REG_HELPER_EL123(ttbr0)
92 MAKE_REG_HELPER_EL123(vbar)
93
94 #if defined(CONFIG_ARM_MPU)
95 /* Armv8-R aarch64 mpu registers */
96 #define mpuir_el1 S3_0_c0_c0_4
97 #define prselr_el1 S3_0_c6_c2_1
98 #define prbar_el1 S3_0_c6_c8_0
99 #define prlar_el1 S3_0_c6_c8_1
100
101 MAKE_REG_HELPER(mpuir_el1);
102 MAKE_REG_HELPER(prselr_el1);
103 MAKE_REG_HELPER(prbar_el1);
104 MAKE_REG_HELPER(prlar_el1);
105 #endif
106
enable_debug_exceptions(void)107 static ALWAYS_INLINE void enable_debug_exceptions(void)
108 {
109 __asm__ volatile ("msr DAIFClr, %0"
110 :: "i" (DAIFCLR_DBG_BIT) : "memory");
111 }
112
disable_debug_exceptions(void)113 static ALWAYS_INLINE void disable_debug_exceptions(void)
114 {
115 __asm__ volatile ("msr DAIFSet, %0"
116 :: "i" (DAIFSET_DBG_BIT) : "memory");
117 }
118
enable_serror_exceptions(void)119 static ALWAYS_INLINE void enable_serror_exceptions(void)
120 {
121 __asm__ volatile ("msr DAIFClr, %0"
122 :: "i" (DAIFCLR_ABT_BIT) : "memory");
123 }
124
disable_serror_exceptions(void)125 static ALWAYS_INLINE void disable_serror_exceptions(void)
126 {
127 __asm__ volatile ("msr DAIFSet, %0"
128 :: "i" (DAIFSET_ABT_BIT) : "memory");
129 }
130
enable_irq(void)131 static ALWAYS_INLINE void enable_irq(void)
132 {
133 __asm__ volatile ("msr DAIFClr, %0"
134 :: "i" (DAIFCLR_IRQ_BIT) : "memory");
135 }
136
disable_irq(void)137 static ALWAYS_INLINE void disable_irq(void)
138 {
139 __asm__ volatile ("msr DAIFSet, %0"
140 :: "i" (DAIFSET_IRQ_BIT) : "memory");
141 }
142
enable_fiq(void)143 static ALWAYS_INLINE void enable_fiq(void)
144 {
145 __asm__ volatile ("msr DAIFClr, %0"
146 :: "i" (DAIFCLR_FIQ_BIT) : "memory");
147 }
148
disable_fiq(void)149 static ALWAYS_INLINE void disable_fiq(void)
150 {
151 __asm__ volatile ("msr DAIFSet, %0"
152 :: "i" (DAIFSET_FIQ_BIT) : "memory");
153 }
154
155 #define sev() __asm__ volatile("sev" : : : "memory")
156 #define wfe() __asm__ volatile("wfe" : : : "memory")
157 #define wfi() __asm__ volatile("wfi" : : : "memory")
158
is_el_implemented(unsigned int el)159 static inline bool is_el_implemented(unsigned int el)
160 {
161 unsigned int shift;
162
163 if (el > 3) {
164 return false;
165 }
166
167 shift = ID_AA64PFR0_EL1_SHIFT * el;
168
169 return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U);
170 }
171
is_el_highest_implemented(void)172 static inline bool is_el_highest_implemented(void)
173 {
174 uint32_t el_highest;
175 uint32_t curr_el;
176
177 el_highest = read_id_aa64pfr0_el1() & 0xFFFF;
178 el_highest = (31U - __builtin_clz(el_highest)) / 4;
179
180 curr_el = GET_EL(read_currentel());
181
182 if (curr_el < el_highest)
183 return false;
184
185 return true;
186 }
187
is_el2_sec_supported(void)188 static inline bool is_el2_sec_supported(void)
189 {
190 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
191 ID_AA64PFR0_SEL2_MASK) != 0U);
192 }
193
is_in_secure_state(void)194 static inline bool is_in_secure_state(void)
195 {
196 /* We cannot read SCR_EL3 from EL2 or EL1 */
197 return !IS_ENABLED(CONFIG_ARMV8_A_NS);
198 }
199
200 #endif /* !_ASMLANGUAGE */
201
202 #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ */
203