1 /*
2  * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
8 #define ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
9 
10 #ifndef _ASMLANGUAGE
11 
12 #include <arch/arm64/cpu.h>
13 #include <stdint.h>
14 
15 /* All the macros need a memory clobber */
16 
17 #define read_sysreg(reg)						\
18 ({									\
19 	uint64_t val;							\
20 	__asm__ volatile ("mrs %0, " STRINGIFY(reg)			\
21 			  : "=r" (val) :: "memory");			\
22 	val;								\
23 })
24 
25 #define write_sysreg(val, reg)						\
26 ({									\
27 	__asm__ volatile ("msr " STRINGIFY(reg) ", %0"			\
28 			  :: "r" (val) : "memory");			\
29 })
30 
31 #define zero_sysreg(reg)						\
32 ({									\
33 	__asm__ volatile ("msr " STRINGIFY(reg) ", xzr"			\
34 			  ::: "memory");				\
35 })
36 
37 #define MAKE_REG_HELPER(reg)						\
38 	static ALWAYS_INLINE uint64_t read_##reg(void)			\
39 	{								\
40 		return read_sysreg(reg);				\
41 	}								\
42 	static ALWAYS_INLINE void write_##reg(uint64_t val)		\
43 	{								\
44 		write_sysreg(val, reg);					\
45 	}								\
46 	static ALWAYS_INLINE void zero_##reg(void)			\
47 	{								\
48 		zero_sysreg(reg);					\
49 	}
50 
51 #define MAKE_REG_HELPER_EL123(reg) \
52 	MAKE_REG_HELPER(reg##_el1) \
53 	MAKE_REG_HELPER(reg##_el2) \
54 	MAKE_REG_HELPER(reg##_el3)
55 
56 MAKE_REG_HELPER(cntfrq_el0);
57 MAKE_REG_HELPER(cnthctl_el2);
58 MAKE_REG_HELPER(cnthp_ctl_el2);
59 MAKE_REG_HELPER(cnthps_ctl_el2);
60 MAKE_REG_HELPER(cntv_ctl_el0)
61 MAKE_REG_HELPER(cntv_cval_el0)
62 MAKE_REG_HELPER(cntvct_el0);
63 MAKE_REG_HELPER(cntvoff_el2);
64 MAKE_REG_HELPER(currentel);
65 MAKE_REG_HELPER(daif)
66 MAKE_REG_HELPER(hcr_el2);
67 MAKE_REG_HELPER(id_aa64pfr0_el1);
68 MAKE_REG_HELPER(id_aa64mmfr0_el1);
69 MAKE_REG_HELPER(scr_el3);
70 MAKE_REG_HELPER(tpidrro_el0);
71 MAKE_REG_HELPER(clidr_el1);
72 MAKE_REG_HELPER(csselr_el1);
73 MAKE_REG_HELPER(ccsidr_el1);
74 MAKE_REG_HELPER(vmpidr_el2);
75 MAKE_REG_HELPER(mpidr_el1);
76 
77 MAKE_REG_HELPER_EL123(actlr)
78 MAKE_REG_HELPER_EL123(cpacr)
79 MAKE_REG_HELPER_EL123(cptr)
80 MAKE_REG_HELPER_EL123(elr)
81 MAKE_REG_HELPER_EL123(esr)
82 MAKE_REG_HELPER_EL123(far)
83 MAKE_REG_HELPER_EL123(mair)
84 MAKE_REG_HELPER_EL123(sctlr)
85 MAKE_REG_HELPER_EL123(spsr)
86 MAKE_REG_HELPER_EL123(tcr)
87 MAKE_REG_HELPER_EL123(ttbr0)
88 MAKE_REG_HELPER_EL123(vbar)
89 
90 #if defined(CONFIG_ARM_MPU)
91 /* Armv8-R aarch64 mpu registers */
92 #define mpuir_el1	S3_0_c0_c0_4
93 #define prselr_el1	S3_0_c6_c2_1
94 #define prbar_el1	S3_0_c6_c8_0
95 #define prlar_el1	S3_0_c6_c8_1
96 
97 MAKE_REG_HELPER(mpuir_el1);
98 MAKE_REG_HELPER(prselr_el1);
99 MAKE_REG_HELPER(prbar_el1);
100 MAKE_REG_HELPER(prlar_el1);
101 #endif
102 
enable_debug_exceptions(void)103 static ALWAYS_INLINE void enable_debug_exceptions(void)
104 {
105 	__asm__ volatile ("msr DAIFClr, %0"
106 			  :: "i" (DAIFCLR_DBG_BIT) : "memory");
107 }
108 
disable_debug_exceptions(void)109 static ALWAYS_INLINE void disable_debug_exceptions(void)
110 {
111 	__asm__ volatile ("msr DAIFSet, %0"
112 			  :: "i" (DAIFSET_DBG_BIT) : "memory");
113 }
114 
enable_serror_exceptions(void)115 static ALWAYS_INLINE void enable_serror_exceptions(void)
116 {
117 	__asm__ volatile ("msr DAIFClr, %0"
118 			  :: "i" (DAIFCLR_ABT_BIT) : "memory");
119 }
120 
disable_serror_exceptions(void)121 static ALWAYS_INLINE void disable_serror_exceptions(void)
122 {
123 	__asm__ volatile ("msr DAIFSet, %0"
124 			  :: "i" (DAIFSET_ABT_BIT) : "memory");
125 }
126 
enable_irq(void)127 static ALWAYS_INLINE void enable_irq(void)
128 {
129 	__asm__ volatile ("msr DAIFClr, %0"
130 			  :: "i" (DAIFCLR_IRQ_BIT) : "memory");
131 }
132 
disable_irq(void)133 static ALWAYS_INLINE void disable_irq(void)
134 {
135 	__asm__ volatile ("msr DAIFSet, %0"
136 			  :: "i" (DAIFSET_IRQ_BIT) : "memory");
137 }
138 
enable_fiq(void)139 static ALWAYS_INLINE void enable_fiq(void)
140 {
141 	__asm__ volatile ("msr DAIFClr, %0"
142 			  :: "i" (DAIFCLR_FIQ_BIT) : "memory");
143 }
144 
disable_fiq(void)145 static ALWAYS_INLINE void disable_fiq(void)
146 {
147 	__asm__ volatile ("msr DAIFSet, %0"
148 			  :: "i" (DAIFSET_FIQ_BIT) : "memory");
149 }
150 
151 #define sev()	__asm__ volatile("sev" : : : "memory")
152 #define wfe()	__asm__ volatile("wfe" : : : "memory")
153 #define wfi()	__asm__ volatile("wfi" : : : "memory")
154 
155 #define dsb()	__asm__ volatile ("dsb sy" ::: "memory")
156 #define dmb()	__asm__ volatile ("dmb sy" ::: "memory")
157 #define isb()	__asm__ volatile ("isb" ::: "memory")
158 
159 /* Zephyr needs these as well */
160 #define __ISB() isb()
161 #define __DMB() dmb()
162 #define __DSB() dsb()
163 
is_el_implemented(unsigned int el)164 static inline bool is_el_implemented(unsigned int el)
165 {
166 	unsigned int shift;
167 
168 	if (el > 3) {
169 		return false;
170 	}
171 
172 	shift = ID_AA64PFR0_EL1_SHIFT * el;
173 
174 	return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U);
175 }
176 
is_el_highest_implemented(void)177 static inline bool is_el_highest_implemented(void)
178 {
179 	uint32_t el_highest;
180 	uint32_t curr_el;
181 
182 	el_highest = read_id_aa64pfr0_el1() & 0xFFFF;
183 	el_highest = (31U - __builtin_clz(el_highest)) / 4;
184 
185 	curr_el = GET_EL(read_currentel());
186 
187 	if (curr_el < el_highest)
188 		return false;
189 
190 	return true;
191 }
192 
is_el2_sec_supported(void)193 static inline bool is_el2_sec_supported(void)
194 {
195 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
196 		ID_AA64PFR0_SEL2_MASK) != 0U);
197 }
198 
is_in_secure_state(void)199 static inline bool is_in_secure_state(void)
200 {
201 	/* We cannot read SCR_EL3 from EL2 or EL1 */
202 	return !IS_ENABLED(CONFIG_ARMV8_A_NS);
203 }
204 
205 #endif /* !_ASMLANGUAGE */
206 
207 #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ */
208