1 /*
2  * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #ifndef ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
8 #define ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_
9 
10 #ifndef _ASMLANGUAGE
11 
12 #include <zephyr/arch/arm64/cpu.h>
13 #include <stdint.h>
14 
15 /* All the macros need a memory clobber */
16 
17 #define read_sysreg(reg)						\
18 ({									\
19 	uint64_t reg_val;						\
20 	__asm__ volatile ("mrs %0, " STRINGIFY(reg)			\
21 			  : "=r" (reg_val) :: "memory");		\
22 	reg_val;							\
23 })
24 
25 #define write_sysreg(val, reg)						\
26 ({									\
27 	uint64_t reg_val = val;						\
28 	__asm__ volatile ("msr " STRINGIFY(reg) ", %0"			\
29 			  :: "r" (reg_val) : "memory");			\
30 })
31 
32 #define zero_sysreg(reg)						\
33 ({									\
34 	__asm__ volatile ("msr " STRINGIFY(reg) ", xzr"			\
35 			  ::: "memory");				\
36 })
37 
38 #define MAKE_REG_HELPER(reg)						\
39 	static ALWAYS_INLINE uint64_t read_##reg(void)			\
40 	{								\
41 		return read_sysreg(reg);				\
42 	}								\
43 	static ALWAYS_INLINE void write_##reg(uint64_t val)		\
44 	{								\
45 		write_sysreg(val, reg);					\
46 	}								\
47 	static ALWAYS_INLINE void zero_##reg(void)			\
48 	{								\
49 		zero_sysreg(reg);					\
50 	}
51 
52 #define MAKE_REG_HELPER_EL123(reg) \
53 	MAKE_REG_HELPER(reg##_el1) \
54 	MAKE_REG_HELPER(reg##_el2) \
55 	MAKE_REG_HELPER(reg##_el3)
56 
57 MAKE_REG_HELPER(ccsidr_el1);
58 MAKE_REG_HELPER(clidr_el1);
59 MAKE_REG_HELPER(cntfrq_el0);
60 MAKE_REG_HELPER(cnthctl_el2);
61 MAKE_REG_HELPER(cnthp_ctl_el2);
62 MAKE_REG_HELPER(cnthps_ctl_el2);
63 MAKE_REG_HELPER(cntv_ctl_el0)
64 MAKE_REG_HELPER(cntv_cval_el0)
65 MAKE_REG_HELPER(cntvct_el0);
66 MAKE_REG_HELPER(cntvoff_el2);
67 MAKE_REG_HELPER(currentel);
68 MAKE_REG_HELPER(csselr_el1);
69 MAKE_REG_HELPER(daif)
70 MAKE_REG_HELPER(hcr_el2);
71 MAKE_REG_HELPER(id_aa64pfr0_el1);
72 MAKE_REG_HELPER(id_aa64mmfr0_el1);
73 MAKE_REG_HELPER(mpidr_el1);
74 MAKE_REG_HELPER(par_el1);
75 #if !defined(CONFIG_ARMV8_R)
76 MAKE_REG_HELPER(scr_el3);
77 #endif /* CONFIG_ARMV8_R */
78 MAKE_REG_HELPER(tpidrro_el0);
79 MAKE_REG_HELPER(vmpidr_el2);
80 MAKE_REG_HELPER(sp_el0);
81 
82 MAKE_REG_HELPER_EL123(actlr)
83 MAKE_REG_HELPER_EL123(cpacr)
84 MAKE_REG_HELPER_EL123(cptr)
85 MAKE_REG_HELPER_EL123(elr)
86 MAKE_REG_HELPER_EL123(esr)
87 MAKE_REG_HELPER_EL123(far)
88 MAKE_REG_HELPER_EL123(mair)
89 MAKE_REG_HELPER_EL123(sctlr)
90 MAKE_REG_HELPER_EL123(spsr)
91 MAKE_REG_HELPER_EL123(tcr)
92 MAKE_REG_HELPER_EL123(ttbr0)
93 MAKE_REG_HELPER_EL123(vbar)
94 
95 #if defined(CONFIG_ARM_MPU)
96 /* Armv8-R aarch64 mpu registers */
97 #define mpuir_el1	S3_0_c0_c0_4
98 #define prselr_el1	S3_0_c6_c2_1
99 #define prbar_el1	S3_0_c6_c8_0
100 #define prlar_el1	S3_0_c6_c8_1
101 
102 MAKE_REG_HELPER(mpuir_el1);
103 MAKE_REG_HELPER(prselr_el1);
104 MAKE_REG_HELPER(prbar_el1);
105 MAKE_REG_HELPER(prlar_el1);
106 #endif
107 
enable_debug_exceptions(void)108 static ALWAYS_INLINE void enable_debug_exceptions(void)
109 {
110 	__asm__ volatile ("msr DAIFClr, %0"
111 			  :: "i" (DAIFCLR_DBG_BIT) : "memory");
112 }
113 
disable_debug_exceptions(void)114 static ALWAYS_INLINE void disable_debug_exceptions(void)
115 {
116 	__asm__ volatile ("msr DAIFSet, %0"
117 			  :: "i" (DAIFSET_DBG_BIT) : "memory");
118 }
119 
enable_serror_exceptions(void)120 static ALWAYS_INLINE void enable_serror_exceptions(void)
121 {
122 	__asm__ volatile ("msr DAIFClr, %0"
123 			  :: "i" (DAIFCLR_ABT_BIT) : "memory");
124 }
125 
disable_serror_exceptions(void)126 static ALWAYS_INLINE void disable_serror_exceptions(void)
127 {
128 	__asm__ volatile ("msr DAIFSet, %0"
129 			  :: "i" (DAIFSET_ABT_BIT) : "memory");
130 }
131 
enable_irq(void)132 static ALWAYS_INLINE void enable_irq(void)
133 {
134 	__asm__ volatile ("msr DAIFClr, %0"
135 			  :: "i" (DAIFCLR_IRQ_BIT) : "memory");
136 }
137 
disable_irq(void)138 static ALWAYS_INLINE void disable_irq(void)
139 {
140 	__asm__ volatile ("msr DAIFSet, %0"
141 			  :: "i" (DAIFSET_IRQ_BIT) : "memory");
142 }
143 
enable_fiq(void)144 static ALWAYS_INLINE void enable_fiq(void)
145 {
146 	__asm__ volatile ("msr DAIFClr, %0"
147 			  :: "i" (DAIFCLR_FIQ_BIT) : "memory");
148 }
149 
disable_fiq(void)150 static ALWAYS_INLINE void disable_fiq(void)
151 {
152 	__asm__ volatile ("msr DAIFSet, %0"
153 			  :: "i" (DAIFSET_FIQ_BIT) : "memory");
154 }
155 
156 #define sev()	__asm__ volatile("sev" : : : "memory")
157 #define wfe()	__asm__ volatile("wfe" : : : "memory")
158 #define wfi()	__asm__ volatile("wfi" : : : "memory")
159 
is_el_implemented(unsigned int el)160 static inline bool is_el_implemented(unsigned int el)
161 {
162 	unsigned int shift;
163 
164 	if (el > 3) {
165 		return false;
166 	}
167 
168 	shift = ID_AA64PFR0_EL1_SHIFT * el;
169 
170 	return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U);
171 }
172 
is_el_highest_implemented(void)173 static inline bool is_el_highest_implemented(void)
174 {
175 	uint32_t el_highest;
176 	uint32_t curr_el;
177 
178 	el_highest = read_id_aa64pfr0_el1() & 0xFFFF;
179 	el_highest = (31U - __builtin_clz(el_highest)) / 4;
180 
181 	curr_el = GET_EL(read_currentel());
182 
183 	if (curr_el < el_highest)
184 		return false;
185 
186 	return true;
187 }
188 
is_el2_sec_supported(void)189 static inline bool is_el2_sec_supported(void)
190 {
191 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
192 		ID_AA64PFR0_SEL2_MASK) != 0U);
193 }
194 
is_in_secure_state(void)195 static inline bool is_in_secure_state(void)
196 {
197 	/* We cannot read SCR_EL3 from EL2 or EL1 */
198 	return !IS_ENABLED(CONFIG_ARMV8_A_NS);
199 }
200 
201 #endif /* !_ASMLANGUAGE */
202 
203 #endif /* ZEPHYR_INCLUDE_ARCH_ARM64_LIB_HELPERS_H_ */
204