1 /*
2  * Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <kernel_internal.h>
8 #include <zephyr/sys/barrier.h>
9 #include "boot.h"
10 
11 void z_arm64_el2_init(void);
12 
z_arm64_el_highest_plat_init(void)13 void __weak z_arm64_el_highest_plat_init(void)
14 {
15 	/* do nothing */
16 }
17 
z_arm64_el3_plat_init(void)18 void __weak z_arm64_el3_plat_init(void)
19 {
20 	/* do nothing */
21 }
22 
z_arm64_el2_plat_init(void)23 void __weak z_arm64_el2_plat_init(void)
24 {
25 	/* do nothing */
26 }
27 
z_arm64_el1_plat_init(void)28 void __weak z_arm64_el1_plat_init(void)
29 {
30 	/* do nothing */
31 }
32 
z_arm64_el_highest_init(void)33 void z_arm64_el_highest_init(void)
34 {
35 	if (is_el_highest_implemented()) {
36 		write_cntfrq_el0(CONFIG_SYS_CLOCK_HW_CYCLES_PER_SEC);
37 	}
38 
39 	z_arm64_el_highest_plat_init();
40 
41 	barrier_isync_fence_full();
42 }
43 
44 
45 #if !defined(CONFIG_ARMV8_R)
46 enum el3_next_el {
47 	EL3_TO_EL2,
48 	EL3_TO_EL1_NO_EL2,
49 	EL3_TO_EL1_SKIP_EL2
50 };
51 
el3_get_next_el(void)52 static inline enum el3_next_el el3_get_next_el(void)
53 {
54 	if (!is_el_implemented(2)) {
55 		return EL3_TO_EL1_NO_EL2;
56 	} else if (is_in_secure_state() && !is_el2_sec_supported()) {
57 		/*
58 		 * Is considered an illegal return "[..] a return to EL2 when EL3 is
59 		 * implemented and the value of the SCR_EL3.NS bit is 0 if
60 		 * ARMv8.4-SecEL2 is not implemented" (D1.11.2 from ARM DDI 0487E.a)
61 		 */
62 		return EL3_TO_EL1_SKIP_EL2;
63 	} else {
64 		return EL3_TO_EL2;
65 	}
66 }
67 
z_arm64_el3_init(void)68 void z_arm64_el3_init(void)
69 {
70 	uint64_t reg;
71 
72 	/* Setup vector table */
73 	write_vbar_el3((uint64_t)_vector_table);
74 	barrier_isync_fence_full();
75 
76 	reg = 0U;			/* Mostly RES0 */
77 	reg &= ~(CPTR_TTA_BIT |		/* Do not trap sysreg accesses */
78 		 CPTR_TFP_BIT |		/* Do not trap SVE, SIMD and FP */
79 		 CPTR_TCPAC_BIT);	/* Do not trap CPTR_EL2 / CPACR_EL1 accesses */
80 	write_cptr_el3(reg);
81 
82 	reg = 0U;			/* Reset */
83 #ifdef CONFIG_ARMV8_A_NS
84 	reg |= SCR_NS_BIT;		/* EL2 / EL3 non-secure */
85 #else
86 	if (is_in_secure_state() && is_el2_sec_supported()) {
87 		reg |= SCR_EEL2_BIT;    /* Enable EL2 secure */
88 	}
89 #endif
90 	reg |= (SCR_RES1 |		/* RES1 */
91 		SCR_RW_BIT |		/* EL2 execution state is AArch64 */
92 		SCR_ST_BIT |		/* Do not trap EL1 accesses to timer */
93 		SCR_HCE_BIT |		/* Do not trap HVC */
94 		SCR_SMD_BIT);		/* Do not trap SMC */
95 	write_scr_el3(reg);
96 
97 #if defined(CONFIG_GIC_V3)
98 	reg = read_sysreg(ICC_SRE_EL3);
99 	reg |= (ICC_SRE_ELx_DFB_BIT |	/* Disable FIQ bypass */
100 		ICC_SRE_ELx_DIB_BIT |	/* Disable IRQ bypass */
101 		ICC_SRE_ELx_SRE_BIT |	/* System register interface is used */
102 		ICC_SRE_EL3_EN_BIT);	/* Enables lower Exception level access to ICC_SRE_EL1 */
103 	write_sysreg(reg, ICC_SRE_EL3);
104 #endif
105 
106 	z_arm64_el3_plat_init();
107 
108 	barrier_isync_fence_full();
109 
110 	if (el3_get_next_el() == EL3_TO_EL1_SKIP_EL2) {
111 		/*
112 		 * handle EL2 init in EL3, as it still needs to be done,
113 		 * but we are going to be skipping EL2.
114 		 */
115 		z_arm64_el2_init();
116 	}
117 }
118 #endif /* CONFIG_ARMV8_R */
119 
z_arm64_el2_init(void)120 void z_arm64_el2_init(void)
121 {
122 	uint64_t reg;
123 
124 	reg = read_sctlr_el2();
125 	reg |= (SCTLR_EL2_RES1 |	/* RES1 */
126 		SCTLR_I_BIT |		/* Enable i-cache */
127 		SCTLR_SA_BIT);		/* Enable SP alignment check */
128 	write_sctlr_el2(reg);
129 
130 	reg = read_hcr_el2();
131 	/* when EL2 is enable in current security status:
132 	 * Clear TGE bit: All exceptions that would not be routed to EL2;
133 	 * Clear AMO bit: Physical SError interrupts are not taken to EL2 and EL3.
134 	 * Clear IMO bit: Physical IRQ interrupts are not taken to EL2 and EL3.
135 	 */
136 	reg &= ~(HCR_IMO_BIT | HCR_AMO_BIT | HCR_TGE_BIT);
137 	reg |= HCR_RW_BIT;		/* EL1 Execution state is AArch64 */
138 	write_hcr_el2(reg);
139 
140 	reg = 0U;			/* RES0 */
141 	reg |= CPTR_EL2_RES1;		/* RES1 */
142 	reg &= ~(CPTR_TFP_BIT |		/* Do not trap SVE, SIMD and FP */
143 		 CPTR_TCPAC_BIT);	/* Do not trap CPACR_EL1 accesses */
144 	write_cptr_el2(reg);
145 
146 	zero_cntvoff_el2();		/* Set 64-bit virtual timer offset to 0 */
147 	zero_cnthctl_el2();
148 #ifdef CONFIG_CPU_AARCH64_CORTEX_R
149 	zero_cnthps_ctl_el2();
150 #else
151 	zero_cnthp_ctl_el2();
152 #endif
153 
154 #ifdef CONFIG_ARM64_SET_VMPIDR_EL2
155 	reg = read_mpidr_el1();
156 	write_vmpidr_el2(reg);
157 #endif
158 
159 	/*
160 	 * Enable this if/when we use the hypervisor timer.
161 	 * write_cnthp_cval_el2(~(uint64_t)0);
162 	 */
163 
164 	z_arm64_el2_plat_init();
165 
166 	barrier_isync_fence_full();
167 }
168 
z_arm64_el1_init(void)169 void z_arm64_el1_init(void)
170 {
171 	uint64_t reg;
172 
173 	/* Setup vector table */
174 	write_vbar_el1((uint64_t)_vector_table);
175 	barrier_isync_fence_full();
176 
177 	reg = 0U;			/* RES0 */
178 	reg |= CPACR_EL1_FPEN_NOTRAP;	/* Do not trap NEON/SIMD/FP initially */
179 					/* TODO: CONFIG_FLOAT_*_FORBIDDEN */
180 	write_cpacr_el1(reg);
181 
182 	reg = read_sctlr_el1();
183 	reg |= (SCTLR_EL1_RES1 |	/* RES1 */
184 		SCTLR_I_BIT |		/* Enable i-cache */
185 		SCTLR_C_BIT |		/* Enable d-cache */
186 		SCTLR_SA_BIT);		/* Enable SP alignment check */
187 	write_sctlr_el1(reg);
188 
189 	write_cntv_cval_el0(~(uint64_t)0);
190 	/*
191 	 * Enable these if/when we use the corresponding timers.
192 	 * write_cntp_cval_el0(~(uint64_t)0);
193 	 * write_cntps_cval_el1(~(uint64_t)0);
194 	 */
195 
196 	z_arm64_el1_plat_init();
197 
198 	barrier_isync_fence_full();
199 }
200 
201 #if !defined(CONFIG_ARMV8_R)
z_arm64_el3_get_next_el(uint64_t switch_addr)202 void z_arm64_el3_get_next_el(uint64_t switch_addr)
203 {
204 	uint64_t spsr;
205 
206 	write_elr_el3(switch_addr);
207 
208 	/* Mask the DAIF */
209 	spsr = SPSR_DAIF_MASK;
210 
211 	if (el3_get_next_el() == EL3_TO_EL2) {
212 		/* Dropping into EL2 */
213 		spsr |= SPSR_MODE_EL2T;
214 	} else {
215 		/* Dropping into EL1 */
216 		spsr |= SPSR_MODE_EL1T;
217 	}
218 
219 	write_spsr_el3(spsr);
220 }
221 #endif /* CONFIG_ARMV8_R */
222