1/*
2 * Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7#include <toolchain.h>
8#include <linker/sections.h>
9#include <arch/cpu.h>
10#include "boot.h"
11#include "macro_priv.inc"
12
13_ASM_FILE_PROLOGUE
14
15/*
16 * Platform specific pre-C init code
17 *
18 * Note: - Stack is not yet available
19 *       - x23, x24 and x25 must be preserved
20 */
21
22WTEXT(z_arm64_el3_plat_prep_c)
23SECTION_FUNC(TEXT,z_arm64_el3_plat_prep_c)
24	ret
25
26WTEXT(z_arm64_el2_plat_prep_c)
27SECTION_FUNC(TEXT,z_arm64_el2_plat_prep_c)
28	ret
29
30WTEXT(z_arm64_el1_plat_prep_c)
31SECTION_FUNC(TEXT,z_arm64_el1_plat_prep_c)
32	ret
33
34/*
35 * Set the minimum necessary to safely call C code
36 */
37
38GTEXT(__reset_prep_c)
39SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset_prep_c)
40	/* return address: x23 */
41	mov	x23, x30
42
43	switch_el x0, 3f, 2f, 1f
443:
45	/* Reinitialize SCTLR from scratch in EL3 */
46	ldr	w0, =(SCTLR_EL3_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
47	msr	sctlr_el3, x0
48
49	/* Custom plat prep_c init */
50	bl	z_arm64_el3_plat_prep_c
51
52	/* Set SP_EL1 */
53	msr     sp_el1, x24
54
55	b	out
562:
57	/* Disable alignment fault checking */
58	mrs	x0, sctlr_el2
59	bic	x0, x0, SCTLR_A_BIT
60	msr	sctlr_el2, x0
61
62	/* Custom plat prep_c init */
63	bl	z_arm64_el2_plat_prep_c
64
65	/* Set SP_EL1 */
66	msr     sp_el1, x24
67
68	b	out
691:
70	/* Disable alignment fault checking */
71	mrs	x0, sctlr_el1
72	bic	x0, x0, SCTLR_A_BIT
73	msr	sctlr_el1, x0
74
75	/* Custom plat prep_c init */
76	bl	z_arm64_el1_plat_prep_c
77
78	/* Set SP_EL1. We cannot use sp_el1 at EL1 */
79	msr     SPSel, #1
80	mov     sp, x24
81out:
82	isb
83
84	/* Select SP_EL0 */
85	msr	SPSel, #0
86
87	/* Initialize stack */
88	mov	sp, x24
89
90	ret	x23
91
92/*
93 * Reset vector
94 *
95 * Ran when the system comes out of reset. The processor is in thread mode with
96 * privileged level. At this point, neither SP_EL0 nor SP_ELx point to a valid
97 * area in SRAM.
98 */
99
100GTEXT(__reset)
101SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset)
102
103GTEXT(__start)
104SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
105	/* Mask all exceptions */
106	msr	DAIFSet, #0xf
107
108#if CONFIG_MP_NUM_CPUS > 1
109
110	ldr	x0, =arm64_cpu_boot_params
111	get_cpu_id x1
112	ldr	x2, [x0, #BOOT_PARAM_MPID_OFFSET]
113	cmp	x2, #-1
114	beq	primary_core
115
116	/* loop until our turn comes */
1171:	dmb	ld
118	ldr	x2, [x0, #BOOT_PARAM_MPID_OFFSET]
119	cmp	x1, x2
120	bne	1b
121
122	/* we can now load our stack pointer value and move on */
123	ldr	x24, [x0, #BOOT_PARAM_SP_OFFSET]
124	ldr	x25, =z_arm64_secondary_prep_c
125	b	2f
126
127primary_core:
128	/* advertise ourself */
129	str	x1, [x0, #BOOT_PARAM_MPID_OFFSET]
130#endif
131	/* load primary stack and entry point */
132	ldr	x24, =(z_interrupt_stacks + CONFIG_ISR_STACK_SIZE)
133	ldr	x25, =z_arm64_prep_c
1342:
135	/* Prepare for calling C code */
136	bl	__reset_prep_c
137
138	/* Platform hook for highest EL */
139	bl	z_arm64_el_highest_init
140
141switch_el:
142	switch_el x0, 3f, 2f, 1f
1433:
144	/* EL3 init */
145	bl	z_arm64_el3_init
146
147	/* Get next EL */
148	adr	x0, switch_el
149	bl	z_arm64_el3_get_next_el
150	eret
151
1522:
153	/* EL2 init */
154	bl	z_arm64_el2_init
155
156	/* Move to EL1 with all exceptions masked */
157	mov_imm	x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1T)
158	msr	spsr_el2, x0
159
160	adr	x0, 1f
161	msr	elr_el2, x0
162	eret
163
1641:
165	/* EL1 init */
166	bl	z_arm64_el1_init
167
168	/* Enable SError interrupts */
169	msr	DAIFClr, #(DAIFCLR_ABT_BIT)
170	isb
171
172	ret	x25  /* either z_arm64_prep_c or z_arm64_secondary_prep_c */
173