1/* This is a simple version of setjmp and longjmp.
2
3   Copyright (c) 1997 Nick Clifton, Cygnus Solutions
4 */
5
6#include "machine/acle-compat.h"
7
8/* ANSI concatenation macros.  */
9#define CONCAT(a, b)  CONCAT2(a, b)
10#define CONCAT2(a, b) a##b
11
12#ifndef __USER_LABEL_PREFIX__
13#error  __USER_LABEL_PREFIX__ not defined
14#endif
15
16#define SYM(x) CONCAT (__USER_LABEL_PREFIX__, x)
17
18#ifdef __ELF__
19#define TYPE(x) .type SYM(x),function
20#define SIZE(x) .size SYM(x), . - SYM(x)
21#else
22#define TYPE(x)
23#define SIZE(x)
24#endif
25
26/* Jump buffer allocation sizes.  */
27#define JUMPBUF_CORE_REGS_SIZE (10 * 4)
28#define JUMPBUF_FP_REGS_SIZE (8 * 8)
29#define JUMPBUF_PAC (JUMPBUF_CORE_REGS_SIZE + JUMPBUF_FP_REGS_SIZE + 0)
30
31/* Arm/Thumb interworking support:
32
33   The interworking scheme expects functions to use a BX instruction
34   to return control to their parent.  Since we need this code to work
35   in both interworked and non-interworked environments as well as with
36   older processors which do not have the BX instruction we do the
37   following:
38	Test the return address.
39	If the bottom bit is clear perform an "old style" function exit.
40	(We know that we are in ARM mode and returning to an ARM mode caller).
41	Otherwise use the BX instruction to perform the function exit.
42
43   We know that we will never attempt to perform the BX instruction on
44   an older processor, because that kind of processor will never be
45   interworked, and a return address with the bottom bit set will never
46   be generated.
47
48   In addition, we do not actually assemble the BX instruction as this would
49   require us to tell the assembler that the processor is an ARM7TDMI and
50   it would store this information in the binary.  We want this binary to be
51   able to be linked with binaries compiled for older processors however, so
52   we do not want such information stored there.
53
54   If we are running using the APCS-26 convention however, then we never
55   test the bottom bit, because this is part of the processor status.
56   Instead we just do a normal return, since we know that we cannot be
57   returning to a Thumb caller - the Thumb does not support APCS-26.
58
59   Function entry is much simpler.  If we are compiling for the Thumb we
60   just switch into ARM mode and then drop through into the rest of the
61   function.  The function exit code will take care of the restore to
62   Thumb mode.
63
64   For Thumb-2 do everything in Thumb mode.  */
65
66	.syntax unified
67
68/*  GCC 12.1 and later and clang will tell the assembler exactly which
69    floating point (or MVE) unit is required and we don't want to
70    override that.  Conversely, older versions of the compiler don't
71    pass this information so we need to enable the VFP version that is
72    most appropriate.  The choice here should support all suitable VFP
73    versions that the older toolchains can handle.  */
74#if __GNUC__ && __GNUC__ < 12 && !defined(__clang__)
75/*  Ensure that FPU instructions are correctly compiled and, likewise,
76    the appropriate build attributes are added to the resulting object
77    file.  Check whether the MVE extension is present and whether
78    we have support for hardware floating point-operations.  VFPxd
79    covers all the cases we need in this file for hardware
80    floating-point and should be compatible with all required FPUs
81    that we need to support.  */
82# if __ARM_FP
83	.fpu vfpxd
84# endif
85# if __ARM_FEATURE_MVE
86	.arch_extension mve
87# endif
88#endif
89
90#if __ARM_ARCH_ISA_THUMB == 1 && !__ARM_ARCH_ISA_ARM
91/* ARMv6-M-like has to be implemented in Thumb mode.  */
92
93.thumb
94.thumb_func
95	.globl SYM (setjmp)
96	TYPE (setjmp)
97SYM (setjmp):
98	/* Save registers in jump buffer.  */
99	stmia	r0!, {r4, r5, r6, r7}
100	mov	r1, r8
101	mov	r2, r9
102	mov	r3, r10
103	mov	r4, fp
104	mov	r5, sp
105	mov	r6, lr
106	stmia	r0!, {r1, r2, r3, r4, r5, r6}
107	subs	r0, r0, #40
108	/* Restore callee-saved low regs.  */
109	ldmia	r0!, {r4, r5, r6, r7}
110	/* Return zero.  */
111	movs	r0, #0
112	bx lr
113
114.thumb_func
115	.globl SYM (longjmp)
116	TYPE (longjmp)
117SYM (longjmp):
118	/* Restore High regs.  */
119	adds	r0, r0, #16
120	ldmia	r0!, {r2, r3, r4, r5, r6}
121	mov	r8, r2
122	mov	r9, r3
123	mov	r10, r4
124	mov	fp, r5
125	mov	sp, r6
126	ldmia	r0!, {r3} /* lr */
127	/* Restore low regs.  */
128	subs	r0, r0, #40
129	ldmia	r0!, {r4, r5, r6, r7}
130	/* Return the result argument, or 1 if it is zero.  */
131	movs	r0, r1
132	bne	1f
133	movs	r0, #1
1341:
135	bx	r3
136
137#else
138
139#ifdef __APCS_26__
140#define RET	movs		pc, lr
141#elif defined(__thumb2__)
142#define RET	bx lr
143#else
144#define RET	tst		lr, #1; \
145	        moveq		pc, lr ; \
146.inst           0xe12fff1e	/* bx lr */
147#endif
148
149#ifdef __thumb2__
150.macro COND where when
151	i\where	\when
152.endm
153#else
154.macro COND where when
155.endm
156#endif
157
158#if defined(__thumb2__)
159.macro MODE
160	.thumb
161	.thumb_func
162.endm
163.macro PROLOGUE name
164.endm
165
166#elif defined(__thumb__)
167#define	MODE		.thumb_func
168.macro PROLOGUE name
169	.code 16
170	bx	pc
171	nop
172	.code 32
173SYM (.arm_start_of.\name):
174.endm
175#else /* Arm */
176#define	MODE		.code 32
177.macro PROLOGUE name
178.endm
179#endif
180
181.macro FUNC_START name
182	.text
183	.align 2
184	MODE
185	.globl SYM (\name)
186	.cfi_startproc
187	TYPE (\name)
188SYM (\name):
189	PROLOGUE \name
190.endm
191
192.macro FUNC_END name
193	RET
194	.cfi_endproc
195	SIZE (\name)
196.endm
197
198/* --------------------------------------------------------------------
199                 int setjmp (jmp_buf);
200   -------------------------------------------------------------------- */
201
202	FUNC_START setjmp
203
204#if __ARM_FEATURE_PAC_DEFAULT
205# if __ARM_FEATURE_BTI_DEFAULT
206	pacbti	ip, lr, sp
207# else
208	pac	ip, lr, sp
209# endif /* __ARM_FEATURE_BTI_DEFAULT */
210	mov r3, ip
211	str r3, [r0, #JUMPBUF_PAC]
212	.cfi_register 143, 12
213#else
214# if __ARM_FEATURE_BTI_DEFAULT
215	bti
216# endif /* __ARM_FEATURE_BTI_DEFAULT */
217#endif /* __ARM_FEATURE_PAC_DEFAULT */
218
219	/* Save all the callee-preserved registers into the jump buffer.  */
220#ifdef __thumb2__
221	mov		ip, sp
222	stmia		r0!, { r4-r10, fp, ip, lr }
223#else
224	stmia		r0!, { r4-r10, fp, sp, lr }
225#endif
226#if defined __ARM_FP || defined __ARM_FEATURE_MVE
227	vstm		r0, { d8-d15 }
228#endif
229
230	/* When setting up the jump buffer return 0.  */
231	mov		r0, #0
232#if __ARM_FEATURE_PAC_DEFAULT
233	mov ip, r3
234	aut ip, lr, sp
235#endif /* __ARM_FEATURE_PAC_DEFAULT */
236
237	FUNC_END setjmp
238
239/* --------------------------------------------------------------------
240		volatile void longjmp (jmp_buf, int);
241   -------------------------------------------------------------------- */
242
243	FUNC_START longjmp
244
245#if __ARM_FEATURE_BTI_DEFAULT
246	bti
247#endif /* __ARM_FEATURE_BTI_DEFAULT */
248
249#if __ARM_FEATURE_PAC_DEFAULT
250	/* Keep original jmpbuf address for retrieving pac-code
251	   for authentication.  */
252	mov	r2, r0
253#endif /* __ARM_FEATURE_PAC_DEFAULT */
254
255	/* If we have stack extension code it ought to be handled here.  */
256
257	/* Restore the registers, retrieving the state when setjmp() was called.  */
258#ifdef __thumb2__
259	ldmia		r0!, { r4-r10, fp, ip, lr }
260	mov		sp, ip
261#else
262	ldmia		r0!, { r4-r10, fp, sp, lr }
263#endif
264#if defined __ARM_FP || defined __ARM_FEATURE_MVE
265	vldm		r0, { d8-d15 }
266#endif
267
268	/* Put the return value into the integer result register.
269	   But if it is zero then return 1 instead.  */
270	movs		r0, r1
271	it		eq
272	moveq		r0, #1
273
274#if __ARM_FEATURE_PAC_DEFAULT
275	ldr ip, [r2, #JUMPBUF_PAC]
276	aut ip, lr, sp
277#endif /* __ARM_FEATURE_PAC_DEFAULT */
278
279	FUNC_END longjmp
280#endif
281