1/*
2 * Assembly macros and helpers
3 *
4 * Copyright (c) 2022 BayLibre, SAS
5 *
6 * SPDX-License-Identifier: Apache-2.0
7 */
8
9#ifdef CONFIG_64BIT
10	/* register-wide load/store based on ld/sd (XLEN = 64) */
11
12	.macro lr, rd, mem
13	ld \rd, \mem
14	.endm
15
16	.macro sr, rs, mem
17	sd \rs, \mem
18	.endm
19
20#else
21	/* register-wide load/store based on lw/sw (XLEN = 32) */
22
23	.macro lr, rd, mem
24	lw \rd, \mem
25	.endm
26
27	.macro sr, rs, mem
28	sw \rs, \mem
29	.endm
30
31#endif
32
33#ifdef CONFIG_CPU_HAS_FPU_DOUBLE_PRECISION
34
35	.macro flr, rd, mem
36	fld \rd, \mem
37	.endm
38
39	.macro fsr, rs, mem
40	fsd \rs, \mem
41	.endm
42
43#else
44
45	.macro flr, rd, mem
46	flw \rd, \mem
47	.endm
48
49	.macro fsr, rs, mem
50	fsw \rs, \mem
51	.endm
52
53#endif
54
55	/*
56	 * Perform rd += rs * mult using only shifts and adds.
57	 * Useful when the mul instruction isn't available.
58	 * mult must be a constant. rs will be clobbered.
59	 */
60	.macro shiftmul_add rd, rs, mult
61
62	beqz \rs, 999f
63
64	.set _bitpos, 0
65	.set _lastbitpos, 0
66
67	.rept 32
68	.if ((\mult) & (1 << _bitpos))
69	.if (_bitpos - _lastbitpos) != 0
70	slli \rs, \rs, (_bitpos - _lastbitpos)
71	.set _lastbitpos, _bitpos
72	.endif
73	add \rd, \rd, \rs
74	.endif
75	.set _bitpos, _bitpos + 1
76	.endr
77999:
78	.endm
79
80/* lowest common denominator for register availability */
81#if defined(CONFIG_RISCV_ISA_RV32E)
82#define RV_E(op...) op
83#define RV_I(op...) /* unavailable */
84#else
85#define RV_E(op...) op
86#define RV_I(op...) op
87#endif
88