1/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <linux/linkage.h>
4#include <asm/asm.h>
5#include <asm/alternative-macros.h>
6#include <asm/hwcap.h>
7
8/* int strcmp(const char *cs, const char *ct) */
9SYM_FUNC_START(strcmp)
10
11	ALTERNATIVE("nop", "j strcmp_zbb", 0, RISCV_ISA_EXT_ZBB, CONFIG_RISCV_ISA_ZBB)
12
13	/*
14	 * Returns
15	 *   a0 - comparison result, value like strcmp
16	 *
17	 * Parameters
18	 *   a0 - string1
19	 *   a1 - string2
20	 *
21	 * Clobbers
22	 *   t0, t1
23	 */
241:
25	lbu	t0, 0(a0)
26	lbu	t1, 0(a1)
27	addi	a0, a0, 1
28	addi	a1, a1, 1
29	bne	t0, t1, 2f
30	bnez	t0, 1b
31	li	a0, 0
32	ret
332:
34	/*
35	 * strcmp only needs to return (< 0, 0, > 0) values
36	 * not necessarily -1, 0, +1
37	 */
38	sub	a0, t0, t1
39	ret
40
41/*
42 * Variant of strcmp using the ZBB extension if available.
43 * The code was published as part of the bitmanip manual
44 * in Appendix A.
45 */
46#ifdef CONFIG_RISCV_ISA_ZBB
47strcmp_zbb:
48
49.option push
50.option arch,+zbb
51
52	/*
53	 * Returns
54	 *   a0 - comparison result, value like strcmp
55	 *
56	 * Parameters
57	 *   a0 - string1
58	 *   a1 - string2
59	 *
60	 * Clobbers
61	 *   t0, t1, t2, t3, t4
62	 */
63
64	or	t2, a0, a1
65	li	t4, -1
66	and	t2, t2, SZREG-1
67	bnez	t2, 3f
68
69	/* Main loop for aligned string.  */
70	.p2align 3
711:
72	REG_L	t0, 0(a0)
73	REG_L	t1, 0(a1)
74	orc.b	t3, t0
75	bne	t3, t4, 2f
76	addi	a0, a0, SZREG
77	addi	a1, a1, SZREG
78	beq	t0, t1, 1b
79
80	/*
81	 * Words don't match, and no null byte in the first
82	 * word. Get bytes in big-endian order and compare.
83	 */
84#ifndef CONFIG_CPU_BIG_ENDIAN
85	rev8	t0, t0
86	rev8	t1, t1
87#endif
88
89	/* Synthesize (t0 >= t1) ? 1 : -1 in a branchless sequence. */
90	sltu	a0, t0, t1
91	neg	a0, a0
92	ori	a0, a0, 1
93	ret
94
952:
96	/*
97	 * Found a null byte.
98	 * If words don't match, fall back to simple loop.
99	 */
100	bne	t0, t1, 3f
101
102	/* Otherwise, strings are equal. */
103	li	a0, 0
104	ret
105
106	/* Simple loop for misaligned strings. */
107	.p2align 3
1083:
109	lbu	t0, 0(a0)
110	lbu	t1, 0(a1)
111	addi	a0, a0, 1
112	addi	a1, a1, 1
113	bne	t0, t1, 4f
114	bnez	t0, 3b
115
1164:
117	sub	a0, t0, t1
118	ret
119
120.option pop
121#endif
122SYM_FUNC_END(strcmp)
123