1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_FUTEX_H
3 #define _ASM_FUTEX_H
4 
5 #include <linux/futex.h>
6 #include <linux/uaccess.h>
7 #include <asm/errno.h>
8 
9 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
10 do {									\
11 	register unsigned long r8 __asm ("r8") = 0;			\
12 	__asm__ __volatile__(						\
13 		"	mf;;					\n"	\
14 		"[1:] "	insn ";;				\n"	\
15 		"	.xdata4 \"__ex_table\", 1b-., 2f-.	\n"	\
16 		"[2:]"							\
17 		: "+r" (r8), "=r" (oldval)				\
18 		: "r" (uaddr), "r" (oparg)				\
19 		: "memory");						\
20 	ret = r8;							\
21 } while (0)
22 
23 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24 do {									\
25 	register unsigned long r8 __asm ("r8") = 0;			\
26 	int val, newval;						\
27 	do {								\
28 		__asm__ __volatile__(					\
29 			"	mf;;				  \n"	\
30 			"[1:]	ld4 %3=[%4];;			  \n"	\
31 			"	mov %2=%3			  \n"	\
32 				insn	";;			  \n"	\
33 			"	mov ar.ccv=%2;;			  \n"	\
34 			"[2:]	cmpxchg4.acq %1=[%4],%3,ar.ccv;;  \n"	\
35 			"	.xdata4 \"__ex_table\", 1b-., 3f-.\n"	\
36 			"	.xdata4 \"__ex_table\", 2b-., 3f-.\n"	\
37 			"[3:]"						\
38 			: "+r" (r8), "=r" (val), "=&r" (oldval),	\
39 			   "=&r" (newval)				\
40 			: "r" (uaddr), "r" (oparg)			\
41 			: "memory");					\
42 		if (unlikely (r8))					\
43 			break;						\
44 	} while (unlikely (val != oldval));				\
45 	ret = r8;							\
46 } while (0)
47 
48 static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)49 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
50 {
51 	int oldval = 0, ret;
52 
53 	pagefault_disable();
54 
55 	switch (op) {
56 	case FUTEX_OP_SET:
57 		__futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
58 				   oparg);
59 		break;
60 	case FUTEX_OP_ADD:
61 		__futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
62 		break;
63 	case FUTEX_OP_OR:
64 		__futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
65 		break;
66 	case FUTEX_OP_ANDN:
67 		__futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
68 				   ~oparg);
69 		break;
70 	case FUTEX_OP_XOR:
71 		__futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
72 		break;
73 	default:
74 		ret = -ENOSYS;
75 	}
76 
77 	pagefault_enable();
78 
79 	if (!ret)
80 		*oval = oldval;
81 
82 	return ret;
83 }
84 
85 static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)86 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
87 			      u32 oldval, u32 newval)
88 {
89 	if (!access_ok(uaddr, sizeof(u32)))
90 		return -EFAULT;
91 
92 	{
93 		register unsigned long r8 __asm ("r8") = 0;
94 		unsigned long prev;
95 		__asm__ __volatile__(
96 			"	mf;;					\n"
97 			"	mov ar.ccv=%4;;				\n"
98 			"[1:]	cmpxchg4.acq %1=[%2],%3,ar.ccv		\n"
99 			"	.xdata4 \"__ex_table\", 1b-., 2f-.	\n"
100 			"[2:]"
101 			: "+r" (r8), "=&r" (prev)
102 			: "r" (uaddr), "r" (newval),
103 			  "rO" ((long) (unsigned) oldval)
104 			: "memory");
105 		*uval = prev;
106 		return r8;
107 	}
108 }
109 
110 #endif /* _ASM_FUTEX_H */
111