1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_FUTEX_H
3 #define _ASM_FUTEX_H
4 
5 #include <linux/futex.h>
6 #include <linux/uaccess.h>
7 #include <asm/errno.h>
8 
9 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
10 do {									\
11 	register unsigned long r8 __asm ("r8") = 0;			\
12 	__asm__ __volatile__(						\
13 		"	mf;;					\n"	\
14 		"[1:] "	insn ";;				\n"	\
15 		"	.xdata4 \"__ex_table\", 1b-., 2f-.	\n"	\
16 		"[2:]"							\
17 		: "+r" (r8), "=r" (oldval)				\
18 		: "r" (uaddr), "r" (oparg)				\
19 		: "memory");						\
20 	ret = r8;							\
21 } while (0)
22 
23 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24 do {									\
25 	register unsigned long r8 __asm ("r8") = 0;			\
26 	int val, newval;						\
27 	do {								\
28 		__asm__ __volatile__(					\
29 			"	mf;;				  \n"	\
30 			"[1:]	ld4 %3=[%4];;			  \n"	\
31 			"	mov %2=%3			  \n"	\
32 				insn	";;			  \n"	\
33 			"	mov ar.ccv=%2;;			  \n"	\
34 			"[2:]	cmpxchg4.acq %1=[%4],%3,ar.ccv;;  \n"	\
35 			"	.xdata4 \"__ex_table\", 1b-., 3f-.\n"	\
36 			"	.xdata4 \"__ex_table\", 2b-., 3f-.\n"	\
37 			"[3:]"						\
38 			: "+r" (r8), "=r" (val), "=&r" (oldval),	\
39 			   "=&r" (newval)				\
40 			: "r" (uaddr), "r" (oparg)			\
41 			: "memory");					\
42 		if (unlikely (r8))					\
43 			break;						\
44 	} while (unlikely (val != oldval));				\
45 	ret = r8;							\
46 } while (0)
47 
48 static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)49 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
50 {
51 	int oldval = 0, ret;
52 
53 	if (!access_ok(uaddr, sizeof(u32)))
54 		return -EFAULT;
55 
56 	switch (op) {
57 	case FUTEX_OP_SET:
58 		__futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
59 				   oparg);
60 		break;
61 	case FUTEX_OP_ADD:
62 		__futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
63 		break;
64 	case FUTEX_OP_OR:
65 		__futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
66 		break;
67 	case FUTEX_OP_ANDN:
68 		__futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
69 				   ~oparg);
70 		break;
71 	case FUTEX_OP_XOR:
72 		__futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
73 		break;
74 	default:
75 		ret = -ENOSYS;
76 	}
77 
78 	if (!ret)
79 		*oval = oldval;
80 
81 	return ret;
82 }
83 
84 static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)85 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
86 			      u32 oldval, u32 newval)
87 {
88 	if (!access_ok(uaddr, sizeof(u32)))
89 		return -EFAULT;
90 
91 	{
92 		register unsigned long r8 __asm ("r8") = 0;
93 		unsigned long prev;
94 		__asm__ __volatile__(
95 			"	mf;;					\n"
96 			"	mov ar.ccv=%4;;				\n"
97 			"[1:]	cmpxchg4.acq %1=[%2],%3,ar.ccv		\n"
98 			"	.xdata4 \"__ex_table\", 1b-., 2f-.	\n"
99 			"[2:]"
100 			: "+r" (r8), "=&r" (prev)
101 			: "r" (uaddr), "r" (newval),
102 			  "rO" ((long) (unsigned) oldval)
103 			: "memory");
104 		*uval = prev;
105 		return r8;
106 	}
107 }
108 
109 #endif /* _ASM_FUTEX_H */
110