xref: /OK3568_Linux_fs/kernel/arch/nds32/include/asm/futex.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun // Copyright (C) 2005-2017 Andes Technology Corporation
3*4882a593Smuzhiyun 
4*4882a593Smuzhiyun #ifndef __NDS32_FUTEX_H__
5*4882a593Smuzhiyun #define __NDS32_FUTEX_H__
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #include <linux/futex.h>
8*4882a593Smuzhiyun #include <linux/uaccess.h>
9*4882a593Smuzhiyun #include <asm/errno.h>
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #define __futex_atomic_ex_table(err_reg)			\
12*4882a593Smuzhiyun 	"	.pushsection __ex_table,\"a\"\n"		\
13*4882a593Smuzhiyun 	"	.align	3\n"					\
14*4882a593Smuzhiyun 	"	.long	1b, 4f\n"				\
15*4882a593Smuzhiyun 	"	.long	2b, 4f\n"				\
16*4882a593Smuzhiyun 	"	.popsection\n"					\
17*4882a593Smuzhiyun 	"	.pushsection .fixup,\"ax\"\n"			\
18*4882a593Smuzhiyun 	"4:	move	%0, " err_reg "\n"			\
19*4882a593Smuzhiyun 	"	b	3b\n"					\
20*4882a593Smuzhiyun 	"	.popsection"
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
23*4882a593Smuzhiyun 	smp_mb();						\
24*4882a593Smuzhiyun 	asm volatile(					\
25*4882a593Smuzhiyun 	"	movi	$ta, #0\n"				\
26*4882a593Smuzhiyun 	"1:	llw	%1, [%2+$ta]\n"				\
27*4882a593Smuzhiyun 	"	" insn "\n"					\
28*4882a593Smuzhiyun 	"2:	scw	%0, [%2+$ta]\n"				\
29*4882a593Smuzhiyun 	"	beqz	%0, 1b\n"				\
30*4882a593Smuzhiyun 	"	movi	%0, #0\n"				\
31*4882a593Smuzhiyun 	"3:\n"							\
32*4882a593Smuzhiyun 	__futex_atomic_ex_table("%4")				\
33*4882a593Smuzhiyun 	: "=&r" (ret), "=&r" (oldval)				\
34*4882a593Smuzhiyun 	: "r" (uaddr), "r" (oparg), "i" (-EFAULT)		\
35*4882a593Smuzhiyun 	: "cc", "memory")
36*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)37*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 * uval, u32 __user * uaddr,
38*4882a593Smuzhiyun 			      u32 oldval, u32 newval)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun 	int ret = 0;
41*4882a593Smuzhiyun 	u32 val, tmp, flags;
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun 	if (!access_ok(uaddr, sizeof(u32)))
44*4882a593Smuzhiyun 		return -EFAULT;
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun 	smp_mb();
47*4882a593Smuzhiyun 	asm volatile ("       movi    $ta, #0\n"
48*4882a593Smuzhiyun 		      "1:     llw     %1, [%6 + $ta]\n"
49*4882a593Smuzhiyun 		      "       sub     %3, %1, %4\n"
50*4882a593Smuzhiyun 		      "       cmovz   %2, %5, %3\n"
51*4882a593Smuzhiyun 		      "       cmovn   %2, %1, %3\n"
52*4882a593Smuzhiyun 		      "2:     scw     %2, [%6 + $ta]\n"
53*4882a593Smuzhiyun 		      "       beqz    %2, 1b\n"
54*4882a593Smuzhiyun 		      "3:\n                   " __futex_atomic_ex_table("%7")
55*4882a593Smuzhiyun 		      :"+&r"(ret), "=&r"(val), "=&r"(tmp), "=&r"(flags)
56*4882a593Smuzhiyun 		      :"r"(oldval), "r"(newval), "r"(uaddr), "i"(-EFAULT)
57*4882a593Smuzhiyun 		      :"$ta", "memory");
58*4882a593Smuzhiyun 	smp_mb();
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun 	*uval = val;
61*4882a593Smuzhiyun 	return ret;
62*4882a593Smuzhiyun }
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)65*4882a593Smuzhiyun arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun 	int oldval = 0, ret;
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 	if (!access_ok(uaddr, sizeof(u32)))
70*4882a593Smuzhiyun 		return -EFAULT;
71*4882a593Smuzhiyun 	switch (op) {
72*4882a593Smuzhiyun 	case FUTEX_OP_SET:
73*4882a593Smuzhiyun 		__futex_atomic_op("move	%0, %3", ret, oldval, tmp, uaddr,
74*4882a593Smuzhiyun 				  oparg);
75*4882a593Smuzhiyun 		break;
76*4882a593Smuzhiyun 	case FUTEX_OP_ADD:
77*4882a593Smuzhiyun 		__futex_atomic_op("add	%0, %1, %3", ret, oldval, tmp, uaddr,
78*4882a593Smuzhiyun 				  oparg);
79*4882a593Smuzhiyun 		break;
80*4882a593Smuzhiyun 	case FUTEX_OP_OR:
81*4882a593Smuzhiyun 		__futex_atomic_op("or	%0, %1, %3", ret, oldval, tmp, uaddr,
82*4882a593Smuzhiyun 				  oparg);
83*4882a593Smuzhiyun 		break;
84*4882a593Smuzhiyun 	case FUTEX_OP_ANDN:
85*4882a593Smuzhiyun 		__futex_atomic_op("and	%0, %1, %3", ret, oldval, tmp, uaddr,
86*4882a593Smuzhiyun 				  ~oparg);
87*4882a593Smuzhiyun 		break;
88*4882a593Smuzhiyun 	case FUTEX_OP_XOR:
89*4882a593Smuzhiyun 		__futex_atomic_op("xor	%0, %1, %3", ret, oldval, tmp, uaddr,
90*4882a593Smuzhiyun 				  oparg);
91*4882a593Smuzhiyun 		break;
92*4882a593Smuzhiyun 	default:
93*4882a593Smuzhiyun 		ret = -ENOSYS;
94*4882a593Smuzhiyun 	}
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun 	if (!ret)
97*4882a593Smuzhiyun 		*oval = oldval;
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun 	return ret;
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun #endif /* __NDS32_FUTEX_H__ */
102