xref: /OK3568_Linux_fs/kernel/arch/arm/include/asm/futex.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_ARM_FUTEX_H
3*4882a593Smuzhiyun #define _ASM_ARM_FUTEX_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #ifdef __KERNEL__
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #include <linux/futex.h>
8*4882a593Smuzhiyun #include <linux/uaccess.h>
9*4882a593Smuzhiyun #include <asm/errno.h>
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #define __futex_atomic_ex_table(err_reg)			\
12*4882a593Smuzhiyun 	"3:\n"							\
13*4882a593Smuzhiyun 	"	.pushsection __ex_table,\"a\"\n"		\
14*4882a593Smuzhiyun 	"	.align	3\n"					\
15*4882a593Smuzhiyun 	"	.long	1b, 4f, 2b, 4f\n"			\
16*4882a593Smuzhiyun 	"	.popsection\n"					\
17*4882a593Smuzhiyun 	"	.pushsection .text.fixup,\"ax\"\n"		\
18*4882a593Smuzhiyun 	"	.align	2\n"					\
19*4882a593Smuzhiyun 	"4:	mov	%0, " err_reg "\n"			\
20*4882a593Smuzhiyun 	"	b	3b\n"					\
21*4882a593Smuzhiyun 	"	.popsection"
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun #ifdef CONFIG_SMP
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
26*4882a593Smuzhiyun ({								\
27*4882a593Smuzhiyun 	unsigned int __ua_flags;				\
28*4882a593Smuzhiyun 	smp_mb();						\
29*4882a593Smuzhiyun 	prefetchw(uaddr);					\
30*4882a593Smuzhiyun 	__ua_flags = uaccess_save_and_enable();			\
31*4882a593Smuzhiyun 	__asm__ __volatile__(					\
32*4882a593Smuzhiyun 	"1:	ldrex	%1, [%3]\n"				\
33*4882a593Smuzhiyun 	"	" insn "\n"					\
34*4882a593Smuzhiyun 	"2:	strex	%2, %0, [%3]\n"				\
35*4882a593Smuzhiyun 	"	teq	%2, #0\n"				\
36*4882a593Smuzhiyun 	"	bne	1b\n"					\
37*4882a593Smuzhiyun 	"	mov	%0, #0\n"				\
38*4882a593Smuzhiyun 	__futex_atomic_ex_table("%5")				\
39*4882a593Smuzhiyun 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
40*4882a593Smuzhiyun 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
41*4882a593Smuzhiyun 	: "cc", "memory");					\
42*4882a593Smuzhiyun 	uaccess_restore(__ua_flags);				\
43*4882a593Smuzhiyun })
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)46*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
47*4882a593Smuzhiyun 			      u32 oldval, u32 newval)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun 	unsigned int __ua_flags;
50*4882a593Smuzhiyun 	int ret;
51*4882a593Smuzhiyun 	u32 val;
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	if (!access_ok(uaddr, sizeof(u32)))
54*4882a593Smuzhiyun 		return -EFAULT;
55*4882a593Smuzhiyun 
56*4882a593Smuzhiyun 	smp_mb();
57*4882a593Smuzhiyun 	/* Prefetching cannot fault */
58*4882a593Smuzhiyun 	prefetchw(uaddr);
59*4882a593Smuzhiyun 	__ua_flags = uaccess_save_and_enable();
60*4882a593Smuzhiyun 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
61*4882a593Smuzhiyun 	"1:	ldrex	%1, [%4]\n"
62*4882a593Smuzhiyun 	"	teq	%1, %2\n"
63*4882a593Smuzhiyun 	"	ite	eq	@ explicit IT needed for the 2b label\n"
64*4882a593Smuzhiyun 	"2:	strexeq	%0, %3, [%4]\n"
65*4882a593Smuzhiyun 	"	movne	%0, #0\n"
66*4882a593Smuzhiyun 	"	teq	%0, #0\n"
67*4882a593Smuzhiyun 	"	bne	1b\n"
68*4882a593Smuzhiyun 	__futex_atomic_ex_table("%5")
69*4882a593Smuzhiyun 	: "=&r" (ret), "=&r" (val)
70*4882a593Smuzhiyun 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
71*4882a593Smuzhiyun 	: "cc", "memory");
72*4882a593Smuzhiyun 	uaccess_restore(__ua_flags);
73*4882a593Smuzhiyun 	smp_mb();
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	*uval = val;
76*4882a593Smuzhiyun 	return ret;
77*4882a593Smuzhiyun }
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun #include <linux/preempt.h>
82*4882a593Smuzhiyun #include <asm/domain.h>
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
85*4882a593Smuzhiyun ({								\
86*4882a593Smuzhiyun 	unsigned int __ua_flags = uaccess_save_and_enable();	\
87*4882a593Smuzhiyun 	__asm__ __volatile__(					\
88*4882a593Smuzhiyun 	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
89*4882a593Smuzhiyun 	"	" insn "\n"					\
90*4882a593Smuzhiyun 	"2:	" TUSER(str) "	%0, [%3]\n"			\
91*4882a593Smuzhiyun 	"	mov	%0, #0\n"				\
92*4882a593Smuzhiyun 	__futex_atomic_ex_table("%5")				\
93*4882a593Smuzhiyun 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
94*4882a593Smuzhiyun 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
95*4882a593Smuzhiyun 	: "cc", "memory");					\
96*4882a593Smuzhiyun 	uaccess_restore(__ua_flags);				\
97*4882a593Smuzhiyun })
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)100*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
101*4882a593Smuzhiyun 			      u32 oldval, u32 newval)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun 	unsigned int __ua_flags;
104*4882a593Smuzhiyun 	int ret = 0;
105*4882a593Smuzhiyun 	u32 val;
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 	if (!access_ok(uaddr, sizeof(u32)))
108*4882a593Smuzhiyun 		return -EFAULT;
109*4882a593Smuzhiyun 
110*4882a593Smuzhiyun 	preempt_disable();
111*4882a593Smuzhiyun 	__ua_flags = uaccess_save_and_enable();
112*4882a593Smuzhiyun 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
113*4882a593Smuzhiyun 	"	.syntax unified\n"
114*4882a593Smuzhiyun 	"1:	" TUSER(ldr) "	%1, [%4]\n"
115*4882a593Smuzhiyun 	"	teq	%1, %2\n"
116*4882a593Smuzhiyun 	"	it	eq	@ explicit IT needed for the 2b label\n"
117*4882a593Smuzhiyun 	"2:	" TUSERCOND(str, eq) "	%3, [%4]\n"
118*4882a593Smuzhiyun 	__futex_atomic_ex_table("%5")
119*4882a593Smuzhiyun 	: "+r" (ret), "=&r" (val)
120*4882a593Smuzhiyun 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
121*4882a593Smuzhiyun 	: "cc", "memory");
122*4882a593Smuzhiyun 	uaccess_restore(__ua_flags);
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun 	*uval = val;
125*4882a593Smuzhiyun 	preempt_enable();
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 	return ret;
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun #endif /* !SMP */
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)133*4882a593Smuzhiyun arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun 	int oldval = 0, ret, tmp;
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 	if (!access_ok(uaddr, sizeof(u32)))
138*4882a593Smuzhiyun 		return -EFAULT;
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun #ifndef CONFIG_SMP
141*4882a593Smuzhiyun 	preempt_disable();
142*4882a593Smuzhiyun #endif
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	switch (op) {
145*4882a593Smuzhiyun 	case FUTEX_OP_SET:
146*4882a593Smuzhiyun 		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
147*4882a593Smuzhiyun 		break;
148*4882a593Smuzhiyun 	case FUTEX_OP_ADD:
149*4882a593Smuzhiyun 		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
150*4882a593Smuzhiyun 		break;
151*4882a593Smuzhiyun 	case FUTEX_OP_OR:
152*4882a593Smuzhiyun 		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
153*4882a593Smuzhiyun 		break;
154*4882a593Smuzhiyun 	case FUTEX_OP_ANDN:
155*4882a593Smuzhiyun 		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
156*4882a593Smuzhiyun 		break;
157*4882a593Smuzhiyun 	case FUTEX_OP_XOR:
158*4882a593Smuzhiyun 		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
159*4882a593Smuzhiyun 		break;
160*4882a593Smuzhiyun 	default:
161*4882a593Smuzhiyun 		ret = -ENOSYS;
162*4882a593Smuzhiyun 	}
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun #ifndef CONFIG_SMP
165*4882a593Smuzhiyun 	preempt_enable();
166*4882a593Smuzhiyun #endif
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	/*
169*4882a593Smuzhiyun 	 * Store unconditionally. If ret != 0 the extra store is the least
170*4882a593Smuzhiyun 	 * of the worries but GCC cannot figure out that __futex_atomic_op()
171*4882a593Smuzhiyun 	 * is either setting ret to -EFAULT or storing the old value in
172*4882a593Smuzhiyun 	 * oldval which results in a uninitialized warning at the call site.
173*4882a593Smuzhiyun 	 */
174*4882a593Smuzhiyun 	*oval = oldval;
175*4882a593Smuzhiyun 
176*4882a593Smuzhiyun 	return ret;
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun #endif /* __KERNEL__ */
180*4882a593Smuzhiyun #endif /* _ASM_ARM_FUTEX_H */
181