xref: /OK3568_Linux_fs/kernel/arch/arm64/include/asm/futex.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2012 ARM Ltd.
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun #ifndef __ASM_FUTEX_H
6*4882a593Smuzhiyun #define __ASM_FUTEX_H
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #include <linux/futex.h>
9*4882a593Smuzhiyun #include <linux/uaccess.h>
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <asm/errno.h>
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #define FUTEX_MAX_LOOPS	128 /* What's the largest number you can think of? */
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)		\
16*4882a593Smuzhiyun do {									\
17*4882a593Smuzhiyun 	unsigned int loops = FUTEX_MAX_LOOPS;				\
18*4882a593Smuzhiyun 									\
19*4882a593Smuzhiyun 	uaccess_enable_privileged();					\
20*4882a593Smuzhiyun 	asm volatile(							\
21*4882a593Smuzhiyun "	prfm	pstl1strm, %2\n"					\
22*4882a593Smuzhiyun "1:	ldxr	%w1, %2\n"						\
23*4882a593Smuzhiyun 	insn "\n"							\
24*4882a593Smuzhiyun "2:	stlxr	%w0, %w3, %2\n"						\
25*4882a593Smuzhiyun "	cbz	%w0, 3f\n"						\
26*4882a593Smuzhiyun "	sub	%w4, %w4, %w0\n"					\
27*4882a593Smuzhiyun "	cbnz	%w4, 1b\n"						\
28*4882a593Smuzhiyun "	mov	%w0, %w7\n"						\
29*4882a593Smuzhiyun "3:\n"									\
30*4882a593Smuzhiyun "	dmb	ish\n"							\
31*4882a593Smuzhiyun "	.pushsection .fixup,\"ax\"\n"					\
32*4882a593Smuzhiyun "	.align	2\n"							\
33*4882a593Smuzhiyun "4:	mov	%w0, %w6\n"						\
34*4882a593Smuzhiyun "	b	3b\n"							\
35*4882a593Smuzhiyun "	.popsection\n"							\
36*4882a593Smuzhiyun 	_ASM_EXTABLE(1b, 4b)						\
37*4882a593Smuzhiyun 	_ASM_EXTABLE(2b, 4b)						\
38*4882a593Smuzhiyun 	: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp),	\
39*4882a593Smuzhiyun 	  "+r" (loops)							\
40*4882a593Smuzhiyun 	: "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN)			\
41*4882a593Smuzhiyun 	: "memory");							\
42*4882a593Smuzhiyun 	uaccess_disable_privileged();					\
43*4882a593Smuzhiyun } while (0)
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * _uaddr)46*4882a593Smuzhiyun arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun 	int oldval = 0, ret, tmp;
49*4882a593Smuzhiyun 	u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun 	if (!access_ok(_uaddr, sizeof(u32)))
52*4882a593Smuzhiyun 		return -EFAULT;
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	switch (op) {
55*4882a593Smuzhiyun 	case FUTEX_OP_SET:
56*4882a593Smuzhiyun 		__futex_atomic_op("mov	%w3, %w5",
57*4882a593Smuzhiyun 				  ret, oldval, uaddr, tmp, oparg);
58*4882a593Smuzhiyun 		break;
59*4882a593Smuzhiyun 	case FUTEX_OP_ADD:
60*4882a593Smuzhiyun 		__futex_atomic_op("add	%w3, %w1, %w5",
61*4882a593Smuzhiyun 				  ret, oldval, uaddr, tmp, oparg);
62*4882a593Smuzhiyun 		break;
63*4882a593Smuzhiyun 	case FUTEX_OP_OR:
64*4882a593Smuzhiyun 		__futex_atomic_op("orr	%w3, %w1, %w5",
65*4882a593Smuzhiyun 				  ret, oldval, uaddr, tmp, oparg);
66*4882a593Smuzhiyun 		break;
67*4882a593Smuzhiyun 	case FUTEX_OP_ANDN:
68*4882a593Smuzhiyun 		__futex_atomic_op("and	%w3, %w1, %w5",
69*4882a593Smuzhiyun 				  ret, oldval, uaddr, tmp, ~oparg);
70*4882a593Smuzhiyun 		break;
71*4882a593Smuzhiyun 	case FUTEX_OP_XOR:
72*4882a593Smuzhiyun 		__futex_atomic_op("eor	%w3, %w1, %w5",
73*4882a593Smuzhiyun 				  ret, oldval, uaddr, tmp, oparg);
74*4882a593Smuzhiyun 		break;
75*4882a593Smuzhiyun 	default:
76*4882a593Smuzhiyun 		ret = -ENOSYS;
77*4882a593Smuzhiyun 	}
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	if (!ret)
80*4882a593Smuzhiyun 		*oval = oldval;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	return ret;
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * _uaddr,u32 oldval,u32 newval)86*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
87*4882a593Smuzhiyun 			      u32 oldval, u32 newval)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	int ret = 0;
90*4882a593Smuzhiyun 	unsigned int loops = FUTEX_MAX_LOOPS;
91*4882a593Smuzhiyun 	u32 val, tmp;
92*4882a593Smuzhiyun 	u32 __user *uaddr;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	if (!access_ok(_uaddr, sizeof(u32)))
95*4882a593Smuzhiyun 		return -EFAULT;
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	uaddr = __uaccess_mask_ptr(_uaddr);
98*4882a593Smuzhiyun 	uaccess_enable_privileged();
99*4882a593Smuzhiyun 	asm volatile("// futex_atomic_cmpxchg_inatomic\n"
100*4882a593Smuzhiyun "	prfm	pstl1strm, %2\n"
101*4882a593Smuzhiyun "1:	ldxr	%w1, %2\n"
102*4882a593Smuzhiyun "	sub	%w3, %w1, %w5\n"
103*4882a593Smuzhiyun "	cbnz	%w3, 4f\n"
104*4882a593Smuzhiyun "2:	stlxr	%w3, %w6, %2\n"
105*4882a593Smuzhiyun "	cbz	%w3, 3f\n"
106*4882a593Smuzhiyun "	sub	%w4, %w4, %w3\n"
107*4882a593Smuzhiyun "	cbnz	%w4, 1b\n"
108*4882a593Smuzhiyun "	mov	%w0, %w8\n"
109*4882a593Smuzhiyun "3:\n"
110*4882a593Smuzhiyun "	dmb	ish\n"
111*4882a593Smuzhiyun "4:\n"
112*4882a593Smuzhiyun "	.pushsection .fixup,\"ax\"\n"
113*4882a593Smuzhiyun "5:	mov	%w0, %w7\n"
114*4882a593Smuzhiyun "	b	4b\n"
115*4882a593Smuzhiyun "	.popsection\n"
116*4882a593Smuzhiyun 	_ASM_EXTABLE(1b, 5b)
117*4882a593Smuzhiyun 	_ASM_EXTABLE(2b, 5b)
118*4882a593Smuzhiyun 	: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
119*4882a593Smuzhiyun 	: "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
120*4882a593Smuzhiyun 	: "memory");
121*4882a593Smuzhiyun 	uaccess_disable_privileged();
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 	if (!ret)
124*4882a593Smuzhiyun 		*uval = val;
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	return ret;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun #endif /* __ASM_FUTEX_H */
130