1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_MICROBLAZE_FUTEX_H
3*4882a593Smuzhiyun #define _ASM_MICROBLAZE_FUTEX_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #ifdef __KERNEL__
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #include <linux/futex.h>
8*4882a593Smuzhiyun #include <linux/uaccess.h>
9*4882a593Smuzhiyun #include <asm/errno.h>
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
12*4882a593Smuzhiyun ({ \
13*4882a593Smuzhiyun __asm__ __volatile__ ( \
14*4882a593Smuzhiyun "1: lwx %0, %2, r0; " \
15*4882a593Smuzhiyun insn \
16*4882a593Smuzhiyun "2: swx %1, %2, r0; \
17*4882a593Smuzhiyun addic %1, r0, 0; \
18*4882a593Smuzhiyun bnei %1, 1b; \
19*4882a593Smuzhiyun 3: \
20*4882a593Smuzhiyun .section .fixup,\"ax\"; \
21*4882a593Smuzhiyun 4: brid 3b; \
22*4882a593Smuzhiyun addik %1, r0, %3; \
23*4882a593Smuzhiyun .previous; \
24*4882a593Smuzhiyun .section __ex_table,\"a\"; \
25*4882a593Smuzhiyun .word 1b,4b,2b,4b; \
26*4882a593Smuzhiyun .previous;" \
27*4882a593Smuzhiyun : "=&r" (oldval), "=&r" (ret) \
28*4882a593Smuzhiyun : "r" (uaddr), "i" (-EFAULT), "r" (oparg) \
29*4882a593Smuzhiyun ); \
30*4882a593Smuzhiyun })
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)33*4882a593Smuzhiyun arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun int oldval = 0, ret;
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
38*4882a593Smuzhiyun return -EFAULT;
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun switch (op) {
41*4882a593Smuzhiyun case FUTEX_OP_SET:
42*4882a593Smuzhiyun __futex_atomic_op("or %1,%4,%4;", ret, oldval, uaddr, oparg);
43*4882a593Smuzhiyun break;
44*4882a593Smuzhiyun case FUTEX_OP_ADD:
45*4882a593Smuzhiyun __futex_atomic_op("add %1,%0,%4;", ret, oldval, uaddr, oparg);
46*4882a593Smuzhiyun break;
47*4882a593Smuzhiyun case FUTEX_OP_OR:
48*4882a593Smuzhiyun __futex_atomic_op("or %1,%0,%4;", ret, oldval, uaddr, oparg);
49*4882a593Smuzhiyun break;
50*4882a593Smuzhiyun case FUTEX_OP_ANDN:
51*4882a593Smuzhiyun __futex_atomic_op("andn %1,%0,%4;", ret, oldval, uaddr, oparg);
52*4882a593Smuzhiyun break;
53*4882a593Smuzhiyun case FUTEX_OP_XOR:
54*4882a593Smuzhiyun __futex_atomic_op("xor %1,%0,%4;", ret, oldval, uaddr, oparg);
55*4882a593Smuzhiyun break;
56*4882a593Smuzhiyun default:
57*4882a593Smuzhiyun ret = -ENOSYS;
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun if (!ret)
61*4882a593Smuzhiyun *oval = oldval;
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun return ret;
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)67*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
68*4882a593Smuzhiyun u32 oldval, u32 newval)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun int ret = 0, cmp;
71*4882a593Smuzhiyun u32 prev;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
74*4882a593Smuzhiyun return -EFAULT;
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun __asm__ __volatile__ ("1: lwx %1, %3, r0; \
77*4882a593Smuzhiyun cmp %2, %1, %4; \
78*4882a593Smuzhiyun bnei %2, 3f; \
79*4882a593Smuzhiyun 2: swx %5, %3, r0; \
80*4882a593Smuzhiyun addic %2, r0, 0; \
81*4882a593Smuzhiyun bnei %2, 1b; \
82*4882a593Smuzhiyun 3: \
83*4882a593Smuzhiyun .section .fixup,\"ax\"; \
84*4882a593Smuzhiyun 4: brid 3b; \
85*4882a593Smuzhiyun addik %0, r0, %6; \
86*4882a593Smuzhiyun .previous; \
87*4882a593Smuzhiyun .section __ex_table,\"a\"; \
88*4882a593Smuzhiyun .word 1b,4b,2b,4b; \
89*4882a593Smuzhiyun .previous;" \
90*4882a593Smuzhiyun : "+r" (ret), "=&r" (prev), "=&r"(cmp) \
91*4882a593Smuzhiyun : "r" (uaddr), "r" (oldval), "r" (newval), "i" (-EFAULT));
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun *uval = prev;
94*4882a593Smuzhiyun return ret;
95*4882a593Smuzhiyun }
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun #endif /* __KERNEL__ */
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun #endif
100