1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Atomic futex routines
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Based on the PowerPC implementataion
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Copyright (C) 2013 TangoTec Ltd.
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * Baruch Siach <baruch@tkos.co.il>
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #ifndef _ASM_XTENSA_FUTEX_H
13*4882a593Smuzhiyun #define _ASM_XTENSA_FUTEX_H
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #include <linux/futex.h>
16*4882a593Smuzhiyun #include <linux/uaccess.h>
17*4882a593Smuzhiyun #include <linux/errno.h>
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #if XCHAL_HAVE_EXCLUSIVE
20*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
21*4882a593Smuzhiyun __asm__ __volatile( \
22*4882a593Smuzhiyun "1: l32ex %[oldval], %[addr]\n" \
23*4882a593Smuzhiyun insn "\n" \
24*4882a593Smuzhiyun "2: s32ex %[newval], %[addr]\n" \
25*4882a593Smuzhiyun " getex %[newval]\n" \
26*4882a593Smuzhiyun " beqz %[newval], 1b\n" \
27*4882a593Smuzhiyun " movi %[newval], 0\n" \
28*4882a593Smuzhiyun "3:\n" \
29*4882a593Smuzhiyun " .section .fixup,\"ax\"\n" \
30*4882a593Smuzhiyun " .align 4\n" \
31*4882a593Smuzhiyun " .literal_position\n" \
32*4882a593Smuzhiyun "5: movi %[oldval], 3b\n" \
33*4882a593Smuzhiyun " movi %[newval], %[fault]\n" \
34*4882a593Smuzhiyun " jx %[oldval]\n" \
35*4882a593Smuzhiyun " .previous\n" \
36*4882a593Smuzhiyun " .section __ex_table,\"a\"\n" \
37*4882a593Smuzhiyun " .long 1b, 5b, 2b, 5b\n" \
38*4882a593Smuzhiyun " .previous\n" \
39*4882a593Smuzhiyun : [oldval] "=&r" (old), [newval] "=&r" (ret) \
40*4882a593Smuzhiyun : [addr] "r" (uaddr), [oparg] "r" (arg), \
41*4882a593Smuzhiyun [fault] "I" (-EFAULT) \
42*4882a593Smuzhiyun : "memory")
43*4882a593Smuzhiyun #elif XCHAL_HAVE_S32C1I
44*4882a593Smuzhiyun #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
45*4882a593Smuzhiyun __asm__ __volatile( \
46*4882a593Smuzhiyun "1: l32i %[oldval], %[mem]\n" \
47*4882a593Smuzhiyun insn "\n" \
48*4882a593Smuzhiyun " wsr %[oldval], scompare1\n" \
49*4882a593Smuzhiyun "2: s32c1i %[newval], %[mem]\n" \
50*4882a593Smuzhiyun " bne %[newval], %[oldval], 1b\n" \
51*4882a593Smuzhiyun " movi %[newval], 0\n" \
52*4882a593Smuzhiyun "3:\n" \
53*4882a593Smuzhiyun " .section .fixup,\"ax\"\n" \
54*4882a593Smuzhiyun " .align 4\n" \
55*4882a593Smuzhiyun " .literal_position\n" \
56*4882a593Smuzhiyun "5: movi %[oldval], 3b\n" \
57*4882a593Smuzhiyun " movi %[newval], %[fault]\n" \
58*4882a593Smuzhiyun " jx %[oldval]\n" \
59*4882a593Smuzhiyun " .previous\n" \
60*4882a593Smuzhiyun " .section __ex_table,\"a\"\n" \
61*4882a593Smuzhiyun " .long 1b, 5b, 2b, 5b\n" \
62*4882a593Smuzhiyun " .previous\n" \
63*4882a593Smuzhiyun : [oldval] "=&r" (old), [newval] "=&r" (ret), \
64*4882a593Smuzhiyun [mem] "+m" (*(uaddr)) \
65*4882a593Smuzhiyun : [oparg] "r" (arg), [fault] "I" (-EFAULT) \
66*4882a593Smuzhiyun : "memory")
67*4882a593Smuzhiyun #endif
68*4882a593Smuzhiyun
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)69*4882a593Smuzhiyun static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
70*4882a593Smuzhiyun u32 __user *uaddr)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
73*4882a593Smuzhiyun int oldval = 0, ret;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
76*4882a593Smuzhiyun return -EFAULT;
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun switch (op) {
79*4882a593Smuzhiyun case FUTEX_OP_SET:
80*4882a593Smuzhiyun __futex_atomic_op("mov %[newval], %[oparg]",
81*4882a593Smuzhiyun ret, oldval, uaddr, oparg);
82*4882a593Smuzhiyun break;
83*4882a593Smuzhiyun case FUTEX_OP_ADD:
84*4882a593Smuzhiyun __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
85*4882a593Smuzhiyun ret, oldval, uaddr, oparg);
86*4882a593Smuzhiyun break;
87*4882a593Smuzhiyun case FUTEX_OP_OR:
88*4882a593Smuzhiyun __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
89*4882a593Smuzhiyun ret, oldval, uaddr, oparg);
90*4882a593Smuzhiyun break;
91*4882a593Smuzhiyun case FUTEX_OP_ANDN:
92*4882a593Smuzhiyun __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
93*4882a593Smuzhiyun ret, oldval, uaddr, ~oparg);
94*4882a593Smuzhiyun break;
95*4882a593Smuzhiyun case FUTEX_OP_XOR:
96*4882a593Smuzhiyun __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
97*4882a593Smuzhiyun ret, oldval, uaddr, oparg);
98*4882a593Smuzhiyun break;
99*4882a593Smuzhiyun default:
100*4882a593Smuzhiyun ret = -ENOSYS;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun if (!ret)
104*4882a593Smuzhiyun *oval = oldval;
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun return ret;
107*4882a593Smuzhiyun #else
108*4882a593Smuzhiyun return -ENOSYS;
109*4882a593Smuzhiyun #endif
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)113*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
114*4882a593Smuzhiyun u32 oldval, u32 newval)
115*4882a593Smuzhiyun {
116*4882a593Smuzhiyun #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
117*4882a593Smuzhiyun unsigned long tmp;
118*4882a593Smuzhiyun int ret = 0;
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
121*4882a593Smuzhiyun return -EFAULT;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun __asm__ __volatile__ (
124*4882a593Smuzhiyun " # futex_atomic_cmpxchg_inatomic\n"
125*4882a593Smuzhiyun #if XCHAL_HAVE_EXCLUSIVE
126*4882a593Smuzhiyun "1: l32ex %[tmp], %[addr]\n"
127*4882a593Smuzhiyun " s32i %[tmp], %[uval], 0\n"
128*4882a593Smuzhiyun " bne %[tmp], %[oldval], 2f\n"
129*4882a593Smuzhiyun " mov %[tmp], %[newval]\n"
130*4882a593Smuzhiyun "3: s32ex %[tmp], %[addr]\n"
131*4882a593Smuzhiyun " getex %[tmp]\n"
132*4882a593Smuzhiyun " beqz %[tmp], 1b\n"
133*4882a593Smuzhiyun #elif XCHAL_HAVE_S32C1I
134*4882a593Smuzhiyun " wsr %[oldval], scompare1\n"
135*4882a593Smuzhiyun "1: s32c1i %[newval], %[addr], 0\n"
136*4882a593Smuzhiyun " s32i %[newval], %[uval], 0\n"
137*4882a593Smuzhiyun #endif
138*4882a593Smuzhiyun "2:\n"
139*4882a593Smuzhiyun " .section .fixup,\"ax\"\n"
140*4882a593Smuzhiyun " .align 4\n"
141*4882a593Smuzhiyun " .literal_position\n"
142*4882a593Smuzhiyun "4: movi %[tmp], 2b\n"
143*4882a593Smuzhiyun " movi %[ret], %[fault]\n"
144*4882a593Smuzhiyun " jx %[tmp]\n"
145*4882a593Smuzhiyun " .previous\n"
146*4882a593Smuzhiyun " .section __ex_table,\"a\"\n"
147*4882a593Smuzhiyun " .long 1b, 4b\n"
148*4882a593Smuzhiyun #if XCHAL_HAVE_EXCLUSIVE
149*4882a593Smuzhiyun " .long 3b, 4b\n"
150*4882a593Smuzhiyun #endif
151*4882a593Smuzhiyun " .previous\n"
152*4882a593Smuzhiyun : [ret] "+r" (ret), [newval] "+r" (newval), [tmp] "=&r" (tmp)
153*4882a593Smuzhiyun : [addr] "r" (uaddr), [oldval] "r" (oldval), [uval] "r" (uval),
154*4882a593Smuzhiyun [fault] "I" (-EFAULT)
155*4882a593Smuzhiyun : "memory");
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun return ret;
158*4882a593Smuzhiyun #else
159*4882a593Smuzhiyun return -ENOSYS;
160*4882a593Smuzhiyun #endif
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun #endif /* _ASM_XTENSA_FUTEX_H */
164