1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_FUTEX_H
3*4882a593Smuzhiyun #define _ASM_FUTEX_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/futex.h>
6*4882a593Smuzhiyun #include <linux/uaccess.h>
7*4882a593Smuzhiyun #include <asm/errno.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
10*4882a593Smuzhiyun do { \
11*4882a593Smuzhiyun register unsigned long r8 __asm ("r8") = 0; \
12*4882a593Smuzhiyun __asm__ __volatile__( \
13*4882a593Smuzhiyun " mf;; \n" \
14*4882a593Smuzhiyun "[1:] " insn ";; \n" \
15*4882a593Smuzhiyun " .xdata4 \"__ex_table\", 1b-., 2f-. \n" \
16*4882a593Smuzhiyun "[2:]" \
17*4882a593Smuzhiyun : "+r" (r8), "=r" (oldval) \
18*4882a593Smuzhiyun : "r" (uaddr), "r" (oparg) \
19*4882a593Smuzhiyun : "memory"); \
20*4882a593Smuzhiyun ret = r8; \
21*4882a593Smuzhiyun } while (0)
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24*4882a593Smuzhiyun do { \
25*4882a593Smuzhiyun register unsigned long r8 __asm ("r8") = 0; \
26*4882a593Smuzhiyun int val, newval; \
27*4882a593Smuzhiyun do { \
28*4882a593Smuzhiyun __asm__ __volatile__( \
29*4882a593Smuzhiyun " mf;; \n" \
30*4882a593Smuzhiyun "[1:] ld4 %3=[%4];; \n" \
31*4882a593Smuzhiyun " mov %2=%3 \n" \
32*4882a593Smuzhiyun insn ";; \n" \
33*4882a593Smuzhiyun " mov ar.ccv=%2;; \n" \
34*4882a593Smuzhiyun "[2:] cmpxchg4.acq %1=[%4],%3,ar.ccv;; \n" \
35*4882a593Smuzhiyun " .xdata4 \"__ex_table\", 1b-., 3f-.\n" \
36*4882a593Smuzhiyun " .xdata4 \"__ex_table\", 2b-., 3f-.\n" \
37*4882a593Smuzhiyun "[3:]" \
38*4882a593Smuzhiyun : "+r" (r8), "=r" (val), "=&r" (oldval), \
39*4882a593Smuzhiyun "=&r" (newval) \
40*4882a593Smuzhiyun : "r" (uaddr), "r" (oparg) \
41*4882a593Smuzhiyun : "memory"); \
42*4882a593Smuzhiyun if (unlikely (r8)) \
43*4882a593Smuzhiyun break; \
44*4882a593Smuzhiyun } while (unlikely (val != oldval)); \
45*4882a593Smuzhiyun ret = r8; \
46*4882a593Smuzhiyun } while (0)
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)49*4882a593Smuzhiyun arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun int oldval = 0, ret;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
54*4882a593Smuzhiyun return -EFAULT;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun switch (op) {
57*4882a593Smuzhiyun case FUTEX_OP_SET:
58*4882a593Smuzhiyun __futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
59*4882a593Smuzhiyun oparg);
60*4882a593Smuzhiyun break;
61*4882a593Smuzhiyun case FUTEX_OP_ADD:
62*4882a593Smuzhiyun __futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
63*4882a593Smuzhiyun break;
64*4882a593Smuzhiyun case FUTEX_OP_OR:
65*4882a593Smuzhiyun __futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
66*4882a593Smuzhiyun break;
67*4882a593Smuzhiyun case FUTEX_OP_ANDN:
68*4882a593Smuzhiyun __futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
69*4882a593Smuzhiyun ~oparg);
70*4882a593Smuzhiyun break;
71*4882a593Smuzhiyun case FUTEX_OP_XOR:
72*4882a593Smuzhiyun __futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
73*4882a593Smuzhiyun break;
74*4882a593Smuzhiyun default:
75*4882a593Smuzhiyun ret = -ENOSYS;
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun if (!ret)
79*4882a593Smuzhiyun *oval = oldval;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun return ret;
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)85*4882a593Smuzhiyun futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
86*4882a593Smuzhiyun u32 oldval, u32 newval)
87*4882a593Smuzhiyun {
88*4882a593Smuzhiyun if (!access_ok(uaddr, sizeof(u32)))
89*4882a593Smuzhiyun return -EFAULT;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun register unsigned long r8 __asm ("r8") = 0;
93*4882a593Smuzhiyun unsigned long prev;
94*4882a593Smuzhiyun __asm__ __volatile__(
95*4882a593Smuzhiyun " mf;; \n"
96*4882a593Smuzhiyun " mov ar.ccv=%4;; \n"
97*4882a593Smuzhiyun "[1:] cmpxchg4.acq %1=[%2],%3,ar.ccv \n"
98*4882a593Smuzhiyun " .xdata4 \"__ex_table\", 1b-., 2f-. \n"
99*4882a593Smuzhiyun "[2:]"
100*4882a593Smuzhiyun : "+r" (r8), "=&r" (prev)
101*4882a593Smuzhiyun : "r" (uaddr), "r" (newval),
102*4882a593Smuzhiyun "rO" ((long) (unsigned) oldval)
103*4882a593Smuzhiyun : "memory");
104*4882a593Smuzhiyun *uval = prev;
105*4882a593Smuzhiyun return r8;
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun #endif /* _ASM_FUTEX_H */
110