1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * 1,2 and 4 byte cmpxchg and xchg implementations for OpenRISC.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
5*4882a593Smuzhiyun * Copyright (C) 2017 Stafford Horne <shorne@gmail.com>
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * This file is licensed under the terms of the GNU General Public License
8*4882a593Smuzhiyun * version 2. This program is licensed "as is" without any warranty of any
9*4882a593Smuzhiyun * kind, whether express or implied.
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun * Note:
12*4882a593Smuzhiyun * The portable implementations of 1 and 2 byte xchg and cmpxchg using a 4
13*4882a593Smuzhiyun * byte cmpxchg is sourced heavily from the sh and mips implementations.
14*4882a593Smuzhiyun */
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun #ifndef __ASM_OPENRISC_CMPXCHG_H
17*4882a593Smuzhiyun #define __ASM_OPENRISC_CMPXCHG_H
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #include <linux/bits.h>
20*4882a593Smuzhiyun #include <linux/compiler.h>
21*4882a593Smuzhiyun #include <linux/types.h>
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun #define __HAVE_ARCH_CMPXCHG 1
24*4882a593Smuzhiyun
cmpxchg_u32(volatile void * ptr,unsigned long old,unsigned long new)25*4882a593Smuzhiyun static inline unsigned long cmpxchg_u32(volatile void *ptr,
26*4882a593Smuzhiyun unsigned long old, unsigned long new)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun __asm__ __volatile__(
29*4882a593Smuzhiyun "1: l.lwa %0, 0(%1) \n"
30*4882a593Smuzhiyun " l.sfeq %0, %2 \n"
31*4882a593Smuzhiyun " l.bnf 2f \n"
32*4882a593Smuzhiyun " l.nop \n"
33*4882a593Smuzhiyun " l.swa 0(%1), %3 \n"
34*4882a593Smuzhiyun " l.bnf 1b \n"
35*4882a593Smuzhiyun " l.nop \n"
36*4882a593Smuzhiyun "2: \n"
37*4882a593Smuzhiyun : "=&r"(old)
38*4882a593Smuzhiyun : "r"(ptr), "r"(old), "r"(new)
39*4882a593Smuzhiyun : "cc", "memory");
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun return old;
42*4882a593Smuzhiyun }
43*4882a593Smuzhiyun
xchg_u32(volatile void * ptr,unsigned long val)44*4882a593Smuzhiyun static inline unsigned long xchg_u32(volatile void *ptr,
45*4882a593Smuzhiyun unsigned long val)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun __asm__ __volatile__(
48*4882a593Smuzhiyun "1: l.lwa %0, 0(%1) \n"
49*4882a593Smuzhiyun " l.swa 0(%1), %2 \n"
50*4882a593Smuzhiyun " l.bnf 1b \n"
51*4882a593Smuzhiyun " l.nop \n"
52*4882a593Smuzhiyun : "=&r"(val)
53*4882a593Smuzhiyun : "r"(ptr), "r"(val)
54*4882a593Smuzhiyun : "cc", "memory");
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun return val;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun
cmpxchg_small(volatile void * ptr,u32 old,u32 new,int size)59*4882a593Smuzhiyun static inline u32 cmpxchg_small(volatile void *ptr, u32 old, u32 new,
60*4882a593Smuzhiyun int size)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun int off = (unsigned long)ptr % sizeof(u32);
63*4882a593Smuzhiyun volatile u32 *p = ptr - off;
64*4882a593Smuzhiyun #ifdef __BIG_ENDIAN
65*4882a593Smuzhiyun int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
66*4882a593Smuzhiyun #else
67*4882a593Smuzhiyun int bitoff = off * BITS_PER_BYTE;
68*4882a593Smuzhiyun #endif
69*4882a593Smuzhiyun u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
70*4882a593Smuzhiyun u32 load32, old32, new32;
71*4882a593Smuzhiyun u32 ret;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun load32 = READ_ONCE(*p);
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun while (true) {
76*4882a593Smuzhiyun ret = (load32 & bitmask) >> bitoff;
77*4882a593Smuzhiyun if (old != ret)
78*4882a593Smuzhiyun return ret;
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun old32 = (load32 & ~bitmask) | (old << bitoff);
81*4882a593Smuzhiyun new32 = (load32 & ~bitmask) | (new << bitoff);
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun /* Do 32 bit cmpxchg */
84*4882a593Smuzhiyun load32 = cmpxchg_u32(p, old32, new32);
85*4882a593Smuzhiyun if (load32 == old32)
86*4882a593Smuzhiyun return old;
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun /* xchg */
91*4882a593Smuzhiyun
xchg_small(volatile void * ptr,u32 x,int size)92*4882a593Smuzhiyun static inline u32 xchg_small(volatile void *ptr, u32 x, int size)
93*4882a593Smuzhiyun {
94*4882a593Smuzhiyun int off = (unsigned long)ptr % sizeof(u32);
95*4882a593Smuzhiyun volatile u32 *p = ptr - off;
96*4882a593Smuzhiyun #ifdef __BIG_ENDIAN
97*4882a593Smuzhiyun int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
98*4882a593Smuzhiyun #else
99*4882a593Smuzhiyun int bitoff = off * BITS_PER_BYTE;
100*4882a593Smuzhiyun #endif
101*4882a593Smuzhiyun u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
102*4882a593Smuzhiyun u32 oldv, newv;
103*4882a593Smuzhiyun u32 ret;
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun do {
106*4882a593Smuzhiyun oldv = READ_ONCE(*p);
107*4882a593Smuzhiyun ret = (oldv & bitmask) >> bitoff;
108*4882a593Smuzhiyun newv = (oldv & ~bitmask) | (x << bitoff);
109*4882a593Smuzhiyun } while (cmpxchg_u32(p, oldv, newv) != oldv);
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun return ret;
112*4882a593Smuzhiyun }
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun /*
115*4882a593Smuzhiyun * This function doesn't exist, so you'll get a linker error
116*4882a593Smuzhiyun * if something tries to do an invalid cmpxchg().
117*4882a593Smuzhiyun */
118*4882a593Smuzhiyun extern unsigned long __cmpxchg_called_with_bad_pointer(void)
119*4882a593Smuzhiyun __compiletime_error("Bad argument size for cmpxchg");
120*4882a593Smuzhiyun
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)121*4882a593Smuzhiyun static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
122*4882a593Smuzhiyun unsigned long new, int size)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun switch (size) {
125*4882a593Smuzhiyun case 1:
126*4882a593Smuzhiyun case 2:
127*4882a593Smuzhiyun return cmpxchg_small(ptr, old, new, size);
128*4882a593Smuzhiyun case 4:
129*4882a593Smuzhiyun return cmpxchg_u32(ptr, old, new);
130*4882a593Smuzhiyun default:
131*4882a593Smuzhiyun return __cmpxchg_called_with_bad_pointer();
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun #define cmpxchg(ptr, o, n) \
136*4882a593Smuzhiyun ({ \
137*4882a593Smuzhiyun (__typeof__(*(ptr))) __cmpxchg((ptr), \
138*4882a593Smuzhiyun (unsigned long)(o), \
139*4882a593Smuzhiyun (unsigned long)(n), \
140*4882a593Smuzhiyun sizeof(*(ptr))); \
141*4882a593Smuzhiyun })
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun /*
144*4882a593Smuzhiyun * This function doesn't exist, so you'll get a linker error if
145*4882a593Smuzhiyun * something tries to do an invalidly-sized xchg().
146*4882a593Smuzhiyun */
147*4882a593Smuzhiyun extern unsigned long __xchg_called_with_bad_pointer(void)
148*4882a593Smuzhiyun __compiletime_error("Bad argument size for xchg");
149*4882a593Smuzhiyun
__xchg(volatile void * ptr,unsigned long with,int size)150*4882a593Smuzhiyun static inline unsigned long __xchg(volatile void *ptr, unsigned long with,
151*4882a593Smuzhiyun int size)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun switch (size) {
154*4882a593Smuzhiyun case 1:
155*4882a593Smuzhiyun case 2:
156*4882a593Smuzhiyun return xchg_small(ptr, with, size);
157*4882a593Smuzhiyun case 4:
158*4882a593Smuzhiyun return xchg_u32(ptr, with);
159*4882a593Smuzhiyun default:
160*4882a593Smuzhiyun return __xchg_called_with_bad_pointer();
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun }
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun #define xchg(ptr, with) \
165*4882a593Smuzhiyun ({ \
166*4882a593Smuzhiyun (__typeof__(*(ptr))) __xchg((ptr), \
167*4882a593Smuzhiyun (unsigned long)(with), \
168*4882a593Smuzhiyun sizeof(*(ptr))); \
169*4882a593Smuzhiyun })
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun #endif /* __ASM_OPENRISC_CMPXCHG_H */
172