1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ALPHA_CMPXCHG_H
3*4882a593Smuzhiyun #error Do not include xchg.h directly!
4*4882a593Smuzhiyun #else
5*4882a593Smuzhiyun /*
6*4882a593Smuzhiyun * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7*4882a593Smuzhiyun * except that local version do not have the expensive memory barrier.
8*4882a593Smuzhiyun * So this file is included twice from asm/cmpxchg.h.
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun /*
12*4882a593Smuzhiyun * Atomic exchange.
13*4882a593Smuzhiyun * Since it can be used to implement critical sections
14*4882a593Smuzhiyun * it must clobber "memory" (also for interrupts in UP).
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun static inline unsigned long
____xchg(_u8,volatile char * m,unsigned long val)18*4882a593Smuzhiyun ____xchg(_u8, volatile char *m, unsigned long val)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun unsigned long ret, tmp, addr64;
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun __asm__ __volatile__(
23*4882a593Smuzhiyun " andnot %4,7,%3\n"
24*4882a593Smuzhiyun " insbl %1,%4,%1\n"
25*4882a593Smuzhiyun "1: ldq_l %2,0(%3)\n"
26*4882a593Smuzhiyun " extbl %2,%4,%0\n"
27*4882a593Smuzhiyun " mskbl %2,%4,%2\n"
28*4882a593Smuzhiyun " or %1,%2,%2\n"
29*4882a593Smuzhiyun " stq_c %2,0(%3)\n"
30*4882a593Smuzhiyun " beq %2,2f\n"
31*4882a593Smuzhiyun ".subsection 2\n"
32*4882a593Smuzhiyun "2: br 1b\n"
33*4882a593Smuzhiyun ".previous"
34*4882a593Smuzhiyun : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
35*4882a593Smuzhiyun : "r" ((long)m), "1" (val) : "memory");
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun return ret;
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun static inline unsigned long
____xchg(_u16,volatile short * m,unsigned long val)41*4882a593Smuzhiyun ____xchg(_u16, volatile short *m, unsigned long val)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun unsigned long ret, tmp, addr64;
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun __asm__ __volatile__(
46*4882a593Smuzhiyun " andnot %4,7,%3\n"
47*4882a593Smuzhiyun " inswl %1,%4,%1\n"
48*4882a593Smuzhiyun "1: ldq_l %2,0(%3)\n"
49*4882a593Smuzhiyun " extwl %2,%4,%0\n"
50*4882a593Smuzhiyun " mskwl %2,%4,%2\n"
51*4882a593Smuzhiyun " or %1,%2,%2\n"
52*4882a593Smuzhiyun " stq_c %2,0(%3)\n"
53*4882a593Smuzhiyun " beq %2,2f\n"
54*4882a593Smuzhiyun ".subsection 2\n"
55*4882a593Smuzhiyun "2: br 1b\n"
56*4882a593Smuzhiyun ".previous"
57*4882a593Smuzhiyun : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
58*4882a593Smuzhiyun : "r" ((long)m), "1" (val) : "memory");
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun return ret;
61*4882a593Smuzhiyun }
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun static inline unsigned long
____xchg(_u32,volatile int * m,unsigned long val)64*4882a593Smuzhiyun ____xchg(_u32, volatile int *m, unsigned long val)
65*4882a593Smuzhiyun {
66*4882a593Smuzhiyun unsigned long dummy;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun __asm__ __volatile__(
69*4882a593Smuzhiyun "1: ldl_l %0,%4\n"
70*4882a593Smuzhiyun " bis $31,%3,%1\n"
71*4882a593Smuzhiyun " stl_c %1,%2\n"
72*4882a593Smuzhiyun " beq %1,2f\n"
73*4882a593Smuzhiyun ".subsection 2\n"
74*4882a593Smuzhiyun "2: br 1b\n"
75*4882a593Smuzhiyun ".previous"
76*4882a593Smuzhiyun : "=&r" (val), "=&r" (dummy), "=m" (*m)
77*4882a593Smuzhiyun : "rI" (val), "m" (*m) : "memory");
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun return val;
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun static inline unsigned long
____xchg(_u64,volatile long * m,unsigned long val)83*4882a593Smuzhiyun ____xchg(_u64, volatile long *m, unsigned long val)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun unsigned long dummy;
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun __asm__ __volatile__(
88*4882a593Smuzhiyun "1: ldq_l %0,%4\n"
89*4882a593Smuzhiyun " bis $31,%3,%1\n"
90*4882a593Smuzhiyun " stq_c %1,%2\n"
91*4882a593Smuzhiyun " beq %1,2f\n"
92*4882a593Smuzhiyun ".subsection 2\n"
93*4882a593Smuzhiyun "2: br 1b\n"
94*4882a593Smuzhiyun ".previous"
95*4882a593Smuzhiyun : "=&r" (val), "=&r" (dummy), "=m" (*m)
96*4882a593Smuzhiyun : "rI" (val), "m" (*m) : "memory");
97*4882a593Smuzhiyun
98*4882a593Smuzhiyun return val;
99*4882a593Smuzhiyun }
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun /* This function doesn't exist, so you'll get a linker error
102*4882a593Smuzhiyun if something tries to do an invalid xchg(). */
103*4882a593Smuzhiyun extern void __xchg_called_with_bad_pointer(void);
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun static __always_inline unsigned long
106*4882a593Smuzhiyun ____xchg(, volatile void *ptr, unsigned long x, int size)
107*4882a593Smuzhiyun {
108*4882a593Smuzhiyun switch (size) {
109*4882a593Smuzhiyun case 1:
110*4882a593Smuzhiyun return ____xchg(_u8, ptr, x);
111*4882a593Smuzhiyun case 2:
112*4882a593Smuzhiyun return ____xchg(_u16, ptr, x);
113*4882a593Smuzhiyun case 4:
114*4882a593Smuzhiyun return ____xchg(_u32, ptr, x);
115*4882a593Smuzhiyun case 8:
116*4882a593Smuzhiyun return ____xchg(_u64, ptr, x);
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun __xchg_called_with_bad_pointer();
119*4882a593Smuzhiyun return x;
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun /*
123*4882a593Smuzhiyun * Atomic compare and exchange. Compare OLD with MEM, if identical,
124*4882a593Smuzhiyun * store NEW in MEM. Return the initial value in MEM. Success is
125*4882a593Smuzhiyun * indicated by comparing RETURN with OLD.
126*4882a593Smuzhiyun */
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun static inline unsigned long
____cmpxchg(_u8,volatile char * m,unsigned char old,unsigned char new)129*4882a593Smuzhiyun ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
130*4882a593Smuzhiyun {
131*4882a593Smuzhiyun unsigned long prev, tmp, cmp, addr64;
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun __asm__ __volatile__(
134*4882a593Smuzhiyun " andnot %5,7,%4\n"
135*4882a593Smuzhiyun " insbl %1,%5,%1\n"
136*4882a593Smuzhiyun "1: ldq_l %2,0(%4)\n"
137*4882a593Smuzhiyun " extbl %2,%5,%0\n"
138*4882a593Smuzhiyun " cmpeq %0,%6,%3\n"
139*4882a593Smuzhiyun " beq %3,2f\n"
140*4882a593Smuzhiyun " mskbl %2,%5,%2\n"
141*4882a593Smuzhiyun " or %1,%2,%2\n"
142*4882a593Smuzhiyun " stq_c %2,0(%4)\n"
143*4882a593Smuzhiyun " beq %2,3f\n"
144*4882a593Smuzhiyun "2:\n"
145*4882a593Smuzhiyun ".subsection 2\n"
146*4882a593Smuzhiyun "3: br 1b\n"
147*4882a593Smuzhiyun ".previous"
148*4882a593Smuzhiyun : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
149*4882a593Smuzhiyun : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun return prev;
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun static inline unsigned long
____cmpxchg(_u16,volatile short * m,unsigned short old,unsigned short new)155*4882a593Smuzhiyun ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun unsigned long prev, tmp, cmp, addr64;
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun __asm__ __volatile__(
160*4882a593Smuzhiyun " andnot %5,7,%4\n"
161*4882a593Smuzhiyun " inswl %1,%5,%1\n"
162*4882a593Smuzhiyun "1: ldq_l %2,0(%4)\n"
163*4882a593Smuzhiyun " extwl %2,%5,%0\n"
164*4882a593Smuzhiyun " cmpeq %0,%6,%3\n"
165*4882a593Smuzhiyun " beq %3,2f\n"
166*4882a593Smuzhiyun " mskwl %2,%5,%2\n"
167*4882a593Smuzhiyun " or %1,%2,%2\n"
168*4882a593Smuzhiyun " stq_c %2,0(%4)\n"
169*4882a593Smuzhiyun " beq %2,3f\n"
170*4882a593Smuzhiyun "2:\n"
171*4882a593Smuzhiyun ".subsection 2\n"
172*4882a593Smuzhiyun "3: br 1b\n"
173*4882a593Smuzhiyun ".previous"
174*4882a593Smuzhiyun : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
175*4882a593Smuzhiyun : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
176*4882a593Smuzhiyun
177*4882a593Smuzhiyun return prev;
178*4882a593Smuzhiyun }
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun static inline unsigned long
____cmpxchg(_u32,volatile int * m,int old,int new)181*4882a593Smuzhiyun ____cmpxchg(_u32, volatile int *m, int old, int new)
182*4882a593Smuzhiyun {
183*4882a593Smuzhiyun unsigned long prev, cmp;
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun __asm__ __volatile__(
186*4882a593Smuzhiyun "1: ldl_l %0,%5\n"
187*4882a593Smuzhiyun " cmpeq %0,%3,%1\n"
188*4882a593Smuzhiyun " beq %1,2f\n"
189*4882a593Smuzhiyun " mov %4,%1\n"
190*4882a593Smuzhiyun " stl_c %1,%2\n"
191*4882a593Smuzhiyun " beq %1,3f\n"
192*4882a593Smuzhiyun "2:\n"
193*4882a593Smuzhiyun ".subsection 2\n"
194*4882a593Smuzhiyun "3: br 1b\n"
195*4882a593Smuzhiyun ".previous"
196*4882a593Smuzhiyun : "=&r"(prev), "=&r"(cmp), "=m"(*m)
197*4882a593Smuzhiyun : "r"((long) old), "r"(new), "m"(*m) : "memory");
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun return prev;
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun static inline unsigned long
____cmpxchg(_u64,volatile long * m,unsigned long old,unsigned long new)203*4882a593Smuzhiyun ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
204*4882a593Smuzhiyun {
205*4882a593Smuzhiyun unsigned long prev, cmp;
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun __asm__ __volatile__(
208*4882a593Smuzhiyun "1: ldq_l %0,%5\n"
209*4882a593Smuzhiyun " cmpeq %0,%3,%1\n"
210*4882a593Smuzhiyun " beq %1,2f\n"
211*4882a593Smuzhiyun " mov %4,%1\n"
212*4882a593Smuzhiyun " stq_c %1,%2\n"
213*4882a593Smuzhiyun " beq %1,3f\n"
214*4882a593Smuzhiyun "2:\n"
215*4882a593Smuzhiyun ".subsection 2\n"
216*4882a593Smuzhiyun "3: br 1b\n"
217*4882a593Smuzhiyun ".previous"
218*4882a593Smuzhiyun : "=&r"(prev), "=&r"(cmp), "=m"(*m)
219*4882a593Smuzhiyun : "r"((long) old), "r"(new), "m"(*m) : "memory");
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun return prev;
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun /* This function doesn't exist, so you'll get a linker error
225*4882a593Smuzhiyun if something tries to do an invalid cmpxchg(). */
226*4882a593Smuzhiyun extern void __cmpxchg_called_with_bad_pointer(void);
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun static __always_inline unsigned long
229*4882a593Smuzhiyun ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
230*4882a593Smuzhiyun int size)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun switch (size) {
233*4882a593Smuzhiyun case 1:
234*4882a593Smuzhiyun return ____cmpxchg(_u8, ptr, old, new);
235*4882a593Smuzhiyun case 2:
236*4882a593Smuzhiyun return ____cmpxchg(_u16, ptr, old, new);
237*4882a593Smuzhiyun case 4:
238*4882a593Smuzhiyun return ____cmpxchg(_u32, ptr, old, new);
239*4882a593Smuzhiyun case 8:
240*4882a593Smuzhiyun return ____cmpxchg(_u64, ptr, old, new);
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun __cmpxchg_called_with_bad_pointer();
243*4882a593Smuzhiyun return old;
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun #endif
247