xref: /OK3568_Linux_fs/kernel/arch/riscv/include/asm/cmpxchg.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (C) 2014 Regents of the University of California
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #ifndef _ASM_RISCV_CMPXCHG_H
7*4882a593Smuzhiyun #define _ASM_RISCV_CMPXCHG_H
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include <linux/bug.h>
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <asm/barrier.h>
12*4882a593Smuzhiyun #include <asm/fence.h>
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #define __xchg_relaxed(ptr, new, size)					\
15*4882a593Smuzhiyun ({									\
16*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
17*4882a593Smuzhiyun 	__typeof__(new) __new = (new);					\
18*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
19*4882a593Smuzhiyun 	switch (size) {							\
20*4882a593Smuzhiyun 	case 4:								\
21*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
22*4882a593Smuzhiyun 			"	amoswap.w %0, %2, %1\n"			\
23*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
24*4882a593Smuzhiyun 			: "r" (__new)					\
25*4882a593Smuzhiyun 			: "memory");					\
26*4882a593Smuzhiyun 		break;							\
27*4882a593Smuzhiyun 	case 8:								\
28*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
29*4882a593Smuzhiyun 			"	amoswap.d %0, %2, %1\n"			\
30*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
31*4882a593Smuzhiyun 			: "r" (__new)					\
32*4882a593Smuzhiyun 			: "memory");					\
33*4882a593Smuzhiyun 		break;							\
34*4882a593Smuzhiyun 	default:							\
35*4882a593Smuzhiyun 		BUILD_BUG();						\
36*4882a593Smuzhiyun 	}								\
37*4882a593Smuzhiyun 	__ret;								\
38*4882a593Smuzhiyun })
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun #define xchg_relaxed(ptr, x)						\
41*4882a593Smuzhiyun ({									\
42*4882a593Smuzhiyun 	__typeof__(*(ptr)) _x_ = (x);					\
43*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __xchg_relaxed((ptr),			\
44*4882a593Smuzhiyun 					    _x_, sizeof(*(ptr)));	\
45*4882a593Smuzhiyun })
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun #define __xchg_acquire(ptr, new, size)					\
48*4882a593Smuzhiyun ({									\
49*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
50*4882a593Smuzhiyun 	__typeof__(new) __new = (new);					\
51*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
52*4882a593Smuzhiyun 	switch (size) {							\
53*4882a593Smuzhiyun 	case 4:								\
54*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
55*4882a593Smuzhiyun 			"	amoswap.w %0, %2, %1\n"			\
56*4882a593Smuzhiyun 			RISCV_ACQUIRE_BARRIER				\
57*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
58*4882a593Smuzhiyun 			: "r" (__new)					\
59*4882a593Smuzhiyun 			: "memory");					\
60*4882a593Smuzhiyun 		break;							\
61*4882a593Smuzhiyun 	case 8:								\
62*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
63*4882a593Smuzhiyun 			"	amoswap.d %0, %2, %1\n"			\
64*4882a593Smuzhiyun 			RISCV_ACQUIRE_BARRIER				\
65*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
66*4882a593Smuzhiyun 			: "r" (__new)					\
67*4882a593Smuzhiyun 			: "memory");					\
68*4882a593Smuzhiyun 		break;							\
69*4882a593Smuzhiyun 	default:							\
70*4882a593Smuzhiyun 		BUILD_BUG();						\
71*4882a593Smuzhiyun 	}								\
72*4882a593Smuzhiyun 	__ret;								\
73*4882a593Smuzhiyun })
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun #define xchg_acquire(ptr, x)						\
76*4882a593Smuzhiyun ({									\
77*4882a593Smuzhiyun 	__typeof__(*(ptr)) _x_ = (x);					\
78*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __xchg_acquire((ptr),			\
79*4882a593Smuzhiyun 					    _x_, sizeof(*(ptr)));	\
80*4882a593Smuzhiyun })
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun #define __xchg_release(ptr, new, size)					\
83*4882a593Smuzhiyun ({									\
84*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
85*4882a593Smuzhiyun 	__typeof__(new) __new = (new);					\
86*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
87*4882a593Smuzhiyun 	switch (size) {							\
88*4882a593Smuzhiyun 	case 4:								\
89*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
90*4882a593Smuzhiyun 			RISCV_RELEASE_BARRIER				\
91*4882a593Smuzhiyun 			"	amoswap.w %0, %2, %1\n"			\
92*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
93*4882a593Smuzhiyun 			: "r" (__new)					\
94*4882a593Smuzhiyun 			: "memory");					\
95*4882a593Smuzhiyun 		break;							\
96*4882a593Smuzhiyun 	case 8:								\
97*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
98*4882a593Smuzhiyun 			RISCV_RELEASE_BARRIER				\
99*4882a593Smuzhiyun 			"	amoswap.d %0, %2, %1\n"			\
100*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
101*4882a593Smuzhiyun 			: "r" (__new)					\
102*4882a593Smuzhiyun 			: "memory");					\
103*4882a593Smuzhiyun 		break;							\
104*4882a593Smuzhiyun 	default:							\
105*4882a593Smuzhiyun 		BUILD_BUG();						\
106*4882a593Smuzhiyun 	}								\
107*4882a593Smuzhiyun 	__ret;								\
108*4882a593Smuzhiyun })
109*4882a593Smuzhiyun 
110*4882a593Smuzhiyun #define xchg_release(ptr, x)						\
111*4882a593Smuzhiyun ({									\
112*4882a593Smuzhiyun 	__typeof__(*(ptr)) _x_ = (x);					\
113*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __xchg_release((ptr),			\
114*4882a593Smuzhiyun 					    _x_, sizeof(*(ptr)));	\
115*4882a593Smuzhiyun })
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun #define __xchg(ptr, new, size)						\
118*4882a593Smuzhiyun ({									\
119*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
120*4882a593Smuzhiyun 	__typeof__(new) __new = (new);					\
121*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
122*4882a593Smuzhiyun 	switch (size) {							\
123*4882a593Smuzhiyun 	case 4:								\
124*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
125*4882a593Smuzhiyun 			"	amoswap.w.aqrl %0, %2, %1\n"		\
126*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
127*4882a593Smuzhiyun 			: "r" (__new)					\
128*4882a593Smuzhiyun 			: "memory");					\
129*4882a593Smuzhiyun 		break;							\
130*4882a593Smuzhiyun 	case 8:								\
131*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
132*4882a593Smuzhiyun 			"	amoswap.d.aqrl %0, %2, %1\n"		\
133*4882a593Smuzhiyun 			: "=r" (__ret), "+A" (*__ptr)			\
134*4882a593Smuzhiyun 			: "r" (__new)					\
135*4882a593Smuzhiyun 			: "memory");					\
136*4882a593Smuzhiyun 		break;							\
137*4882a593Smuzhiyun 	default:							\
138*4882a593Smuzhiyun 		BUILD_BUG();						\
139*4882a593Smuzhiyun 	}								\
140*4882a593Smuzhiyun 	__ret;								\
141*4882a593Smuzhiyun })
142*4882a593Smuzhiyun 
143*4882a593Smuzhiyun #define xchg(ptr, x)							\
144*4882a593Smuzhiyun ({									\
145*4882a593Smuzhiyun 	__typeof__(*(ptr)) _x_ = (x);					\
146*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr)));	\
147*4882a593Smuzhiyun })
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun #define xchg32(ptr, x)							\
150*4882a593Smuzhiyun ({									\
151*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 4);				\
152*4882a593Smuzhiyun 	xchg((ptr), (x));						\
153*4882a593Smuzhiyun })
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun #define xchg64(ptr, x)							\
156*4882a593Smuzhiyun ({									\
157*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
158*4882a593Smuzhiyun 	xchg((ptr), (x));						\
159*4882a593Smuzhiyun })
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun /*
162*4882a593Smuzhiyun  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
163*4882a593Smuzhiyun  * store NEW in MEM.  Return the initial value in MEM.  Success is
164*4882a593Smuzhiyun  * indicated by comparing RETURN with OLD.
165*4882a593Smuzhiyun  */
166*4882a593Smuzhiyun #define __cmpxchg_relaxed(ptr, old, new, size)				\
167*4882a593Smuzhiyun ({									\
168*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
169*4882a593Smuzhiyun 	__typeof__(*(ptr)) __old = (old);				\
170*4882a593Smuzhiyun 	__typeof__(*(ptr)) __new = (new);				\
171*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
172*4882a593Smuzhiyun 	register unsigned int __rc;					\
173*4882a593Smuzhiyun 	switch (size) {							\
174*4882a593Smuzhiyun 	case 4:								\
175*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
176*4882a593Smuzhiyun 			"0:	lr.w %0, %2\n"				\
177*4882a593Smuzhiyun 			"	bne  %0, %z3, 1f\n"			\
178*4882a593Smuzhiyun 			"	sc.w %1, %z4, %2\n"			\
179*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
180*4882a593Smuzhiyun 			"1:\n"						\
181*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
182*4882a593Smuzhiyun 			: "rJ" ((long)__old), "rJ" (__new)		\
183*4882a593Smuzhiyun 			: "memory");					\
184*4882a593Smuzhiyun 		break;							\
185*4882a593Smuzhiyun 	case 8:								\
186*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
187*4882a593Smuzhiyun 			"0:	lr.d %0, %2\n"				\
188*4882a593Smuzhiyun 			"	bne %0, %z3, 1f\n"			\
189*4882a593Smuzhiyun 			"	sc.d %1, %z4, %2\n"			\
190*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
191*4882a593Smuzhiyun 			"1:\n"						\
192*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
193*4882a593Smuzhiyun 			: "rJ" (__old), "rJ" (__new)			\
194*4882a593Smuzhiyun 			: "memory");					\
195*4882a593Smuzhiyun 		break;							\
196*4882a593Smuzhiyun 	default:							\
197*4882a593Smuzhiyun 		BUILD_BUG();						\
198*4882a593Smuzhiyun 	}								\
199*4882a593Smuzhiyun 	__ret;								\
200*4882a593Smuzhiyun })
201*4882a593Smuzhiyun 
202*4882a593Smuzhiyun #define cmpxchg_relaxed(ptr, o, n)					\
203*4882a593Smuzhiyun ({									\
204*4882a593Smuzhiyun 	__typeof__(*(ptr)) _o_ = (o);					\
205*4882a593Smuzhiyun 	__typeof__(*(ptr)) _n_ = (n);					\
206*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __cmpxchg_relaxed((ptr),			\
207*4882a593Smuzhiyun 					_o_, _n_, sizeof(*(ptr)));	\
208*4882a593Smuzhiyun })
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun #define __cmpxchg_acquire(ptr, old, new, size)				\
211*4882a593Smuzhiyun ({									\
212*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
213*4882a593Smuzhiyun 	__typeof__(*(ptr)) __old = (old);				\
214*4882a593Smuzhiyun 	__typeof__(*(ptr)) __new = (new);				\
215*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
216*4882a593Smuzhiyun 	register unsigned int __rc;					\
217*4882a593Smuzhiyun 	switch (size) {							\
218*4882a593Smuzhiyun 	case 4:								\
219*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
220*4882a593Smuzhiyun 			"0:	lr.w %0, %2\n"				\
221*4882a593Smuzhiyun 			"	bne  %0, %z3, 1f\n"			\
222*4882a593Smuzhiyun 			"	sc.w %1, %z4, %2\n"			\
223*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
224*4882a593Smuzhiyun 			RISCV_ACQUIRE_BARRIER				\
225*4882a593Smuzhiyun 			"1:\n"						\
226*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
227*4882a593Smuzhiyun 			: "rJ" ((long)__old), "rJ" (__new)		\
228*4882a593Smuzhiyun 			: "memory");					\
229*4882a593Smuzhiyun 		break;							\
230*4882a593Smuzhiyun 	case 8:								\
231*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
232*4882a593Smuzhiyun 			"0:	lr.d %0, %2\n"				\
233*4882a593Smuzhiyun 			"	bne %0, %z3, 1f\n"			\
234*4882a593Smuzhiyun 			"	sc.d %1, %z4, %2\n"			\
235*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
236*4882a593Smuzhiyun 			RISCV_ACQUIRE_BARRIER				\
237*4882a593Smuzhiyun 			"1:\n"						\
238*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
239*4882a593Smuzhiyun 			: "rJ" (__old), "rJ" (__new)			\
240*4882a593Smuzhiyun 			: "memory");					\
241*4882a593Smuzhiyun 		break;							\
242*4882a593Smuzhiyun 	default:							\
243*4882a593Smuzhiyun 		BUILD_BUG();						\
244*4882a593Smuzhiyun 	}								\
245*4882a593Smuzhiyun 	__ret;								\
246*4882a593Smuzhiyun })
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun #define cmpxchg_acquire(ptr, o, n)					\
249*4882a593Smuzhiyun ({									\
250*4882a593Smuzhiyun 	__typeof__(*(ptr)) _o_ = (o);					\
251*4882a593Smuzhiyun 	__typeof__(*(ptr)) _n_ = (n);					\
252*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __cmpxchg_acquire((ptr),			\
253*4882a593Smuzhiyun 					_o_, _n_, sizeof(*(ptr)));	\
254*4882a593Smuzhiyun })
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun #define __cmpxchg_release(ptr, old, new, size)				\
257*4882a593Smuzhiyun ({									\
258*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
259*4882a593Smuzhiyun 	__typeof__(*(ptr)) __old = (old);				\
260*4882a593Smuzhiyun 	__typeof__(*(ptr)) __new = (new);				\
261*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
262*4882a593Smuzhiyun 	register unsigned int __rc;					\
263*4882a593Smuzhiyun 	switch (size) {							\
264*4882a593Smuzhiyun 	case 4:								\
265*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
266*4882a593Smuzhiyun 			RISCV_RELEASE_BARRIER				\
267*4882a593Smuzhiyun 			"0:	lr.w %0, %2\n"				\
268*4882a593Smuzhiyun 			"	bne  %0, %z3, 1f\n"			\
269*4882a593Smuzhiyun 			"	sc.w %1, %z4, %2\n"			\
270*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
271*4882a593Smuzhiyun 			"1:\n"						\
272*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
273*4882a593Smuzhiyun 			: "rJ" ((long)__old), "rJ" (__new)		\
274*4882a593Smuzhiyun 			: "memory");					\
275*4882a593Smuzhiyun 		break;							\
276*4882a593Smuzhiyun 	case 8:								\
277*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
278*4882a593Smuzhiyun 			RISCV_RELEASE_BARRIER				\
279*4882a593Smuzhiyun 			"0:	lr.d %0, %2\n"				\
280*4882a593Smuzhiyun 			"	bne %0, %z3, 1f\n"			\
281*4882a593Smuzhiyun 			"	sc.d %1, %z4, %2\n"			\
282*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
283*4882a593Smuzhiyun 			"1:\n"						\
284*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
285*4882a593Smuzhiyun 			: "rJ" (__old), "rJ" (__new)			\
286*4882a593Smuzhiyun 			: "memory");					\
287*4882a593Smuzhiyun 		break;							\
288*4882a593Smuzhiyun 	default:							\
289*4882a593Smuzhiyun 		BUILD_BUG();						\
290*4882a593Smuzhiyun 	}								\
291*4882a593Smuzhiyun 	__ret;								\
292*4882a593Smuzhiyun })
293*4882a593Smuzhiyun 
294*4882a593Smuzhiyun #define cmpxchg_release(ptr, o, n)					\
295*4882a593Smuzhiyun ({									\
296*4882a593Smuzhiyun 	__typeof__(*(ptr)) _o_ = (o);					\
297*4882a593Smuzhiyun 	__typeof__(*(ptr)) _n_ = (n);					\
298*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __cmpxchg_release((ptr),			\
299*4882a593Smuzhiyun 					_o_, _n_, sizeof(*(ptr)));	\
300*4882a593Smuzhiyun })
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun #define __cmpxchg(ptr, old, new, size)					\
303*4882a593Smuzhiyun ({									\
304*4882a593Smuzhiyun 	__typeof__(ptr) __ptr = (ptr);					\
305*4882a593Smuzhiyun 	__typeof__(*(ptr)) __old = (old);				\
306*4882a593Smuzhiyun 	__typeof__(*(ptr)) __new = (new);				\
307*4882a593Smuzhiyun 	__typeof__(*(ptr)) __ret;					\
308*4882a593Smuzhiyun 	register unsigned int __rc;					\
309*4882a593Smuzhiyun 	switch (size) {							\
310*4882a593Smuzhiyun 	case 4:								\
311*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
312*4882a593Smuzhiyun 			"0:	lr.w %0, %2\n"				\
313*4882a593Smuzhiyun 			"	bne  %0, %z3, 1f\n"			\
314*4882a593Smuzhiyun 			"	sc.w.rl %1, %z4, %2\n"			\
315*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
316*4882a593Smuzhiyun 			"	fence rw, rw\n"				\
317*4882a593Smuzhiyun 			"1:\n"						\
318*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
319*4882a593Smuzhiyun 			: "rJ" ((long)__old), "rJ" (__new)		\
320*4882a593Smuzhiyun 			: "memory");					\
321*4882a593Smuzhiyun 		break;							\
322*4882a593Smuzhiyun 	case 8:								\
323*4882a593Smuzhiyun 		__asm__ __volatile__ (					\
324*4882a593Smuzhiyun 			"0:	lr.d %0, %2\n"				\
325*4882a593Smuzhiyun 			"	bne %0, %z3, 1f\n"			\
326*4882a593Smuzhiyun 			"	sc.d.rl %1, %z4, %2\n"			\
327*4882a593Smuzhiyun 			"	bnez %1, 0b\n"				\
328*4882a593Smuzhiyun 			"	fence rw, rw\n"				\
329*4882a593Smuzhiyun 			"1:\n"						\
330*4882a593Smuzhiyun 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
331*4882a593Smuzhiyun 			: "rJ" (__old), "rJ" (__new)			\
332*4882a593Smuzhiyun 			: "memory");					\
333*4882a593Smuzhiyun 		break;							\
334*4882a593Smuzhiyun 	default:							\
335*4882a593Smuzhiyun 		BUILD_BUG();						\
336*4882a593Smuzhiyun 	}								\
337*4882a593Smuzhiyun 	__ret;								\
338*4882a593Smuzhiyun })
339*4882a593Smuzhiyun 
340*4882a593Smuzhiyun #define cmpxchg(ptr, o, n)						\
341*4882a593Smuzhiyun ({									\
342*4882a593Smuzhiyun 	__typeof__(*(ptr)) _o_ = (o);					\
343*4882a593Smuzhiyun 	__typeof__(*(ptr)) _n_ = (n);					\
344*4882a593Smuzhiyun 	(__typeof__(*(ptr))) __cmpxchg((ptr),				\
345*4882a593Smuzhiyun 				       _o_, _n_, sizeof(*(ptr)));	\
346*4882a593Smuzhiyun })
347*4882a593Smuzhiyun 
348*4882a593Smuzhiyun #define cmpxchg_local(ptr, o, n)					\
349*4882a593Smuzhiyun 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun #define cmpxchg32(ptr, o, n)						\
352*4882a593Smuzhiyun ({									\
353*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 4);				\
354*4882a593Smuzhiyun 	cmpxchg((ptr), (o), (n));					\
355*4882a593Smuzhiyun })
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun #define cmpxchg32_local(ptr, o, n)					\
358*4882a593Smuzhiyun ({									\
359*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 4);				\
360*4882a593Smuzhiyun 	cmpxchg_relaxed((ptr), (o), (n))				\
361*4882a593Smuzhiyun })
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun #define cmpxchg64(ptr, o, n)						\
364*4882a593Smuzhiyun ({									\
365*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
366*4882a593Smuzhiyun 	cmpxchg((ptr), (o), (n));					\
367*4882a593Smuzhiyun })
368*4882a593Smuzhiyun 
369*4882a593Smuzhiyun #define cmpxchg64_local(ptr, o, n)					\
370*4882a593Smuzhiyun ({									\
371*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
372*4882a593Smuzhiyun 	cmpxchg_relaxed((ptr), (o), (n));				\
373*4882a593Smuzhiyun })
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun #endif /* _ASM_RISCV_CMPXCHG_H */
376