xref: /OK3568_Linux_fs/kernel/include/asm-generic/cmpxchg.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Generic UP xchg and cmpxchg using interrupt disablement.  Does not
4*4882a593Smuzhiyun  * support SMP.
5*4882a593Smuzhiyun  */
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #ifndef __ASM_GENERIC_CMPXCHG_H
8*4882a593Smuzhiyun #define __ASM_GENERIC_CMPXCHG_H
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #ifdef CONFIG_SMP
11*4882a593Smuzhiyun #error "Cannot use generic cmpxchg on SMP"
12*4882a593Smuzhiyun #endif
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #include <linux/types.h>
15*4882a593Smuzhiyun #include <linux/irqflags.h>
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #ifndef xchg
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun /*
20*4882a593Smuzhiyun  * This function doesn't exist, so you'll get a linker error if
21*4882a593Smuzhiyun  * something tries to do an invalidly-sized xchg().
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun extern void __xchg_called_with_bad_pointer(void);
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun static inline
__xchg(unsigned long x,volatile void * ptr,int size)26*4882a593Smuzhiyun unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun 	unsigned long ret, flags;
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun 	switch (size) {
31*4882a593Smuzhiyun 	case 1:
32*4882a593Smuzhiyun #ifdef __xchg_u8
33*4882a593Smuzhiyun 		return __xchg_u8(x, ptr);
34*4882a593Smuzhiyun #else
35*4882a593Smuzhiyun 		local_irq_save(flags);
36*4882a593Smuzhiyun 		ret = *(volatile u8 *)ptr;
37*4882a593Smuzhiyun 		*(volatile u8 *)ptr = x;
38*4882a593Smuzhiyun 		local_irq_restore(flags);
39*4882a593Smuzhiyun 		return ret;
40*4882a593Smuzhiyun #endif /* __xchg_u8 */
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun 	case 2:
43*4882a593Smuzhiyun #ifdef __xchg_u16
44*4882a593Smuzhiyun 		return __xchg_u16(x, ptr);
45*4882a593Smuzhiyun #else
46*4882a593Smuzhiyun 		local_irq_save(flags);
47*4882a593Smuzhiyun 		ret = *(volatile u16 *)ptr;
48*4882a593Smuzhiyun 		*(volatile u16 *)ptr = x;
49*4882a593Smuzhiyun 		local_irq_restore(flags);
50*4882a593Smuzhiyun 		return ret;
51*4882a593Smuzhiyun #endif /* __xchg_u16 */
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	case 4:
54*4882a593Smuzhiyun #ifdef __xchg_u32
55*4882a593Smuzhiyun 		return __xchg_u32(x, ptr);
56*4882a593Smuzhiyun #else
57*4882a593Smuzhiyun 		local_irq_save(flags);
58*4882a593Smuzhiyun 		ret = *(volatile u32 *)ptr;
59*4882a593Smuzhiyun 		*(volatile u32 *)ptr = x;
60*4882a593Smuzhiyun 		local_irq_restore(flags);
61*4882a593Smuzhiyun 		return ret;
62*4882a593Smuzhiyun #endif /* __xchg_u32 */
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun #ifdef CONFIG_64BIT
65*4882a593Smuzhiyun 	case 8:
66*4882a593Smuzhiyun #ifdef __xchg_u64
67*4882a593Smuzhiyun 		return __xchg_u64(x, ptr);
68*4882a593Smuzhiyun #else
69*4882a593Smuzhiyun 		local_irq_save(flags);
70*4882a593Smuzhiyun 		ret = *(volatile u64 *)ptr;
71*4882a593Smuzhiyun 		*(volatile u64 *)ptr = x;
72*4882a593Smuzhiyun 		local_irq_restore(flags);
73*4882a593Smuzhiyun 		return ret;
74*4882a593Smuzhiyun #endif /* __xchg_u64 */
75*4882a593Smuzhiyun #endif /* CONFIG_64BIT */
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	default:
78*4882a593Smuzhiyun 		__xchg_called_with_bad_pointer();
79*4882a593Smuzhiyun 		return x;
80*4882a593Smuzhiyun 	}
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun #define xchg(ptr, x) ({							\
84*4882a593Smuzhiyun 	((__typeof__(*(ptr)))						\
85*4882a593Smuzhiyun 		__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));	\
86*4882a593Smuzhiyun })
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun #endif /* xchg */
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun /*
91*4882a593Smuzhiyun  * Atomic compare and exchange.
92*4882a593Smuzhiyun  */
93*4882a593Smuzhiyun #include <asm-generic/cmpxchg-local.h>
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun #ifndef cmpxchg_local
96*4882a593Smuzhiyun #define cmpxchg_local(ptr, o, n) ({					       \
97*4882a593Smuzhiyun 	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
98*4882a593Smuzhiyun 			(unsigned long)(n), sizeof(*(ptr))));		       \
99*4882a593Smuzhiyun })
100*4882a593Smuzhiyun #endif
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun #ifndef cmpxchg64_local
103*4882a593Smuzhiyun #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
104*4882a593Smuzhiyun #endif
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun #define cmpxchg(ptr, o, n)	cmpxchg_local((ptr), (o), (n))
107*4882a593Smuzhiyun #define cmpxchg64(ptr, o, n)	cmpxchg64_local((ptr), (o), (n))
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun #endif /* __ASM_GENERIC_CMPXCHG_H */
110