xref: /OK3568_Linux_fs/kernel/arch/csky/include/asm/atomic.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun 
3*4882a593Smuzhiyun #ifndef __ASM_CSKY_ATOMIC_H
4*4882a593Smuzhiyun #define __ASM_CSKY_ATOMIC_H
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #include <linux/version.h>
7*4882a593Smuzhiyun #include <asm/cmpxchg.h>
8*4882a593Smuzhiyun #include <asm/barrier.h>
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #ifdef CONFIG_CPU_HAS_LDSTEX
11*4882a593Smuzhiyun 
12*4882a593Smuzhiyun #define __atomic_add_unless __atomic_add_unless
__atomic_add_unless(atomic_t * v,int a,int u)13*4882a593Smuzhiyun static inline int __atomic_add_unless(atomic_t *v, int a, int u)
14*4882a593Smuzhiyun {
15*4882a593Smuzhiyun 	unsigned long tmp, ret;
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun 	smp_mb();
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun 	asm volatile (
20*4882a593Smuzhiyun 	"1:	ldex.w		%0, (%3) \n"
21*4882a593Smuzhiyun 	"	mov		%1, %0   \n"
22*4882a593Smuzhiyun 	"	cmpne		%0, %4   \n"
23*4882a593Smuzhiyun 	"	bf		2f	 \n"
24*4882a593Smuzhiyun 	"	add		%0, %2   \n"
25*4882a593Smuzhiyun 	"	stex.w		%0, (%3) \n"
26*4882a593Smuzhiyun 	"	bez		%0, 1b   \n"
27*4882a593Smuzhiyun 	"2:				 \n"
28*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)
29*4882a593Smuzhiyun 		: "r" (a), "r"(&v->counter), "r"(u)
30*4882a593Smuzhiyun 		: "memory");
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun 	if (ret != u)
33*4882a593Smuzhiyun 		smp_mb();
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun 	return ret;
36*4882a593Smuzhiyun }
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun #define ATOMIC_OP(op, c_op)						\
39*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v)			\
40*4882a593Smuzhiyun {									\
41*4882a593Smuzhiyun 	unsigned long tmp;						\
42*4882a593Smuzhiyun 									\
43*4882a593Smuzhiyun 	asm volatile (							\
44*4882a593Smuzhiyun 	"1:	ldex.w		%0, (%2) \n"				\
45*4882a593Smuzhiyun 	"	" #op "		%0, %1   \n"				\
46*4882a593Smuzhiyun 	"	stex.w		%0, (%2) \n"				\
47*4882a593Smuzhiyun 	"	bez		%0, 1b   \n"				\
48*4882a593Smuzhiyun 		: "=&r" (tmp)						\
49*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
50*4882a593Smuzhiyun 		: "memory");						\
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op)					\
54*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v)		\
55*4882a593Smuzhiyun {									\
56*4882a593Smuzhiyun 	unsigned long tmp, ret;						\
57*4882a593Smuzhiyun 									\
58*4882a593Smuzhiyun 	smp_mb();							\
59*4882a593Smuzhiyun 	asm volatile (							\
60*4882a593Smuzhiyun 	"1:	ldex.w		%0, (%3) \n"				\
61*4882a593Smuzhiyun 	"	" #op "		%0, %2   \n"				\
62*4882a593Smuzhiyun 	"	mov		%1, %0   \n"				\
63*4882a593Smuzhiyun 	"	stex.w		%0, (%3) \n"				\
64*4882a593Smuzhiyun 	"	bez		%0, 1b   \n"				\
65*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)				\
66*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
67*4882a593Smuzhiyun 		: "memory");						\
68*4882a593Smuzhiyun 	smp_mb();							\
69*4882a593Smuzhiyun 									\
70*4882a593Smuzhiyun 	return ret;							\
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op)					\
74*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v)			\
75*4882a593Smuzhiyun {									\
76*4882a593Smuzhiyun 	unsigned long tmp, ret;						\
77*4882a593Smuzhiyun 									\
78*4882a593Smuzhiyun 	smp_mb();							\
79*4882a593Smuzhiyun 	asm volatile (							\
80*4882a593Smuzhiyun 	"1:	ldex.w		%0, (%3) \n"				\
81*4882a593Smuzhiyun 	"	mov		%1, %0   \n"				\
82*4882a593Smuzhiyun 	"	" #op "		%0, %2   \n"				\
83*4882a593Smuzhiyun 	"	stex.w		%0, (%3) \n"				\
84*4882a593Smuzhiyun 	"	bez		%0, 1b   \n"				\
85*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)				\
86*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
87*4882a593Smuzhiyun 		: "memory");						\
88*4882a593Smuzhiyun 	smp_mb();							\
89*4882a593Smuzhiyun 									\
90*4882a593Smuzhiyun 	return ret;							\
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun #else /* CONFIG_CPU_HAS_LDSTEX */
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun #include <linux/irqflags.h>
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun #define __atomic_add_unless __atomic_add_unless
__atomic_add_unless(atomic_t * v,int a,int u)98*4882a593Smuzhiyun static inline int __atomic_add_unless(atomic_t *v, int a, int u)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun 	unsigned long tmp, ret, flags;
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun 	raw_local_irq_save(flags);
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	asm volatile (
105*4882a593Smuzhiyun 	"	ldw		%0, (%3) \n"
106*4882a593Smuzhiyun 	"	mov		%1, %0   \n"
107*4882a593Smuzhiyun 	"	cmpne		%0, %4   \n"
108*4882a593Smuzhiyun 	"	bf		2f	 \n"
109*4882a593Smuzhiyun 	"	add		%0, %2   \n"
110*4882a593Smuzhiyun 	"	stw		%0, (%3) \n"
111*4882a593Smuzhiyun 	"2:				 \n"
112*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)
113*4882a593Smuzhiyun 		: "r" (a), "r"(&v->counter), "r"(u)
114*4882a593Smuzhiyun 		: "memory");
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun 	raw_local_irq_restore(flags);
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	return ret;
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun #define ATOMIC_OP(op, c_op)						\
122*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v)			\
123*4882a593Smuzhiyun {									\
124*4882a593Smuzhiyun 	unsigned long tmp, flags;					\
125*4882a593Smuzhiyun 									\
126*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
127*4882a593Smuzhiyun 									\
128*4882a593Smuzhiyun 	asm volatile (							\
129*4882a593Smuzhiyun 	"	ldw		%0, (%2) \n"				\
130*4882a593Smuzhiyun 	"	" #op "		%0, %1   \n"				\
131*4882a593Smuzhiyun 	"	stw		%0, (%2) \n"				\
132*4882a593Smuzhiyun 		: "=&r" (tmp)						\
133*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
134*4882a593Smuzhiyun 		: "memory");						\
135*4882a593Smuzhiyun 									\
136*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
137*4882a593Smuzhiyun }
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op)					\
140*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v)		\
141*4882a593Smuzhiyun {									\
142*4882a593Smuzhiyun 	unsigned long tmp, ret, flags;					\
143*4882a593Smuzhiyun 									\
144*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
145*4882a593Smuzhiyun 									\
146*4882a593Smuzhiyun 	asm volatile (							\
147*4882a593Smuzhiyun 	"	ldw		%0, (%3) \n"				\
148*4882a593Smuzhiyun 	"	" #op "		%0, %2   \n"				\
149*4882a593Smuzhiyun 	"	stw		%0, (%3) \n"				\
150*4882a593Smuzhiyun 	"	mov		%1, %0   \n"				\
151*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)				\
152*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
153*4882a593Smuzhiyun 		: "memory");						\
154*4882a593Smuzhiyun 									\
155*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
156*4882a593Smuzhiyun 									\
157*4882a593Smuzhiyun 	return ret;							\
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op)					\
161*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v)			\
162*4882a593Smuzhiyun {									\
163*4882a593Smuzhiyun 	unsigned long tmp, ret, flags;					\
164*4882a593Smuzhiyun 									\
165*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
166*4882a593Smuzhiyun 									\
167*4882a593Smuzhiyun 	asm volatile (							\
168*4882a593Smuzhiyun 	"	ldw		%0, (%3) \n"				\
169*4882a593Smuzhiyun 	"	mov		%1, %0   \n"				\
170*4882a593Smuzhiyun 	"	" #op "		%0, %2   \n"				\
171*4882a593Smuzhiyun 	"	stw		%0, (%3) \n"				\
172*4882a593Smuzhiyun 		: "=&r" (tmp), "=&r" (ret)				\
173*4882a593Smuzhiyun 		: "r" (i), "r"(&v->counter)				\
174*4882a593Smuzhiyun 		: "memory");						\
175*4882a593Smuzhiyun 									\
176*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
177*4882a593Smuzhiyun 									\
178*4882a593Smuzhiyun 	return ret;							\
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun #endif /* CONFIG_CPU_HAS_LDSTEX */
182*4882a593Smuzhiyun 
183*4882a593Smuzhiyun #define atomic_add_return atomic_add_return
184*4882a593Smuzhiyun ATOMIC_OP_RETURN(add, +)
185*4882a593Smuzhiyun #define atomic_sub_return atomic_sub_return
186*4882a593Smuzhiyun ATOMIC_OP_RETURN(sub, -)
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun #define atomic_fetch_add atomic_fetch_add
189*4882a593Smuzhiyun ATOMIC_FETCH_OP(add, +)
190*4882a593Smuzhiyun #define atomic_fetch_sub atomic_fetch_sub
191*4882a593Smuzhiyun ATOMIC_FETCH_OP(sub, -)
192*4882a593Smuzhiyun #define atomic_fetch_and atomic_fetch_and
193*4882a593Smuzhiyun ATOMIC_FETCH_OP(and, &)
194*4882a593Smuzhiyun #define atomic_fetch_or atomic_fetch_or
195*4882a593Smuzhiyun ATOMIC_FETCH_OP(or, |)
196*4882a593Smuzhiyun #define atomic_fetch_xor atomic_fetch_xor
197*4882a593Smuzhiyun ATOMIC_FETCH_OP(xor, ^)
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun #define atomic_and atomic_and
200*4882a593Smuzhiyun ATOMIC_OP(and, &)
201*4882a593Smuzhiyun #define atomic_or atomic_or
202*4882a593Smuzhiyun ATOMIC_OP(or, |)
203*4882a593Smuzhiyun #define atomic_xor atomic_xor
204*4882a593Smuzhiyun ATOMIC_OP(xor, ^)
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun #undef ATOMIC_FETCH_OP
207*4882a593Smuzhiyun #undef ATOMIC_OP_RETURN
208*4882a593Smuzhiyun #undef ATOMIC_OP
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun #include <asm-generic/atomic.h>
211*4882a593Smuzhiyun 
212*4882a593Smuzhiyun #endif /* __ASM_CSKY_ATOMIC_H */
213