xref: /OK3568_Linux_fs/kernel/include/asm-generic/atomic.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-or-later */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Generic C implementation of atomic counter operations. Usable on
4*4882a593Smuzhiyun  * UP systems only. Do not include in machine independent code.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
7*4882a593Smuzhiyun  * Written by David Howells (dhowells@redhat.com)
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun #ifndef __ASM_GENERIC_ATOMIC_H
10*4882a593Smuzhiyun #define __ASM_GENERIC_ATOMIC_H
11*4882a593Smuzhiyun 
12*4882a593Smuzhiyun #include <asm/cmpxchg.h>
13*4882a593Smuzhiyun #include <asm/barrier.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun /*
16*4882a593Smuzhiyun  * atomic_$op() - $op integer to atomic variable
17*4882a593Smuzhiyun  * @i: integer value to $op
18*4882a593Smuzhiyun  * @v: pointer to the atomic variable
19*4882a593Smuzhiyun  *
20*4882a593Smuzhiyun  * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
21*4882a593Smuzhiyun  * smp_mb__{before,after}_atomic().
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun /*
25*4882a593Smuzhiyun  * atomic_$op_return() - $op interer to atomic variable and returns the result
26*4882a593Smuzhiyun  * @i: integer value to $op
27*4882a593Smuzhiyun  * @v: pointer to the atomic variable
28*4882a593Smuzhiyun  *
29*4882a593Smuzhiyun  * Atomically $ops @i to @v. Does imply a full memory barrier.
30*4882a593Smuzhiyun  */
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun #ifdef CONFIG_SMP
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun /* we can build all atomic primitives from cmpxchg */
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun #define ATOMIC_OP(op, c_op)						\
37*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v)			\
38*4882a593Smuzhiyun {									\
39*4882a593Smuzhiyun 	int c, old;							\
40*4882a593Smuzhiyun 									\
41*4882a593Smuzhiyun 	c = v->counter;							\
42*4882a593Smuzhiyun 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
43*4882a593Smuzhiyun 		c = old;						\
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op)					\
47*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v)		\
48*4882a593Smuzhiyun {									\
49*4882a593Smuzhiyun 	int c, old;							\
50*4882a593Smuzhiyun 									\
51*4882a593Smuzhiyun 	c = v->counter;							\
52*4882a593Smuzhiyun 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
53*4882a593Smuzhiyun 		c = old;						\
54*4882a593Smuzhiyun 									\
55*4882a593Smuzhiyun 	return c c_op i;						\
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op)					\
59*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v)			\
60*4882a593Smuzhiyun {									\
61*4882a593Smuzhiyun 	int c, old;							\
62*4882a593Smuzhiyun 									\
63*4882a593Smuzhiyun 	c = v->counter;							\
64*4882a593Smuzhiyun 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
65*4882a593Smuzhiyun 		c = old;						\
66*4882a593Smuzhiyun 									\
67*4882a593Smuzhiyun 	return c;							\
68*4882a593Smuzhiyun }
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun #else
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun #include <linux/irqflags.h>
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun #define ATOMIC_OP(op, c_op)						\
75*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v)			\
76*4882a593Smuzhiyun {									\
77*4882a593Smuzhiyun 	unsigned long flags;						\
78*4882a593Smuzhiyun 									\
79*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
80*4882a593Smuzhiyun 	v->counter = v->counter c_op i;					\
81*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op)					\
85*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v)		\
86*4882a593Smuzhiyun {									\
87*4882a593Smuzhiyun 	unsigned long flags;						\
88*4882a593Smuzhiyun 	int ret;							\
89*4882a593Smuzhiyun 									\
90*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
91*4882a593Smuzhiyun 	ret = (v->counter = v->counter c_op i);				\
92*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
93*4882a593Smuzhiyun 									\
94*4882a593Smuzhiyun 	return ret;							\
95*4882a593Smuzhiyun }
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op)					\
98*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v)			\
99*4882a593Smuzhiyun {									\
100*4882a593Smuzhiyun 	unsigned long flags;						\
101*4882a593Smuzhiyun 	int ret;							\
102*4882a593Smuzhiyun 									\
103*4882a593Smuzhiyun 	raw_local_irq_save(flags);					\
104*4882a593Smuzhiyun 	ret = v->counter;						\
105*4882a593Smuzhiyun 	v->counter = v->counter c_op i;					\
106*4882a593Smuzhiyun 	raw_local_irq_restore(flags);					\
107*4882a593Smuzhiyun 									\
108*4882a593Smuzhiyun 	return ret;							\
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun #endif /* CONFIG_SMP */
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun #ifndef atomic_add_return
114*4882a593Smuzhiyun ATOMIC_OP_RETURN(add, +)
115*4882a593Smuzhiyun #endif
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun #ifndef atomic_sub_return
118*4882a593Smuzhiyun ATOMIC_OP_RETURN(sub, -)
119*4882a593Smuzhiyun #endif
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun #ifndef atomic_fetch_add
122*4882a593Smuzhiyun ATOMIC_FETCH_OP(add, +)
123*4882a593Smuzhiyun #endif
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun #ifndef atomic_fetch_sub
126*4882a593Smuzhiyun ATOMIC_FETCH_OP(sub, -)
127*4882a593Smuzhiyun #endif
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun #ifndef atomic_fetch_and
130*4882a593Smuzhiyun ATOMIC_FETCH_OP(and, &)
131*4882a593Smuzhiyun #endif
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun #ifndef atomic_fetch_or
134*4882a593Smuzhiyun ATOMIC_FETCH_OP(or, |)
135*4882a593Smuzhiyun #endif
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun #ifndef atomic_fetch_xor
138*4882a593Smuzhiyun ATOMIC_FETCH_OP(xor, ^)
139*4882a593Smuzhiyun #endif
140*4882a593Smuzhiyun 
141*4882a593Smuzhiyun #ifndef atomic_and
142*4882a593Smuzhiyun ATOMIC_OP(and, &)
143*4882a593Smuzhiyun #endif
144*4882a593Smuzhiyun 
145*4882a593Smuzhiyun #ifndef atomic_or
146*4882a593Smuzhiyun ATOMIC_OP(or, |)
147*4882a593Smuzhiyun #endif
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun #ifndef atomic_xor
150*4882a593Smuzhiyun ATOMIC_OP(xor, ^)
151*4882a593Smuzhiyun #endif
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun #undef ATOMIC_FETCH_OP
154*4882a593Smuzhiyun #undef ATOMIC_OP_RETURN
155*4882a593Smuzhiyun #undef ATOMIC_OP
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun /*
158*4882a593Smuzhiyun  * Atomic operations that C can't guarantee us.  Useful for
159*4882a593Smuzhiyun  * resource counting etc..
160*4882a593Smuzhiyun  */
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun /**
163*4882a593Smuzhiyun  * atomic_read - read atomic variable
164*4882a593Smuzhiyun  * @v: pointer of type atomic_t
165*4882a593Smuzhiyun  *
166*4882a593Smuzhiyun  * Atomically reads the value of @v.
167*4882a593Smuzhiyun  */
168*4882a593Smuzhiyun #ifndef atomic_read
169*4882a593Smuzhiyun #define atomic_read(v)	READ_ONCE((v)->counter)
170*4882a593Smuzhiyun #endif
171*4882a593Smuzhiyun 
172*4882a593Smuzhiyun /**
173*4882a593Smuzhiyun  * atomic_set - set atomic variable
174*4882a593Smuzhiyun  * @v: pointer of type atomic_t
175*4882a593Smuzhiyun  * @i: required value
176*4882a593Smuzhiyun  *
177*4882a593Smuzhiyun  * Atomically sets the value of @v to @i.
178*4882a593Smuzhiyun  */
179*4882a593Smuzhiyun #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun #include <linux/irqflags.h>
182*4882a593Smuzhiyun 
atomic_add(int i,atomic_t * v)183*4882a593Smuzhiyun static inline void atomic_add(int i, atomic_t *v)
184*4882a593Smuzhiyun {
185*4882a593Smuzhiyun 	atomic_add_return(i, v);
186*4882a593Smuzhiyun }
187*4882a593Smuzhiyun 
atomic_sub(int i,atomic_t * v)188*4882a593Smuzhiyun static inline void atomic_sub(int i, atomic_t *v)
189*4882a593Smuzhiyun {
190*4882a593Smuzhiyun 	atomic_sub_return(i, v);
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
194*4882a593Smuzhiyun #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun #endif /* __ASM_GENERIC_ATOMIC_H */
197