1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_ATOMIC_H
3*4882a593Smuzhiyun #define _ASM_X86_ATOMIC_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/compiler.h>
6*4882a593Smuzhiyun #include <linux/types.h>
7*4882a593Smuzhiyun #include <asm/alternative.h>
8*4882a593Smuzhiyun #include <asm/cmpxchg.h>
9*4882a593Smuzhiyun #include <asm/rmwcc.h>
10*4882a593Smuzhiyun #include <asm/barrier.h>
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun /*
13*4882a593Smuzhiyun * Atomic operations that C can't guarantee us. Useful for
14*4882a593Smuzhiyun * resource counting etc..
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun /**
18*4882a593Smuzhiyun * arch_atomic_read - read atomic variable
19*4882a593Smuzhiyun * @v: pointer of type atomic_t
20*4882a593Smuzhiyun *
21*4882a593Smuzhiyun * Atomically reads the value of @v.
22*4882a593Smuzhiyun */
arch_atomic_read(const atomic_t * v)23*4882a593Smuzhiyun static __always_inline int arch_atomic_read(const atomic_t *v)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun /*
26*4882a593Smuzhiyun * Note for KASAN: we deliberately don't use READ_ONCE_NOCHECK() here,
27*4882a593Smuzhiyun * it's non-inlined function that increases binary size and stack usage.
28*4882a593Smuzhiyun */
29*4882a593Smuzhiyun return __READ_ONCE((v)->counter);
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun /**
33*4882a593Smuzhiyun * arch_atomic_set - set atomic variable
34*4882a593Smuzhiyun * @v: pointer of type atomic_t
35*4882a593Smuzhiyun * @i: required value
36*4882a593Smuzhiyun *
37*4882a593Smuzhiyun * Atomically sets the value of @v to @i.
38*4882a593Smuzhiyun */
arch_atomic_set(atomic_t * v,int i)39*4882a593Smuzhiyun static __always_inline void arch_atomic_set(atomic_t *v, int i)
40*4882a593Smuzhiyun {
41*4882a593Smuzhiyun __WRITE_ONCE(v->counter, i);
42*4882a593Smuzhiyun }
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun /**
45*4882a593Smuzhiyun * arch_atomic_add - add integer to atomic variable
46*4882a593Smuzhiyun * @i: integer value to add
47*4882a593Smuzhiyun * @v: pointer of type atomic_t
48*4882a593Smuzhiyun *
49*4882a593Smuzhiyun * Atomically adds @i to @v.
50*4882a593Smuzhiyun */
arch_atomic_add(int i,atomic_t * v)51*4882a593Smuzhiyun static __always_inline void arch_atomic_add(int i, atomic_t *v)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "addl %1,%0"
54*4882a593Smuzhiyun : "+m" (v->counter)
55*4882a593Smuzhiyun : "ir" (i) : "memory");
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun /**
59*4882a593Smuzhiyun * arch_atomic_sub - subtract integer from atomic variable
60*4882a593Smuzhiyun * @i: integer value to subtract
61*4882a593Smuzhiyun * @v: pointer of type atomic_t
62*4882a593Smuzhiyun *
63*4882a593Smuzhiyun * Atomically subtracts @i from @v.
64*4882a593Smuzhiyun */
arch_atomic_sub(int i,atomic_t * v)65*4882a593Smuzhiyun static __always_inline void arch_atomic_sub(int i, atomic_t *v)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "subl %1,%0"
68*4882a593Smuzhiyun : "+m" (v->counter)
69*4882a593Smuzhiyun : "ir" (i) : "memory");
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun /**
73*4882a593Smuzhiyun * arch_atomic_sub_and_test - subtract value from variable and test result
74*4882a593Smuzhiyun * @i: integer value to subtract
75*4882a593Smuzhiyun * @v: pointer of type atomic_t
76*4882a593Smuzhiyun *
77*4882a593Smuzhiyun * Atomically subtracts @i from @v and returns
78*4882a593Smuzhiyun * true if the result is zero, or false for all
79*4882a593Smuzhiyun * other cases.
80*4882a593Smuzhiyun */
arch_atomic_sub_and_test(int i,atomic_t * v)81*4882a593Smuzhiyun static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
82*4882a593Smuzhiyun {
83*4882a593Smuzhiyun return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i);
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun #define arch_atomic_sub_and_test arch_atomic_sub_and_test
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun /**
88*4882a593Smuzhiyun * arch_atomic_inc - increment atomic variable
89*4882a593Smuzhiyun * @v: pointer of type atomic_t
90*4882a593Smuzhiyun *
91*4882a593Smuzhiyun * Atomically increments @v by 1.
92*4882a593Smuzhiyun */
arch_atomic_inc(atomic_t * v)93*4882a593Smuzhiyun static __always_inline void arch_atomic_inc(atomic_t *v)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "incl %0"
96*4882a593Smuzhiyun : "+m" (v->counter) :: "memory");
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun #define arch_atomic_inc arch_atomic_inc
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun /**
101*4882a593Smuzhiyun * arch_atomic_dec - decrement atomic variable
102*4882a593Smuzhiyun * @v: pointer of type atomic_t
103*4882a593Smuzhiyun *
104*4882a593Smuzhiyun * Atomically decrements @v by 1.
105*4882a593Smuzhiyun */
arch_atomic_dec(atomic_t * v)106*4882a593Smuzhiyun static __always_inline void arch_atomic_dec(atomic_t *v)
107*4882a593Smuzhiyun {
108*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "decl %0"
109*4882a593Smuzhiyun : "+m" (v->counter) :: "memory");
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun #define arch_atomic_dec arch_atomic_dec
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun /**
114*4882a593Smuzhiyun * arch_atomic_dec_and_test - decrement and test
115*4882a593Smuzhiyun * @v: pointer of type atomic_t
116*4882a593Smuzhiyun *
117*4882a593Smuzhiyun * Atomically decrements @v by 1 and
118*4882a593Smuzhiyun * returns true if the result is 0, or false for all other
119*4882a593Smuzhiyun * cases.
120*4882a593Smuzhiyun */
arch_atomic_dec_and_test(atomic_t * v)121*4882a593Smuzhiyun static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
122*4882a593Smuzhiyun {
123*4882a593Smuzhiyun return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e);
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun #define arch_atomic_dec_and_test arch_atomic_dec_and_test
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun /**
128*4882a593Smuzhiyun * arch_atomic_inc_and_test - increment and test
129*4882a593Smuzhiyun * @v: pointer of type atomic_t
130*4882a593Smuzhiyun *
131*4882a593Smuzhiyun * Atomically increments @v by 1
132*4882a593Smuzhiyun * and returns true if the result is zero, or false for all
133*4882a593Smuzhiyun * other cases.
134*4882a593Smuzhiyun */
arch_atomic_inc_and_test(atomic_t * v)135*4882a593Smuzhiyun static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e);
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun #define arch_atomic_inc_and_test arch_atomic_inc_and_test
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun /**
142*4882a593Smuzhiyun * arch_atomic_add_negative - add and test if negative
143*4882a593Smuzhiyun * @i: integer value to add
144*4882a593Smuzhiyun * @v: pointer of type atomic_t
145*4882a593Smuzhiyun *
146*4882a593Smuzhiyun * Atomically adds @i to @v and returns true
147*4882a593Smuzhiyun * if the result is negative, or false when
148*4882a593Smuzhiyun * result is greater than or equal to zero.
149*4882a593Smuzhiyun */
arch_atomic_add_negative(int i,atomic_t * v)150*4882a593Smuzhiyun static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i);
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun #define arch_atomic_add_negative arch_atomic_add_negative
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun /**
157*4882a593Smuzhiyun * arch_atomic_add_return - add integer and return
158*4882a593Smuzhiyun * @i: integer value to add
159*4882a593Smuzhiyun * @v: pointer of type atomic_t
160*4882a593Smuzhiyun *
161*4882a593Smuzhiyun * Atomically adds @i to @v and returns @i + @v
162*4882a593Smuzhiyun */
arch_atomic_add_return(int i,atomic_t * v)163*4882a593Smuzhiyun static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
164*4882a593Smuzhiyun {
165*4882a593Smuzhiyun return i + xadd(&v->counter, i);
166*4882a593Smuzhiyun }
167*4882a593Smuzhiyun #define arch_atomic_add_return arch_atomic_add_return
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun /**
170*4882a593Smuzhiyun * arch_atomic_sub_return - subtract integer and return
171*4882a593Smuzhiyun * @v: pointer of type atomic_t
172*4882a593Smuzhiyun * @i: integer value to subtract
173*4882a593Smuzhiyun *
174*4882a593Smuzhiyun * Atomically subtracts @i from @v and returns @v - @i
175*4882a593Smuzhiyun */
arch_atomic_sub_return(int i,atomic_t * v)176*4882a593Smuzhiyun static __always_inline int arch_atomic_sub_return(int i, atomic_t *v)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun return arch_atomic_add_return(-i, v);
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun #define arch_atomic_sub_return arch_atomic_sub_return
181*4882a593Smuzhiyun
arch_atomic_fetch_add(int i,atomic_t * v)182*4882a593Smuzhiyun static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun return xadd(&v->counter, i);
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun #define arch_atomic_fetch_add arch_atomic_fetch_add
187*4882a593Smuzhiyun
arch_atomic_fetch_sub(int i,atomic_t * v)188*4882a593Smuzhiyun static __always_inline int arch_atomic_fetch_sub(int i, atomic_t *v)
189*4882a593Smuzhiyun {
190*4882a593Smuzhiyun return xadd(&v->counter, -i);
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun #define arch_atomic_fetch_sub arch_atomic_fetch_sub
193*4882a593Smuzhiyun
arch_atomic_cmpxchg(atomic_t * v,int old,int new)194*4882a593Smuzhiyun static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun return arch_cmpxchg(&v->counter, old, new);
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun #define arch_atomic_cmpxchg arch_atomic_cmpxchg
199*4882a593Smuzhiyun
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)200*4882a593Smuzhiyun static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun return try_cmpxchg(&v->counter, old, new);
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
205*4882a593Smuzhiyun
arch_atomic_xchg(atomic_t * v,int new)206*4882a593Smuzhiyun static __always_inline int arch_atomic_xchg(atomic_t *v, int new)
207*4882a593Smuzhiyun {
208*4882a593Smuzhiyun return arch_xchg(&v->counter, new);
209*4882a593Smuzhiyun }
210*4882a593Smuzhiyun #define arch_atomic_xchg arch_atomic_xchg
211*4882a593Smuzhiyun
arch_atomic_and(int i,atomic_t * v)212*4882a593Smuzhiyun static __always_inline void arch_atomic_and(int i, atomic_t *v)
213*4882a593Smuzhiyun {
214*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "andl %1,%0"
215*4882a593Smuzhiyun : "+m" (v->counter)
216*4882a593Smuzhiyun : "ir" (i)
217*4882a593Smuzhiyun : "memory");
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun
arch_atomic_fetch_and(int i,atomic_t * v)220*4882a593Smuzhiyun static __always_inline int arch_atomic_fetch_and(int i, atomic_t *v)
221*4882a593Smuzhiyun {
222*4882a593Smuzhiyun int val = arch_atomic_read(v);
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun do { } while (!arch_atomic_try_cmpxchg(v, &val, val & i));
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun return val;
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun #define arch_atomic_fetch_and arch_atomic_fetch_and
229*4882a593Smuzhiyun
arch_atomic_or(int i,atomic_t * v)230*4882a593Smuzhiyun static __always_inline void arch_atomic_or(int i, atomic_t *v)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "orl %1,%0"
233*4882a593Smuzhiyun : "+m" (v->counter)
234*4882a593Smuzhiyun : "ir" (i)
235*4882a593Smuzhiyun : "memory");
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
arch_atomic_fetch_or(int i,atomic_t * v)238*4882a593Smuzhiyun static __always_inline int arch_atomic_fetch_or(int i, atomic_t *v)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun int val = arch_atomic_read(v);
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun do { } while (!arch_atomic_try_cmpxchg(v, &val, val | i));
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun return val;
245*4882a593Smuzhiyun }
246*4882a593Smuzhiyun #define arch_atomic_fetch_or arch_atomic_fetch_or
247*4882a593Smuzhiyun
arch_atomic_xor(int i,atomic_t * v)248*4882a593Smuzhiyun static __always_inline void arch_atomic_xor(int i, atomic_t *v)
249*4882a593Smuzhiyun {
250*4882a593Smuzhiyun asm volatile(LOCK_PREFIX "xorl %1,%0"
251*4882a593Smuzhiyun : "+m" (v->counter)
252*4882a593Smuzhiyun : "ir" (i)
253*4882a593Smuzhiyun : "memory");
254*4882a593Smuzhiyun }
255*4882a593Smuzhiyun
arch_atomic_fetch_xor(int i,atomic_t * v)256*4882a593Smuzhiyun static __always_inline int arch_atomic_fetch_xor(int i, atomic_t *v)
257*4882a593Smuzhiyun {
258*4882a593Smuzhiyun int val = arch_atomic_read(v);
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun do { } while (!arch_atomic_try_cmpxchg(v, &val, val ^ i));
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun return val;
263*4882a593Smuzhiyun }
264*4882a593Smuzhiyun #define arch_atomic_fetch_xor arch_atomic_fetch_xor
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun #ifdef CONFIG_X86_32
267*4882a593Smuzhiyun # include <asm/atomic64_32.h>
268*4882a593Smuzhiyun #else
269*4882a593Smuzhiyun # include <asm/atomic64_64.h>
270*4882a593Smuzhiyun #endif
271*4882a593Smuzhiyun
272*4882a593Smuzhiyun #define ARCH_ATOMIC
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun #endif /* _ASM_X86_ATOMIC_H */
275