xref: /OK3568_Linux_fs/kernel/arch/x86/include/asm/atomic64_64.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_ATOMIC64_64_H
3*4882a593Smuzhiyun #define _ASM_X86_ATOMIC64_64_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #include <linux/types.h>
6*4882a593Smuzhiyun #include <asm/alternative.h>
7*4882a593Smuzhiyun #include <asm/cmpxchg.h>
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun /* The 64-bit atomic type */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #define ATOMIC64_INIT(i)	{ (i) }
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun /**
14*4882a593Smuzhiyun  * arch_atomic64_read - read atomic64 variable
15*4882a593Smuzhiyun  * @v: pointer of type atomic64_t
16*4882a593Smuzhiyun  *
17*4882a593Smuzhiyun  * Atomically reads the value of @v.
18*4882a593Smuzhiyun  * Doesn't imply a read memory barrier.
19*4882a593Smuzhiyun  */
arch_atomic64_read(const atomic64_t * v)20*4882a593Smuzhiyun static inline s64 arch_atomic64_read(const atomic64_t *v)
21*4882a593Smuzhiyun {
22*4882a593Smuzhiyun 	return __READ_ONCE((v)->counter);
23*4882a593Smuzhiyun }
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun /**
26*4882a593Smuzhiyun  * arch_atomic64_set - set atomic64 variable
27*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
28*4882a593Smuzhiyun  * @i: required value
29*4882a593Smuzhiyun  *
30*4882a593Smuzhiyun  * Atomically sets the value of @v to @i.
31*4882a593Smuzhiyun  */
arch_atomic64_set(atomic64_t * v,s64 i)32*4882a593Smuzhiyun static inline void arch_atomic64_set(atomic64_t *v, s64 i)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun 	__WRITE_ONCE(v->counter, i);
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun /**
38*4882a593Smuzhiyun  * arch_atomic64_add - add integer to atomic64 variable
39*4882a593Smuzhiyun  * @i: integer value to add
40*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
41*4882a593Smuzhiyun  *
42*4882a593Smuzhiyun  * Atomically adds @i to @v.
43*4882a593Smuzhiyun  */
arch_atomic64_add(s64 i,atomic64_t * v)44*4882a593Smuzhiyun static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "addq %1,%0"
47*4882a593Smuzhiyun 		     : "=m" (v->counter)
48*4882a593Smuzhiyun 		     : "er" (i), "m" (v->counter) : "memory");
49*4882a593Smuzhiyun }
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun /**
52*4882a593Smuzhiyun  * arch_atomic64_sub - subtract the atomic64 variable
53*4882a593Smuzhiyun  * @i: integer value to subtract
54*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
55*4882a593Smuzhiyun  *
56*4882a593Smuzhiyun  * Atomically subtracts @i from @v.
57*4882a593Smuzhiyun  */
arch_atomic64_sub(s64 i,atomic64_t * v)58*4882a593Smuzhiyun static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "subq %1,%0"
61*4882a593Smuzhiyun 		     : "=m" (v->counter)
62*4882a593Smuzhiyun 		     : "er" (i), "m" (v->counter) : "memory");
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun /**
66*4882a593Smuzhiyun  * arch_atomic64_sub_and_test - subtract value from variable and test result
67*4882a593Smuzhiyun  * @i: integer value to subtract
68*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
69*4882a593Smuzhiyun  *
70*4882a593Smuzhiyun  * Atomically subtracts @i from @v and returns
71*4882a593Smuzhiyun  * true if the result is zero, or false for all
72*4882a593Smuzhiyun  * other cases.
73*4882a593Smuzhiyun  */
arch_atomic64_sub_and_test(s64 i,atomic64_t * v)74*4882a593Smuzhiyun static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
75*4882a593Smuzhiyun {
76*4882a593Smuzhiyun 	return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
77*4882a593Smuzhiyun }
78*4882a593Smuzhiyun #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun /**
81*4882a593Smuzhiyun  * arch_atomic64_inc - increment atomic64 variable
82*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
83*4882a593Smuzhiyun  *
84*4882a593Smuzhiyun  * Atomically increments @v by 1.
85*4882a593Smuzhiyun  */
arch_atomic64_inc(atomic64_t * v)86*4882a593Smuzhiyun static __always_inline void arch_atomic64_inc(atomic64_t *v)
87*4882a593Smuzhiyun {
88*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "incq %0"
89*4882a593Smuzhiyun 		     : "=m" (v->counter)
90*4882a593Smuzhiyun 		     : "m" (v->counter) : "memory");
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun #define arch_atomic64_inc arch_atomic64_inc
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun /**
95*4882a593Smuzhiyun  * arch_atomic64_dec - decrement atomic64 variable
96*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
97*4882a593Smuzhiyun  *
98*4882a593Smuzhiyun  * Atomically decrements @v by 1.
99*4882a593Smuzhiyun  */
arch_atomic64_dec(atomic64_t * v)100*4882a593Smuzhiyun static __always_inline void arch_atomic64_dec(atomic64_t *v)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "decq %0"
103*4882a593Smuzhiyun 		     : "=m" (v->counter)
104*4882a593Smuzhiyun 		     : "m" (v->counter) : "memory");
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun #define arch_atomic64_dec arch_atomic64_dec
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun /**
109*4882a593Smuzhiyun  * arch_atomic64_dec_and_test - decrement and test
110*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
111*4882a593Smuzhiyun  *
112*4882a593Smuzhiyun  * Atomically decrements @v by 1 and
113*4882a593Smuzhiyun  * returns true if the result is 0, or false for all other
114*4882a593Smuzhiyun  * cases.
115*4882a593Smuzhiyun  */
arch_atomic64_dec_and_test(atomic64_t * v)116*4882a593Smuzhiyun static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun 	return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun /**
123*4882a593Smuzhiyun  * arch_atomic64_inc_and_test - increment and test
124*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
125*4882a593Smuzhiyun  *
126*4882a593Smuzhiyun  * Atomically increments @v by 1
127*4882a593Smuzhiyun  * and returns true if the result is zero, or false for all
128*4882a593Smuzhiyun  * other cases.
129*4882a593Smuzhiyun  */
arch_atomic64_inc_and_test(atomic64_t * v)130*4882a593Smuzhiyun static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun 	return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun /**
137*4882a593Smuzhiyun  * arch_atomic64_add_negative - add and test if negative
138*4882a593Smuzhiyun  * @i: integer value to add
139*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
140*4882a593Smuzhiyun  *
141*4882a593Smuzhiyun  * Atomically adds @i to @v and returns true
142*4882a593Smuzhiyun  * if the result is negative, or false when
143*4882a593Smuzhiyun  * result is greater than or equal to zero.
144*4882a593Smuzhiyun  */
arch_atomic64_add_negative(s64 i,atomic64_t * v)145*4882a593Smuzhiyun static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun 	return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun #define arch_atomic64_add_negative arch_atomic64_add_negative
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun /**
152*4882a593Smuzhiyun  * arch_atomic64_add_return - add and return
153*4882a593Smuzhiyun  * @i: integer value to add
154*4882a593Smuzhiyun  * @v: pointer to type atomic64_t
155*4882a593Smuzhiyun  *
156*4882a593Smuzhiyun  * Atomically adds @i to @v and returns @i + @v
157*4882a593Smuzhiyun  */
arch_atomic64_add_return(s64 i,atomic64_t * v)158*4882a593Smuzhiyun static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
159*4882a593Smuzhiyun {
160*4882a593Smuzhiyun 	return i + xadd(&v->counter, i);
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun #define arch_atomic64_add_return arch_atomic64_add_return
163*4882a593Smuzhiyun 
arch_atomic64_sub_return(s64 i,atomic64_t * v)164*4882a593Smuzhiyun static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun 	return arch_atomic64_add_return(-i, v);
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun #define arch_atomic64_sub_return arch_atomic64_sub_return
169*4882a593Smuzhiyun 
arch_atomic64_fetch_add(s64 i,atomic64_t * v)170*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun 	return xadd(&v->counter, i);
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun #define arch_atomic64_fetch_add arch_atomic64_fetch_add
175*4882a593Smuzhiyun 
arch_atomic64_fetch_sub(s64 i,atomic64_t * v)176*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun 	return xadd(&v->counter, -i);
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
181*4882a593Smuzhiyun 
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)182*4882a593Smuzhiyun static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun 	return arch_cmpxchg(&v->counter, old, new);
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
187*4882a593Smuzhiyun 
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)188*4882a593Smuzhiyun static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
189*4882a593Smuzhiyun {
190*4882a593Smuzhiyun 	return try_cmpxchg(&v->counter, old, new);
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
193*4882a593Smuzhiyun 
arch_atomic64_xchg(atomic64_t * v,s64 new)194*4882a593Smuzhiyun static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun 	return arch_xchg(&v->counter, new);
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun #define arch_atomic64_xchg arch_atomic64_xchg
199*4882a593Smuzhiyun 
arch_atomic64_and(s64 i,atomic64_t * v)200*4882a593Smuzhiyun static inline void arch_atomic64_and(s64 i, atomic64_t *v)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "andq %1,%0"
203*4882a593Smuzhiyun 			: "+m" (v->counter)
204*4882a593Smuzhiyun 			: "er" (i)
205*4882a593Smuzhiyun 			: "memory");
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun 
arch_atomic64_fetch_and(s64 i,atomic64_t * v)208*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun 	s64 val = arch_atomic64_read(v);
211*4882a593Smuzhiyun 
212*4882a593Smuzhiyun 	do {
213*4882a593Smuzhiyun 	} while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
214*4882a593Smuzhiyun 	return val;
215*4882a593Smuzhiyun }
216*4882a593Smuzhiyun #define arch_atomic64_fetch_and arch_atomic64_fetch_and
217*4882a593Smuzhiyun 
arch_atomic64_or(s64 i,atomic64_t * v)218*4882a593Smuzhiyun static inline void arch_atomic64_or(s64 i, atomic64_t *v)
219*4882a593Smuzhiyun {
220*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "orq %1,%0"
221*4882a593Smuzhiyun 			: "+m" (v->counter)
222*4882a593Smuzhiyun 			: "er" (i)
223*4882a593Smuzhiyun 			: "memory");
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun 
arch_atomic64_fetch_or(s64 i,atomic64_t * v)226*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun 	s64 val = arch_atomic64_read(v);
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	do {
231*4882a593Smuzhiyun 	} while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
232*4882a593Smuzhiyun 	return val;
233*4882a593Smuzhiyun }
234*4882a593Smuzhiyun #define arch_atomic64_fetch_or arch_atomic64_fetch_or
235*4882a593Smuzhiyun 
arch_atomic64_xor(s64 i,atomic64_t * v)236*4882a593Smuzhiyun static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun 	asm volatile(LOCK_PREFIX "xorq %1,%0"
239*4882a593Smuzhiyun 			: "+m" (v->counter)
240*4882a593Smuzhiyun 			: "er" (i)
241*4882a593Smuzhiyun 			: "memory");
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun 
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)244*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun 	s64 val = arch_atomic64_read(v);
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 	do {
249*4882a593Smuzhiyun 	} while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
250*4882a593Smuzhiyun 	return val;
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
253*4882a593Smuzhiyun 
254*4882a593Smuzhiyun #endif /* _ASM_X86_ATOMIC64_64_H */
255