1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_X86_ATOMIC64_32_H
3*4882a593Smuzhiyun #define _ASM_X86_ATOMIC64_32_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/compiler.h>
6*4882a593Smuzhiyun #include <linux/types.h>
7*4882a593Smuzhiyun //#include <asm/cmpxchg.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun /* An 64bit atomic type */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun typedef struct {
12*4882a593Smuzhiyun s64 __aligned(8) counter;
13*4882a593Smuzhiyun } atomic64_t;
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #define ATOMIC64_INIT(val) { (val) }
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
18*4882a593Smuzhiyun #ifndef ATOMIC64_EXPORT
19*4882a593Smuzhiyun #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
20*4882a593Smuzhiyun #else
21*4882a593Smuzhiyun #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
22*4882a593Smuzhiyun ATOMIC64_EXPORT(atomic64_##sym)
23*4882a593Smuzhiyun #endif
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun #ifdef CONFIG_X86_CMPXCHG64
26*4882a593Smuzhiyun #define __alternative_atomic64(f, g, out, in...) \
27*4882a593Smuzhiyun asm volatile("call %P[func]" \
28*4882a593Smuzhiyun : out : [func] "i" (atomic64_##g##_cx8), ## in)
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
31*4882a593Smuzhiyun #else
32*4882a593Smuzhiyun #define __alternative_atomic64(f, g, out, in...) \
33*4882a593Smuzhiyun alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
34*4882a593Smuzhiyun X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
37*4882a593Smuzhiyun ATOMIC64_DECL_ONE(sym##_386)
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun ATOMIC64_DECL_ONE(add_386);
40*4882a593Smuzhiyun ATOMIC64_DECL_ONE(sub_386);
41*4882a593Smuzhiyun ATOMIC64_DECL_ONE(inc_386);
42*4882a593Smuzhiyun ATOMIC64_DECL_ONE(dec_386);
43*4882a593Smuzhiyun #endif
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun #define alternative_atomic64(f, out, in...) \
46*4882a593Smuzhiyun __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun ATOMIC64_DECL(read);
49*4882a593Smuzhiyun ATOMIC64_DECL(set);
50*4882a593Smuzhiyun ATOMIC64_DECL(xchg);
51*4882a593Smuzhiyun ATOMIC64_DECL(add_return);
52*4882a593Smuzhiyun ATOMIC64_DECL(sub_return);
53*4882a593Smuzhiyun ATOMIC64_DECL(inc_return);
54*4882a593Smuzhiyun ATOMIC64_DECL(dec_return);
55*4882a593Smuzhiyun ATOMIC64_DECL(dec_if_positive);
56*4882a593Smuzhiyun ATOMIC64_DECL(inc_not_zero);
57*4882a593Smuzhiyun ATOMIC64_DECL(add_unless);
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun #undef ATOMIC64_DECL
60*4882a593Smuzhiyun #undef ATOMIC64_DECL_ONE
61*4882a593Smuzhiyun #undef __ATOMIC64_DECL
62*4882a593Smuzhiyun #undef ATOMIC64_EXPORT
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun /**
65*4882a593Smuzhiyun * arch_atomic64_cmpxchg - cmpxchg atomic64 variable
66*4882a593Smuzhiyun * @v: pointer to type atomic64_t
67*4882a593Smuzhiyun * @o: expected value
68*4882a593Smuzhiyun * @n: new value
69*4882a593Smuzhiyun *
70*4882a593Smuzhiyun * Atomically sets @v to @n if it was equal to @o and returns
71*4882a593Smuzhiyun * the old value.
72*4882a593Smuzhiyun */
73*4882a593Smuzhiyun
arch_atomic64_cmpxchg(atomic64_t * v,s64 o,s64 n)74*4882a593Smuzhiyun static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
75*4882a593Smuzhiyun {
76*4882a593Smuzhiyun return arch_cmpxchg64(&v->counter, o, n);
77*4882a593Smuzhiyun }
78*4882a593Smuzhiyun #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun /**
81*4882a593Smuzhiyun * arch_atomic64_xchg - xchg atomic64 variable
82*4882a593Smuzhiyun * @v: pointer to type atomic64_t
83*4882a593Smuzhiyun * @n: value to assign
84*4882a593Smuzhiyun *
85*4882a593Smuzhiyun * Atomically xchgs the value of @v to @n and returns
86*4882a593Smuzhiyun * the old value.
87*4882a593Smuzhiyun */
arch_atomic64_xchg(atomic64_t * v,s64 n)88*4882a593Smuzhiyun static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun s64 o;
91*4882a593Smuzhiyun unsigned high = (unsigned)(n >> 32);
92*4882a593Smuzhiyun unsigned low = (unsigned)n;
93*4882a593Smuzhiyun alternative_atomic64(xchg, "=&A" (o),
94*4882a593Smuzhiyun "S" (v), "b" (low), "c" (high)
95*4882a593Smuzhiyun : "memory");
96*4882a593Smuzhiyun return o;
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun #define arch_atomic64_xchg arch_atomic64_xchg
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun /**
101*4882a593Smuzhiyun * arch_atomic64_set - set atomic64 variable
102*4882a593Smuzhiyun * @v: pointer to type atomic64_t
103*4882a593Smuzhiyun * @i: value to assign
104*4882a593Smuzhiyun *
105*4882a593Smuzhiyun * Atomically sets the value of @v to @n.
106*4882a593Smuzhiyun */
arch_atomic64_set(atomic64_t * v,s64 i)107*4882a593Smuzhiyun static inline void arch_atomic64_set(atomic64_t *v, s64 i)
108*4882a593Smuzhiyun {
109*4882a593Smuzhiyun unsigned high = (unsigned)(i >> 32);
110*4882a593Smuzhiyun unsigned low = (unsigned)i;
111*4882a593Smuzhiyun alternative_atomic64(set, /* no output */,
112*4882a593Smuzhiyun "S" (v), "b" (low), "c" (high)
113*4882a593Smuzhiyun : "eax", "edx", "memory");
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun /**
117*4882a593Smuzhiyun * arch_atomic64_read - read atomic64 variable
118*4882a593Smuzhiyun * @v: pointer to type atomic64_t
119*4882a593Smuzhiyun *
120*4882a593Smuzhiyun * Atomically reads the value of @v and returns it.
121*4882a593Smuzhiyun */
arch_atomic64_read(const atomic64_t * v)122*4882a593Smuzhiyun static inline s64 arch_atomic64_read(const atomic64_t *v)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun s64 r;
125*4882a593Smuzhiyun alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
126*4882a593Smuzhiyun return r;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun /**
130*4882a593Smuzhiyun * arch_atomic64_add_return - add and return
131*4882a593Smuzhiyun * @i: integer value to add
132*4882a593Smuzhiyun * @v: pointer to type atomic64_t
133*4882a593Smuzhiyun *
134*4882a593Smuzhiyun * Atomically adds @i to @v and returns @i + *@v
135*4882a593Smuzhiyun */
arch_atomic64_add_return(s64 i,atomic64_t * v)136*4882a593Smuzhiyun static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
137*4882a593Smuzhiyun {
138*4882a593Smuzhiyun alternative_atomic64(add_return,
139*4882a593Smuzhiyun ASM_OUTPUT2("+A" (i), "+c" (v)),
140*4882a593Smuzhiyun ASM_NO_INPUT_CLOBBER("memory"));
141*4882a593Smuzhiyun return i;
142*4882a593Smuzhiyun }
143*4882a593Smuzhiyun #define arch_atomic64_add_return arch_atomic64_add_return
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun /*
146*4882a593Smuzhiyun * Other variants with different arithmetic operators:
147*4882a593Smuzhiyun */
arch_atomic64_sub_return(s64 i,atomic64_t * v)148*4882a593Smuzhiyun static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
149*4882a593Smuzhiyun {
150*4882a593Smuzhiyun alternative_atomic64(sub_return,
151*4882a593Smuzhiyun ASM_OUTPUT2("+A" (i), "+c" (v)),
152*4882a593Smuzhiyun ASM_NO_INPUT_CLOBBER("memory"));
153*4882a593Smuzhiyun return i;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun #define arch_atomic64_sub_return arch_atomic64_sub_return
156*4882a593Smuzhiyun
arch_atomic64_inc_return(atomic64_t * v)157*4882a593Smuzhiyun static inline s64 arch_atomic64_inc_return(atomic64_t *v)
158*4882a593Smuzhiyun {
159*4882a593Smuzhiyun s64 a;
160*4882a593Smuzhiyun alternative_atomic64(inc_return, "=&A" (a),
161*4882a593Smuzhiyun "S" (v) : "memory", "ecx");
162*4882a593Smuzhiyun return a;
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun #define arch_atomic64_inc_return arch_atomic64_inc_return
165*4882a593Smuzhiyun
arch_atomic64_dec_return(atomic64_t * v)166*4882a593Smuzhiyun static inline s64 arch_atomic64_dec_return(atomic64_t *v)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun s64 a;
169*4882a593Smuzhiyun alternative_atomic64(dec_return, "=&A" (a),
170*4882a593Smuzhiyun "S" (v) : "memory", "ecx");
171*4882a593Smuzhiyun return a;
172*4882a593Smuzhiyun }
173*4882a593Smuzhiyun #define arch_atomic64_dec_return arch_atomic64_dec_return
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun /**
176*4882a593Smuzhiyun * arch_atomic64_add - add integer to atomic64 variable
177*4882a593Smuzhiyun * @i: integer value to add
178*4882a593Smuzhiyun * @v: pointer to type atomic64_t
179*4882a593Smuzhiyun *
180*4882a593Smuzhiyun * Atomically adds @i to @v.
181*4882a593Smuzhiyun */
arch_atomic64_add(s64 i,atomic64_t * v)182*4882a593Smuzhiyun static inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun __alternative_atomic64(add, add_return,
185*4882a593Smuzhiyun ASM_OUTPUT2("+A" (i), "+c" (v)),
186*4882a593Smuzhiyun ASM_NO_INPUT_CLOBBER("memory"));
187*4882a593Smuzhiyun return i;
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun /**
191*4882a593Smuzhiyun * arch_atomic64_sub - subtract the atomic64 variable
192*4882a593Smuzhiyun * @i: integer value to subtract
193*4882a593Smuzhiyun * @v: pointer to type atomic64_t
194*4882a593Smuzhiyun *
195*4882a593Smuzhiyun * Atomically subtracts @i from @v.
196*4882a593Smuzhiyun */
arch_atomic64_sub(s64 i,atomic64_t * v)197*4882a593Smuzhiyun static inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
198*4882a593Smuzhiyun {
199*4882a593Smuzhiyun __alternative_atomic64(sub, sub_return,
200*4882a593Smuzhiyun ASM_OUTPUT2("+A" (i), "+c" (v)),
201*4882a593Smuzhiyun ASM_NO_INPUT_CLOBBER("memory"));
202*4882a593Smuzhiyun return i;
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun /**
206*4882a593Smuzhiyun * arch_atomic64_inc - increment atomic64 variable
207*4882a593Smuzhiyun * @v: pointer to type atomic64_t
208*4882a593Smuzhiyun *
209*4882a593Smuzhiyun * Atomically increments @v by 1.
210*4882a593Smuzhiyun */
arch_atomic64_inc(atomic64_t * v)211*4882a593Smuzhiyun static inline void arch_atomic64_inc(atomic64_t *v)
212*4882a593Smuzhiyun {
213*4882a593Smuzhiyun __alternative_atomic64(inc, inc_return, /* no output */,
214*4882a593Smuzhiyun "S" (v) : "memory", "eax", "ecx", "edx");
215*4882a593Smuzhiyun }
216*4882a593Smuzhiyun #define arch_atomic64_inc arch_atomic64_inc
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun /**
219*4882a593Smuzhiyun * arch_atomic64_dec - decrement atomic64 variable
220*4882a593Smuzhiyun * @v: pointer to type atomic64_t
221*4882a593Smuzhiyun *
222*4882a593Smuzhiyun * Atomically decrements @v by 1.
223*4882a593Smuzhiyun */
arch_atomic64_dec(atomic64_t * v)224*4882a593Smuzhiyun static inline void arch_atomic64_dec(atomic64_t *v)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun __alternative_atomic64(dec, dec_return, /* no output */,
227*4882a593Smuzhiyun "S" (v) : "memory", "eax", "ecx", "edx");
228*4882a593Smuzhiyun }
229*4882a593Smuzhiyun #define arch_atomic64_dec arch_atomic64_dec
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun /**
232*4882a593Smuzhiyun * arch_atomic64_add_unless - add unless the number is a given value
233*4882a593Smuzhiyun * @v: pointer of type atomic64_t
234*4882a593Smuzhiyun * @a: the amount to add to v...
235*4882a593Smuzhiyun * @u: ...unless v is equal to u.
236*4882a593Smuzhiyun *
237*4882a593Smuzhiyun * Atomically adds @a to @v, so long as it was not @u.
238*4882a593Smuzhiyun * Returns non-zero if the add was done, zero otherwise.
239*4882a593Smuzhiyun */
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)240*4882a593Smuzhiyun static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
241*4882a593Smuzhiyun {
242*4882a593Smuzhiyun unsigned low = (unsigned)u;
243*4882a593Smuzhiyun unsigned high = (unsigned)(u >> 32);
244*4882a593Smuzhiyun alternative_atomic64(add_unless,
245*4882a593Smuzhiyun ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
246*4882a593Smuzhiyun "S" (v) : "memory");
247*4882a593Smuzhiyun return (int)a;
248*4882a593Smuzhiyun }
249*4882a593Smuzhiyun #define arch_atomic64_add_unless arch_atomic64_add_unless
250*4882a593Smuzhiyun
arch_atomic64_inc_not_zero(atomic64_t * v)251*4882a593Smuzhiyun static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
252*4882a593Smuzhiyun {
253*4882a593Smuzhiyun int r;
254*4882a593Smuzhiyun alternative_atomic64(inc_not_zero, "=&a" (r),
255*4882a593Smuzhiyun "S" (v) : "ecx", "edx", "memory");
256*4882a593Smuzhiyun return r;
257*4882a593Smuzhiyun }
258*4882a593Smuzhiyun #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
259*4882a593Smuzhiyun
arch_atomic64_dec_if_positive(atomic64_t * v)260*4882a593Smuzhiyun static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
261*4882a593Smuzhiyun {
262*4882a593Smuzhiyun s64 r;
263*4882a593Smuzhiyun alternative_atomic64(dec_if_positive, "=&A" (r),
264*4882a593Smuzhiyun "S" (v) : "ecx", "memory");
265*4882a593Smuzhiyun return r;
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun #undef alternative_atomic64
270*4882a593Smuzhiyun #undef __alternative_atomic64
271*4882a593Smuzhiyun
arch_atomic64_and(s64 i,atomic64_t * v)272*4882a593Smuzhiyun static inline void arch_atomic64_and(s64 i, atomic64_t *v)
273*4882a593Smuzhiyun {
274*4882a593Smuzhiyun s64 old, c = 0;
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
277*4882a593Smuzhiyun c = old;
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun
arch_atomic64_fetch_and(s64 i,atomic64_t * v)280*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun s64 old, c = 0;
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
285*4882a593Smuzhiyun c = old;
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun return old;
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun #define arch_atomic64_fetch_and arch_atomic64_fetch_and
290*4882a593Smuzhiyun
arch_atomic64_or(s64 i,atomic64_t * v)291*4882a593Smuzhiyun static inline void arch_atomic64_or(s64 i, atomic64_t *v)
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun s64 old, c = 0;
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
296*4882a593Smuzhiyun c = old;
297*4882a593Smuzhiyun }
298*4882a593Smuzhiyun
arch_atomic64_fetch_or(s64 i,atomic64_t * v)299*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
300*4882a593Smuzhiyun {
301*4882a593Smuzhiyun s64 old, c = 0;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
304*4882a593Smuzhiyun c = old;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun return old;
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun #define arch_atomic64_fetch_or arch_atomic64_fetch_or
309*4882a593Smuzhiyun
arch_atomic64_xor(s64 i,atomic64_t * v)310*4882a593Smuzhiyun static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
311*4882a593Smuzhiyun {
312*4882a593Smuzhiyun s64 old, c = 0;
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
315*4882a593Smuzhiyun c = old;
316*4882a593Smuzhiyun }
317*4882a593Smuzhiyun
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)318*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
319*4882a593Smuzhiyun {
320*4882a593Smuzhiyun s64 old, c = 0;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
323*4882a593Smuzhiyun c = old;
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun return old;
326*4882a593Smuzhiyun }
327*4882a593Smuzhiyun #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
328*4882a593Smuzhiyun
arch_atomic64_fetch_add(s64 i,atomic64_t * v)329*4882a593Smuzhiyun static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
330*4882a593Smuzhiyun {
331*4882a593Smuzhiyun s64 old, c = 0;
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
334*4882a593Smuzhiyun c = old;
335*4882a593Smuzhiyun
336*4882a593Smuzhiyun return old;
337*4882a593Smuzhiyun }
338*4882a593Smuzhiyun #define arch_atomic64_fetch_add arch_atomic64_fetch_add
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun #endif /* _ASM_X86_ATOMIC64_32_H */
343