1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun
3*4882a593Smuzhiyun // Generated by scripts/atomic/gen-atomic-fallback.sh
4*4882a593Smuzhiyun // DO NOT MODIFY THIS FILE DIRECTLY
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun #ifndef _LINUX_ATOMIC_FALLBACK_H
7*4882a593Smuzhiyun #define _LINUX_ATOMIC_FALLBACK_H
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <linux/compiler.h>
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #ifndef xchg_relaxed
12*4882a593Smuzhiyun #define xchg_relaxed xchg
13*4882a593Smuzhiyun #define xchg_acquire xchg
14*4882a593Smuzhiyun #define xchg_release xchg
15*4882a593Smuzhiyun #else /* xchg_relaxed */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #ifndef xchg_acquire
18*4882a593Smuzhiyun #define xchg_acquire(...) \
19*4882a593Smuzhiyun __atomic_op_acquire(xchg, __VA_ARGS__)
20*4882a593Smuzhiyun #endif
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun #ifndef xchg_release
23*4882a593Smuzhiyun #define xchg_release(...) \
24*4882a593Smuzhiyun __atomic_op_release(xchg, __VA_ARGS__)
25*4882a593Smuzhiyun #endif
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun #ifndef xchg
28*4882a593Smuzhiyun #define xchg(...) \
29*4882a593Smuzhiyun __atomic_op_fence(xchg, __VA_ARGS__)
30*4882a593Smuzhiyun #endif
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun #endif /* xchg_relaxed */
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun #ifndef cmpxchg_relaxed
35*4882a593Smuzhiyun #define cmpxchg_relaxed cmpxchg
36*4882a593Smuzhiyun #define cmpxchg_acquire cmpxchg
37*4882a593Smuzhiyun #define cmpxchg_release cmpxchg
38*4882a593Smuzhiyun #else /* cmpxchg_relaxed */
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun #ifndef cmpxchg_acquire
41*4882a593Smuzhiyun #define cmpxchg_acquire(...) \
42*4882a593Smuzhiyun __atomic_op_acquire(cmpxchg, __VA_ARGS__)
43*4882a593Smuzhiyun #endif
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun #ifndef cmpxchg_release
46*4882a593Smuzhiyun #define cmpxchg_release(...) \
47*4882a593Smuzhiyun __atomic_op_release(cmpxchg, __VA_ARGS__)
48*4882a593Smuzhiyun #endif
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun #ifndef cmpxchg
51*4882a593Smuzhiyun #define cmpxchg(...) \
52*4882a593Smuzhiyun __atomic_op_fence(cmpxchg, __VA_ARGS__)
53*4882a593Smuzhiyun #endif
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun #endif /* cmpxchg_relaxed */
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun #ifndef cmpxchg64_relaxed
58*4882a593Smuzhiyun #define cmpxchg64_relaxed cmpxchg64
59*4882a593Smuzhiyun #define cmpxchg64_acquire cmpxchg64
60*4882a593Smuzhiyun #define cmpxchg64_release cmpxchg64
61*4882a593Smuzhiyun #else /* cmpxchg64_relaxed */
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun #ifndef cmpxchg64_acquire
64*4882a593Smuzhiyun #define cmpxchg64_acquire(...) \
65*4882a593Smuzhiyun __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
66*4882a593Smuzhiyun #endif
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun #ifndef cmpxchg64_release
69*4882a593Smuzhiyun #define cmpxchg64_release(...) \
70*4882a593Smuzhiyun __atomic_op_release(cmpxchg64, __VA_ARGS__)
71*4882a593Smuzhiyun #endif
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun #ifndef cmpxchg64
74*4882a593Smuzhiyun #define cmpxchg64(...) \
75*4882a593Smuzhiyun __atomic_op_fence(cmpxchg64, __VA_ARGS__)
76*4882a593Smuzhiyun #endif
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun #endif /* cmpxchg64_relaxed */
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun #define arch_atomic_read atomic_read
81*4882a593Smuzhiyun #define arch_atomic_read_acquire atomic_read_acquire
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun #ifndef atomic_read_acquire
84*4882a593Smuzhiyun static __always_inline int
atomic_read_acquire(const atomic_t * v)85*4882a593Smuzhiyun atomic_read_acquire(const atomic_t *v)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun return smp_load_acquire(&(v)->counter);
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun #define atomic_read_acquire atomic_read_acquire
90*4882a593Smuzhiyun #endif
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun #define arch_atomic_set atomic_set
93*4882a593Smuzhiyun #define arch_atomic_set_release atomic_set_release
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun #ifndef atomic_set_release
96*4882a593Smuzhiyun static __always_inline void
atomic_set_release(atomic_t * v,int i)97*4882a593Smuzhiyun atomic_set_release(atomic_t *v, int i)
98*4882a593Smuzhiyun {
99*4882a593Smuzhiyun smp_store_release(&(v)->counter, i);
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun #define atomic_set_release atomic_set_release
102*4882a593Smuzhiyun #endif
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun #define arch_atomic_add atomic_add
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun #define arch_atomic_add_return atomic_add_return
107*4882a593Smuzhiyun #define arch_atomic_add_return_acquire atomic_add_return_acquire
108*4882a593Smuzhiyun #define arch_atomic_add_return_release atomic_add_return_release
109*4882a593Smuzhiyun #define arch_atomic_add_return_relaxed atomic_add_return_relaxed
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun #ifndef atomic_add_return_relaxed
112*4882a593Smuzhiyun #define atomic_add_return_acquire atomic_add_return
113*4882a593Smuzhiyun #define atomic_add_return_release atomic_add_return
114*4882a593Smuzhiyun #define atomic_add_return_relaxed atomic_add_return
115*4882a593Smuzhiyun #else /* atomic_add_return_relaxed */
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun #ifndef atomic_add_return_acquire
118*4882a593Smuzhiyun static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)119*4882a593Smuzhiyun atomic_add_return_acquire(int i, atomic_t *v)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun int ret = atomic_add_return_relaxed(i, v);
122*4882a593Smuzhiyun __atomic_acquire_fence();
123*4882a593Smuzhiyun return ret;
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun #define atomic_add_return_acquire atomic_add_return_acquire
126*4882a593Smuzhiyun #endif
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun #ifndef atomic_add_return_release
129*4882a593Smuzhiyun static __always_inline int
atomic_add_return_release(int i,atomic_t * v)130*4882a593Smuzhiyun atomic_add_return_release(int i, atomic_t *v)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun __atomic_release_fence();
133*4882a593Smuzhiyun return atomic_add_return_relaxed(i, v);
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun #define atomic_add_return_release atomic_add_return_release
136*4882a593Smuzhiyun #endif
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun #ifndef atomic_add_return
139*4882a593Smuzhiyun static __always_inline int
atomic_add_return(int i,atomic_t * v)140*4882a593Smuzhiyun atomic_add_return(int i, atomic_t *v)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun int ret;
143*4882a593Smuzhiyun __atomic_pre_full_fence();
144*4882a593Smuzhiyun ret = atomic_add_return_relaxed(i, v);
145*4882a593Smuzhiyun __atomic_post_full_fence();
146*4882a593Smuzhiyun return ret;
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun #define atomic_add_return atomic_add_return
149*4882a593Smuzhiyun #endif
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun #endif /* atomic_add_return_relaxed */
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun #define arch_atomic_fetch_add atomic_fetch_add
154*4882a593Smuzhiyun #define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
155*4882a593Smuzhiyun #define arch_atomic_fetch_add_release atomic_fetch_add_release
156*4882a593Smuzhiyun #define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun #ifndef atomic_fetch_add_relaxed
159*4882a593Smuzhiyun #define atomic_fetch_add_acquire atomic_fetch_add
160*4882a593Smuzhiyun #define atomic_fetch_add_release atomic_fetch_add
161*4882a593Smuzhiyun #define atomic_fetch_add_relaxed atomic_fetch_add
162*4882a593Smuzhiyun #else /* atomic_fetch_add_relaxed */
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun #ifndef atomic_fetch_add_acquire
165*4882a593Smuzhiyun static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)166*4882a593Smuzhiyun atomic_fetch_add_acquire(int i, atomic_t *v)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun int ret = atomic_fetch_add_relaxed(i, v);
169*4882a593Smuzhiyun __atomic_acquire_fence();
170*4882a593Smuzhiyun return ret;
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun #define atomic_fetch_add_acquire atomic_fetch_add_acquire
173*4882a593Smuzhiyun #endif
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun #ifndef atomic_fetch_add_release
176*4882a593Smuzhiyun static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)177*4882a593Smuzhiyun atomic_fetch_add_release(int i, atomic_t *v)
178*4882a593Smuzhiyun {
179*4882a593Smuzhiyun __atomic_release_fence();
180*4882a593Smuzhiyun return atomic_fetch_add_relaxed(i, v);
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun #define atomic_fetch_add_release atomic_fetch_add_release
183*4882a593Smuzhiyun #endif
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun #ifndef atomic_fetch_add
186*4882a593Smuzhiyun static __always_inline int
atomic_fetch_add(int i,atomic_t * v)187*4882a593Smuzhiyun atomic_fetch_add(int i, atomic_t *v)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun int ret;
190*4882a593Smuzhiyun __atomic_pre_full_fence();
191*4882a593Smuzhiyun ret = atomic_fetch_add_relaxed(i, v);
192*4882a593Smuzhiyun __atomic_post_full_fence();
193*4882a593Smuzhiyun return ret;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun #define atomic_fetch_add atomic_fetch_add
196*4882a593Smuzhiyun #endif
197*4882a593Smuzhiyun
198*4882a593Smuzhiyun #endif /* atomic_fetch_add_relaxed */
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun #define arch_atomic_sub atomic_sub
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun #define arch_atomic_sub_return atomic_sub_return
203*4882a593Smuzhiyun #define arch_atomic_sub_return_acquire atomic_sub_return_acquire
204*4882a593Smuzhiyun #define arch_atomic_sub_return_release atomic_sub_return_release
205*4882a593Smuzhiyun #define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun #ifndef atomic_sub_return_relaxed
208*4882a593Smuzhiyun #define atomic_sub_return_acquire atomic_sub_return
209*4882a593Smuzhiyun #define atomic_sub_return_release atomic_sub_return
210*4882a593Smuzhiyun #define atomic_sub_return_relaxed atomic_sub_return
211*4882a593Smuzhiyun #else /* atomic_sub_return_relaxed */
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun #ifndef atomic_sub_return_acquire
214*4882a593Smuzhiyun static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)215*4882a593Smuzhiyun atomic_sub_return_acquire(int i, atomic_t *v)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun int ret = atomic_sub_return_relaxed(i, v);
218*4882a593Smuzhiyun __atomic_acquire_fence();
219*4882a593Smuzhiyun return ret;
220*4882a593Smuzhiyun }
221*4882a593Smuzhiyun #define atomic_sub_return_acquire atomic_sub_return_acquire
222*4882a593Smuzhiyun #endif
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun #ifndef atomic_sub_return_release
225*4882a593Smuzhiyun static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)226*4882a593Smuzhiyun atomic_sub_return_release(int i, atomic_t *v)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun __atomic_release_fence();
229*4882a593Smuzhiyun return atomic_sub_return_relaxed(i, v);
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun #define atomic_sub_return_release atomic_sub_return_release
232*4882a593Smuzhiyun #endif
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun #ifndef atomic_sub_return
235*4882a593Smuzhiyun static __always_inline int
atomic_sub_return(int i,atomic_t * v)236*4882a593Smuzhiyun atomic_sub_return(int i, atomic_t *v)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun int ret;
239*4882a593Smuzhiyun __atomic_pre_full_fence();
240*4882a593Smuzhiyun ret = atomic_sub_return_relaxed(i, v);
241*4882a593Smuzhiyun __atomic_post_full_fence();
242*4882a593Smuzhiyun return ret;
243*4882a593Smuzhiyun }
244*4882a593Smuzhiyun #define atomic_sub_return atomic_sub_return
245*4882a593Smuzhiyun #endif
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun #endif /* atomic_sub_return_relaxed */
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun #define arch_atomic_fetch_sub atomic_fetch_sub
250*4882a593Smuzhiyun #define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
251*4882a593Smuzhiyun #define arch_atomic_fetch_sub_release atomic_fetch_sub_release
252*4882a593Smuzhiyun #define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun #ifndef atomic_fetch_sub_relaxed
255*4882a593Smuzhiyun #define atomic_fetch_sub_acquire atomic_fetch_sub
256*4882a593Smuzhiyun #define atomic_fetch_sub_release atomic_fetch_sub
257*4882a593Smuzhiyun #define atomic_fetch_sub_relaxed atomic_fetch_sub
258*4882a593Smuzhiyun #else /* atomic_fetch_sub_relaxed */
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun #ifndef atomic_fetch_sub_acquire
261*4882a593Smuzhiyun static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)262*4882a593Smuzhiyun atomic_fetch_sub_acquire(int i, atomic_t *v)
263*4882a593Smuzhiyun {
264*4882a593Smuzhiyun int ret = atomic_fetch_sub_relaxed(i, v);
265*4882a593Smuzhiyun __atomic_acquire_fence();
266*4882a593Smuzhiyun return ret;
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
269*4882a593Smuzhiyun #endif
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun #ifndef atomic_fetch_sub_release
272*4882a593Smuzhiyun static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)273*4882a593Smuzhiyun atomic_fetch_sub_release(int i, atomic_t *v)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun __atomic_release_fence();
276*4882a593Smuzhiyun return atomic_fetch_sub_relaxed(i, v);
277*4882a593Smuzhiyun }
278*4882a593Smuzhiyun #define atomic_fetch_sub_release atomic_fetch_sub_release
279*4882a593Smuzhiyun #endif
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun #ifndef atomic_fetch_sub
282*4882a593Smuzhiyun static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)283*4882a593Smuzhiyun atomic_fetch_sub(int i, atomic_t *v)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun int ret;
286*4882a593Smuzhiyun __atomic_pre_full_fence();
287*4882a593Smuzhiyun ret = atomic_fetch_sub_relaxed(i, v);
288*4882a593Smuzhiyun __atomic_post_full_fence();
289*4882a593Smuzhiyun return ret;
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun #define atomic_fetch_sub atomic_fetch_sub
292*4882a593Smuzhiyun #endif
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun #endif /* atomic_fetch_sub_relaxed */
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun #define arch_atomic_inc atomic_inc
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun #ifndef atomic_inc
299*4882a593Smuzhiyun static __always_inline void
atomic_inc(atomic_t * v)300*4882a593Smuzhiyun atomic_inc(atomic_t *v)
301*4882a593Smuzhiyun {
302*4882a593Smuzhiyun atomic_add(1, v);
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun #define atomic_inc atomic_inc
305*4882a593Smuzhiyun #endif
306*4882a593Smuzhiyun
307*4882a593Smuzhiyun #define arch_atomic_inc_return atomic_inc_return
308*4882a593Smuzhiyun #define arch_atomic_inc_return_acquire atomic_inc_return_acquire
309*4882a593Smuzhiyun #define arch_atomic_inc_return_release atomic_inc_return_release
310*4882a593Smuzhiyun #define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
311*4882a593Smuzhiyun
312*4882a593Smuzhiyun #ifndef atomic_inc_return_relaxed
313*4882a593Smuzhiyun #ifdef atomic_inc_return
314*4882a593Smuzhiyun #define atomic_inc_return_acquire atomic_inc_return
315*4882a593Smuzhiyun #define atomic_inc_return_release atomic_inc_return
316*4882a593Smuzhiyun #define atomic_inc_return_relaxed atomic_inc_return
317*4882a593Smuzhiyun #endif /* atomic_inc_return */
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun #ifndef atomic_inc_return
320*4882a593Smuzhiyun static __always_inline int
atomic_inc_return(atomic_t * v)321*4882a593Smuzhiyun atomic_inc_return(atomic_t *v)
322*4882a593Smuzhiyun {
323*4882a593Smuzhiyun return atomic_add_return(1, v);
324*4882a593Smuzhiyun }
325*4882a593Smuzhiyun #define atomic_inc_return atomic_inc_return
326*4882a593Smuzhiyun #endif
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun #ifndef atomic_inc_return_acquire
329*4882a593Smuzhiyun static __always_inline int
atomic_inc_return_acquire(atomic_t * v)330*4882a593Smuzhiyun atomic_inc_return_acquire(atomic_t *v)
331*4882a593Smuzhiyun {
332*4882a593Smuzhiyun return atomic_add_return_acquire(1, v);
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun #define atomic_inc_return_acquire atomic_inc_return_acquire
335*4882a593Smuzhiyun #endif
336*4882a593Smuzhiyun
337*4882a593Smuzhiyun #ifndef atomic_inc_return_release
338*4882a593Smuzhiyun static __always_inline int
atomic_inc_return_release(atomic_t * v)339*4882a593Smuzhiyun atomic_inc_return_release(atomic_t *v)
340*4882a593Smuzhiyun {
341*4882a593Smuzhiyun return atomic_add_return_release(1, v);
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun #define atomic_inc_return_release atomic_inc_return_release
344*4882a593Smuzhiyun #endif
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun #ifndef atomic_inc_return_relaxed
347*4882a593Smuzhiyun static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)348*4882a593Smuzhiyun atomic_inc_return_relaxed(atomic_t *v)
349*4882a593Smuzhiyun {
350*4882a593Smuzhiyun return atomic_add_return_relaxed(1, v);
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun #define atomic_inc_return_relaxed atomic_inc_return_relaxed
353*4882a593Smuzhiyun #endif
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun #else /* atomic_inc_return_relaxed */
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun #ifndef atomic_inc_return_acquire
358*4882a593Smuzhiyun static __always_inline int
atomic_inc_return_acquire(atomic_t * v)359*4882a593Smuzhiyun atomic_inc_return_acquire(atomic_t *v)
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun int ret = atomic_inc_return_relaxed(v);
362*4882a593Smuzhiyun __atomic_acquire_fence();
363*4882a593Smuzhiyun return ret;
364*4882a593Smuzhiyun }
365*4882a593Smuzhiyun #define atomic_inc_return_acquire atomic_inc_return_acquire
366*4882a593Smuzhiyun #endif
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun #ifndef atomic_inc_return_release
369*4882a593Smuzhiyun static __always_inline int
atomic_inc_return_release(atomic_t * v)370*4882a593Smuzhiyun atomic_inc_return_release(atomic_t *v)
371*4882a593Smuzhiyun {
372*4882a593Smuzhiyun __atomic_release_fence();
373*4882a593Smuzhiyun return atomic_inc_return_relaxed(v);
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun #define atomic_inc_return_release atomic_inc_return_release
376*4882a593Smuzhiyun #endif
377*4882a593Smuzhiyun
378*4882a593Smuzhiyun #ifndef atomic_inc_return
379*4882a593Smuzhiyun static __always_inline int
atomic_inc_return(atomic_t * v)380*4882a593Smuzhiyun atomic_inc_return(atomic_t *v)
381*4882a593Smuzhiyun {
382*4882a593Smuzhiyun int ret;
383*4882a593Smuzhiyun __atomic_pre_full_fence();
384*4882a593Smuzhiyun ret = atomic_inc_return_relaxed(v);
385*4882a593Smuzhiyun __atomic_post_full_fence();
386*4882a593Smuzhiyun return ret;
387*4882a593Smuzhiyun }
388*4882a593Smuzhiyun #define atomic_inc_return atomic_inc_return
389*4882a593Smuzhiyun #endif
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun #endif /* atomic_inc_return_relaxed */
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun #define arch_atomic_fetch_inc atomic_fetch_inc
394*4882a593Smuzhiyun #define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
395*4882a593Smuzhiyun #define arch_atomic_fetch_inc_release atomic_fetch_inc_release
396*4882a593Smuzhiyun #define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
397*4882a593Smuzhiyun
398*4882a593Smuzhiyun #ifndef atomic_fetch_inc_relaxed
399*4882a593Smuzhiyun #ifdef atomic_fetch_inc
400*4882a593Smuzhiyun #define atomic_fetch_inc_acquire atomic_fetch_inc
401*4882a593Smuzhiyun #define atomic_fetch_inc_release atomic_fetch_inc
402*4882a593Smuzhiyun #define atomic_fetch_inc_relaxed atomic_fetch_inc
403*4882a593Smuzhiyun #endif /* atomic_fetch_inc */
404*4882a593Smuzhiyun
405*4882a593Smuzhiyun #ifndef atomic_fetch_inc
406*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc(atomic_t * v)407*4882a593Smuzhiyun atomic_fetch_inc(atomic_t *v)
408*4882a593Smuzhiyun {
409*4882a593Smuzhiyun return atomic_fetch_add(1, v);
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun #define atomic_fetch_inc atomic_fetch_inc
412*4882a593Smuzhiyun #endif
413*4882a593Smuzhiyun
414*4882a593Smuzhiyun #ifndef atomic_fetch_inc_acquire
415*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)416*4882a593Smuzhiyun atomic_fetch_inc_acquire(atomic_t *v)
417*4882a593Smuzhiyun {
418*4882a593Smuzhiyun return atomic_fetch_add_acquire(1, v);
419*4882a593Smuzhiyun }
420*4882a593Smuzhiyun #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
421*4882a593Smuzhiyun #endif
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun #ifndef atomic_fetch_inc_release
424*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc_release(atomic_t * v)425*4882a593Smuzhiyun atomic_fetch_inc_release(atomic_t *v)
426*4882a593Smuzhiyun {
427*4882a593Smuzhiyun return atomic_fetch_add_release(1, v);
428*4882a593Smuzhiyun }
429*4882a593Smuzhiyun #define atomic_fetch_inc_release atomic_fetch_inc_release
430*4882a593Smuzhiyun #endif
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun #ifndef atomic_fetch_inc_relaxed
433*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)434*4882a593Smuzhiyun atomic_fetch_inc_relaxed(atomic_t *v)
435*4882a593Smuzhiyun {
436*4882a593Smuzhiyun return atomic_fetch_add_relaxed(1, v);
437*4882a593Smuzhiyun }
438*4882a593Smuzhiyun #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
439*4882a593Smuzhiyun #endif
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun #else /* atomic_fetch_inc_relaxed */
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun #ifndef atomic_fetch_inc_acquire
444*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)445*4882a593Smuzhiyun atomic_fetch_inc_acquire(atomic_t *v)
446*4882a593Smuzhiyun {
447*4882a593Smuzhiyun int ret = atomic_fetch_inc_relaxed(v);
448*4882a593Smuzhiyun __atomic_acquire_fence();
449*4882a593Smuzhiyun return ret;
450*4882a593Smuzhiyun }
451*4882a593Smuzhiyun #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
452*4882a593Smuzhiyun #endif
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun #ifndef atomic_fetch_inc_release
455*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc_release(atomic_t * v)456*4882a593Smuzhiyun atomic_fetch_inc_release(atomic_t *v)
457*4882a593Smuzhiyun {
458*4882a593Smuzhiyun __atomic_release_fence();
459*4882a593Smuzhiyun return atomic_fetch_inc_relaxed(v);
460*4882a593Smuzhiyun }
461*4882a593Smuzhiyun #define atomic_fetch_inc_release atomic_fetch_inc_release
462*4882a593Smuzhiyun #endif
463*4882a593Smuzhiyun
464*4882a593Smuzhiyun #ifndef atomic_fetch_inc
465*4882a593Smuzhiyun static __always_inline int
atomic_fetch_inc(atomic_t * v)466*4882a593Smuzhiyun atomic_fetch_inc(atomic_t *v)
467*4882a593Smuzhiyun {
468*4882a593Smuzhiyun int ret;
469*4882a593Smuzhiyun __atomic_pre_full_fence();
470*4882a593Smuzhiyun ret = atomic_fetch_inc_relaxed(v);
471*4882a593Smuzhiyun __atomic_post_full_fence();
472*4882a593Smuzhiyun return ret;
473*4882a593Smuzhiyun }
474*4882a593Smuzhiyun #define atomic_fetch_inc atomic_fetch_inc
475*4882a593Smuzhiyun #endif
476*4882a593Smuzhiyun
477*4882a593Smuzhiyun #endif /* atomic_fetch_inc_relaxed */
478*4882a593Smuzhiyun
479*4882a593Smuzhiyun #define arch_atomic_dec atomic_dec
480*4882a593Smuzhiyun
481*4882a593Smuzhiyun #ifndef atomic_dec
482*4882a593Smuzhiyun static __always_inline void
atomic_dec(atomic_t * v)483*4882a593Smuzhiyun atomic_dec(atomic_t *v)
484*4882a593Smuzhiyun {
485*4882a593Smuzhiyun atomic_sub(1, v);
486*4882a593Smuzhiyun }
487*4882a593Smuzhiyun #define atomic_dec atomic_dec
488*4882a593Smuzhiyun #endif
489*4882a593Smuzhiyun
490*4882a593Smuzhiyun #define arch_atomic_dec_return atomic_dec_return
491*4882a593Smuzhiyun #define arch_atomic_dec_return_acquire atomic_dec_return_acquire
492*4882a593Smuzhiyun #define arch_atomic_dec_return_release atomic_dec_return_release
493*4882a593Smuzhiyun #define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
494*4882a593Smuzhiyun
495*4882a593Smuzhiyun #ifndef atomic_dec_return_relaxed
496*4882a593Smuzhiyun #ifdef atomic_dec_return
497*4882a593Smuzhiyun #define atomic_dec_return_acquire atomic_dec_return
498*4882a593Smuzhiyun #define atomic_dec_return_release atomic_dec_return
499*4882a593Smuzhiyun #define atomic_dec_return_relaxed atomic_dec_return
500*4882a593Smuzhiyun #endif /* atomic_dec_return */
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun #ifndef atomic_dec_return
503*4882a593Smuzhiyun static __always_inline int
atomic_dec_return(atomic_t * v)504*4882a593Smuzhiyun atomic_dec_return(atomic_t *v)
505*4882a593Smuzhiyun {
506*4882a593Smuzhiyun return atomic_sub_return(1, v);
507*4882a593Smuzhiyun }
508*4882a593Smuzhiyun #define atomic_dec_return atomic_dec_return
509*4882a593Smuzhiyun #endif
510*4882a593Smuzhiyun
511*4882a593Smuzhiyun #ifndef atomic_dec_return_acquire
512*4882a593Smuzhiyun static __always_inline int
atomic_dec_return_acquire(atomic_t * v)513*4882a593Smuzhiyun atomic_dec_return_acquire(atomic_t *v)
514*4882a593Smuzhiyun {
515*4882a593Smuzhiyun return atomic_sub_return_acquire(1, v);
516*4882a593Smuzhiyun }
517*4882a593Smuzhiyun #define atomic_dec_return_acquire atomic_dec_return_acquire
518*4882a593Smuzhiyun #endif
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun #ifndef atomic_dec_return_release
521*4882a593Smuzhiyun static __always_inline int
atomic_dec_return_release(atomic_t * v)522*4882a593Smuzhiyun atomic_dec_return_release(atomic_t *v)
523*4882a593Smuzhiyun {
524*4882a593Smuzhiyun return atomic_sub_return_release(1, v);
525*4882a593Smuzhiyun }
526*4882a593Smuzhiyun #define atomic_dec_return_release atomic_dec_return_release
527*4882a593Smuzhiyun #endif
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun #ifndef atomic_dec_return_relaxed
530*4882a593Smuzhiyun static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)531*4882a593Smuzhiyun atomic_dec_return_relaxed(atomic_t *v)
532*4882a593Smuzhiyun {
533*4882a593Smuzhiyun return atomic_sub_return_relaxed(1, v);
534*4882a593Smuzhiyun }
535*4882a593Smuzhiyun #define atomic_dec_return_relaxed atomic_dec_return_relaxed
536*4882a593Smuzhiyun #endif
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun #else /* atomic_dec_return_relaxed */
539*4882a593Smuzhiyun
540*4882a593Smuzhiyun #ifndef atomic_dec_return_acquire
541*4882a593Smuzhiyun static __always_inline int
atomic_dec_return_acquire(atomic_t * v)542*4882a593Smuzhiyun atomic_dec_return_acquire(atomic_t *v)
543*4882a593Smuzhiyun {
544*4882a593Smuzhiyun int ret = atomic_dec_return_relaxed(v);
545*4882a593Smuzhiyun __atomic_acquire_fence();
546*4882a593Smuzhiyun return ret;
547*4882a593Smuzhiyun }
548*4882a593Smuzhiyun #define atomic_dec_return_acquire atomic_dec_return_acquire
549*4882a593Smuzhiyun #endif
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun #ifndef atomic_dec_return_release
552*4882a593Smuzhiyun static __always_inline int
atomic_dec_return_release(atomic_t * v)553*4882a593Smuzhiyun atomic_dec_return_release(atomic_t *v)
554*4882a593Smuzhiyun {
555*4882a593Smuzhiyun __atomic_release_fence();
556*4882a593Smuzhiyun return atomic_dec_return_relaxed(v);
557*4882a593Smuzhiyun }
558*4882a593Smuzhiyun #define atomic_dec_return_release atomic_dec_return_release
559*4882a593Smuzhiyun #endif
560*4882a593Smuzhiyun
561*4882a593Smuzhiyun #ifndef atomic_dec_return
562*4882a593Smuzhiyun static __always_inline int
atomic_dec_return(atomic_t * v)563*4882a593Smuzhiyun atomic_dec_return(atomic_t *v)
564*4882a593Smuzhiyun {
565*4882a593Smuzhiyun int ret;
566*4882a593Smuzhiyun __atomic_pre_full_fence();
567*4882a593Smuzhiyun ret = atomic_dec_return_relaxed(v);
568*4882a593Smuzhiyun __atomic_post_full_fence();
569*4882a593Smuzhiyun return ret;
570*4882a593Smuzhiyun }
571*4882a593Smuzhiyun #define atomic_dec_return atomic_dec_return
572*4882a593Smuzhiyun #endif
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun #endif /* atomic_dec_return_relaxed */
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun #define arch_atomic_fetch_dec atomic_fetch_dec
577*4882a593Smuzhiyun #define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
578*4882a593Smuzhiyun #define arch_atomic_fetch_dec_release atomic_fetch_dec_release
579*4882a593Smuzhiyun #define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun #ifndef atomic_fetch_dec_relaxed
582*4882a593Smuzhiyun #ifdef atomic_fetch_dec
583*4882a593Smuzhiyun #define atomic_fetch_dec_acquire atomic_fetch_dec
584*4882a593Smuzhiyun #define atomic_fetch_dec_release atomic_fetch_dec
585*4882a593Smuzhiyun #define atomic_fetch_dec_relaxed atomic_fetch_dec
586*4882a593Smuzhiyun #endif /* atomic_fetch_dec */
587*4882a593Smuzhiyun
588*4882a593Smuzhiyun #ifndef atomic_fetch_dec
589*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec(atomic_t * v)590*4882a593Smuzhiyun atomic_fetch_dec(atomic_t *v)
591*4882a593Smuzhiyun {
592*4882a593Smuzhiyun return atomic_fetch_sub(1, v);
593*4882a593Smuzhiyun }
594*4882a593Smuzhiyun #define atomic_fetch_dec atomic_fetch_dec
595*4882a593Smuzhiyun #endif
596*4882a593Smuzhiyun
597*4882a593Smuzhiyun #ifndef atomic_fetch_dec_acquire
598*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)599*4882a593Smuzhiyun atomic_fetch_dec_acquire(atomic_t *v)
600*4882a593Smuzhiyun {
601*4882a593Smuzhiyun return atomic_fetch_sub_acquire(1, v);
602*4882a593Smuzhiyun }
603*4882a593Smuzhiyun #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
604*4882a593Smuzhiyun #endif
605*4882a593Smuzhiyun
606*4882a593Smuzhiyun #ifndef atomic_fetch_dec_release
607*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec_release(atomic_t * v)608*4882a593Smuzhiyun atomic_fetch_dec_release(atomic_t *v)
609*4882a593Smuzhiyun {
610*4882a593Smuzhiyun return atomic_fetch_sub_release(1, v);
611*4882a593Smuzhiyun }
612*4882a593Smuzhiyun #define atomic_fetch_dec_release atomic_fetch_dec_release
613*4882a593Smuzhiyun #endif
614*4882a593Smuzhiyun
615*4882a593Smuzhiyun #ifndef atomic_fetch_dec_relaxed
616*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)617*4882a593Smuzhiyun atomic_fetch_dec_relaxed(atomic_t *v)
618*4882a593Smuzhiyun {
619*4882a593Smuzhiyun return atomic_fetch_sub_relaxed(1, v);
620*4882a593Smuzhiyun }
621*4882a593Smuzhiyun #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
622*4882a593Smuzhiyun #endif
623*4882a593Smuzhiyun
624*4882a593Smuzhiyun #else /* atomic_fetch_dec_relaxed */
625*4882a593Smuzhiyun
626*4882a593Smuzhiyun #ifndef atomic_fetch_dec_acquire
627*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)628*4882a593Smuzhiyun atomic_fetch_dec_acquire(atomic_t *v)
629*4882a593Smuzhiyun {
630*4882a593Smuzhiyun int ret = atomic_fetch_dec_relaxed(v);
631*4882a593Smuzhiyun __atomic_acquire_fence();
632*4882a593Smuzhiyun return ret;
633*4882a593Smuzhiyun }
634*4882a593Smuzhiyun #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
635*4882a593Smuzhiyun #endif
636*4882a593Smuzhiyun
637*4882a593Smuzhiyun #ifndef atomic_fetch_dec_release
638*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec_release(atomic_t * v)639*4882a593Smuzhiyun atomic_fetch_dec_release(atomic_t *v)
640*4882a593Smuzhiyun {
641*4882a593Smuzhiyun __atomic_release_fence();
642*4882a593Smuzhiyun return atomic_fetch_dec_relaxed(v);
643*4882a593Smuzhiyun }
644*4882a593Smuzhiyun #define atomic_fetch_dec_release atomic_fetch_dec_release
645*4882a593Smuzhiyun #endif
646*4882a593Smuzhiyun
647*4882a593Smuzhiyun #ifndef atomic_fetch_dec
648*4882a593Smuzhiyun static __always_inline int
atomic_fetch_dec(atomic_t * v)649*4882a593Smuzhiyun atomic_fetch_dec(atomic_t *v)
650*4882a593Smuzhiyun {
651*4882a593Smuzhiyun int ret;
652*4882a593Smuzhiyun __atomic_pre_full_fence();
653*4882a593Smuzhiyun ret = atomic_fetch_dec_relaxed(v);
654*4882a593Smuzhiyun __atomic_post_full_fence();
655*4882a593Smuzhiyun return ret;
656*4882a593Smuzhiyun }
657*4882a593Smuzhiyun #define atomic_fetch_dec atomic_fetch_dec
658*4882a593Smuzhiyun #endif
659*4882a593Smuzhiyun
660*4882a593Smuzhiyun #endif /* atomic_fetch_dec_relaxed */
661*4882a593Smuzhiyun
662*4882a593Smuzhiyun #define arch_atomic_and atomic_and
663*4882a593Smuzhiyun
664*4882a593Smuzhiyun #define arch_atomic_fetch_and atomic_fetch_and
665*4882a593Smuzhiyun #define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
666*4882a593Smuzhiyun #define arch_atomic_fetch_and_release atomic_fetch_and_release
667*4882a593Smuzhiyun #define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
668*4882a593Smuzhiyun
669*4882a593Smuzhiyun #ifndef atomic_fetch_and_relaxed
670*4882a593Smuzhiyun #define atomic_fetch_and_acquire atomic_fetch_and
671*4882a593Smuzhiyun #define atomic_fetch_and_release atomic_fetch_and
672*4882a593Smuzhiyun #define atomic_fetch_and_relaxed atomic_fetch_and
673*4882a593Smuzhiyun #else /* atomic_fetch_and_relaxed */
674*4882a593Smuzhiyun
675*4882a593Smuzhiyun #ifndef atomic_fetch_and_acquire
676*4882a593Smuzhiyun static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)677*4882a593Smuzhiyun atomic_fetch_and_acquire(int i, atomic_t *v)
678*4882a593Smuzhiyun {
679*4882a593Smuzhiyun int ret = atomic_fetch_and_relaxed(i, v);
680*4882a593Smuzhiyun __atomic_acquire_fence();
681*4882a593Smuzhiyun return ret;
682*4882a593Smuzhiyun }
683*4882a593Smuzhiyun #define atomic_fetch_and_acquire atomic_fetch_and_acquire
684*4882a593Smuzhiyun #endif
685*4882a593Smuzhiyun
686*4882a593Smuzhiyun #ifndef atomic_fetch_and_release
687*4882a593Smuzhiyun static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)688*4882a593Smuzhiyun atomic_fetch_and_release(int i, atomic_t *v)
689*4882a593Smuzhiyun {
690*4882a593Smuzhiyun __atomic_release_fence();
691*4882a593Smuzhiyun return atomic_fetch_and_relaxed(i, v);
692*4882a593Smuzhiyun }
693*4882a593Smuzhiyun #define atomic_fetch_and_release atomic_fetch_and_release
694*4882a593Smuzhiyun #endif
695*4882a593Smuzhiyun
696*4882a593Smuzhiyun #ifndef atomic_fetch_and
697*4882a593Smuzhiyun static __always_inline int
atomic_fetch_and(int i,atomic_t * v)698*4882a593Smuzhiyun atomic_fetch_and(int i, atomic_t *v)
699*4882a593Smuzhiyun {
700*4882a593Smuzhiyun int ret;
701*4882a593Smuzhiyun __atomic_pre_full_fence();
702*4882a593Smuzhiyun ret = atomic_fetch_and_relaxed(i, v);
703*4882a593Smuzhiyun __atomic_post_full_fence();
704*4882a593Smuzhiyun return ret;
705*4882a593Smuzhiyun }
706*4882a593Smuzhiyun #define atomic_fetch_and atomic_fetch_and
707*4882a593Smuzhiyun #endif
708*4882a593Smuzhiyun
709*4882a593Smuzhiyun #endif /* atomic_fetch_and_relaxed */
710*4882a593Smuzhiyun
711*4882a593Smuzhiyun #define arch_atomic_andnot atomic_andnot
712*4882a593Smuzhiyun
713*4882a593Smuzhiyun #ifndef atomic_andnot
714*4882a593Smuzhiyun static __always_inline void
atomic_andnot(int i,atomic_t * v)715*4882a593Smuzhiyun atomic_andnot(int i, atomic_t *v)
716*4882a593Smuzhiyun {
717*4882a593Smuzhiyun atomic_and(~i, v);
718*4882a593Smuzhiyun }
719*4882a593Smuzhiyun #define atomic_andnot atomic_andnot
720*4882a593Smuzhiyun #endif
721*4882a593Smuzhiyun
722*4882a593Smuzhiyun #define arch_atomic_fetch_andnot atomic_fetch_andnot
723*4882a593Smuzhiyun #define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
724*4882a593Smuzhiyun #define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
725*4882a593Smuzhiyun #define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
726*4882a593Smuzhiyun
727*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_relaxed
728*4882a593Smuzhiyun #ifdef atomic_fetch_andnot
729*4882a593Smuzhiyun #define atomic_fetch_andnot_acquire atomic_fetch_andnot
730*4882a593Smuzhiyun #define atomic_fetch_andnot_release atomic_fetch_andnot
731*4882a593Smuzhiyun #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
732*4882a593Smuzhiyun #endif /* atomic_fetch_andnot */
733*4882a593Smuzhiyun
734*4882a593Smuzhiyun #ifndef atomic_fetch_andnot
735*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)736*4882a593Smuzhiyun atomic_fetch_andnot(int i, atomic_t *v)
737*4882a593Smuzhiyun {
738*4882a593Smuzhiyun return atomic_fetch_and(~i, v);
739*4882a593Smuzhiyun }
740*4882a593Smuzhiyun #define atomic_fetch_andnot atomic_fetch_andnot
741*4882a593Smuzhiyun #endif
742*4882a593Smuzhiyun
743*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_acquire
744*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)745*4882a593Smuzhiyun atomic_fetch_andnot_acquire(int i, atomic_t *v)
746*4882a593Smuzhiyun {
747*4882a593Smuzhiyun return atomic_fetch_and_acquire(~i, v);
748*4882a593Smuzhiyun }
749*4882a593Smuzhiyun #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
750*4882a593Smuzhiyun #endif
751*4882a593Smuzhiyun
752*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_release
753*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)754*4882a593Smuzhiyun atomic_fetch_andnot_release(int i, atomic_t *v)
755*4882a593Smuzhiyun {
756*4882a593Smuzhiyun return atomic_fetch_and_release(~i, v);
757*4882a593Smuzhiyun }
758*4882a593Smuzhiyun #define atomic_fetch_andnot_release atomic_fetch_andnot_release
759*4882a593Smuzhiyun #endif
760*4882a593Smuzhiyun
761*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_relaxed
762*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)763*4882a593Smuzhiyun atomic_fetch_andnot_relaxed(int i, atomic_t *v)
764*4882a593Smuzhiyun {
765*4882a593Smuzhiyun return atomic_fetch_and_relaxed(~i, v);
766*4882a593Smuzhiyun }
767*4882a593Smuzhiyun #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
768*4882a593Smuzhiyun #endif
769*4882a593Smuzhiyun
770*4882a593Smuzhiyun #else /* atomic_fetch_andnot_relaxed */
771*4882a593Smuzhiyun
772*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_acquire
773*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)774*4882a593Smuzhiyun atomic_fetch_andnot_acquire(int i, atomic_t *v)
775*4882a593Smuzhiyun {
776*4882a593Smuzhiyun int ret = atomic_fetch_andnot_relaxed(i, v);
777*4882a593Smuzhiyun __atomic_acquire_fence();
778*4882a593Smuzhiyun return ret;
779*4882a593Smuzhiyun }
780*4882a593Smuzhiyun #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
781*4882a593Smuzhiyun #endif
782*4882a593Smuzhiyun
783*4882a593Smuzhiyun #ifndef atomic_fetch_andnot_release
784*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)785*4882a593Smuzhiyun atomic_fetch_andnot_release(int i, atomic_t *v)
786*4882a593Smuzhiyun {
787*4882a593Smuzhiyun __atomic_release_fence();
788*4882a593Smuzhiyun return atomic_fetch_andnot_relaxed(i, v);
789*4882a593Smuzhiyun }
790*4882a593Smuzhiyun #define atomic_fetch_andnot_release atomic_fetch_andnot_release
791*4882a593Smuzhiyun #endif
792*4882a593Smuzhiyun
793*4882a593Smuzhiyun #ifndef atomic_fetch_andnot
794*4882a593Smuzhiyun static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)795*4882a593Smuzhiyun atomic_fetch_andnot(int i, atomic_t *v)
796*4882a593Smuzhiyun {
797*4882a593Smuzhiyun int ret;
798*4882a593Smuzhiyun __atomic_pre_full_fence();
799*4882a593Smuzhiyun ret = atomic_fetch_andnot_relaxed(i, v);
800*4882a593Smuzhiyun __atomic_post_full_fence();
801*4882a593Smuzhiyun return ret;
802*4882a593Smuzhiyun }
803*4882a593Smuzhiyun #define atomic_fetch_andnot atomic_fetch_andnot
804*4882a593Smuzhiyun #endif
805*4882a593Smuzhiyun
806*4882a593Smuzhiyun #endif /* atomic_fetch_andnot_relaxed */
807*4882a593Smuzhiyun
808*4882a593Smuzhiyun #define arch_atomic_or atomic_or
809*4882a593Smuzhiyun
810*4882a593Smuzhiyun #define arch_atomic_fetch_or atomic_fetch_or
811*4882a593Smuzhiyun #define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
812*4882a593Smuzhiyun #define arch_atomic_fetch_or_release atomic_fetch_or_release
813*4882a593Smuzhiyun #define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
814*4882a593Smuzhiyun
815*4882a593Smuzhiyun #ifndef atomic_fetch_or_relaxed
816*4882a593Smuzhiyun #define atomic_fetch_or_acquire atomic_fetch_or
817*4882a593Smuzhiyun #define atomic_fetch_or_release atomic_fetch_or
818*4882a593Smuzhiyun #define atomic_fetch_or_relaxed atomic_fetch_or
819*4882a593Smuzhiyun #else /* atomic_fetch_or_relaxed */
820*4882a593Smuzhiyun
821*4882a593Smuzhiyun #ifndef atomic_fetch_or_acquire
822*4882a593Smuzhiyun static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)823*4882a593Smuzhiyun atomic_fetch_or_acquire(int i, atomic_t *v)
824*4882a593Smuzhiyun {
825*4882a593Smuzhiyun int ret = atomic_fetch_or_relaxed(i, v);
826*4882a593Smuzhiyun __atomic_acquire_fence();
827*4882a593Smuzhiyun return ret;
828*4882a593Smuzhiyun }
829*4882a593Smuzhiyun #define atomic_fetch_or_acquire atomic_fetch_or_acquire
830*4882a593Smuzhiyun #endif
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun #ifndef atomic_fetch_or_release
833*4882a593Smuzhiyun static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)834*4882a593Smuzhiyun atomic_fetch_or_release(int i, atomic_t *v)
835*4882a593Smuzhiyun {
836*4882a593Smuzhiyun __atomic_release_fence();
837*4882a593Smuzhiyun return atomic_fetch_or_relaxed(i, v);
838*4882a593Smuzhiyun }
839*4882a593Smuzhiyun #define atomic_fetch_or_release atomic_fetch_or_release
840*4882a593Smuzhiyun #endif
841*4882a593Smuzhiyun
842*4882a593Smuzhiyun #ifndef atomic_fetch_or
843*4882a593Smuzhiyun static __always_inline int
atomic_fetch_or(int i,atomic_t * v)844*4882a593Smuzhiyun atomic_fetch_or(int i, atomic_t *v)
845*4882a593Smuzhiyun {
846*4882a593Smuzhiyun int ret;
847*4882a593Smuzhiyun __atomic_pre_full_fence();
848*4882a593Smuzhiyun ret = atomic_fetch_or_relaxed(i, v);
849*4882a593Smuzhiyun __atomic_post_full_fence();
850*4882a593Smuzhiyun return ret;
851*4882a593Smuzhiyun }
852*4882a593Smuzhiyun #define atomic_fetch_or atomic_fetch_or
853*4882a593Smuzhiyun #endif
854*4882a593Smuzhiyun
855*4882a593Smuzhiyun #endif /* atomic_fetch_or_relaxed */
856*4882a593Smuzhiyun
857*4882a593Smuzhiyun #define arch_atomic_xor atomic_xor
858*4882a593Smuzhiyun
859*4882a593Smuzhiyun #define arch_atomic_fetch_xor atomic_fetch_xor
860*4882a593Smuzhiyun #define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
861*4882a593Smuzhiyun #define arch_atomic_fetch_xor_release atomic_fetch_xor_release
862*4882a593Smuzhiyun #define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
863*4882a593Smuzhiyun
864*4882a593Smuzhiyun #ifndef atomic_fetch_xor_relaxed
865*4882a593Smuzhiyun #define atomic_fetch_xor_acquire atomic_fetch_xor
866*4882a593Smuzhiyun #define atomic_fetch_xor_release atomic_fetch_xor
867*4882a593Smuzhiyun #define atomic_fetch_xor_relaxed atomic_fetch_xor
868*4882a593Smuzhiyun #else /* atomic_fetch_xor_relaxed */
869*4882a593Smuzhiyun
870*4882a593Smuzhiyun #ifndef atomic_fetch_xor_acquire
871*4882a593Smuzhiyun static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)872*4882a593Smuzhiyun atomic_fetch_xor_acquire(int i, atomic_t *v)
873*4882a593Smuzhiyun {
874*4882a593Smuzhiyun int ret = atomic_fetch_xor_relaxed(i, v);
875*4882a593Smuzhiyun __atomic_acquire_fence();
876*4882a593Smuzhiyun return ret;
877*4882a593Smuzhiyun }
878*4882a593Smuzhiyun #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
879*4882a593Smuzhiyun #endif
880*4882a593Smuzhiyun
881*4882a593Smuzhiyun #ifndef atomic_fetch_xor_release
882*4882a593Smuzhiyun static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)883*4882a593Smuzhiyun atomic_fetch_xor_release(int i, atomic_t *v)
884*4882a593Smuzhiyun {
885*4882a593Smuzhiyun __atomic_release_fence();
886*4882a593Smuzhiyun return atomic_fetch_xor_relaxed(i, v);
887*4882a593Smuzhiyun }
888*4882a593Smuzhiyun #define atomic_fetch_xor_release atomic_fetch_xor_release
889*4882a593Smuzhiyun #endif
890*4882a593Smuzhiyun
891*4882a593Smuzhiyun #ifndef atomic_fetch_xor
892*4882a593Smuzhiyun static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)893*4882a593Smuzhiyun atomic_fetch_xor(int i, atomic_t *v)
894*4882a593Smuzhiyun {
895*4882a593Smuzhiyun int ret;
896*4882a593Smuzhiyun __atomic_pre_full_fence();
897*4882a593Smuzhiyun ret = atomic_fetch_xor_relaxed(i, v);
898*4882a593Smuzhiyun __atomic_post_full_fence();
899*4882a593Smuzhiyun return ret;
900*4882a593Smuzhiyun }
901*4882a593Smuzhiyun #define atomic_fetch_xor atomic_fetch_xor
902*4882a593Smuzhiyun #endif
903*4882a593Smuzhiyun
904*4882a593Smuzhiyun #endif /* atomic_fetch_xor_relaxed */
905*4882a593Smuzhiyun
906*4882a593Smuzhiyun #define arch_atomic_xchg atomic_xchg
907*4882a593Smuzhiyun #define arch_atomic_xchg_acquire atomic_xchg_acquire
908*4882a593Smuzhiyun #define arch_atomic_xchg_release atomic_xchg_release
909*4882a593Smuzhiyun #define arch_atomic_xchg_relaxed atomic_xchg_relaxed
910*4882a593Smuzhiyun
911*4882a593Smuzhiyun #ifndef atomic_xchg_relaxed
912*4882a593Smuzhiyun #define atomic_xchg_acquire atomic_xchg
913*4882a593Smuzhiyun #define atomic_xchg_release atomic_xchg
914*4882a593Smuzhiyun #define atomic_xchg_relaxed atomic_xchg
915*4882a593Smuzhiyun #else /* atomic_xchg_relaxed */
916*4882a593Smuzhiyun
917*4882a593Smuzhiyun #ifndef atomic_xchg_acquire
918*4882a593Smuzhiyun static __always_inline int
atomic_xchg_acquire(atomic_t * v,int i)919*4882a593Smuzhiyun atomic_xchg_acquire(atomic_t *v, int i)
920*4882a593Smuzhiyun {
921*4882a593Smuzhiyun int ret = atomic_xchg_relaxed(v, i);
922*4882a593Smuzhiyun __atomic_acquire_fence();
923*4882a593Smuzhiyun return ret;
924*4882a593Smuzhiyun }
925*4882a593Smuzhiyun #define atomic_xchg_acquire atomic_xchg_acquire
926*4882a593Smuzhiyun #endif
927*4882a593Smuzhiyun
928*4882a593Smuzhiyun #ifndef atomic_xchg_release
929*4882a593Smuzhiyun static __always_inline int
atomic_xchg_release(atomic_t * v,int i)930*4882a593Smuzhiyun atomic_xchg_release(atomic_t *v, int i)
931*4882a593Smuzhiyun {
932*4882a593Smuzhiyun __atomic_release_fence();
933*4882a593Smuzhiyun return atomic_xchg_relaxed(v, i);
934*4882a593Smuzhiyun }
935*4882a593Smuzhiyun #define atomic_xchg_release atomic_xchg_release
936*4882a593Smuzhiyun #endif
937*4882a593Smuzhiyun
938*4882a593Smuzhiyun #ifndef atomic_xchg
939*4882a593Smuzhiyun static __always_inline int
atomic_xchg(atomic_t * v,int i)940*4882a593Smuzhiyun atomic_xchg(atomic_t *v, int i)
941*4882a593Smuzhiyun {
942*4882a593Smuzhiyun int ret;
943*4882a593Smuzhiyun __atomic_pre_full_fence();
944*4882a593Smuzhiyun ret = atomic_xchg_relaxed(v, i);
945*4882a593Smuzhiyun __atomic_post_full_fence();
946*4882a593Smuzhiyun return ret;
947*4882a593Smuzhiyun }
948*4882a593Smuzhiyun #define atomic_xchg atomic_xchg
949*4882a593Smuzhiyun #endif
950*4882a593Smuzhiyun
951*4882a593Smuzhiyun #endif /* atomic_xchg_relaxed */
952*4882a593Smuzhiyun
953*4882a593Smuzhiyun #define arch_atomic_cmpxchg atomic_cmpxchg
954*4882a593Smuzhiyun #define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
955*4882a593Smuzhiyun #define arch_atomic_cmpxchg_release atomic_cmpxchg_release
956*4882a593Smuzhiyun #define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
957*4882a593Smuzhiyun
958*4882a593Smuzhiyun #ifndef atomic_cmpxchg_relaxed
959*4882a593Smuzhiyun #define atomic_cmpxchg_acquire atomic_cmpxchg
960*4882a593Smuzhiyun #define atomic_cmpxchg_release atomic_cmpxchg
961*4882a593Smuzhiyun #define atomic_cmpxchg_relaxed atomic_cmpxchg
962*4882a593Smuzhiyun #else /* atomic_cmpxchg_relaxed */
963*4882a593Smuzhiyun
964*4882a593Smuzhiyun #ifndef atomic_cmpxchg_acquire
965*4882a593Smuzhiyun static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)966*4882a593Smuzhiyun atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
967*4882a593Smuzhiyun {
968*4882a593Smuzhiyun int ret = atomic_cmpxchg_relaxed(v, old, new);
969*4882a593Smuzhiyun __atomic_acquire_fence();
970*4882a593Smuzhiyun return ret;
971*4882a593Smuzhiyun }
972*4882a593Smuzhiyun #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
973*4882a593Smuzhiyun #endif
974*4882a593Smuzhiyun
975*4882a593Smuzhiyun #ifndef atomic_cmpxchg_release
976*4882a593Smuzhiyun static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)977*4882a593Smuzhiyun atomic_cmpxchg_release(atomic_t *v, int old, int new)
978*4882a593Smuzhiyun {
979*4882a593Smuzhiyun __atomic_release_fence();
980*4882a593Smuzhiyun return atomic_cmpxchg_relaxed(v, old, new);
981*4882a593Smuzhiyun }
982*4882a593Smuzhiyun #define atomic_cmpxchg_release atomic_cmpxchg_release
983*4882a593Smuzhiyun #endif
984*4882a593Smuzhiyun
985*4882a593Smuzhiyun #ifndef atomic_cmpxchg
986*4882a593Smuzhiyun static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)987*4882a593Smuzhiyun atomic_cmpxchg(atomic_t *v, int old, int new)
988*4882a593Smuzhiyun {
989*4882a593Smuzhiyun int ret;
990*4882a593Smuzhiyun __atomic_pre_full_fence();
991*4882a593Smuzhiyun ret = atomic_cmpxchg_relaxed(v, old, new);
992*4882a593Smuzhiyun __atomic_post_full_fence();
993*4882a593Smuzhiyun return ret;
994*4882a593Smuzhiyun }
995*4882a593Smuzhiyun #define atomic_cmpxchg atomic_cmpxchg
996*4882a593Smuzhiyun #endif
997*4882a593Smuzhiyun
998*4882a593Smuzhiyun #endif /* atomic_cmpxchg_relaxed */
999*4882a593Smuzhiyun
1000*4882a593Smuzhiyun #define arch_atomic_try_cmpxchg atomic_try_cmpxchg
1001*4882a593Smuzhiyun #define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1002*4882a593Smuzhiyun #define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1003*4882a593Smuzhiyun #define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1004*4882a593Smuzhiyun
1005*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_relaxed
1006*4882a593Smuzhiyun #ifdef atomic_try_cmpxchg
1007*4882a593Smuzhiyun #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
1008*4882a593Smuzhiyun #define atomic_try_cmpxchg_release atomic_try_cmpxchg
1009*4882a593Smuzhiyun #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
1010*4882a593Smuzhiyun #endif /* atomic_try_cmpxchg */
1011*4882a593Smuzhiyun
1012*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg
1013*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1014*4882a593Smuzhiyun atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1015*4882a593Smuzhiyun {
1016*4882a593Smuzhiyun int r, o = *old;
1017*4882a593Smuzhiyun r = atomic_cmpxchg(v, o, new);
1018*4882a593Smuzhiyun if (unlikely(r != o))
1019*4882a593Smuzhiyun *old = r;
1020*4882a593Smuzhiyun return likely(r == o);
1021*4882a593Smuzhiyun }
1022*4882a593Smuzhiyun #define atomic_try_cmpxchg atomic_try_cmpxchg
1023*4882a593Smuzhiyun #endif
1024*4882a593Smuzhiyun
1025*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_acquire
1026*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1027*4882a593Smuzhiyun atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1028*4882a593Smuzhiyun {
1029*4882a593Smuzhiyun int r, o = *old;
1030*4882a593Smuzhiyun r = atomic_cmpxchg_acquire(v, o, new);
1031*4882a593Smuzhiyun if (unlikely(r != o))
1032*4882a593Smuzhiyun *old = r;
1033*4882a593Smuzhiyun return likely(r == o);
1034*4882a593Smuzhiyun }
1035*4882a593Smuzhiyun #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1036*4882a593Smuzhiyun #endif
1037*4882a593Smuzhiyun
1038*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_release
1039*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1040*4882a593Smuzhiyun atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1041*4882a593Smuzhiyun {
1042*4882a593Smuzhiyun int r, o = *old;
1043*4882a593Smuzhiyun r = atomic_cmpxchg_release(v, o, new);
1044*4882a593Smuzhiyun if (unlikely(r != o))
1045*4882a593Smuzhiyun *old = r;
1046*4882a593Smuzhiyun return likely(r == o);
1047*4882a593Smuzhiyun }
1048*4882a593Smuzhiyun #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1049*4882a593Smuzhiyun #endif
1050*4882a593Smuzhiyun
1051*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_relaxed
1052*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1053*4882a593Smuzhiyun atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1054*4882a593Smuzhiyun {
1055*4882a593Smuzhiyun int r, o = *old;
1056*4882a593Smuzhiyun r = atomic_cmpxchg_relaxed(v, o, new);
1057*4882a593Smuzhiyun if (unlikely(r != o))
1058*4882a593Smuzhiyun *old = r;
1059*4882a593Smuzhiyun return likely(r == o);
1060*4882a593Smuzhiyun }
1061*4882a593Smuzhiyun #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1062*4882a593Smuzhiyun #endif
1063*4882a593Smuzhiyun
1064*4882a593Smuzhiyun #else /* atomic_try_cmpxchg_relaxed */
1065*4882a593Smuzhiyun
1066*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_acquire
1067*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1068*4882a593Smuzhiyun atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1069*4882a593Smuzhiyun {
1070*4882a593Smuzhiyun bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
1071*4882a593Smuzhiyun __atomic_acquire_fence();
1072*4882a593Smuzhiyun return ret;
1073*4882a593Smuzhiyun }
1074*4882a593Smuzhiyun #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1075*4882a593Smuzhiyun #endif
1076*4882a593Smuzhiyun
1077*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg_release
1078*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1079*4882a593Smuzhiyun atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1080*4882a593Smuzhiyun {
1081*4882a593Smuzhiyun __atomic_release_fence();
1082*4882a593Smuzhiyun return atomic_try_cmpxchg_relaxed(v, old, new);
1083*4882a593Smuzhiyun }
1084*4882a593Smuzhiyun #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1085*4882a593Smuzhiyun #endif
1086*4882a593Smuzhiyun
1087*4882a593Smuzhiyun #ifndef atomic_try_cmpxchg
1088*4882a593Smuzhiyun static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1089*4882a593Smuzhiyun atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1090*4882a593Smuzhiyun {
1091*4882a593Smuzhiyun bool ret;
1092*4882a593Smuzhiyun __atomic_pre_full_fence();
1093*4882a593Smuzhiyun ret = atomic_try_cmpxchg_relaxed(v, old, new);
1094*4882a593Smuzhiyun __atomic_post_full_fence();
1095*4882a593Smuzhiyun return ret;
1096*4882a593Smuzhiyun }
1097*4882a593Smuzhiyun #define atomic_try_cmpxchg atomic_try_cmpxchg
1098*4882a593Smuzhiyun #endif
1099*4882a593Smuzhiyun
1100*4882a593Smuzhiyun #endif /* atomic_try_cmpxchg_relaxed */
1101*4882a593Smuzhiyun
1102*4882a593Smuzhiyun #define arch_atomic_sub_and_test atomic_sub_and_test
1103*4882a593Smuzhiyun
1104*4882a593Smuzhiyun #ifndef atomic_sub_and_test
1105*4882a593Smuzhiyun /**
1106*4882a593Smuzhiyun * atomic_sub_and_test - subtract value from variable and test result
1107*4882a593Smuzhiyun * @i: integer value to subtract
1108*4882a593Smuzhiyun * @v: pointer of type atomic_t
1109*4882a593Smuzhiyun *
1110*4882a593Smuzhiyun * Atomically subtracts @i from @v and returns
1111*4882a593Smuzhiyun * true if the result is zero, or false for all
1112*4882a593Smuzhiyun * other cases.
1113*4882a593Smuzhiyun */
1114*4882a593Smuzhiyun static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)1115*4882a593Smuzhiyun atomic_sub_and_test(int i, atomic_t *v)
1116*4882a593Smuzhiyun {
1117*4882a593Smuzhiyun return atomic_sub_return(i, v) == 0;
1118*4882a593Smuzhiyun }
1119*4882a593Smuzhiyun #define atomic_sub_and_test atomic_sub_and_test
1120*4882a593Smuzhiyun #endif
1121*4882a593Smuzhiyun
1122*4882a593Smuzhiyun #define arch_atomic_dec_and_test atomic_dec_and_test
1123*4882a593Smuzhiyun
1124*4882a593Smuzhiyun #ifndef atomic_dec_and_test
1125*4882a593Smuzhiyun /**
1126*4882a593Smuzhiyun * atomic_dec_and_test - decrement and test
1127*4882a593Smuzhiyun * @v: pointer of type atomic_t
1128*4882a593Smuzhiyun *
1129*4882a593Smuzhiyun * Atomically decrements @v by 1 and
1130*4882a593Smuzhiyun * returns true if the result is 0, or false for all other
1131*4882a593Smuzhiyun * cases.
1132*4882a593Smuzhiyun */
1133*4882a593Smuzhiyun static __always_inline bool
atomic_dec_and_test(atomic_t * v)1134*4882a593Smuzhiyun atomic_dec_and_test(atomic_t *v)
1135*4882a593Smuzhiyun {
1136*4882a593Smuzhiyun return atomic_dec_return(v) == 0;
1137*4882a593Smuzhiyun }
1138*4882a593Smuzhiyun #define atomic_dec_and_test atomic_dec_and_test
1139*4882a593Smuzhiyun #endif
1140*4882a593Smuzhiyun
1141*4882a593Smuzhiyun #define arch_atomic_inc_and_test atomic_inc_and_test
1142*4882a593Smuzhiyun
1143*4882a593Smuzhiyun #ifndef atomic_inc_and_test
1144*4882a593Smuzhiyun /**
1145*4882a593Smuzhiyun * atomic_inc_and_test - increment and test
1146*4882a593Smuzhiyun * @v: pointer of type atomic_t
1147*4882a593Smuzhiyun *
1148*4882a593Smuzhiyun * Atomically increments @v by 1
1149*4882a593Smuzhiyun * and returns true if the result is zero, or false for all
1150*4882a593Smuzhiyun * other cases.
1151*4882a593Smuzhiyun */
1152*4882a593Smuzhiyun static __always_inline bool
atomic_inc_and_test(atomic_t * v)1153*4882a593Smuzhiyun atomic_inc_and_test(atomic_t *v)
1154*4882a593Smuzhiyun {
1155*4882a593Smuzhiyun return atomic_inc_return(v) == 0;
1156*4882a593Smuzhiyun }
1157*4882a593Smuzhiyun #define atomic_inc_and_test atomic_inc_and_test
1158*4882a593Smuzhiyun #endif
1159*4882a593Smuzhiyun
1160*4882a593Smuzhiyun #define arch_atomic_add_negative atomic_add_negative
1161*4882a593Smuzhiyun
1162*4882a593Smuzhiyun #ifndef atomic_add_negative
1163*4882a593Smuzhiyun /**
1164*4882a593Smuzhiyun * atomic_add_negative - add and test if negative
1165*4882a593Smuzhiyun * @i: integer value to add
1166*4882a593Smuzhiyun * @v: pointer of type atomic_t
1167*4882a593Smuzhiyun *
1168*4882a593Smuzhiyun * Atomically adds @i to @v and returns true
1169*4882a593Smuzhiyun * if the result is negative, or false when
1170*4882a593Smuzhiyun * result is greater than or equal to zero.
1171*4882a593Smuzhiyun */
1172*4882a593Smuzhiyun static __always_inline bool
atomic_add_negative(int i,atomic_t * v)1173*4882a593Smuzhiyun atomic_add_negative(int i, atomic_t *v)
1174*4882a593Smuzhiyun {
1175*4882a593Smuzhiyun return atomic_add_return(i, v) < 0;
1176*4882a593Smuzhiyun }
1177*4882a593Smuzhiyun #define atomic_add_negative atomic_add_negative
1178*4882a593Smuzhiyun #endif
1179*4882a593Smuzhiyun
1180*4882a593Smuzhiyun #define arch_atomic_fetch_add_unless atomic_fetch_add_unless
1181*4882a593Smuzhiyun
1182*4882a593Smuzhiyun #ifndef atomic_fetch_add_unless
1183*4882a593Smuzhiyun /**
1184*4882a593Smuzhiyun * atomic_fetch_add_unless - add unless the number is already a given value
1185*4882a593Smuzhiyun * @v: pointer of type atomic_t
1186*4882a593Smuzhiyun * @a: the amount to add to v...
1187*4882a593Smuzhiyun * @u: ...unless v is equal to u.
1188*4882a593Smuzhiyun *
1189*4882a593Smuzhiyun * Atomically adds @a to @v, so long as @v was not already @u.
1190*4882a593Smuzhiyun * Returns original value of @v
1191*4882a593Smuzhiyun */
1192*4882a593Smuzhiyun static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)1193*4882a593Smuzhiyun atomic_fetch_add_unless(atomic_t *v, int a, int u)
1194*4882a593Smuzhiyun {
1195*4882a593Smuzhiyun int c = atomic_read(v);
1196*4882a593Smuzhiyun
1197*4882a593Smuzhiyun do {
1198*4882a593Smuzhiyun if (unlikely(c == u))
1199*4882a593Smuzhiyun break;
1200*4882a593Smuzhiyun } while (!atomic_try_cmpxchg(v, &c, c + a));
1201*4882a593Smuzhiyun
1202*4882a593Smuzhiyun return c;
1203*4882a593Smuzhiyun }
1204*4882a593Smuzhiyun #define atomic_fetch_add_unless atomic_fetch_add_unless
1205*4882a593Smuzhiyun #endif
1206*4882a593Smuzhiyun
1207*4882a593Smuzhiyun #define arch_atomic_add_unless atomic_add_unless
1208*4882a593Smuzhiyun
1209*4882a593Smuzhiyun #ifndef atomic_add_unless
1210*4882a593Smuzhiyun /**
1211*4882a593Smuzhiyun * atomic_add_unless - add unless the number is already a given value
1212*4882a593Smuzhiyun * @v: pointer of type atomic_t
1213*4882a593Smuzhiyun * @a: the amount to add to v...
1214*4882a593Smuzhiyun * @u: ...unless v is equal to u.
1215*4882a593Smuzhiyun *
1216*4882a593Smuzhiyun * Atomically adds @a to @v, if @v was not already @u.
1217*4882a593Smuzhiyun * Returns true if the addition was done.
1218*4882a593Smuzhiyun */
1219*4882a593Smuzhiyun static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)1220*4882a593Smuzhiyun atomic_add_unless(atomic_t *v, int a, int u)
1221*4882a593Smuzhiyun {
1222*4882a593Smuzhiyun return atomic_fetch_add_unless(v, a, u) != u;
1223*4882a593Smuzhiyun }
1224*4882a593Smuzhiyun #define atomic_add_unless atomic_add_unless
1225*4882a593Smuzhiyun #endif
1226*4882a593Smuzhiyun
1227*4882a593Smuzhiyun #define arch_atomic_inc_not_zero atomic_inc_not_zero
1228*4882a593Smuzhiyun
1229*4882a593Smuzhiyun #ifndef atomic_inc_not_zero
1230*4882a593Smuzhiyun /**
1231*4882a593Smuzhiyun * atomic_inc_not_zero - increment unless the number is zero
1232*4882a593Smuzhiyun * @v: pointer of type atomic_t
1233*4882a593Smuzhiyun *
1234*4882a593Smuzhiyun * Atomically increments @v by 1, if @v is non-zero.
1235*4882a593Smuzhiyun * Returns true if the increment was done.
1236*4882a593Smuzhiyun */
1237*4882a593Smuzhiyun static __always_inline bool
atomic_inc_not_zero(atomic_t * v)1238*4882a593Smuzhiyun atomic_inc_not_zero(atomic_t *v)
1239*4882a593Smuzhiyun {
1240*4882a593Smuzhiyun return atomic_add_unless(v, 1, 0);
1241*4882a593Smuzhiyun }
1242*4882a593Smuzhiyun #define atomic_inc_not_zero atomic_inc_not_zero
1243*4882a593Smuzhiyun #endif
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyun #define arch_atomic_inc_unless_negative atomic_inc_unless_negative
1246*4882a593Smuzhiyun
1247*4882a593Smuzhiyun #ifndef atomic_inc_unless_negative
1248*4882a593Smuzhiyun static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)1249*4882a593Smuzhiyun atomic_inc_unless_negative(atomic_t *v)
1250*4882a593Smuzhiyun {
1251*4882a593Smuzhiyun int c = atomic_read(v);
1252*4882a593Smuzhiyun
1253*4882a593Smuzhiyun do {
1254*4882a593Smuzhiyun if (unlikely(c < 0))
1255*4882a593Smuzhiyun return false;
1256*4882a593Smuzhiyun } while (!atomic_try_cmpxchg(v, &c, c + 1));
1257*4882a593Smuzhiyun
1258*4882a593Smuzhiyun return true;
1259*4882a593Smuzhiyun }
1260*4882a593Smuzhiyun #define atomic_inc_unless_negative atomic_inc_unless_negative
1261*4882a593Smuzhiyun #endif
1262*4882a593Smuzhiyun
1263*4882a593Smuzhiyun #define arch_atomic_dec_unless_positive atomic_dec_unless_positive
1264*4882a593Smuzhiyun
1265*4882a593Smuzhiyun #ifndef atomic_dec_unless_positive
1266*4882a593Smuzhiyun static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)1267*4882a593Smuzhiyun atomic_dec_unless_positive(atomic_t *v)
1268*4882a593Smuzhiyun {
1269*4882a593Smuzhiyun int c = atomic_read(v);
1270*4882a593Smuzhiyun
1271*4882a593Smuzhiyun do {
1272*4882a593Smuzhiyun if (unlikely(c > 0))
1273*4882a593Smuzhiyun return false;
1274*4882a593Smuzhiyun } while (!atomic_try_cmpxchg(v, &c, c - 1));
1275*4882a593Smuzhiyun
1276*4882a593Smuzhiyun return true;
1277*4882a593Smuzhiyun }
1278*4882a593Smuzhiyun #define atomic_dec_unless_positive atomic_dec_unless_positive
1279*4882a593Smuzhiyun #endif
1280*4882a593Smuzhiyun
1281*4882a593Smuzhiyun #define arch_atomic_dec_if_positive atomic_dec_if_positive
1282*4882a593Smuzhiyun
1283*4882a593Smuzhiyun #ifndef atomic_dec_if_positive
1284*4882a593Smuzhiyun static __always_inline int
atomic_dec_if_positive(atomic_t * v)1285*4882a593Smuzhiyun atomic_dec_if_positive(atomic_t *v)
1286*4882a593Smuzhiyun {
1287*4882a593Smuzhiyun int dec, c = atomic_read(v);
1288*4882a593Smuzhiyun
1289*4882a593Smuzhiyun do {
1290*4882a593Smuzhiyun dec = c - 1;
1291*4882a593Smuzhiyun if (unlikely(dec < 0))
1292*4882a593Smuzhiyun break;
1293*4882a593Smuzhiyun } while (!atomic_try_cmpxchg(v, &c, dec));
1294*4882a593Smuzhiyun
1295*4882a593Smuzhiyun return dec;
1296*4882a593Smuzhiyun }
1297*4882a593Smuzhiyun #define atomic_dec_if_positive atomic_dec_if_positive
1298*4882a593Smuzhiyun #endif
1299*4882a593Smuzhiyun
1300*4882a593Smuzhiyun #ifdef CONFIG_GENERIC_ATOMIC64
1301*4882a593Smuzhiyun #include <asm-generic/atomic64.h>
1302*4882a593Smuzhiyun #endif
1303*4882a593Smuzhiyun
1304*4882a593Smuzhiyun #define arch_atomic64_read atomic64_read
1305*4882a593Smuzhiyun #define arch_atomic64_read_acquire atomic64_read_acquire
1306*4882a593Smuzhiyun
1307*4882a593Smuzhiyun #ifndef atomic64_read_acquire
1308*4882a593Smuzhiyun static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)1309*4882a593Smuzhiyun atomic64_read_acquire(const atomic64_t *v)
1310*4882a593Smuzhiyun {
1311*4882a593Smuzhiyun return smp_load_acquire(&(v)->counter);
1312*4882a593Smuzhiyun }
1313*4882a593Smuzhiyun #define atomic64_read_acquire atomic64_read_acquire
1314*4882a593Smuzhiyun #endif
1315*4882a593Smuzhiyun
1316*4882a593Smuzhiyun #define arch_atomic64_set atomic64_set
1317*4882a593Smuzhiyun #define arch_atomic64_set_release atomic64_set_release
1318*4882a593Smuzhiyun
1319*4882a593Smuzhiyun #ifndef atomic64_set_release
1320*4882a593Smuzhiyun static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)1321*4882a593Smuzhiyun atomic64_set_release(atomic64_t *v, s64 i)
1322*4882a593Smuzhiyun {
1323*4882a593Smuzhiyun smp_store_release(&(v)->counter, i);
1324*4882a593Smuzhiyun }
1325*4882a593Smuzhiyun #define atomic64_set_release atomic64_set_release
1326*4882a593Smuzhiyun #endif
1327*4882a593Smuzhiyun
1328*4882a593Smuzhiyun #define arch_atomic64_add atomic64_add
1329*4882a593Smuzhiyun
1330*4882a593Smuzhiyun #define arch_atomic64_add_return atomic64_add_return
1331*4882a593Smuzhiyun #define arch_atomic64_add_return_acquire atomic64_add_return_acquire
1332*4882a593Smuzhiyun #define arch_atomic64_add_return_release atomic64_add_return_release
1333*4882a593Smuzhiyun #define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
1334*4882a593Smuzhiyun
1335*4882a593Smuzhiyun #ifndef atomic64_add_return_relaxed
1336*4882a593Smuzhiyun #define atomic64_add_return_acquire atomic64_add_return
1337*4882a593Smuzhiyun #define atomic64_add_return_release atomic64_add_return
1338*4882a593Smuzhiyun #define atomic64_add_return_relaxed atomic64_add_return
1339*4882a593Smuzhiyun #else /* atomic64_add_return_relaxed */
1340*4882a593Smuzhiyun
1341*4882a593Smuzhiyun #ifndef atomic64_add_return_acquire
1342*4882a593Smuzhiyun static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)1343*4882a593Smuzhiyun atomic64_add_return_acquire(s64 i, atomic64_t *v)
1344*4882a593Smuzhiyun {
1345*4882a593Smuzhiyun s64 ret = atomic64_add_return_relaxed(i, v);
1346*4882a593Smuzhiyun __atomic_acquire_fence();
1347*4882a593Smuzhiyun return ret;
1348*4882a593Smuzhiyun }
1349*4882a593Smuzhiyun #define atomic64_add_return_acquire atomic64_add_return_acquire
1350*4882a593Smuzhiyun #endif
1351*4882a593Smuzhiyun
1352*4882a593Smuzhiyun #ifndef atomic64_add_return_release
1353*4882a593Smuzhiyun static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)1354*4882a593Smuzhiyun atomic64_add_return_release(s64 i, atomic64_t *v)
1355*4882a593Smuzhiyun {
1356*4882a593Smuzhiyun __atomic_release_fence();
1357*4882a593Smuzhiyun return atomic64_add_return_relaxed(i, v);
1358*4882a593Smuzhiyun }
1359*4882a593Smuzhiyun #define atomic64_add_return_release atomic64_add_return_release
1360*4882a593Smuzhiyun #endif
1361*4882a593Smuzhiyun
1362*4882a593Smuzhiyun #ifndef atomic64_add_return
1363*4882a593Smuzhiyun static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)1364*4882a593Smuzhiyun atomic64_add_return(s64 i, atomic64_t *v)
1365*4882a593Smuzhiyun {
1366*4882a593Smuzhiyun s64 ret;
1367*4882a593Smuzhiyun __atomic_pre_full_fence();
1368*4882a593Smuzhiyun ret = atomic64_add_return_relaxed(i, v);
1369*4882a593Smuzhiyun __atomic_post_full_fence();
1370*4882a593Smuzhiyun return ret;
1371*4882a593Smuzhiyun }
1372*4882a593Smuzhiyun #define atomic64_add_return atomic64_add_return
1373*4882a593Smuzhiyun #endif
1374*4882a593Smuzhiyun
1375*4882a593Smuzhiyun #endif /* atomic64_add_return_relaxed */
1376*4882a593Smuzhiyun
1377*4882a593Smuzhiyun #define arch_atomic64_fetch_add atomic64_fetch_add
1378*4882a593Smuzhiyun #define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1379*4882a593Smuzhiyun #define arch_atomic64_fetch_add_release atomic64_fetch_add_release
1380*4882a593Smuzhiyun #define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
1381*4882a593Smuzhiyun
1382*4882a593Smuzhiyun #ifndef atomic64_fetch_add_relaxed
1383*4882a593Smuzhiyun #define atomic64_fetch_add_acquire atomic64_fetch_add
1384*4882a593Smuzhiyun #define atomic64_fetch_add_release atomic64_fetch_add
1385*4882a593Smuzhiyun #define atomic64_fetch_add_relaxed atomic64_fetch_add
1386*4882a593Smuzhiyun #else /* atomic64_fetch_add_relaxed */
1387*4882a593Smuzhiyun
1388*4882a593Smuzhiyun #ifndef atomic64_fetch_add_acquire
1389*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1390*4882a593Smuzhiyun atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1391*4882a593Smuzhiyun {
1392*4882a593Smuzhiyun s64 ret = atomic64_fetch_add_relaxed(i, v);
1393*4882a593Smuzhiyun __atomic_acquire_fence();
1394*4882a593Smuzhiyun return ret;
1395*4882a593Smuzhiyun }
1396*4882a593Smuzhiyun #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1397*4882a593Smuzhiyun #endif
1398*4882a593Smuzhiyun
1399*4882a593Smuzhiyun #ifndef atomic64_fetch_add_release
1400*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)1401*4882a593Smuzhiyun atomic64_fetch_add_release(s64 i, atomic64_t *v)
1402*4882a593Smuzhiyun {
1403*4882a593Smuzhiyun __atomic_release_fence();
1404*4882a593Smuzhiyun return atomic64_fetch_add_relaxed(i, v);
1405*4882a593Smuzhiyun }
1406*4882a593Smuzhiyun #define atomic64_fetch_add_release atomic64_fetch_add_release
1407*4882a593Smuzhiyun #endif
1408*4882a593Smuzhiyun
1409*4882a593Smuzhiyun #ifndef atomic64_fetch_add
1410*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)1411*4882a593Smuzhiyun atomic64_fetch_add(s64 i, atomic64_t *v)
1412*4882a593Smuzhiyun {
1413*4882a593Smuzhiyun s64 ret;
1414*4882a593Smuzhiyun __atomic_pre_full_fence();
1415*4882a593Smuzhiyun ret = atomic64_fetch_add_relaxed(i, v);
1416*4882a593Smuzhiyun __atomic_post_full_fence();
1417*4882a593Smuzhiyun return ret;
1418*4882a593Smuzhiyun }
1419*4882a593Smuzhiyun #define atomic64_fetch_add atomic64_fetch_add
1420*4882a593Smuzhiyun #endif
1421*4882a593Smuzhiyun
1422*4882a593Smuzhiyun #endif /* atomic64_fetch_add_relaxed */
1423*4882a593Smuzhiyun
1424*4882a593Smuzhiyun #define arch_atomic64_sub atomic64_sub
1425*4882a593Smuzhiyun
1426*4882a593Smuzhiyun #define arch_atomic64_sub_return atomic64_sub_return
1427*4882a593Smuzhiyun #define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
1428*4882a593Smuzhiyun #define arch_atomic64_sub_return_release atomic64_sub_return_release
1429*4882a593Smuzhiyun #define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1430*4882a593Smuzhiyun
1431*4882a593Smuzhiyun #ifndef atomic64_sub_return_relaxed
1432*4882a593Smuzhiyun #define atomic64_sub_return_acquire atomic64_sub_return
1433*4882a593Smuzhiyun #define atomic64_sub_return_release atomic64_sub_return
1434*4882a593Smuzhiyun #define atomic64_sub_return_relaxed atomic64_sub_return
1435*4882a593Smuzhiyun #else /* atomic64_sub_return_relaxed */
1436*4882a593Smuzhiyun
1437*4882a593Smuzhiyun #ifndef atomic64_sub_return_acquire
1438*4882a593Smuzhiyun static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)1439*4882a593Smuzhiyun atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1440*4882a593Smuzhiyun {
1441*4882a593Smuzhiyun s64 ret = atomic64_sub_return_relaxed(i, v);
1442*4882a593Smuzhiyun __atomic_acquire_fence();
1443*4882a593Smuzhiyun return ret;
1444*4882a593Smuzhiyun }
1445*4882a593Smuzhiyun #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1446*4882a593Smuzhiyun #endif
1447*4882a593Smuzhiyun
1448*4882a593Smuzhiyun #ifndef atomic64_sub_return_release
1449*4882a593Smuzhiyun static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)1450*4882a593Smuzhiyun atomic64_sub_return_release(s64 i, atomic64_t *v)
1451*4882a593Smuzhiyun {
1452*4882a593Smuzhiyun __atomic_release_fence();
1453*4882a593Smuzhiyun return atomic64_sub_return_relaxed(i, v);
1454*4882a593Smuzhiyun }
1455*4882a593Smuzhiyun #define atomic64_sub_return_release atomic64_sub_return_release
1456*4882a593Smuzhiyun #endif
1457*4882a593Smuzhiyun
1458*4882a593Smuzhiyun #ifndef atomic64_sub_return
1459*4882a593Smuzhiyun static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)1460*4882a593Smuzhiyun atomic64_sub_return(s64 i, atomic64_t *v)
1461*4882a593Smuzhiyun {
1462*4882a593Smuzhiyun s64 ret;
1463*4882a593Smuzhiyun __atomic_pre_full_fence();
1464*4882a593Smuzhiyun ret = atomic64_sub_return_relaxed(i, v);
1465*4882a593Smuzhiyun __atomic_post_full_fence();
1466*4882a593Smuzhiyun return ret;
1467*4882a593Smuzhiyun }
1468*4882a593Smuzhiyun #define atomic64_sub_return atomic64_sub_return
1469*4882a593Smuzhiyun #endif
1470*4882a593Smuzhiyun
1471*4882a593Smuzhiyun #endif /* atomic64_sub_return_relaxed */
1472*4882a593Smuzhiyun
1473*4882a593Smuzhiyun #define arch_atomic64_fetch_sub atomic64_fetch_sub
1474*4882a593Smuzhiyun #define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1475*4882a593Smuzhiyun #define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
1476*4882a593Smuzhiyun #define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1477*4882a593Smuzhiyun
1478*4882a593Smuzhiyun #ifndef atomic64_fetch_sub_relaxed
1479*4882a593Smuzhiyun #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1480*4882a593Smuzhiyun #define atomic64_fetch_sub_release atomic64_fetch_sub
1481*4882a593Smuzhiyun #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1482*4882a593Smuzhiyun #else /* atomic64_fetch_sub_relaxed */
1483*4882a593Smuzhiyun
1484*4882a593Smuzhiyun #ifndef atomic64_fetch_sub_acquire
1485*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1486*4882a593Smuzhiyun atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1487*4882a593Smuzhiyun {
1488*4882a593Smuzhiyun s64 ret = atomic64_fetch_sub_relaxed(i, v);
1489*4882a593Smuzhiyun __atomic_acquire_fence();
1490*4882a593Smuzhiyun return ret;
1491*4882a593Smuzhiyun }
1492*4882a593Smuzhiyun #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1493*4882a593Smuzhiyun #endif
1494*4882a593Smuzhiyun
1495*4882a593Smuzhiyun #ifndef atomic64_fetch_sub_release
1496*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)1497*4882a593Smuzhiyun atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1498*4882a593Smuzhiyun {
1499*4882a593Smuzhiyun __atomic_release_fence();
1500*4882a593Smuzhiyun return atomic64_fetch_sub_relaxed(i, v);
1501*4882a593Smuzhiyun }
1502*4882a593Smuzhiyun #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1503*4882a593Smuzhiyun #endif
1504*4882a593Smuzhiyun
1505*4882a593Smuzhiyun #ifndef atomic64_fetch_sub
1506*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)1507*4882a593Smuzhiyun atomic64_fetch_sub(s64 i, atomic64_t *v)
1508*4882a593Smuzhiyun {
1509*4882a593Smuzhiyun s64 ret;
1510*4882a593Smuzhiyun __atomic_pre_full_fence();
1511*4882a593Smuzhiyun ret = atomic64_fetch_sub_relaxed(i, v);
1512*4882a593Smuzhiyun __atomic_post_full_fence();
1513*4882a593Smuzhiyun return ret;
1514*4882a593Smuzhiyun }
1515*4882a593Smuzhiyun #define atomic64_fetch_sub atomic64_fetch_sub
1516*4882a593Smuzhiyun #endif
1517*4882a593Smuzhiyun
1518*4882a593Smuzhiyun #endif /* atomic64_fetch_sub_relaxed */
1519*4882a593Smuzhiyun
1520*4882a593Smuzhiyun #define arch_atomic64_inc atomic64_inc
1521*4882a593Smuzhiyun
1522*4882a593Smuzhiyun #ifndef atomic64_inc
1523*4882a593Smuzhiyun static __always_inline void
atomic64_inc(atomic64_t * v)1524*4882a593Smuzhiyun atomic64_inc(atomic64_t *v)
1525*4882a593Smuzhiyun {
1526*4882a593Smuzhiyun atomic64_add(1, v);
1527*4882a593Smuzhiyun }
1528*4882a593Smuzhiyun #define atomic64_inc atomic64_inc
1529*4882a593Smuzhiyun #endif
1530*4882a593Smuzhiyun
1531*4882a593Smuzhiyun #define arch_atomic64_inc_return atomic64_inc_return
1532*4882a593Smuzhiyun #define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
1533*4882a593Smuzhiyun #define arch_atomic64_inc_return_release atomic64_inc_return_release
1534*4882a593Smuzhiyun #define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1535*4882a593Smuzhiyun
1536*4882a593Smuzhiyun #ifndef atomic64_inc_return_relaxed
1537*4882a593Smuzhiyun #ifdef atomic64_inc_return
1538*4882a593Smuzhiyun #define atomic64_inc_return_acquire atomic64_inc_return
1539*4882a593Smuzhiyun #define atomic64_inc_return_release atomic64_inc_return
1540*4882a593Smuzhiyun #define atomic64_inc_return_relaxed atomic64_inc_return
1541*4882a593Smuzhiyun #endif /* atomic64_inc_return */
1542*4882a593Smuzhiyun
1543*4882a593Smuzhiyun #ifndef atomic64_inc_return
1544*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return(atomic64_t * v)1545*4882a593Smuzhiyun atomic64_inc_return(atomic64_t *v)
1546*4882a593Smuzhiyun {
1547*4882a593Smuzhiyun return atomic64_add_return(1, v);
1548*4882a593Smuzhiyun }
1549*4882a593Smuzhiyun #define atomic64_inc_return atomic64_inc_return
1550*4882a593Smuzhiyun #endif
1551*4882a593Smuzhiyun
1552*4882a593Smuzhiyun #ifndef atomic64_inc_return_acquire
1553*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)1554*4882a593Smuzhiyun atomic64_inc_return_acquire(atomic64_t *v)
1555*4882a593Smuzhiyun {
1556*4882a593Smuzhiyun return atomic64_add_return_acquire(1, v);
1557*4882a593Smuzhiyun }
1558*4882a593Smuzhiyun #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1559*4882a593Smuzhiyun #endif
1560*4882a593Smuzhiyun
1561*4882a593Smuzhiyun #ifndef atomic64_inc_return_release
1562*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)1563*4882a593Smuzhiyun atomic64_inc_return_release(atomic64_t *v)
1564*4882a593Smuzhiyun {
1565*4882a593Smuzhiyun return atomic64_add_return_release(1, v);
1566*4882a593Smuzhiyun }
1567*4882a593Smuzhiyun #define atomic64_inc_return_release atomic64_inc_return_release
1568*4882a593Smuzhiyun #endif
1569*4882a593Smuzhiyun
1570*4882a593Smuzhiyun #ifndef atomic64_inc_return_relaxed
1571*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)1572*4882a593Smuzhiyun atomic64_inc_return_relaxed(atomic64_t *v)
1573*4882a593Smuzhiyun {
1574*4882a593Smuzhiyun return atomic64_add_return_relaxed(1, v);
1575*4882a593Smuzhiyun }
1576*4882a593Smuzhiyun #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1577*4882a593Smuzhiyun #endif
1578*4882a593Smuzhiyun
1579*4882a593Smuzhiyun #else /* atomic64_inc_return_relaxed */
1580*4882a593Smuzhiyun
1581*4882a593Smuzhiyun #ifndef atomic64_inc_return_acquire
1582*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)1583*4882a593Smuzhiyun atomic64_inc_return_acquire(atomic64_t *v)
1584*4882a593Smuzhiyun {
1585*4882a593Smuzhiyun s64 ret = atomic64_inc_return_relaxed(v);
1586*4882a593Smuzhiyun __atomic_acquire_fence();
1587*4882a593Smuzhiyun return ret;
1588*4882a593Smuzhiyun }
1589*4882a593Smuzhiyun #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1590*4882a593Smuzhiyun #endif
1591*4882a593Smuzhiyun
1592*4882a593Smuzhiyun #ifndef atomic64_inc_return_release
1593*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)1594*4882a593Smuzhiyun atomic64_inc_return_release(atomic64_t *v)
1595*4882a593Smuzhiyun {
1596*4882a593Smuzhiyun __atomic_release_fence();
1597*4882a593Smuzhiyun return atomic64_inc_return_relaxed(v);
1598*4882a593Smuzhiyun }
1599*4882a593Smuzhiyun #define atomic64_inc_return_release atomic64_inc_return_release
1600*4882a593Smuzhiyun #endif
1601*4882a593Smuzhiyun
1602*4882a593Smuzhiyun #ifndef atomic64_inc_return
1603*4882a593Smuzhiyun static __always_inline s64
atomic64_inc_return(atomic64_t * v)1604*4882a593Smuzhiyun atomic64_inc_return(atomic64_t *v)
1605*4882a593Smuzhiyun {
1606*4882a593Smuzhiyun s64 ret;
1607*4882a593Smuzhiyun __atomic_pre_full_fence();
1608*4882a593Smuzhiyun ret = atomic64_inc_return_relaxed(v);
1609*4882a593Smuzhiyun __atomic_post_full_fence();
1610*4882a593Smuzhiyun return ret;
1611*4882a593Smuzhiyun }
1612*4882a593Smuzhiyun #define atomic64_inc_return atomic64_inc_return
1613*4882a593Smuzhiyun #endif
1614*4882a593Smuzhiyun
1615*4882a593Smuzhiyun #endif /* atomic64_inc_return_relaxed */
1616*4882a593Smuzhiyun
1617*4882a593Smuzhiyun #define arch_atomic64_fetch_inc atomic64_fetch_inc
1618*4882a593Smuzhiyun #define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1619*4882a593Smuzhiyun #define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
1620*4882a593Smuzhiyun #define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1621*4882a593Smuzhiyun
1622*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_relaxed
1623*4882a593Smuzhiyun #ifdef atomic64_fetch_inc
1624*4882a593Smuzhiyun #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1625*4882a593Smuzhiyun #define atomic64_fetch_inc_release atomic64_fetch_inc
1626*4882a593Smuzhiyun #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1627*4882a593Smuzhiyun #endif /* atomic64_fetch_inc */
1628*4882a593Smuzhiyun
1629*4882a593Smuzhiyun #ifndef atomic64_fetch_inc
1630*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)1631*4882a593Smuzhiyun atomic64_fetch_inc(atomic64_t *v)
1632*4882a593Smuzhiyun {
1633*4882a593Smuzhiyun return atomic64_fetch_add(1, v);
1634*4882a593Smuzhiyun }
1635*4882a593Smuzhiyun #define atomic64_fetch_inc atomic64_fetch_inc
1636*4882a593Smuzhiyun #endif
1637*4882a593Smuzhiyun
1638*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_acquire
1639*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)1640*4882a593Smuzhiyun atomic64_fetch_inc_acquire(atomic64_t *v)
1641*4882a593Smuzhiyun {
1642*4882a593Smuzhiyun return atomic64_fetch_add_acquire(1, v);
1643*4882a593Smuzhiyun }
1644*4882a593Smuzhiyun #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1645*4882a593Smuzhiyun #endif
1646*4882a593Smuzhiyun
1647*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_release
1648*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)1649*4882a593Smuzhiyun atomic64_fetch_inc_release(atomic64_t *v)
1650*4882a593Smuzhiyun {
1651*4882a593Smuzhiyun return atomic64_fetch_add_release(1, v);
1652*4882a593Smuzhiyun }
1653*4882a593Smuzhiyun #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1654*4882a593Smuzhiyun #endif
1655*4882a593Smuzhiyun
1656*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_relaxed
1657*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)1658*4882a593Smuzhiyun atomic64_fetch_inc_relaxed(atomic64_t *v)
1659*4882a593Smuzhiyun {
1660*4882a593Smuzhiyun return atomic64_fetch_add_relaxed(1, v);
1661*4882a593Smuzhiyun }
1662*4882a593Smuzhiyun #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1663*4882a593Smuzhiyun #endif
1664*4882a593Smuzhiyun
1665*4882a593Smuzhiyun #else /* atomic64_fetch_inc_relaxed */
1666*4882a593Smuzhiyun
1667*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_acquire
1668*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)1669*4882a593Smuzhiyun atomic64_fetch_inc_acquire(atomic64_t *v)
1670*4882a593Smuzhiyun {
1671*4882a593Smuzhiyun s64 ret = atomic64_fetch_inc_relaxed(v);
1672*4882a593Smuzhiyun __atomic_acquire_fence();
1673*4882a593Smuzhiyun return ret;
1674*4882a593Smuzhiyun }
1675*4882a593Smuzhiyun #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1676*4882a593Smuzhiyun #endif
1677*4882a593Smuzhiyun
1678*4882a593Smuzhiyun #ifndef atomic64_fetch_inc_release
1679*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)1680*4882a593Smuzhiyun atomic64_fetch_inc_release(atomic64_t *v)
1681*4882a593Smuzhiyun {
1682*4882a593Smuzhiyun __atomic_release_fence();
1683*4882a593Smuzhiyun return atomic64_fetch_inc_relaxed(v);
1684*4882a593Smuzhiyun }
1685*4882a593Smuzhiyun #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1686*4882a593Smuzhiyun #endif
1687*4882a593Smuzhiyun
1688*4882a593Smuzhiyun #ifndef atomic64_fetch_inc
1689*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)1690*4882a593Smuzhiyun atomic64_fetch_inc(atomic64_t *v)
1691*4882a593Smuzhiyun {
1692*4882a593Smuzhiyun s64 ret;
1693*4882a593Smuzhiyun __atomic_pre_full_fence();
1694*4882a593Smuzhiyun ret = atomic64_fetch_inc_relaxed(v);
1695*4882a593Smuzhiyun __atomic_post_full_fence();
1696*4882a593Smuzhiyun return ret;
1697*4882a593Smuzhiyun }
1698*4882a593Smuzhiyun #define atomic64_fetch_inc atomic64_fetch_inc
1699*4882a593Smuzhiyun #endif
1700*4882a593Smuzhiyun
1701*4882a593Smuzhiyun #endif /* atomic64_fetch_inc_relaxed */
1702*4882a593Smuzhiyun
1703*4882a593Smuzhiyun #define arch_atomic64_dec atomic64_dec
1704*4882a593Smuzhiyun
1705*4882a593Smuzhiyun #ifndef atomic64_dec
1706*4882a593Smuzhiyun static __always_inline void
atomic64_dec(atomic64_t * v)1707*4882a593Smuzhiyun atomic64_dec(atomic64_t *v)
1708*4882a593Smuzhiyun {
1709*4882a593Smuzhiyun atomic64_sub(1, v);
1710*4882a593Smuzhiyun }
1711*4882a593Smuzhiyun #define atomic64_dec atomic64_dec
1712*4882a593Smuzhiyun #endif
1713*4882a593Smuzhiyun
1714*4882a593Smuzhiyun #define arch_atomic64_dec_return atomic64_dec_return
1715*4882a593Smuzhiyun #define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
1716*4882a593Smuzhiyun #define arch_atomic64_dec_return_release atomic64_dec_return_release
1717*4882a593Smuzhiyun #define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1718*4882a593Smuzhiyun
1719*4882a593Smuzhiyun #ifndef atomic64_dec_return_relaxed
1720*4882a593Smuzhiyun #ifdef atomic64_dec_return
1721*4882a593Smuzhiyun #define atomic64_dec_return_acquire atomic64_dec_return
1722*4882a593Smuzhiyun #define atomic64_dec_return_release atomic64_dec_return
1723*4882a593Smuzhiyun #define atomic64_dec_return_relaxed atomic64_dec_return
1724*4882a593Smuzhiyun #endif /* atomic64_dec_return */
1725*4882a593Smuzhiyun
1726*4882a593Smuzhiyun #ifndef atomic64_dec_return
1727*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return(atomic64_t * v)1728*4882a593Smuzhiyun atomic64_dec_return(atomic64_t *v)
1729*4882a593Smuzhiyun {
1730*4882a593Smuzhiyun return atomic64_sub_return(1, v);
1731*4882a593Smuzhiyun }
1732*4882a593Smuzhiyun #define atomic64_dec_return atomic64_dec_return
1733*4882a593Smuzhiyun #endif
1734*4882a593Smuzhiyun
1735*4882a593Smuzhiyun #ifndef atomic64_dec_return_acquire
1736*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)1737*4882a593Smuzhiyun atomic64_dec_return_acquire(atomic64_t *v)
1738*4882a593Smuzhiyun {
1739*4882a593Smuzhiyun return atomic64_sub_return_acquire(1, v);
1740*4882a593Smuzhiyun }
1741*4882a593Smuzhiyun #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1742*4882a593Smuzhiyun #endif
1743*4882a593Smuzhiyun
1744*4882a593Smuzhiyun #ifndef atomic64_dec_return_release
1745*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)1746*4882a593Smuzhiyun atomic64_dec_return_release(atomic64_t *v)
1747*4882a593Smuzhiyun {
1748*4882a593Smuzhiyun return atomic64_sub_return_release(1, v);
1749*4882a593Smuzhiyun }
1750*4882a593Smuzhiyun #define atomic64_dec_return_release atomic64_dec_return_release
1751*4882a593Smuzhiyun #endif
1752*4882a593Smuzhiyun
1753*4882a593Smuzhiyun #ifndef atomic64_dec_return_relaxed
1754*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)1755*4882a593Smuzhiyun atomic64_dec_return_relaxed(atomic64_t *v)
1756*4882a593Smuzhiyun {
1757*4882a593Smuzhiyun return atomic64_sub_return_relaxed(1, v);
1758*4882a593Smuzhiyun }
1759*4882a593Smuzhiyun #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1760*4882a593Smuzhiyun #endif
1761*4882a593Smuzhiyun
1762*4882a593Smuzhiyun #else /* atomic64_dec_return_relaxed */
1763*4882a593Smuzhiyun
1764*4882a593Smuzhiyun #ifndef atomic64_dec_return_acquire
1765*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)1766*4882a593Smuzhiyun atomic64_dec_return_acquire(atomic64_t *v)
1767*4882a593Smuzhiyun {
1768*4882a593Smuzhiyun s64 ret = atomic64_dec_return_relaxed(v);
1769*4882a593Smuzhiyun __atomic_acquire_fence();
1770*4882a593Smuzhiyun return ret;
1771*4882a593Smuzhiyun }
1772*4882a593Smuzhiyun #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1773*4882a593Smuzhiyun #endif
1774*4882a593Smuzhiyun
1775*4882a593Smuzhiyun #ifndef atomic64_dec_return_release
1776*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)1777*4882a593Smuzhiyun atomic64_dec_return_release(atomic64_t *v)
1778*4882a593Smuzhiyun {
1779*4882a593Smuzhiyun __atomic_release_fence();
1780*4882a593Smuzhiyun return atomic64_dec_return_relaxed(v);
1781*4882a593Smuzhiyun }
1782*4882a593Smuzhiyun #define atomic64_dec_return_release atomic64_dec_return_release
1783*4882a593Smuzhiyun #endif
1784*4882a593Smuzhiyun
1785*4882a593Smuzhiyun #ifndef atomic64_dec_return
1786*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_return(atomic64_t * v)1787*4882a593Smuzhiyun atomic64_dec_return(atomic64_t *v)
1788*4882a593Smuzhiyun {
1789*4882a593Smuzhiyun s64 ret;
1790*4882a593Smuzhiyun __atomic_pre_full_fence();
1791*4882a593Smuzhiyun ret = atomic64_dec_return_relaxed(v);
1792*4882a593Smuzhiyun __atomic_post_full_fence();
1793*4882a593Smuzhiyun return ret;
1794*4882a593Smuzhiyun }
1795*4882a593Smuzhiyun #define atomic64_dec_return atomic64_dec_return
1796*4882a593Smuzhiyun #endif
1797*4882a593Smuzhiyun
1798*4882a593Smuzhiyun #endif /* atomic64_dec_return_relaxed */
1799*4882a593Smuzhiyun
1800*4882a593Smuzhiyun #define arch_atomic64_fetch_dec atomic64_fetch_dec
1801*4882a593Smuzhiyun #define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1802*4882a593Smuzhiyun #define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
1803*4882a593Smuzhiyun #define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1804*4882a593Smuzhiyun
1805*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_relaxed
1806*4882a593Smuzhiyun #ifdef atomic64_fetch_dec
1807*4882a593Smuzhiyun #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1808*4882a593Smuzhiyun #define atomic64_fetch_dec_release atomic64_fetch_dec
1809*4882a593Smuzhiyun #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1810*4882a593Smuzhiyun #endif /* atomic64_fetch_dec */
1811*4882a593Smuzhiyun
1812*4882a593Smuzhiyun #ifndef atomic64_fetch_dec
1813*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)1814*4882a593Smuzhiyun atomic64_fetch_dec(atomic64_t *v)
1815*4882a593Smuzhiyun {
1816*4882a593Smuzhiyun return atomic64_fetch_sub(1, v);
1817*4882a593Smuzhiyun }
1818*4882a593Smuzhiyun #define atomic64_fetch_dec atomic64_fetch_dec
1819*4882a593Smuzhiyun #endif
1820*4882a593Smuzhiyun
1821*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_acquire
1822*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)1823*4882a593Smuzhiyun atomic64_fetch_dec_acquire(atomic64_t *v)
1824*4882a593Smuzhiyun {
1825*4882a593Smuzhiyun return atomic64_fetch_sub_acquire(1, v);
1826*4882a593Smuzhiyun }
1827*4882a593Smuzhiyun #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1828*4882a593Smuzhiyun #endif
1829*4882a593Smuzhiyun
1830*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_release
1831*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)1832*4882a593Smuzhiyun atomic64_fetch_dec_release(atomic64_t *v)
1833*4882a593Smuzhiyun {
1834*4882a593Smuzhiyun return atomic64_fetch_sub_release(1, v);
1835*4882a593Smuzhiyun }
1836*4882a593Smuzhiyun #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1837*4882a593Smuzhiyun #endif
1838*4882a593Smuzhiyun
1839*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_relaxed
1840*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)1841*4882a593Smuzhiyun atomic64_fetch_dec_relaxed(atomic64_t *v)
1842*4882a593Smuzhiyun {
1843*4882a593Smuzhiyun return atomic64_fetch_sub_relaxed(1, v);
1844*4882a593Smuzhiyun }
1845*4882a593Smuzhiyun #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1846*4882a593Smuzhiyun #endif
1847*4882a593Smuzhiyun
1848*4882a593Smuzhiyun #else /* atomic64_fetch_dec_relaxed */
1849*4882a593Smuzhiyun
1850*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_acquire
1851*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)1852*4882a593Smuzhiyun atomic64_fetch_dec_acquire(atomic64_t *v)
1853*4882a593Smuzhiyun {
1854*4882a593Smuzhiyun s64 ret = atomic64_fetch_dec_relaxed(v);
1855*4882a593Smuzhiyun __atomic_acquire_fence();
1856*4882a593Smuzhiyun return ret;
1857*4882a593Smuzhiyun }
1858*4882a593Smuzhiyun #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1859*4882a593Smuzhiyun #endif
1860*4882a593Smuzhiyun
1861*4882a593Smuzhiyun #ifndef atomic64_fetch_dec_release
1862*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)1863*4882a593Smuzhiyun atomic64_fetch_dec_release(atomic64_t *v)
1864*4882a593Smuzhiyun {
1865*4882a593Smuzhiyun __atomic_release_fence();
1866*4882a593Smuzhiyun return atomic64_fetch_dec_relaxed(v);
1867*4882a593Smuzhiyun }
1868*4882a593Smuzhiyun #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1869*4882a593Smuzhiyun #endif
1870*4882a593Smuzhiyun
1871*4882a593Smuzhiyun #ifndef atomic64_fetch_dec
1872*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)1873*4882a593Smuzhiyun atomic64_fetch_dec(atomic64_t *v)
1874*4882a593Smuzhiyun {
1875*4882a593Smuzhiyun s64 ret;
1876*4882a593Smuzhiyun __atomic_pre_full_fence();
1877*4882a593Smuzhiyun ret = atomic64_fetch_dec_relaxed(v);
1878*4882a593Smuzhiyun __atomic_post_full_fence();
1879*4882a593Smuzhiyun return ret;
1880*4882a593Smuzhiyun }
1881*4882a593Smuzhiyun #define atomic64_fetch_dec atomic64_fetch_dec
1882*4882a593Smuzhiyun #endif
1883*4882a593Smuzhiyun
1884*4882a593Smuzhiyun #endif /* atomic64_fetch_dec_relaxed */
1885*4882a593Smuzhiyun
1886*4882a593Smuzhiyun #define arch_atomic64_and atomic64_and
1887*4882a593Smuzhiyun
1888*4882a593Smuzhiyun #define arch_atomic64_fetch_and atomic64_fetch_and
1889*4882a593Smuzhiyun #define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1890*4882a593Smuzhiyun #define arch_atomic64_fetch_and_release atomic64_fetch_and_release
1891*4882a593Smuzhiyun #define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1892*4882a593Smuzhiyun
1893*4882a593Smuzhiyun #ifndef atomic64_fetch_and_relaxed
1894*4882a593Smuzhiyun #define atomic64_fetch_and_acquire atomic64_fetch_and
1895*4882a593Smuzhiyun #define atomic64_fetch_and_release atomic64_fetch_and
1896*4882a593Smuzhiyun #define atomic64_fetch_and_relaxed atomic64_fetch_and
1897*4882a593Smuzhiyun #else /* atomic64_fetch_and_relaxed */
1898*4882a593Smuzhiyun
1899*4882a593Smuzhiyun #ifndef atomic64_fetch_and_acquire
1900*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)1901*4882a593Smuzhiyun atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1902*4882a593Smuzhiyun {
1903*4882a593Smuzhiyun s64 ret = atomic64_fetch_and_relaxed(i, v);
1904*4882a593Smuzhiyun __atomic_acquire_fence();
1905*4882a593Smuzhiyun return ret;
1906*4882a593Smuzhiyun }
1907*4882a593Smuzhiyun #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1908*4882a593Smuzhiyun #endif
1909*4882a593Smuzhiyun
1910*4882a593Smuzhiyun #ifndef atomic64_fetch_and_release
1911*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)1912*4882a593Smuzhiyun atomic64_fetch_and_release(s64 i, atomic64_t *v)
1913*4882a593Smuzhiyun {
1914*4882a593Smuzhiyun __atomic_release_fence();
1915*4882a593Smuzhiyun return atomic64_fetch_and_relaxed(i, v);
1916*4882a593Smuzhiyun }
1917*4882a593Smuzhiyun #define atomic64_fetch_and_release atomic64_fetch_and_release
1918*4882a593Smuzhiyun #endif
1919*4882a593Smuzhiyun
1920*4882a593Smuzhiyun #ifndef atomic64_fetch_and
1921*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)1922*4882a593Smuzhiyun atomic64_fetch_and(s64 i, atomic64_t *v)
1923*4882a593Smuzhiyun {
1924*4882a593Smuzhiyun s64 ret;
1925*4882a593Smuzhiyun __atomic_pre_full_fence();
1926*4882a593Smuzhiyun ret = atomic64_fetch_and_relaxed(i, v);
1927*4882a593Smuzhiyun __atomic_post_full_fence();
1928*4882a593Smuzhiyun return ret;
1929*4882a593Smuzhiyun }
1930*4882a593Smuzhiyun #define atomic64_fetch_and atomic64_fetch_and
1931*4882a593Smuzhiyun #endif
1932*4882a593Smuzhiyun
1933*4882a593Smuzhiyun #endif /* atomic64_fetch_and_relaxed */
1934*4882a593Smuzhiyun
1935*4882a593Smuzhiyun #define arch_atomic64_andnot atomic64_andnot
1936*4882a593Smuzhiyun
1937*4882a593Smuzhiyun #ifndef atomic64_andnot
1938*4882a593Smuzhiyun static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)1939*4882a593Smuzhiyun atomic64_andnot(s64 i, atomic64_t *v)
1940*4882a593Smuzhiyun {
1941*4882a593Smuzhiyun atomic64_and(~i, v);
1942*4882a593Smuzhiyun }
1943*4882a593Smuzhiyun #define atomic64_andnot atomic64_andnot
1944*4882a593Smuzhiyun #endif
1945*4882a593Smuzhiyun
1946*4882a593Smuzhiyun #define arch_atomic64_fetch_andnot atomic64_fetch_andnot
1947*4882a593Smuzhiyun #define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1948*4882a593Smuzhiyun #define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1949*4882a593Smuzhiyun #define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1950*4882a593Smuzhiyun
1951*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_relaxed
1952*4882a593Smuzhiyun #ifdef atomic64_fetch_andnot
1953*4882a593Smuzhiyun #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1954*4882a593Smuzhiyun #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1955*4882a593Smuzhiyun #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1956*4882a593Smuzhiyun #endif /* atomic64_fetch_andnot */
1957*4882a593Smuzhiyun
1958*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot
1959*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)1960*4882a593Smuzhiyun atomic64_fetch_andnot(s64 i, atomic64_t *v)
1961*4882a593Smuzhiyun {
1962*4882a593Smuzhiyun return atomic64_fetch_and(~i, v);
1963*4882a593Smuzhiyun }
1964*4882a593Smuzhiyun #define atomic64_fetch_andnot atomic64_fetch_andnot
1965*4882a593Smuzhiyun #endif
1966*4882a593Smuzhiyun
1967*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_acquire
1968*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1969*4882a593Smuzhiyun atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1970*4882a593Smuzhiyun {
1971*4882a593Smuzhiyun return atomic64_fetch_and_acquire(~i, v);
1972*4882a593Smuzhiyun }
1973*4882a593Smuzhiyun #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1974*4882a593Smuzhiyun #endif
1975*4882a593Smuzhiyun
1976*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_release
1977*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1978*4882a593Smuzhiyun atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1979*4882a593Smuzhiyun {
1980*4882a593Smuzhiyun return atomic64_fetch_and_release(~i, v);
1981*4882a593Smuzhiyun }
1982*4882a593Smuzhiyun #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1983*4882a593Smuzhiyun #endif
1984*4882a593Smuzhiyun
1985*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_relaxed
1986*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)1987*4882a593Smuzhiyun atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1988*4882a593Smuzhiyun {
1989*4882a593Smuzhiyun return atomic64_fetch_and_relaxed(~i, v);
1990*4882a593Smuzhiyun }
1991*4882a593Smuzhiyun #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1992*4882a593Smuzhiyun #endif
1993*4882a593Smuzhiyun
1994*4882a593Smuzhiyun #else /* atomic64_fetch_andnot_relaxed */
1995*4882a593Smuzhiyun
1996*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_acquire
1997*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1998*4882a593Smuzhiyun atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1999*4882a593Smuzhiyun {
2000*4882a593Smuzhiyun s64 ret = atomic64_fetch_andnot_relaxed(i, v);
2001*4882a593Smuzhiyun __atomic_acquire_fence();
2002*4882a593Smuzhiyun return ret;
2003*4882a593Smuzhiyun }
2004*4882a593Smuzhiyun #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2005*4882a593Smuzhiyun #endif
2006*4882a593Smuzhiyun
2007*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot_release
2008*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)2009*4882a593Smuzhiyun atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2010*4882a593Smuzhiyun {
2011*4882a593Smuzhiyun __atomic_release_fence();
2012*4882a593Smuzhiyun return atomic64_fetch_andnot_relaxed(i, v);
2013*4882a593Smuzhiyun }
2014*4882a593Smuzhiyun #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2015*4882a593Smuzhiyun #endif
2016*4882a593Smuzhiyun
2017*4882a593Smuzhiyun #ifndef atomic64_fetch_andnot
2018*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)2019*4882a593Smuzhiyun atomic64_fetch_andnot(s64 i, atomic64_t *v)
2020*4882a593Smuzhiyun {
2021*4882a593Smuzhiyun s64 ret;
2022*4882a593Smuzhiyun __atomic_pre_full_fence();
2023*4882a593Smuzhiyun ret = atomic64_fetch_andnot_relaxed(i, v);
2024*4882a593Smuzhiyun __atomic_post_full_fence();
2025*4882a593Smuzhiyun return ret;
2026*4882a593Smuzhiyun }
2027*4882a593Smuzhiyun #define atomic64_fetch_andnot atomic64_fetch_andnot
2028*4882a593Smuzhiyun #endif
2029*4882a593Smuzhiyun
2030*4882a593Smuzhiyun #endif /* atomic64_fetch_andnot_relaxed */
2031*4882a593Smuzhiyun
2032*4882a593Smuzhiyun #define arch_atomic64_or atomic64_or
2033*4882a593Smuzhiyun
2034*4882a593Smuzhiyun #define arch_atomic64_fetch_or atomic64_fetch_or
2035*4882a593Smuzhiyun #define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2036*4882a593Smuzhiyun #define arch_atomic64_fetch_or_release atomic64_fetch_or_release
2037*4882a593Smuzhiyun #define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
2038*4882a593Smuzhiyun
2039*4882a593Smuzhiyun #ifndef atomic64_fetch_or_relaxed
2040*4882a593Smuzhiyun #define atomic64_fetch_or_acquire atomic64_fetch_or
2041*4882a593Smuzhiyun #define atomic64_fetch_or_release atomic64_fetch_or
2042*4882a593Smuzhiyun #define atomic64_fetch_or_relaxed atomic64_fetch_or
2043*4882a593Smuzhiyun #else /* atomic64_fetch_or_relaxed */
2044*4882a593Smuzhiyun
2045*4882a593Smuzhiyun #ifndef atomic64_fetch_or_acquire
2046*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2047*4882a593Smuzhiyun atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2048*4882a593Smuzhiyun {
2049*4882a593Smuzhiyun s64 ret = atomic64_fetch_or_relaxed(i, v);
2050*4882a593Smuzhiyun __atomic_acquire_fence();
2051*4882a593Smuzhiyun return ret;
2052*4882a593Smuzhiyun }
2053*4882a593Smuzhiyun #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2054*4882a593Smuzhiyun #endif
2055*4882a593Smuzhiyun
2056*4882a593Smuzhiyun #ifndef atomic64_fetch_or_release
2057*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)2058*4882a593Smuzhiyun atomic64_fetch_or_release(s64 i, atomic64_t *v)
2059*4882a593Smuzhiyun {
2060*4882a593Smuzhiyun __atomic_release_fence();
2061*4882a593Smuzhiyun return atomic64_fetch_or_relaxed(i, v);
2062*4882a593Smuzhiyun }
2063*4882a593Smuzhiyun #define atomic64_fetch_or_release atomic64_fetch_or_release
2064*4882a593Smuzhiyun #endif
2065*4882a593Smuzhiyun
2066*4882a593Smuzhiyun #ifndef atomic64_fetch_or
2067*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)2068*4882a593Smuzhiyun atomic64_fetch_or(s64 i, atomic64_t *v)
2069*4882a593Smuzhiyun {
2070*4882a593Smuzhiyun s64 ret;
2071*4882a593Smuzhiyun __atomic_pre_full_fence();
2072*4882a593Smuzhiyun ret = atomic64_fetch_or_relaxed(i, v);
2073*4882a593Smuzhiyun __atomic_post_full_fence();
2074*4882a593Smuzhiyun return ret;
2075*4882a593Smuzhiyun }
2076*4882a593Smuzhiyun #define atomic64_fetch_or atomic64_fetch_or
2077*4882a593Smuzhiyun #endif
2078*4882a593Smuzhiyun
2079*4882a593Smuzhiyun #endif /* atomic64_fetch_or_relaxed */
2080*4882a593Smuzhiyun
2081*4882a593Smuzhiyun #define arch_atomic64_xor atomic64_xor
2082*4882a593Smuzhiyun
2083*4882a593Smuzhiyun #define arch_atomic64_fetch_xor atomic64_fetch_xor
2084*4882a593Smuzhiyun #define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2085*4882a593Smuzhiyun #define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
2086*4882a593Smuzhiyun #define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
2087*4882a593Smuzhiyun
2088*4882a593Smuzhiyun #ifndef atomic64_fetch_xor_relaxed
2089*4882a593Smuzhiyun #define atomic64_fetch_xor_acquire atomic64_fetch_xor
2090*4882a593Smuzhiyun #define atomic64_fetch_xor_release atomic64_fetch_xor
2091*4882a593Smuzhiyun #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
2092*4882a593Smuzhiyun #else /* atomic64_fetch_xor_relaxed */
2093*4882a593Smuzhiyun
2094*4882a593Smuzhiyun #ifndef atomic64_fetch_xor_acquire
2095*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2096*4882a593Smuzhiyun atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2097*4882a593Smuzhiyun {
2098*4882a593Smuzhiyun s64 ret = atomic64_fetch_xor_relaxed(i, v);
2099*4882a593Smuzhiyun __atomic_acquire_fence();
2100*4882a593Smuzhiyun return ret;
2101*4882a593Smuzhiyun }
2102*4882a593Smuzhiyun #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2103*4882a593Smuzhiyun #endif
2104*4882a593Smuzhiyun
2105*4882a593Smuzhiyun #ifndef atomic64_fetch_xor_release
2106*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)2107*4882a593Smuzhiyun atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2108*4882a593Smuzhiyun {
2109*4882a593Smuzhiyun __atomic_release_fence();
2110*4882a593Smuzhiyun return atomic64_fetch_xor_relaxed(i, v);
2111*4882a593Smuzhiyun }
2112*4882a593Smuzhiyun #define atomic64_fetch_xor_release atomic64_fetch_xor_release
2113*4882a593Smuzhiyun #endif
2114*4882a593Smuzhiyun
2115*4882a593Smuzhiyun #ifndef atomic64_fetch_xor
2116*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)2117*4882a593Smuzhiyun atomic64_fetch_xor(s64 i, atomic64_t *v)
2118*4882a593Smuzhiyun {
2119*4882a593Smuzhiyun s64 ret;
2120*4882a593Smuzhiyun __atomic_pre_full_fence();
2121*4882a593Smuzhiyun ret = atomic64_fetch_xor_relaxed(i, v);
2122*4882a593Smuzhiyun __atomic_post_full_fence();
2123*4882a593Smuzhiyun return ret;
2124*4882a593Smuzhiyun }
2125*4882a593Smuzhiyun #define atomic64_fetch_xor atomic64_fetch_xor
2126*4882a593Smuzhiyun #endif
2127*4882a593Smuzhiyun
2128*4882a593Smuzhiyun #endif /* atomic64_fetch_xor_relaxed */
2129*4882a593Smuzhiyun
2130*4882a593Smuzhiyun #define arch_atomic64_xchg atomic64_xchg
2131*4882a593Smuzhiyun #define arch_atomic64_xchg_acquire atomic64_xchg_acquire
2132*4882a593Smuzhiyun #define arch_atomic64_xchg_release atomic64_xchg_release
2133*4882a593Smuzhiyun #define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
2134*4882a593Smuzhiyun
2135*4882a593Smuzhiyun #ifndef atomic64_xchg_relaxed
2136*4882a593Smuzhiyun #define atomic64_xchg_acquire atomic64_xchg
2137*4882a593Smuzhiyun #define atomic64_xchg_release atomic64_xchg
2138*4882a593Smuzhiyun #define atomic64_xchg_relaxed atomic64_xchg
2139*4882a593Smuzhiyun #else /* atomic64_xchg_relaxed */
2140*4882a593Smuzhiyun
2141*4882a593Smuzhiyun #ifndef atomic64_xchg_acquire
2142*4882a593Smuzhiyun static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 i)2143*4882a593Smuzhiyun atomic64_xchg_acquire(atomic64_t *v, s64 i)
2144*4882a593Smuzhiyun {
2145*4882a593Smuzhiyun s64 ret = atomic64_xchg_relaxed(v, i);
2146*4882a593Smuzhiyun __atomic_acquire_fence();
2147*4882a593Smuzhiyun return ret;
2148*4882a593Smuzhiyun }
2149*4882a593Smuzhiyun #define atomic64_xchg_acquire atomic64_xchg_acquire
2150*4882a593Smuzhiyun #endif
2151*4882a593Smuzhiyun
2152*4882a593Smuzhiyun #ifndef atomic64_xchg_release
2153*4882a593Smuzhiyun static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 i)2154*4882a593Smuzhiyun atomic64_xchg_release(atomic64_t *v, s64 i)
2155*4882a593Smuzhiyun {
2156*4882a593Smuzhiyun __atomic_release_fence();
2157*4882a593Smuzhiyun return atomic64_xchg_relaxed(v, i);
2158*4882a593Smuzhiyun }
2159*4882a593Smuzhiyun #define atomic64_xchg_release atomic64_xchg_release
2160*4882a593Smuzhiyun #endif
2161*4882a593Smuzhiyun
2162*4882a593Smuzhiyun #ifndef atomic64_xchg
2163*4882a593Smuzhiyun static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 i)2164*4882a593Smuzhiyun atomic64_xchg(atomic64_t *v, s64 i)
2165*4882a593Smuzhiyun {
2166*4882a593Smuzhiyun s64 ret;
2167*4882a593Smuzhiyun __atomic_pre_full_fence();
2168*4882a593Smuzhiyun ret = atomic64_xchg_relaxed(v, i);
2169*4882a593Smuzhiyun __atomic_post_full_fence();
2170*4882a593Smuzhiyun return ret;
2171*4882a593Smuzhiyun }
2172*4882a593Smuzhiyun #define atomic64_xchg atomic64_xchg
2173*4882a593Smuzhiyun #endif
2174*4882a593Smuzhiyun
2175*4882a593Smuzhiyun #endif /* atomic64_xchg_relaxed */
2176*4882a593Smuzhiyun
2177*4882a593Smuzhiyun #define arch_atomic64_cmpxchg atomic64_cmpxchg
2178*4882a593Smuzhiyun #define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2179*4882a593Smuzhiyun #define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
2180*4882a593Smuzhiyun #define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
2181*4882a593Smuzhiyun
2182*4882a593Smuzhiyun #ifndef atomic64_cmpxchg_relaxed
2183*4882a593Smuzhiyun #define atomic64_cmpxchg_acquire atomic64_cmpxchg
2184*4882a593Smuzhiyun #define atomic64_cmpxchg_release atomic64_cmpxchg
2185*4882a593Smuzhiyun #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
2186*4882a593Smuzhiyun #else /* atomic64_cmpxchg_relaxed */
2187*4882a593Smuzhiyun
2188*4882a593Smuzhiyun #ifndef atomic64_cmpxchg_acquire
2189*4882a593Smuzhiyun static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2190*4882a593Smuzhiyun atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2191*4882a593Smuzhiyun {
2192*4882a593Smuzhiyun s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
2193*4882a593Smuzhiyun __atomic_acquire_fence();
2194*4882a593Smuzhiyun return ret;
2195*4882a593Smuzhiyun }
2196*4882a593Smuzhiyun #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2197*4882a593Smuzhiyun #endif
2198*4882a593Smuzhiyun
2199*4882a593Smuzhiyun #ifndef atomic64_cmpxchg_release
2200*4882a593Smuzhiyun static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2201*4882a593Smuzhiyun atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2202*4882a593Smuzhiyun {
2203*4882a593Smuzhiyun __atomic_release_fence();
2204*4882a593Smuzhiyun return atomic64_cmpxchg_relaxed(v, old, new);
2205*4882a593Smuzhiyun }
2206*4882a593Smuzhiyun #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2207*4882a593Smuzhiyun #endif
2208*4882a593Smuzhiyun
2209*4882a593Smuzhiyun #ifndef atomic64_cmpxchg
2210*4882a593Smuzhiyun static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2211*4882a593Smuzhiyun atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2212*4882a593Smuzhiyun {
2213*4882a593Smuzhiyun s64 ret;
2214*4882a593Smuzhiyun __atomic_pre_full_fence();
2215*4882a593Smuzhiyun ret = atomic64_cmpxchg_relaxed(v, old, new);
2216*4882a593Smuzhiyun __atomic_post_full_fence();
2217*4882a593Smuzhiyun return ret;
2218*4882a593Smuzhiyun }
2219*4882a593Smuzhiyun #define atomic64_cmpxchg atomic64_cmpxchg
2220*4882a593Smuzhiyun #endif
2221*4882a593Smuzhiyun
2222*4882a593Smuzhiyun #endif /* atomic64_cmpxchg_relaxed */
2223*4882a593Smuzhiyun
2224*4882a593Smuzhiyun #define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
2225*4882a593Smuzhiyun #define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2226*4882a593Smuzhiyun #define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2227*4882a593Smuzhiyun #define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2228*4882a593Smuzhiyun
2229*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_relaxed
2230*4882a593Smuzhiyun #ifdef atomic64_try_cmpxchg
2231*4882a593Smuzhiyun #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2232*4882a593Smuzhiyun #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2233*4882a593Smuzhiyun #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2234*4882a593Smuzhiyun #endif /* atomic64_try_cmpxchg */
2235*4882a593Smuzhiyun
2236*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg
2237*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2238*4882a593Smuzhiyun atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2239*4882a593Smuzhiyun {
2240*4882a593Smuzhiyun s64 r, o = *old;
2241*4882a593Smuzhiyun r = atomic64_cmpxchg(v, o, new);
2242*4882a593Smuzhiyun if (unlikely(r != o))
2243*4882a593Smuzhiyun *old = r;
2244*4882a593Smuzhiyun return likely(r == o);
2245*4882a593Smuzhiyun }
2246*4882a593Smuzhiyun #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2247*4882a593Smuzhiyun #endif
2248*4882a593Smuzhiyun
2249*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_acquire
2250*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2251*4882a593Smuzhiyun atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2252*4882a593Smuzhiyun {
2253*4882a593Smuzhiyun s64 r, o = *old;
2254*4882a593Smuzhiyun r = atomic64_cmpxchg_acquire(v, o, new);
2255*4882a593Smuzhiyun if (unlikely(r != o))
2256*4882a593Smuzhiyun *old = r;
2257*4882a593Smuzhiyun return likely(r == o);
2258*4882a593Smuzhiyun }
2259*4882a593Smuzhiyun #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2260*4882a593Smuzhiyun #endif
2261*4882a593Smuzhiyun
2262*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_release
2263*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2264*4882a593Smuzhiyun atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2265*4882a593Smuzhiyun {
2266*4882a593Smuzhiyun s64 r, o = *old;
2267*4882a593Smuzhiyun r = atomic64_cmpxchg_release(v, o, new);
2268*4882a593Smuzhiyun if (unlikely(r != o))
2269*4882a593Smuzhiyun *old = r;
2270*4882a593Smuzhiyun return likely(r == o);
2271*4882a593Smuzhiyun }
2272*4882a593Smuzhiyun #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2273*4882a593Smuzhiyun #endif
2274*4882a593Smuzhiyun
2275*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_relaxed
2276*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2277*4882a593Smuzhiyun atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2278*4882a593Smuzhiyun {
2279*4882a593Smuzhiyun s64 r, o = *old;
2280*4882a593Smuzhiyun r = atomic64_cmpxchg_relaxed(v, o, new);
2281*4882a593Smuzhiyun if (unlikely(r != o))
2282*4882a593Smuzhiyun *old = r;
2283*4882a593Smuzhiyun return likely(r == o);
2284*4882a593Smuzhiyun }
2285*4882a593Smuzhiyun #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2286*4882a593Smuzhiyun #endif
2287*4882a593Smuzhiyun
2288*4882a593Smuzhiyun #else /* atomic64_try_cmpxchg_relaxed */
2289*4882a593Smuzhiyun
2290*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_acquire
2291*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2292*4882a593Smuzhiyun atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2293*4882a593Smuzhiyun {
2294*4882a593Smuzhiyun bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2295*4882a593Smuzhiyun __atomic_acquire_fence();
2296*4882a593Smuzhiyun return ret;
2297*4882a593Smuzhiyun }
2298*4882a593Smuzhiyun #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2299*4882a593Smuzhiyun #endif
2300*4882a593Smuzhiyun
2301*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg_release
2302*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2303*4882a593Smuzhiyun atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2304*4882a593Smuzhiyun {
2305*4882a593Smuzhiyun __atomic_release_fence();
2306*4882a593Smuzhiyun return atomic64_try_cmpxchg_relaxed(v, old, new);
2307*4882a593Smuzhiyun }
2308*4882a593Smuzhiyun #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2309*4882a593Smuzhiyun #endif
2310*4882a593Smuzhiyun
2311*4882a593Smuzhiyun #ifndef atomic64_try_cmpxchg
2312*4882a593Smuzhiyun static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2313*4882a593Smuzhiyun atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2314*4882a593Smuzhiyun {
2315*4882a593Smuzhiyun bool ret;
2316*4882a593Smuzhiyun __atomic_pre_full_fence();
2317*4882a593Smuzhiyun ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2318*4882a593Smuzhiyun __atomic_post_full_fence();
2319*4882a593Smuzhiyun return ret;
2320*4882a593Smuzhiyun }
2321*4882a593Smuzhiyun #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2322*4882a593Smuzhiyun #endif
2323*4882a593Smuzhiyun
2324*4882a593Smuzhiyun #endif /* atomic64_try_cmpxchg_relaxed */
2325*4882a593Smuzhiyun
2326*4882a593Smuzhiyun #define arch_atomic64_sub_and_test atomic64_sub_and_test
2327*4882a593Smuzhiyun
2328*4882a593Smuzhiyun #ifndef atomic64_sub_and_test
2329*4882a593Smuzhiyun /**
2330*4882a593Smuzhiyun * atomic64_sub_and_test - subtract value from variable and test result
2331*4882a593Smuzhiyun * @i: integer value to subtract
2332*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2333*4882a593Smuzhiyun *
2334*4882a593Smuzhiyun * Atomically subtracts @i from @v and returns
2335*4882a593Smuzhiyun * true if the result is zero, or false for all
2336*4882a593Smuzhiyun * other cases.
2337*4882a593Smuzhiyun */
2338*4882a593Smuzhiyun static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)2339*4882a593Smuzhiyun atomic64_sub_and_test(s64 i, atomic64_t *v)
2340*4882a593Smuzhiyun {
2341*4882a593Smuzhiyun return atomic64_sub_return(i, v) == 0;
2342*4882a593Smuzhiyun }
2343*4882a593Smuzhiyun #define atomic64_sub_and_test atomic64_sub_and_test
2344*4882a593Smuzhiyun #endif
2345*4882a593Smuzhiyun
2346*4882a593Smuzhiyun #define arch_atomic64_dec_and_test atomic64_dec_and_test
2347*4882a593Smuzhiyun
2348*4882a593Smuzhiyun #ifndef atomic64_dec_and_test
2349*4882a593Smuzhiyun /**
2350*4882a593Smuzhiyun * atomic64_dec_and_test - decrement and test
2351*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2352*4882a593Smuzhiyun *
2353*4882a593Smuzhiyun * Atomically decrements @v by 1 and
2354*4882a593Smuzhiyun * returns true if the result is 0, or false for all other
2355*4882a593Smuzhiyun * cases.
2356*4882a593Smuzhiyun */
2357*4882a593Smuzhiyun static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)2358*4882a593Smuzhiyun atomic64_dec_and_test(atomic64_t *v)
2359*4882a593Smuzhiyun {
2360*4882a593Smuzhiyun return atomic64_dec_return(v) == 0;
2361*4882a593Smuzhiyun }
2362*4882a593Smuzhiyun #define atomic64_dec_and_test atomic64_dec_and_test
2363*4882a593Smuzhiyun #endif
2364*4882a593Smuzhiyun
2365*4882a593Smuzhiyun #define arch_atomic64_inc_and_test atomic64_inc_and_test
2366*4882a593Smuzhiyun
2367*4882a593Smuzhiyun #ifndef atomic64_inc_and_test
2368*4882a593Smuzhiyun /**
2369*4882a593Smuzhiyun * atomic64_inc_and_test - increment and test
2370*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2371*4882a593Smuzhiyun *
2372*4882a593Smuzhiyun * Atomically increments @v by 1
2373*4882a593Smuzhiyun * and returns true if the result is zero, or false for all
2374*4882a593Smuzhiyun * other cases.
2375*4882a593Smuzhiyun */
2376*4882a593Smuzhiyun static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)2377*4882a593Smuzhiyun atomic64_inc_and_test(atomic64_t *v)
2378*4882a593Smuzhiyun {
2379*4882a593Smuzhiyun return atomic64_inc_return(v) == 0;
2380*4882a593Smuzhiyun }
2381*4882a593Smuzhiyun #define atomic64_inc_and_test atomic64_inc_and_test
2382*4882a593Smuzhiyun #endif
2383*4882a593Smuzhiyun
2384*4882a593Smuzhiyun #define arch_atomic64_add_negative atomic64_add_negative
2385*4882a593Smuzhiyun
2386*4882a593Smuzhiyun #ifndef atomic64_add_negative
2387*4882a593Smuzhiyun /**
2388*4882a593Smuzhiyun * atomic64_add_negative - add and test if negative
2389*4882a593Smuzhiyun * @i: integer value to add
2390*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2391*4882a593Smuzhiyun *
2392*4882a593Smuzhiyun * Atomically adds @i to @v and returns true
2393*4882a593Smuzhiyun * if the result is negative, or false when
2394*4882a593Smuzhiyun * result is greater than or equal to zero.
2395*4882a593Smuzhiyun */
2396*4882a593Smuzhiyun static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)2397*4882a593Smuzhiyun atomic64_add_negative(s64 i, atomic64_t *v)
2398*4882a593Smuzhiyun {
2399*4882a593Smuzhiyun return atomic64_add_return(i, v) < 0;
2400*4882a593Smuzhiyun }
2401*4882a593Smuzhiyun #define atomic64_add_negative atomic64_add_negative
2402*4882a593Smuzhiyun #endif
2403*4882a593Smuzhiyun
2404*4882a593Smuzhiyun #define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
2405*4882a593Smuzhiyun
2406*4882a593Smuzhiyun #ifndef atomic64_fetch_add_unless
2407*4882a593Smuzhiyun /**
2408*4882a593Smuzhiyun * atomic64_fetch_add_unless - add unless the number is already a given value
2409*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2410*4882a593Smuzhiyun * @a: the amount to add to v...
2411*4882a593Smuzhiyun * @u: ...unless v is equal to u.
2412*4882a593Smuzhiyun *
2413*4882a593Smuzhiyun * Atomically adds @a to @v, so long as @v was not already @u.
2414*4882a593Smuzhiyun * Returns original value of @v
2415*4882a593Smuzhiyun */
2416*4882a593Smuzhiyun static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)2417*4882a593Smuzhiyun atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2418*4882a593Smuzhiyun {
2419*4882a593Smuzhiyun s64 c = atomic64_read(v);
2420*4882a593Smuzhiyun
2421*4882a593Smuzhiyun do {
2422*4882a593Smuzhiyun if (unlikely(c == u))
2423*4882a593Smuzhiyun break;
2424*4882a593Smuzhiyun } while (!atomic64_try_cmpxchg(v, &c, c + a));
2425*4882a593Smuzhiyun
2426*4882a593Smuzhiyun return c;
2427*4882a593Smuzhiyun }
2428*4882a593Smuzhiyun #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2429*4882a593Smuzhiyun #endif
2430*4882a593Smuzhiyun
2431*4882a593Smuzhiyun #define arch_atomic64_add_unless atomic64_add_unless
2432*4882a593Smuzhiyun
2433*4882a593Smuzhiyun #ifndef atomic64_add_unless
2434*4882a593Smuzhiyun /**
2435*4882a593Smuzhiyun * atomic64_add_unless - add unless the number is already a given value
2436*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2437*4882a593Smuzhiyun * @a: the amount to add to v...
2438*4882a593Smuzhiyun * @u: ...unless v is equal to u.
2439*4882a593Smuzhiyun *
2440*4882a593Smuzhiyun * Atomically adds @a to @v, if @v was not already @u.
2441*4882a593Smuzhiyun * Returns true if the addition was done.
2442*4882a593Smuzhiyun */
2443*4882a593Smuzhiyun static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)2444*4882a593Smuzhiyun atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2445*4882a593Smuzhiyun {
2446*4882a593Smuzhiyun return atomic64_fetch_add_unless(v, a, u) != u;
2447*4882a593Smuzhiyun }
2448*4882a593Smuzhiyun #define atomic64_add_unless atomic64_add_unless
2449*4882a593Smuzhiyun #endif
2450*4882a593Smuzhiyun
2451*4882a593Smuzhiyun #define arch_atomic64_inc_not_zero atomic64_inc_not_zero
2452*4882a593Smuzhiyun
2453*4882a593Smuzhiyun #ifndef atomic64_inc_not_zero
2454*4882a593Smuzhiyun /**
2455*4882a593Smuzhiyun * atomic64_inc_not_zero - increment unless the number is zero
2456*4882a593Smuzhiyun * @v: pointer of type atomic64_t
2457*4882a593Smuzhiyun *
2458*4882a593Smuzhiyun * Atomically increments @v by 1, if @v is non-zero.
2459*4882a593Smuzhiyun * Returns true if the increment was done.
2460*4882a593Smuzhiyun */
2461*4882a593Smuzhiyun static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)2462*4882a593Smuzhiyun atomic64_inc_not_zero(atomic64_t *v)
2463*4882a593Smuzhiyun {
2464*4882a593Smuzhiyun return atomic64_add_unless(v, 1, 0);
2465*4882a593Smuzhiyun }
2466*4882a593Smuzhiyun #define atomic64_inc_not_zero atomic64_inc_not_zero
2467*4882a593Smuzhiyun #endif
2468*4882a593Smuzhiyun
2469*4882a593Smuzhiyun #define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
2470*4882a593Smuzhiyun
2471*4882a593Smuzhiyun #ifndef atomic64_inc_unless_negative
2472*4882a593Smuzhiyun static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)2473*4882a593Smuzhiyun atomic64_inc_unless_negative(atomic64_t *v)
2474*4882a593Smuzhiyun {
2475*4882a593Smuzhiyun s64 c = atomic64_read(v);
2476*4882a593Smuzhiyun
2477*4882a593Smuzhiyun do {
2478*4882a593Smuzhiyun if (unlikely(c < 0))
2479*4882a593Smuzhiyun return false;
2480*4882a593Smuzhiyun } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2481*4882a593Smuzhiyun
2482*4882a593Smuzhiyun return true;
2483*4882a593Smuzhiyun }
2484*4882a593Smuzhiyun #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2485*4882a593Smuzhiyun #endif
2486*4882a593Smuzhiyun
2487*4882a593Smuzhiyun #define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
2488*4882a593Smuzhiyun
2489*4882a593Smuzhiyun #ifndef atomic64_dec_unless_positive
2490*4882a593Smuzhiyun static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)2491*4882a593Smuzhiyun atomic64_dec_unless_positive(atomic64_t *v)
2492*4882a593Smuzhiyun {
2493*4882a593Smuzhiyun s64 c = atomic64_read(v);
2494*4882a593Smuzhiyun
2495*4882a593Smuzhiyun do {
2496*4882a593Smuzhiyun if (unlikely(c > 0))
2497*4882a593Smuzhiyun return false;
2498*4882a593Smuzhiyun } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2499*4882a593Smuzhiyun
2500*4882a593Smuzhiyun return true;
2501*4882a593Smuzhiyun }
2502*4882a593Smuzhiyun #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2503*4882a593Smuzhiyun #endif
2504*4882a593Smuzhiyun
2505*4882a593Smuzhiyun #define arch_atomic64_dec_if_positive atomic64_dec_if_positive
2506*4882a593Smuzhiyun
2507*4882a593Smuzhiyun #ifndef atomic64_dec_if_positive
2508*4882a593Smuzhiyun static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)2509*4882a593Smuzhiyun atomic64_dec_if_positive(atomic64_t *v)
2510*4882a593Smuzhiyun {
2511*4882a593Smuzhiyun s64 dec, c = atomic64_read(v);
2512*4882a593Smuzhiyun
2513*4882a593Smuzhiyun do {
2514*4882a593Smuzhiyun dec = c - 1;
2515*4882a593Smuzhiyun if (unlikely(dec < 0))
2516*4882a593Smuzhiyun break;
2517*4882a593Smuzhiyun } while (!atomic64_try_cmpxchg(v, &c, dec));
2518*4882a593Smuzhiyun
2519*4882a593Smuzhiyun return dec;
2520*4882a593Smuzhiyun }
2521*4882a593Smuzhiyun #define atomic64_dec_if_positive atomic64_dec_if_positive
2522*4882a593Smuzhiyun #endif
2523*4882a593Smuzhiyun
2524*4882a593Smuzhiyun #endif /* _LINUX_ATOMIC_FALLBACK_H */
2525*4882a593Smuzhiyun // 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f
2526