1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * atomic32.c: 32-bit atomic_t implementation
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2004 Keith M Wesolowski
6*4882a593Smuzhiyun * Copyright (C) 2007 Kyle McMartin
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include <linux/atomic.h>
12*4882a593Smuzhiyun #include <linux/spinlock.h>
13*4882a593Smuzhiyun #include <linux/module.h>
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #ifdef CONFIG_SMP
16*4882a593Smuzhiyun #define ATOMIC_HASH_SIZE 4
17*4882a593Smuzhiyun #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
20*4882a593Smuzhiyun [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
21*4882a593Smuzhiyun };
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun #else /* SMP */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun static DEFINE_SPINLOCK(dummy);
26*4882a593Smuzhiyun #define ATOMIC_HASH_SIZE 1
27*4882a593Smuzhiyun #define ATOMIC_HASH(a) (&dummy)
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #endif /* SMP */
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op) \
32*4882a593Smuzhiyun int atomic_fetch_##op(int i, atomic_t *v) \
33*4882a593Smuzhiyun { \
34*4882a593Smuzhiyun int ret; \
35*4882a593Smuzhiyun unsigned long flags; \
36*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags); \
37*4882a593Smuzhiyun \
38*4882a593Smuzhiyun ret = v->counter; \
39*4882a593Smuzhiyun v->counter c_op i; \
40*4882a593Smuzhiyun \
41*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
42*4882a593Smuzhiyun return ret; \
43*4882a593Smuzhiyun } \
44*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_fetch_##op);
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op) \
47*4882a593Smuzhiyun int atomic_##op##_return(int i, atomic_t *v) \
48*4882a593Smuzhiyun { \
49*4882a593Smuzhiyun int ret; \
50*4882a593Smuzhiyun unsigned long flags; \
51*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags); \
52*4882a593Smuzhiyun \
53*4882a593Smuzhiyun ret = (v->counter c_op i); \
54*4882a593Smuzhiyun \
55*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
56*4882a593Smuzhiyun return ret; \
57*4882a593Smuzhiyun } \
58*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_##op##_return);
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun ATOMIC_OP_RETURN(add, +=)
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun ATOMIC_FETCH_OP(add, +=)
63*4882a593Smuzhiyun ATOMIC_FETCH_OP(and, &=)
64*4882a593Smuzhiyun ATOMIC_FETCH_OP(or, |=)
65*4882a593Smuzhiyun ATOMIC_FETCH_OP(xor, ^=)
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun #undef ATOMIC_FETCH_OP
68*4882a593Smuzhiyun #undef ATOMIC_OP_RETURN
69*4882a593Smuzhiyun
atomic_xchg(atomic_t * v,int new)70*4882a593Smuzhiyun int atomic_xchg(atomic_t *v, int new)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun int ret;
73*4882a593Smuzhiyun unsigned long flags;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags);
76*4882a593Smuzhiyun ret = v->counter;
77*4882a593Smuzhiyun v->counter = new;
78*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
79*4882a593Smuzhiyun return ret;
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_xchg);
82*4882a593Smuzhiyun
atomic_cmpxchg(atomic_t * v,int old,int new)83*4882a593Smuzhiyun int atomic_cmpxchg(atomic_t *v, int old, int new)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun int ret;
86*4882a593Smuzhiyun unsigned long flags;
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags);
89*4882a593Smuzhiyun ret = v->counter;
90*4882a593Smuzhiyun if (likely(ret == old))
91*4882a593Smuzhiyun v->counter = new;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
94*4882a593Smuzhiyun return ret;
95*4882a593Smuzhiyun }
96*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_cmpxchg);
97*4882a593Smuzhiyun
atomic_fetch_add_unless(atomic_t * v,int a,int u)98*4882a593Smuzhiyun int atomic_fetch_add_unless(atomic_t *v, int a, int u)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun int ret;
101*4882a593Smuzhiyun unsigned long flags;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags);
104*4882a593Smuzhiyun ret = v->counter;
105*4882a593Smuzhiyun if (ret != u)
106*4882a593Smuzhiyun v->counter += a;
107*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
108*4882a593Smuzhiyun return ret;
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_fetch_add_unless);
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun /* Atomic operations are already serializing */
atomic_set(atomic_t * v,int i)113*4882a593Smuzhiyun void atomic_set(atomic_t *v, int i)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun unsigned long flags;
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(v), flags);
118*4882a593Smuzhiyun v->counter = i;
119*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun EXPORT_SYMBOL(atomic_set);
122*4882a593Smuzhiyun
___set_bit(unsigned long * addr,unsigned long mask)123*4882a593Smuzhiyun unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
124*4882a593Smuzhiyun {
125*4882a593Smuzhiyun unsigned long old, flags;
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(addr), flags);
128*4882a593Smuzhiyun old = *addr;
129*4882a593Smuzhiyun *addr = old | mask;
130*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun return old & mask;
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun EXPORT_SYMBOL(___set_bit);
135*4882a593Smuzhiyun
___clear_bit(unsigned long * addr,unsigned long mask)136*4882a593Smuzhiyun unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
137*4882a593Smuzhiyun {
138*4882a593Smuzhiyun unsigned long old, flags;
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(addr), flags);
141*4882a593Smuzhiyun old = *addr;
142*4882a593Smuzhiyun *addr = old & ~mask;
143*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun return old & mask;
146*4882a593Smuzhiyun }
147*4882a593Smuzhiyun EXPORT_SYMBOL(___clear_bit);
148*4882a593Smuzhiyun
___change_bit(unsigned long * addr,unsigned long mask)149*4882a593Smuzhiyun unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun unsigned long old, flags;
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(addr), flags);
154*4882a593Smuzhiyun old = *addr;
155*4882a593Smuzhiyun *addr = old ^ mask;
156*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun return old & mask;
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun EXPORT_SYMBOL(___change_bit);
161*4882a593Smuzhiyun
__cmpxchg_u32(volatile u32 * ptr,u32 old,u32 new)162*4882a593Smuzhiyun unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
163*4882a593Smuzhiyun {
164*4882a593Smuzhiyun unsigned long flags;
165*4882a593Smuzhiyun u32 prev;
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
168*4882a593Smuzhiyun if ((prev = *ptr) == old)
169*4882a593Smuzhiyun *ptr = new;
170*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun return (unsigned long)prev;
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun EXPORT_SYMBOL(__cmpxchg_u32);
175*4882a593Smuzhiyun
__cmpxchg_u64(u64 * ptr,u64 old,u64 new)176*4882a593Smuzhiyun u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun unsigned long flags;
179*4882a593Smuzhiyun u64 prev;
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
182*4882a593Smuzhiyun if ((prev = *ptr) == old)
183*4882a593Smuzhiyun *ptr = new;
184*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun return prev;
187*4882a593Smuzhiyun }
188*4882a593Smuzhiyun EXPORT_SYMBOL(__cmpxchg_u64);
189*4882a593Smuzhiyun
__xchg_u32(volatile u32 * ptr,u32 new)190*4882a593Smuzhiyun unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
191*4882a593Smuzhiyun {
192*4882a593Smuzhiyun unsigned long flags;
193*4882a593Smuzhiyun u32 prev;
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
196*4882a593Smuzhiyun prev = *ptr;
197*4882a593Smuzhiyun *ptr = new;
198*4882a593Smuzhiyun spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun return (unsigned long)prev;
201*4882a593Smuzhiyun }
202*4882a593Smuzhiyun EXPORT_SYMBOL(__xchg_u32);
203