1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * linux/include/asm-arm/atomic.h
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (c) 1996 Russell King.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This program is free software; you can redistribute it and/or modify
7*4882a593Smuzhiyun * it under the terms of the GNU General Public License version 2 as
8*4882a593Smuzhiyun * published by the Free Software Foundation.
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * Changelog:
11*4882a593Smuzhiyun * 27-06-1996 RMK Created
12*4882a593Smuzhiyun * 13-04-1997 RMK Made functions atomic!
13*4882a593Smuzhiyun * 07-12-1997 RMK Upgraded for v2.1.
14*4882a593Smuzhiyun * 26-08-1998 PJB Added #ifdef __KERNEL__
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun #ifndef __ASM_ARM_ATOMIC_H
17*4882a593Smuzhiyun #define __ASM_ARM_ATOMIC_H
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #ifdef CONFIG_SMP
20*4882a593Smuzhiyun #error SMP not supported
21*4882a593Smuzhiyun #endif
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun typedef struct { volatile int counter; } atomic_t;
24*4882a593Smuzhiyun #if BITS_PER_LONG == 32
25*4882a593Smuzhiyun typedef struct { volatile long long counter; } atomic64_t;
26*4882a593Smuzhiyun #else /* BIT_PER_LONG == 32 */
27*4882a593Smuzhiyun typedef struct { volatile long counter; } atomic64_t;
28*4882a593Smuzhiyun #endif
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun #define ATOMIC_INIT(i) { (i) }
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun #ifdef __KERNEL__
33*4882a593Smuzhiyun #include <asm/proc-armv/system.h>
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun #define atomic_read(v) ((v)->counter)
36*4882a593Smuzhiyun #define atomic_set(v, i) (((v)->counter) = (i))
37*4882a593Smuzhiyun #define atomic64_read(v) atomic_read(v)
38*4882a593Smuzhiyun #define atomic64_set(v, i) atomic_set(v, i)
39*4882a593Smuzhiyun
atomic_add(int i,volatile atomic_t * v)40*4882a593Smuzhiyun static inline void atomic_add(int i, volatile atomic_t *v)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun unsigned long flags = 0;
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun local_irq_save(flags);
45*4882a593Smuzhiyun v->counter += i;
46*4882a593Smuzhiyun local_irq_restore(flags);
47*4882a593Smuzhiyun }
48*4882a593Smuzhiyun
atomic_sub(int i,volatile atomic_t * v)49*4882a593Smuzhiyun static inline void atomic_sub(int i, volatile atomic_t *v)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun unsigned long flags = 0;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun local_irq_save(flags);
54*4882a593Smuzhiyun v->counter -= i;
55*4882a593Smuzhiyun local_irq_restore(flags);
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
atomic_inc(volatile atomic_t * v)58*4882a593Smuzhiyun static inline void atomic_inc(volatile atomic_t *v)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun unsigned long flags = 0;
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun local_irq_save(flags);
63*4882a593Smuzhiyun v->counter += 1;
64*4882a593Smuzhiyun local_irq_restore(flags);
65*4882a593Smuzhiyun }
66*4882a593Smuzhiyun
atomic_dec(volatile atomic_t * v)67*4882a593Smuzhiyun static inline void atomic_dec(volatile atomic_t *v)
68*4882a593Smuzhiyun {
69*4882a593Smuzhiyun unsigned long flags = 0;
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun local_irq_save(flags);
72*4882a593Smuzhiyun v->counter -= 1;
73*4882a593Smuzhiyun local_irq_restore(flags);
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun
atomic_dec_and_test(volatile atomic_t * v)76*4882a593Smuzhiyun static inline int atomic_dec_and_test(volatile atomic_t *v)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun unsigned long flags = 0;
79*4882a593Smuzhiyun int val;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun local_irq_save(flags);
82*4882a593Smuzhiyun val = v->counter;
83*4882a593Smuzhiyun v->counter = val -= 1;
84*4882a593Smuzhiyun local_irq_restore(flags);
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun return val == 0;
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun
atomic_add_negative(int i,volatile atomic_t * v)89*4882a593Smuzhiyun static inline int atomic_add_negative(int i, volatile atomic_t *v)
90*4882a593Smuzhiyun {
91*4882a593Smuzhiyun unsigned long flags = 0;
92*4882a593Smuzhiyun int val;
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun local_irq_save(flags);
95*4882a593Smuzhiyun val = v->counter;
96*4882a593Smuzhiyun v->counter = val += i;
97*4882a593Smuzhiyun local_irq_restore(flags);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun return val < 0;
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun
atomic_clear_mask(unsigned long mask,unsigned long * addr)102*4882a593Smuzhiyun static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun unsigned long flags = 0;
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun local_irq_save(flags);
107*4882a593Smuzhiyun *addr &= ~mask;
108*4882a593Smuzhiyun local_irq_restore(flags);
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun #if BITS_PER_LONG == 32
112*4882a593Smuzhiyun
atomic64_add(long long i,volatile atomic64_t * v)113*4882a593Smuzhiyun static inline void atomic64_add(long long i, volatile atomic64_t *v)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun unsigned long flags = 0;
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun local_irq_save(flags);
118*4882a593Smuzhiyun v->counter += i;
119*4882a593Smuzhiyun local_irq_restore(flags);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
atomic64_sub(long long i,volatile atomic64_t * v)122*4882a593Smuzhiyun static inline void atomic64_sub(long long i, volatile atomic64_t *v)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun unsigned long flags = 0;
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun local_irq_save(flags);
127*4882a593Smuzhiyun v->counter -= i;
128*4882a593Smuzhiyun local_irq_restore(flags);
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun #else /* BIT_PER_LONG == 32 */
132*4882a593Smuzhiyun
atomic64_add(long i,volatile atomic64_t * v)133*4882a593Smuzhiyun static inline void atomic64_add(long i, volatile atomic64_t *v)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun unsigned long flags = 0;
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun local_irq_save(flags);
138*4882a593Smuzhiyun v->counter += i;
139*4882a593Smuzhiyun local_irq_restore(flags);
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
atomic64_sub(long i,volatile atomic64_t * v)142*4882a593Smuzhiyun static inline void atomic64_sub(long i, volatile atomic64_t *v)
143*4882a593Smuzhiyun {
144*4882a593Smuzhiyun unsigned long flags = 0;
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun local_irq_save(flags);
147*4882a593Smuzhiyun v->counter -= i;
148*4882a593Smuzhiyun local_irq_restore(flags);
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun #endif
151*4882a593Smuzhiyun
atomic64_inc(volatile atomic64_t * v)152*4882a593Smuzhiyun static inline void atomic64_inc(volatile atomic64_t *v)
153*4882a593Smuzhiyun {
154*4882a593Smuzhiyun unsigned long flags = 0;
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun local_irq_save(flags);
157*4882a593Smuzhiyun v->counter += 1;
158*4882a593Smuzhiyun local_irq_restore(flags);
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
atomic64_dec(volatile atomic64_t * v)161*4882a593Smuzhiyun static inline void atomic64_dec(volatile atomic64_t *v)
162*4882a593Smuzhiyun {
163*4882a593Smuzhiyun unsigned long flags = 0;
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun local_irq_save(flags);
166*4882a593Smuzhiyun v->counter -= 1;
167*4882a593Smuzhiyun local_irq_restore(flags);
168*4882a593Smuzhiyun }
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun /* Atomic operations are already serializing on ARM */
171*4882a593Smuzhiyun #define smp_mb__before_atomic_dec() barrier()
172*4882a593Smuzhiyun #define smp_mb__after_atomic_dec() barrier()
173*4882a593Smuzhiyun #define smp_mb__before_atomic_inc() barrier()
174*4882a593Smuzhiyun #define smp_mb__after_atomic_inc() barrier()
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun #endif
177*4882a593Smuzhiyun #endif
178