1 /*
2 * linux/include/asm-arm/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * Changelog:
11 * 27-06-1996 RMK Created
12 * 13-04-1997 RMK Made functions atomic!
13 * 07-12-1997 RMK Upgraded for v2.1.
14 * 26-08-1998 PJB Added #ifdef __KERNEL__
15 */
16 #ifndef __ASM_ARM_ATOMIC_H
17 #define __ASM_ARM_ATOMIC_H
18
19 #ifndef CONFIG_ARCH_ROCKCHIP
20 #ifdef CONFIG_SMP
21 #error SMP not supported
22 #endif
23 #endif
24
25 typedef struct { volatile int counter; } atomic_t;
26 #if BITS_PER_LONG == 32
27 typedef struct { volatile long long counter; } atomic64_t;
28 #else /* BIT_PER_LONG == 32 */
29 typedef struct { volatile long counter; } atomic64_t;
30 #endif
31
32 #define ATOMIC_INIT(i) { (i) }
33
34 #ifdef __KERNEL__
35 #include <asm/proc-armv/system.h>
36
37 #define atomic_read(v) ((v)->counter)
38 #define atomic_set(v, i) (((v)->counter) = (i))
39 #define atomic64_read(v) atomic_read(v)
40 #define atomic64_set(v, i) atomic_set(v, i)
41
atomic_add(int i,volatile atomic_t * v)42 static inline void atomic_add(int i, volatile atomic_t *v)
43 {
44 unsigned long flags = 0;
45
46 local_irq_save(flags);
47 v->counter += i;
48 local_irq_restore(flags);
49 }
50
atomic_sub(int i,volatile atomic_t * v)51 static inline void atomic_sub(int i, volatile atomic_t *v)
52 {
53 unsigned long flags = 0;
54
55 local_irq_save(flags);
56 v->counter -= i;
57 local_irq_restore(flags);
58 }
59
atomic_inc(volatile atomic_t * v)60 static inline void atomic_inc(volatile atomic_t *v)
61 {
62 unsigned long flags = 0;
63
64 local_irq_save(flags);
65 v->counter += 1;
66 local_irq_restore(flags);
67 }
68
atomic_dec(volatile atomic_t * v)69 static inline void atomic_dec(volatile atomic_t *v)
70 {
71 unsigned long flags = 0;
72
73 local_irq_save(flags);
74 v->counter -= 1;
75 local_irq_restore(flags);
76 }
77
atomic_dec_and_test(volatile atomic_t * v)78 static inline int atomic_dec_and_test(volatile atomic_t *v)
79 {
80 unsigned long flags = 0;
81 int val;
82
83 local_irq_save(flags);
84 val = v->counter;
85 v->counter = val -= 1;
86 local_irq_restore(flags);
87
88 return val == 0;
89 }
90
atomic_add_negative(int i,volatile atomic_t * v)91 static inline int atomic_add_negative(int i, volatile atomic_t *v)
92 {
93 unsigned long flags = 0;
94 int val;
95
96 local_irq_save(flags);
97 val = v->counter;
98 v->counter = val += i;
99 local_irq_restore(flags);
100
101 return val < 0;
102 }
103
atomic_clear_mask(unsigned long mask,unsigned long * addr)104 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
105 {
106 unsigned long flags = 0;
107
108 local_irq_save(flags);
109 *addr &= ~mask;
110 local_irq_restore(flags);
111 }
112
113 #if BITS_PER_LONG == 32
114
atomic64_add(long long i,volatile atomic64_t * v)115 static inline void atomic64_add(long long i, volatile atomic64_t *v)
116 {
117 unsigned long flags = 0;
118
119 local_irq_save(flags);
120 v->counter += i;
121 local_irq_restore(flags);
122 }
123
atomic64_sub(long long i,volatile atomic64_t * v)124 static inline void atomic64_sub(long long i, volatile atomic64_t *v)
125 {
126 unsigned long flags = 0;
127
128 local_irq_save(flags);
129 v->counter -= i;
130 local_irq_restore(flags);
131 }
132
133 #else /* BIT_PER_LONG == 32 */
134
atomic64_add(long i,volatile atomic64_t * v)135 static inline void atomic64_add(long i, volatile atomic64_t *v)
136 {
137 unsigned long flags = 0;
138
139 local_irq_save(flags);
140 v->counter += i;
141 local_irq_restore(flags);
142 }
143
atomic64_sub(long i,volatile atomic64_t * v)144 static inline void atomic64_sub(long i, volatile atomic64_t *v)
145 {
146 unsigned long flags = 0;
147
148 local_irq_save(flags);
149 v->counter -= i;
150 local_irq_restore(flags);
151 }
152 #endif
153
atomic64_inc(volatile atomic64_t * v)154 static inline void atomic64_inc(volatile atomic64_t *v)
155 {
156 unsigned long flags = 0;
157
158 local_irq_save(flags);
159 v->counter += 1;
160 local_irq_restore(flags);
161 }
162
atomic64_dec(volatile atomic64_t * v)163 static inline void atomic64_dec(volatile atomic64_t *v)
164 {
165 unsigned long flags = 0;
166
167 local_irq_save(flags);
168 v->counter -= 1;
169 local_irq_restore(flags);
170 }
171
172 /* Atomic operations are already serializing on ARM */
173 #define smp_mb__before_atomic_dec() barrier()
174 #define smp_mb__after_atomic_dec() barrier()
175 #define smp_mb__before_atomic_inc() barrier()
176 #define smp_mb__after_atomic_inc() barrier()
177
178 #endif
179 #endif
180