1 #ifndef _ASM_X86_ATOMIC_H
2 #define _ASM_X86_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
9
10 /*
11 * Atomic operations that C can't guarantee us. Useful for
12 * resource counting etc..
13 */
14
15 #define ATOMIC_INIT(i) { (i) }
16
17 /**
18 * atomic_read - read atomic variable
19 * @v: pointer of type atomic_t
20 *
21 * Atomically reads the value of @v.
22 */
atomic_read(const atomic_t * v)23 static inline int atomic_read(const atomic_t *v)
24 {
25 return (*(volatile int *)&(v)->counter);
26 }
27
28 /**
29 * atomic_set - set atomic variable
30 * @v: pointer of type atomic_t
31 * @i: required value
32 *
33 * Atomically sets the value of @v to @i.
34 */
atomic_set(atomic_t * v,int i)35 static inline void atomic_set(atomic_t *v, int i)
36 {
37 v->counter = i;
38 }
39
40 /**
41 * atomic_add - add integer to atomic variable
42 * @i: integer value to add
43 * @v: pointer of type atomic_t
44 *
45 * Atomically adds @i to @v.
46 */
atomic_add(int i,atomic_t * v)47 static inline void atomic_add(int i, atomic_t *v)
48 {
49 asm volatile(LOCK_PREFIX "addl %1,%0"
50 : "+m" (v->counter)
51 : "ir" (i));
52 }
53
54 /**
55 * atomic_sub - subtract integer from atomic variable
56 * @i: integer value to subtract
57 * @v: pointer of type atomic_t
58 *
59 * Atomically subtracts @i from @v.
60 */
atomic_sub(int i,atomic_t * v)61 static inline void atomic_sub(int i, atomic_t *v)
62 {
63 asm volatile(LOCK_PREFIX "subl %1,%0"
64 : "+m" (v->counter)
65 : "ir" (i));
66 }
67
68 /**
69 * atomic_sub_and_test - subtract value from variable and test result
70 * @i: integer value to subtract
71 * @v: pointer of type atomic_t
72 *
73 * Atomically subtracts @i from @v and returns
74 * true if the result is zero, or false for all
75 * other cases.
76 */
atomic_sub_and_test(int i,atomic_t * v)77 static inline int atomic_sub_and_test(int i, atomic_t *v)
78 {
79 unsigned char c;
80
81 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
82 : "+m" (v->counter), "=qm" (c)
83 : "ir" (i) : "memory");
84 return c;
85 }
86
87 /**
88 * atomic_inc - increment atomic variable
89 * @v: pointer of type atomic_t
90 *
91 * Atomically increments @v by 1.
92 */
atomic_inc(atomic_t * v)93 static inline void atomic_inc(atomic_t *v)
94 {
95 asm volatile(LOCK_PREFIX "incl %0"
96 : "+m" (v->counter));
97 }
98
99 /**
100 * atomic_dec - decrement atomic variable
101 * @v: pointer of type atomic_t
102 *
103 * Atomically decrements @v by 1.
104 */
atomic_dec(atomic_t * v)105 static inline void atomic_dec(atomic_t *v)
106 {
107 asm volatile(LOCK_PREFIX "decl %0"
108 : "+m" (v->counter));
109 }
110
111 /**
112 * atomic_dec_and_test - decrement and test
113 * @v: pointer of type atomic_t
114 *
115 * Atomically decrements @v by 1 and
116 * returns true if the result is 0, or false for all other
117 * cases.
118 */
atomic_dec_and_test(atomic_t * v)119 static inline int atomic_dec_and_test(atomic_t *v)
120 {
121 unsigned char c;
122
123 asm volatile(LOCK_PREFIX "decl %0; sete %1"
124 : "+m" (v->counter), "=qm" (c)
125 : : "memory");
126 return c != 0;
127 }
128
129 /**
130 * atomic_inc_and_test - increment and test
131 * @v: pointer of type atomic_t
132 *
133 * Atomically increments @v by 1
134 * and returns true if the result is zero, or false for all
135 * other cases.
136 */
atomic_inc_and_test(atomic_t * v)137 static inline int atomic_inc_and_test(atomic_t *v)
138 {
139 unsigned char c;
140
141 asm volatile(LOCK_PREFIX "incl %0; sete %1"
142 : "+m" (v->counter), "=qm" (c)
143 : : "memory");
144 return c != 0;
145 }
146
147 /**
148 * atomic_add_negative - add and test if negative
149 * @i: integer value to add
150 * @v: pointer of type atomic_t
151 *
152 * Atomically adds @i to @v and returns true
153 * if the result is negative, or false when
154 * result is greater than or equal to zero.
155 */
atomic_add_negative(int i,atomic_t * v)156 static inline int atomic_add_negative(int i, atomic_t *v)
157 {
158 unsigned char c;
159
160 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
161 : "+m" (v->counter), "=qm" (c)
162 : "ir" (i) : "memory");
163 return c;
164 }
165
166 /**
167 * atomic_add_return - add integer and return
168 * @i: integer value to add
169 * @v: pointer of type atomic_t
170 *
171 * Atomically adds @i to @v and returns @i + @v
172 */
atomic_add_return(int i,atomic_t * v)173 static inline int atomic_add_return(int i, atomic_t *v)
174 {
175 #ifdef CONFIG_M386
176 int __i;
177 unsigned long flags;
178 if (unlikely(boot_cpu_data.x86 <= 3))
179 goto no_xadd;
180 #endif
181 /* Modern 486+ processor */
182 return i + xadd(&v->counter, i);
183
184 #ifdef CONFIG_M386
185 no_xadd: /* Legacy 386 processor */
186 raw_local_irq_save(flags);
187 __i = atomic_read(v);
188 atomic_set(v, i + __i);
189 raw_local_irq_restore(flags);
190 return i + __i;
191 #endif
192 }
193
194 /**
195 * atomic_sub_return - subtract integer and return
196 * @v: pointer of type atomic_t
197 * @i: integer value to subtract
198 *
199 * Atomically subtracts @i from @v and returns @v - @i
200 */
atomic_sub_return(int i,atomic_t * v)201 static inline int atomic_sub_return(int i, atomic_t *v)
202 {
203 return atomic_add_return(-i, v);
204 }
205
206 #define atomic_inc_return(v) (atomic_add_return(1, v))
207 #define atomic_dec_return(v) (atomic_sub_return(1, v))
208
atomic_cmpxchg(atomic_t * v,int old,int new)209 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
210 {
211 return cmpxchg(&v->counter, old, new);
212 }
213
atomic_xchg(atomic_t * v,int new)214 static inline int atomic_xchg(atomic_t *v, int new)
215 {
216 return xchg(&v->counter, new);
217 }
218
219 /**
220 * __atomic_add_unless - add unless the number is already a given value
221 * @v: pointer of type atomic_t
222 * @a: the amount to add to v...
223 * @u: ...unless v is equal to u.
224 *
225 * Atomically adds @a to @v, so long as @v was not already @u.
226 * Returns the old value of @v.
227 */
__atomic_add_unless(atomic_t * v,int a,int u)228 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
229 {
230 int c, old;
231 c = atomic_read(v);
232 for (;;) {
233 if (unlikely(c == (u)))
234 break;
235 old = atomic_cmpxchg((v), c, c + (a));
236 if (likely(old == c))
237 break;
238 c = old;
239 }
240 return c;
241 }
242
243
244 /*
245 * atomic_dec_if_positive - decrement by 1 if old value positive
246 * @v: pointer of type atomic_t
247 *
248 * The function returns the old value of *v minus 1, even if
249 * the atomic variable, v, was not decremented.
250 */
atomic_dec_if_positive(atomic_t * v)251 static inline int atomic_dec_if_positive(atomic_t *v)
252 {
253 int c, old, dec;
254 c = atomic_read(v);
255 for (;;) {
256 dec = c - 1;
257 if (unlikely(dec < 0))
258 break;
259 old = atomic_cmpxchg((v), c, dec);
260 if (likely(old == c))
261 break;
262 c = old;
263 }
264 return dec;
265 }
266
267 /**
268 * atomic_inc_short - increment of a short integer
269 * @v: pointer to type int
270 *
271 * Atomically adds 1 to @v
272 * Returns the new value of @u
273 */
atomic_inc_short(short int * v)274 static inline short int atomic_inc_short(short int *v)
275 {
276 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
277 return *v;
278 }
279
280 #ifdef CONFIG_X86_64
281 /**
282 * atomic_or_long - OR of two long integers
283 * @v1: pointer to type unsigned long
284 * @v2: pointer to type unsigned long
285 *
286 * Atomically ORs @v1 and @v2
287 * Returns the result of the OR
288 */
atomic_or_long(unsigned long * v1,unsigned long v2)289 static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
290 {
291 asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
292 }
293 #endif
294
295 /* These are x86-specific, used by some header files */
296 #define atomic_clear_mask(mask, addr) \
297 asm volatile(LOCK_PREFIX "andl %0,%1" \
298 : : "r" (~(mask)), "m" (*(addr)) : "memory")
299
300 #define atomic_set_mask(mask, addr) \
301 asm volatile(LOCK_PREFIX "orl %0,%1" \
302 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
303 : "memory")
304
305 /* Atomic operations are already serializing on x86 */
306 #define smp_mb__before_atomic_dec() barrier()
307 #define smp_mb__after_atomic_dec() barrier()
308 #define smp_mb__before_atomic_inc() barrier()
309 #define smp_mb__after_atomic_inc() barrier()
310
311 #ifdef CONFIG_X86_32
312 # include "atomic64_32.h"
313 #else
314 # include "atomic64_64.h"
315 #endif
316
317 #endif /* _ASM_X86_ATOMIC_H */
318