e1000e triggers sparc32 gcc bug
[deliverable/linux.git] / include / asm-x86 / atomic_64.h
1 #ifndef __ARCH_X86_64_ATOMIC__
2 #define __ARCH_X86_64_ATOMIC__
3
4 #include <asm/alternative.h>
5 #include <asm/cmpxchg.h>
6
7 /* atomic_t should be 32 bit signed type */
8
9 /*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
14 #ifdef CONFIG_SMP
15 #define LOCK "lock ; "
16 #else
17 #define LOCK ""
18 #endif
19
20 /*
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
24 */
25 typedef struct {
26 int counter;
27 } atomic_t;
28
29 #define ATOMIC_INIT(i) { (i) }
30
31 /**
32 * atomic_read - read atomic variable
33 * @v: pointer of type atomic_t
34 *
35 * Atomically reads the value of @v.
36 */
37 #define atomic_read(v) ((v)->counter)
38
39 /**
40 * atomic_set - set atomic variable
41 * @v: pointer of type atomic_t
42 * @i: required value
43 *
44 * Atomically sets the value of @v to @i.
45 */
46 #define atomic_set(v, i) (((v)->counter) = (i))
47
48 /**
49 * atomic_add - add integer to atomic variable
50 * @i: integer value to add
51 * @v: pointer of type atomic_t
52 *
53 * Atomically adds @i to @v.
54 */
55 static inline void atomic_add(int i, atomic_t *v)
56 {
57 asm volatile(LOCK_PREFIX "addl %1,%0"
58 : "=m" (v->counter)
59 : "ir" (i), "m" (v->counter));
60 }
61
62 /**
63 * atomic_sub - subtract the atomic variable
64 * @i: integer value to subtract
65 * @v: pointer of type atomic_t
66 *
67 * Atomically subtracts @i from @v.
68 */
69 static inline void atomic_sub(int i, atomic_t *v)
70 {
71 asm volatile(LOCK_PREFIX "subl %1,%0"
72 : "=m" (v->counter)
73 : "ir" (i), "m" (v->counter));
74 }
75
76 /**
77 * atomic_sub_and_test - subtract value from variable and test result
78 * @i: integer value to subtract
79 * @v: pointer of type atomic_t
80 *
81 * Atomically subtracts @i from @v and returns
82 * true if the result is zero, or false for all
83 * other cases.
84 */
85 static inline int atomic_sub_and_test(int i, atomic_t *v)
86 {
87 unsigned char c;
88
89 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
90 : "=m" (v->counter), "=qm" (c)
91 : "ir" (i), "m" (v->counter) : "memory");
92 return c;
93 }
94
95 /**
96 * atomic_inc - increment atomic variable
97 * @v: pointer of type atomic_t
98 *
99 * Atomically increments @v by 1.
100 */
101 static inline void atomic_inc(atomic_t *v)
102 {
103 asm volatile(LOCK_PREFIX "incl %0"
104 : "=m" (v->counter)
105 : "m" (v->counter));
106 }
107
108 /**
109 * atomic_dec - decrement atomic variable
110 * @v: pointer of type atomic_t
111 *
112 * Atomically decrements @v by 1.
113 */
114 static inline void atomic_dec(atomic_t *v)
115 {
116 asm volatile(LOCK_PREFIX "decl %0"
117 : "=m" (v->counter)
118 : "m" (v->counter));
119 }
120
121 /**
122 * atomic_dec_and_test - decrement and test
123 * @v: pointer of type atomic_t
124 *
125 * Atomically decrements @v by 1 and
126 * returns true if the result is 0, or false for all other
127 * cases.
128 */
129 static inline int atomic_dec_and_test(atomic_t *v)
130 {
131 unsigned char c;
132
133 asm volatile(LOCK_PREFIX "decl %0; sete %1"
134 : "=m" (v->counter), "=qm" (c)
135 : "m" (v->counter) : "memory");
136 return c != 0;
137 }
138
139 /**
140 * atomic_inc_and_test - increment and test
141 * @v: pointer of type atomic_t
142 *
143 * Atomically increments @v by 1
144 * and returns true if the result is zero, or false for all
145 * other cases.
146 */
147 static inline int atomic_inc_and_test(atomic_t *v)
148 {
149 unsigned char c;
150
151 asm volatile(LOCK_PREFIX "incl %0; sete %1"
152 : "=m" (v->counter), "=qm" (c)
153 : "m" (v->counter) : "memory");
154 return c != 0;
155 }
156
157 /**
158 * atomic_add_negative - add and test if negative
159 * @i: integer value to add
160 * @v: pointer of type atomic_t
161 *
162 * Atomically adds @i to @v and returns true
163 * if the result is negative, or false when
164 * result is greater than or equal to zero.
165 */
166 static inline int atomic_add_negative(int i, atomic_t *v)
167 {
168 unsigned char c;
169
170 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
171 : "=m" (v->counter), "=qm" (c)
172 : "ir" (i), "m" (v->counter) : "memory");
173 return c;
174 }
175
176 /**
177 * atomic_add_return - add and return
178 * @i: integer value to add
179 * @v: pointer of type atomic_t
180 *
181 * Atomically adds @i to @v and returns @i + @v
182 */
183 static inline int atomic_add_return(int i, atomic_t *v)
184 {
185 int __i = i;
186 asm volatile(LOCK_PREFIX "xaddl %0, %1"
187 : "+r" (i), "+m" (v->counter)
188 : : "memory");
189 return i + __i;
190 }
191
192 static inline int atomic_sub_return(int i, atomic_t *v)
193 {
194 return atomic_add_return(-i, v);
195 }
196
197 #define atomic_inc_return(v) (atomic_add_return(1, v))
198 #define atomic_dec_return(v) (atomic_sub_return(1, v))
199
200 /* An 64bit atomic type */
201
202 typedef struct {
203 long counter;
204 } atomic64_t;
205
206 #define ATOMIC64_INIT(i) { (i) }
207
208 /**
209 * atomic64_read - read atomic64 variable
210 * @v: pointer of type atomic64_t
211 *
212 * Atomically reads the value of @v.
213 * Doesn't imply a read memory barrier.
214 */
215 #define atomic64_read(v) ((v)->counter)
216
217 /**
218 * atomic64_set - set atomic64 variable
219 * @v: pointer to type atomic64_t
220 * @i: required value
221 *
222 * Atomically sets the value of @v to @i.
223 */
224 #define atomic64_set(v, i) (((v)->counter) = (i))
225
226 /**
227 * atomic64_add - add integer to atomic64 variable
228 * @i: integer value to add
229 * @v: pointer to type atomic64_t
230 *
231 * Atomically adds @i to @v.
232 */
233 static inline void atomic64_add(long i, atomic64_t *v)
234 {
235 asm volatile(LOCK_PREFIX "addq %1,%0"
236 : "=m" (v->counter)
237 : "ir" (i), "m" (v->counter));
238 }
239
240 /**
241 * atomic64_sub - subtract the atomic64 variable
242 * @i: integer value to subtract
243 * @v: pointer to type atomic64_t
244 *
245 * Atomically subtracts @i from @v.
246 */
247 static inline void atomic64_sub(long i, atomic64_t *v)
248 {
249 asm volatile(LOCK_PREFIX "subq %1,%0"
250 : "=m" (v->counter)
251 : "ir" (i), "m" (v->counter));
252 }
253
254 /**
255 * atomic64_sub_and_test - subtract value from variable and test result
256 * @i: integer value to subtract
257 * @v: pointer to type atomic64_t
258 *
259 * Atomically subtracts @i from @v and returns
260 * true if the result is zero, or false for all
261 * other cases.
262 */
263 static inline int atomic64_sub_and_test(long i, atomic64_t *v)
264 {
265 unsigned char c;
266
267 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
268 : "=m" (v->counter), "=qm" (c)
269 : "ir" (i), "m" (v->counter) : "memory");
270 return c;
271 }
272
273 /**
274 * atomic64_inc - increment atomic64 variable
275 * @v: pointer to type atomic64_t
276 *
277 * Atomically increments @v by 1.
278 */
279 static inline void atomic64_inc(atomic64_t *v)
280 {
281 asm volatile(LOCK_PREFIX "incq %0"
282 : "=m" (v->counter)
283 : "m" (v->counter));
284 }
285
286 /**
287 * atomic64_dec - decrement atomic64 variable
288 * @v: pointer to type atomic64_t
289 *
290 * Atomically decrements @v by 1.
291 */
292 static inline void atomic64_dec(atomic64_t *v)
293 {
294 asm volatile(LOCK_PREFIX "decq %0"
295 : "=m" (v->counter)
296 : "m" (v->counter));
297 }
298
299 /**
300 * atomic64_dec_and_test - decrement and test
301 * @v: pointer to type atomic64_t
302 *
303 * Atomically decrements @v by 1 and
304 * returns true if the result is 0, or false for all other
305 * cases.
306 */
307 static inline int atomic64_dec_and_test(atomic64_t *v)
308 {
309 unsigned char c;
310
311 asm volatile(LOCK_PREFIX "decq %0; sete %1"
312 : "=m" (v->counter), "=qm" (c)
313 : "m" (v->counter) : "memory");
314 return c != 0;
315 }
316
317 /**
318 * atomic64_inc_and_test - increment and test
319 * @v: pointer to type atomic64_t
320 *
321 * Atomically increments @v by 1
322 * and returns true if the result is zero, or false for all
323 * other cases.
324 */
325 static inline int atomic64_inc_and_test(atomic64_t *v)
326 {
327 unsigned char c;
328
329 asm volatile(LOCK_PREFIX "incq %0; sete %1"
330 : "=m" (v->counter), "=qm" (c)
331 : "m" (v->counter) : "memory");
332 return c != 0;
333 }
334
335 /**
336 * atomic64_add_negative - add and test if negative
337 * @i: integer value to add
338 * @v: pointer to type atomic64_t
339 *
340 * Atomically adds @i to @v and returns true
341 * if the result is negative, or false when
342 * result is greater than or equal to zero.
343 */
344 static inline int atomic64_add_negative(long i, atomic64_t *v)
345 {
346 unsigned char c;
347
348 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
349 : "=m" (v->counter), "=qm" (c)
350 : "ir" (i), "m" (v->counter) : "memory");
351 return c;
352 }
353
354 /**
355 * atomic64_add_return - add and return
356 * @i: integer value to add
357 * @v: pointer to type atomic64_t
358 *
359 * Atomically adds @i to @v and returns @i + @v
360 */
361 static inline long atomic64_add_return(long i, atomic64_t *v)
362 {
363 long __i = i;
364 asm volatile(LOCK_PREFIX "xaddq %0, %1;"
365 : "+r" (i), "+m" (v->counter)
366 : : "memory");
367 return i + __i;
368 }
369
370 static inline long atomic64_sub_return(long i, atomic64_t *v)
371 {
372 return atomic64_add_return(-i, v);
373 }
374
375 #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
376 #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
377
378 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
379 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
380
381 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
382 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
383
384 /**
385 * atomic_add_unless - add unless the number is a given value
386 * @v: pointer of type atomic_t
387 * @a: the amount to add to v...
388 * @u: ...unless v is equal to u.
389 *
390 * Atomically adds @a to @v, so long as it was not @u.
391 * Returns non-zero if @v was not @u, and zero otherwise.
392 */
393 static inline int atomic_add_unless(atomic_t *v, int a, int u)
394 {
395 int c, old;
396 c = atomic_read(v);
397 for (;;) {
398 if (unlikely(c == (u)))
399 break;
400 old = atomic_cmpxchg((v), c, c + (a));
401 if (likely(old == c))
402 break;
403 c = old;
404 }
405 return c != (u);
406 }
407
408 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
409
410 /**
411 * atomic64_add_unless - add unless the number is a given value
412 * @v: pointer of type atomic64_t
413 * @a: the amount to add to v...
414 * @u: ...unless v is equal to u.
415 *
416 * Atomically adds @a to @v, so long as it was not @u.
417 * Returns non-zero if @v was not @u, and zero otherwise.
418 */
419 static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
420 {
421 long c, old;
422 c = atomic64_read(v);
423 for (;;) {
424 if (unlikely(c == (u)))
425 break;
426 old = atomic64_cmpxchg((v), c, c + (a));
427 if (likely(old == c))
428 break;
429 c = old;
430 }
431 return c != (u);
432 }
433
434 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
435
436 /* These are x86-specific, used by some header files */
437 #define atomic_clear_mask(mask, addr) \
438 asm volatile(LOCK_PREFIX "andl %0,%1" \
439 : : "r" (~(mask)), "m" (*(addr)) : "memory")
440
441 #define atomic_set_mask(mask, addr) \
442 asm volatile(LOCK_PREFIX "orl %0,%1" \
443 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
444 : "memory")
445
446 /* Atomic operations are already serializing on x86 */
447 #define smp_mb__before_atomic_dec() barrier()
448 #define smp_mb__after_atomic_dec() barrier()
449 #define smp_mb__before_atomic_inc() barrier()
450 #define smp_mb__after_atomic_inc() barrier()
451
452 #include <asm-generic/atomic.h>
453 #endif
This page took 0.040809 seconds and 5 git commands to generate.