locking, mips: Fix atomics
[deliverable/linux.git] / arch / mips / include / asm / atomic.h
CommitLineData
1da177e4 1/*
edf7b938 2 * Atomic operations that C can't guarantee us. Useful for
1da177e4
LT
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
ea435467 18#include <linux/types.h>
0004a9df 19#include <asm/barrier.h>
1da177e4 20#include <asm/cpu-features.h>
b81947c6 21#include <asm/cmpxchg.h>
1da177e4
LT
22#include <asm/war.h>
23
70342287 24#define ATOMIC_INIT(i) { (i) }
1da177e4
LT
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
f3d46f9d 32#define atomic_read(v) (*(volatile int *)&(v)->counter)
1da177e4
LT
33
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
21a151d8 41#define atomic_set(v, i) ((v)->counter = (i))
1da177e4 42
ef31563e
PZ
43#define ATOMIC_OP(op, c_op, asm_op) \
44static __inline__ void atomic_##op(int i, atomic_t * v) \
45{ \
46 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
47 int temp; \
48 \
49 __asm__ __volatile__( \
50 " .set arch=r4000 \n" \
51 "1: ll %0, %1 # atomic_" #op " \n" \
52 " " #asm_op " %0, %2 \n" \
53 " sc %0, %1 \n" \
54 " beqzl %0, 1b \n" \
55 " .set mips0 \n" \
56 : "=&r" (temp), "+m" (v->counter) \
57 : "Ir" (i)); \
58 } else if (kernel_uses_llsc) { \
59 int temp; \
60 \
61 do { \
62 __asm__ __volatile__( \
63 " .set arch=r4000 \n" \
64 " ll %0, %1 # atomic_" #op "\n" \
65 " " #asm_op " %0, %2 \n" \
66 " sc %0, %1 \n" \
67 " .set mips0 \n" \
68 : "=&r" (temp), "+m" (v->counter) \
69 : "Ir" (i)); \
70 } while (unlikely(!temp)); \
71 } else { \
72 unsigned long flags; \
73 \
74 raw_local_irq_save(flags); \
75 v->counter c_op i; \
76 raw_local_irq_restore(flags); \
77 } \
78} \
79
80#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
81static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
82{ \
83 int result; \
84 \
85 smp_mb__before_llsc(); \
86 \
87 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
88 int temp; \
89 \
90 __asm__ __volatile__( \
91 " .set arch=r4000 \n" \
92 "1: ll %1, %2 # atomic_" #op "_return \n" \
93 " " #asm_op " %0, %1, %3 \n" \
94 " sc %0, %2 \n" \
95 " beqzl %0, 1b \n" \
da4c5445 96 " " #asm_op " %0, %1, %3 \n" \
ef31563e
PZ
97 " .set mips0 \n" \
98 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
99 : "Ir" (i)); \
100 } else if (kernel_uses_llsc) { \
101 int temp; \
102 \
103 do { \
104 __asm__ __volatile__( \
105 " .set arch=r4000 \n" \
106 " ll %1, %2 # atomic_" #op "_return \n" \
107 " " #asm_op " %0, %1, %3 \n" \
108 " sc %0, %2 \n" \
109 " .set mips0 \n" \
110 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
111 : "Ir" (i)); \
112 } while (unlikely(!result)); \
113 \
da4c5445 114 result = temp; result c_op i; \
ef31563e
PZ
115 } else { \
116 unsigned long flags; \
117 \
118 raw_local_irq_save(flags); \
119 result = v->counter; \
120 result c_op i; \
121 v->counter = result; \
122 raw_local_irq_restore(flags); \
123 } \
124 \
125 smp_llsc_mb(); \
126 \
127 return result; \
1da177e4
LT
128}
129
ef31563e
PZ
130#define ATOMIC_OPS(op, c_op, asm_op) \
131 ATOMIC_OP(op, c_op, asm_op) \
132 ATOMIC_OP_RETURN(op, c_op, asm_op)
1da177e4 133
ef31563e
PZ
134ATOMIC_OPS(add, +=, addu)
135ATOMIC_OPS(sub, -=, subu)
1da177e4 136
ef31563e
PZ
137#undef ATOMIC_OPS
138#undef ATOMIC_OP_RETURN
139#undef ATOMIC_OP
1da177e4
LT
140
141/*
f10d14dd
AG
142 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
143 * @i: integer value to subtract
1da177e4
LT
144 * @v: pointer of type atomic_t
145 *
f10d14dd
AG
146 * Atomically test @v and subtract @i if @v is greater or equal than @i.
147 * The function returns the old value of @v minus @i.
1da177e4
LT
148 */
149static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
150{
915ec1e2 151 int result;
1da177e4 152
f252ffd5 153 smp_mb__before_llsc();
0004a9df 154
b791d119 155 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 156 int temp;
1da177e4
LT
157
158 __asm__ __volatile__(
a809d460 159 " .set arch=r4000 \n"
1da177e4
LT
160 "1: ll %1, %2 # atomic_sub_if_positive\n"
161 " subu %0, %1, %3 \n"
162 " bltz %0, 1f \n"
163 " sc %0, %2 \n"
92f22c18 164 " .set noreorder \n"
1da177e4 165 " beqzl %0, 1b \n"
92f22c18
RB
166 " subu %0, %1, %3 \n"
167 " .set reorder \n"
1da177e4 168 "1: \n"
aac8aa77 169 " .set mips0 \n"
b4f2a17b 170 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
1da177e4
LT
171 : "Ir" (i), "m" (v->counter)
172 : "memory");
b791d119 173 } else if (kernel_uses_llsc) {
915ec1e2 174 int temp;
1da177e4
LT
175
176 __asm__ __volatile__(
a809d460 177 " .set arch=r4000 \n"
1da177e4
LT
178 "1: ll %1, %2 # atomic_sub_if_positive\n"
179 " subu %0, %1, %3 \n"
180 " bltz %0, 1f \n"
181 " sc %0, %2 \n"
92f22c18 182 " .set noreorder \n"
7837314d 183 " beqz %0, 1b \n"
92f22c18
RB
184 " subu %0, %1, %3 \n"
185 " .set reorder \n"
50952026 186 "1: \n"
aac8aa77 187 " .set mips0 \n"
b4f2a17b
JK
188 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
189 : "Ir" (i));
1da177e4
LT
190 } else {
191 unsigned long flags;
192
49edd098 193 raw_local_irq_save(flags);
1da177e4
LT
194 result = v->counter;
195 result -= i;
196 if (result >= 0)
197 v->counter = result;
49edd098 198 raw_local_irq_restore(flags);
1da177e4
LT
199 }
200
17099b11 201 smp_llsc_mb();
0004a9df 202
1da177e4
LT
203 return result;
204}
205
e12f644b
MD
206#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
207#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 208
8426e1f6 209/**
f24219b4 210 * __atomic_add_unless - add unless the number is a given value
8426e1f6
NP
211 * @v: pointer of type atomic_t
212 * @a: the amount to add to v...
213 * @u: ...unless v is equal to u.
214 *
215 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 216 * Returns the old value of @v.
8426e1f6 217 */
f24219b4 218static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
2856f5e3
MD
219{
220 int c, old;
221 c = atomic_read(v);
222 for (;;) {
223 if (unlikely(c == (u)))
224 break;
225 old = atomic_cmpxchg((v), c, c + (a));
226 if (likely(old == c))
227 break;
228 c = old;
229 }
f24219b4 230 return c;
2856f5e3 231}
8426e1f6 232
21a151d8
RB
233#define atomic_dec_return(v) atomic_sub_return(1, (v))
234#define atomic_inc_return(v) atomic_add_return(1, (v))
1da177e4
LT
235
236/*
237 * atomic_sub_and_test - subtract value from variable and test result
238 * @i: integer value to subtract
239 * @v: pointer of type atomic_t
240 *
241 * Atomically subtracts @i from @v and returns
242 * true if the result is zero, or false for all
243 * other cases.
244 */
21a151d8 245#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
1da177e4
LT
246
247/*
248 * atomic_inc_and_test - increment and test
249 * @v: pointer of type atomic_t
250 *
251 * Atomically increments @v by 1
252 * and returns true if the result is zero, or false for all
253 * other cases.
254 */
255#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
256
257/*
258 * atomic_dec_and_test - decrement by 1 and test
259 * @v: pointer of type atomic_t
260 *
261 * Atomically decrements @v by 1 and
262 * returns true if the result is 0, or false for all other
263 * cases.
264 */
265#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
266
267/*
268 * atomic_dec_if_positive - decrement by 1 if old value positive
269 * @v: pointer of type atomic_t
270 */
271#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
272
273/*
274 * atomic_inc - increment atomic variable
275 * @v: pointer of type atomic_t
276 *
277 * Atomically increments @v by 1.
278 */
21a151d8 279#define atomic_inc(v) atomic_add(1, (v))
1da177e4
LT
280
281/*
282 * atomic_dec - decrement and test
283 * @v: pointer of type atomic_t
284 *
285 * Atomically decrements @v by 1.
286 */
21a151d8 287#define atomic_dec(v) atomic_sub(1, (v))
1da177e4
LT
288
289/*
290 * atomic_add_negative - add and test if negative
291 * @v: pointer of type atomic_t
292 * @i: integer value to add
293 *
294 * Atomically adds @i to @v and returns true
295 * if the result is negative, or false when
296 * result is greater than or equal to zero.
297 */
21a151d8 298#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
1da177e4 299
875d43e7 300#ifdef CONFIG_64BIT
1da177e4 301
1da177e4
LT
302#define ATOMIC64_INIT(i) { (i) }
303
304/*
305 * atomic64_read - read atomic variable
306 * @v: pointer of type atomic64_t
307 *
308 */
f3d46f9d 309#define atomic64_read(v) (*(volatile long *)&(v)->counter)
1da177e4
LT
310
311/*
312 * atomic64_set - set atomic variable
313 * @v: pointer of type atomic64_t
314 * @i: required value
315 */
21a151d8 316#define atomic64_set(v, i) ((v)->counter = (i))
1da177e4 317
ef31563e
PZ
318#define ATOMIC64_OP(op, c_op, asm_op) \
319static __inline__ void atomic64_##op(long i, atomic64_t * v) \
320{ \
321 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
322 long temp; \
323 \
324 __asm__ __volatile__( \
325 " .set arch=r4000 \n" \
326 "1: lld %0, %1 # atomic64_" #op " \n" \
327 " " #asm_op " %0, %2 \n" \
328 " scd %0, %1 \n" \
329 " beqzl %0, 1b \n" \
330 " .set mips0 \n" \
331 : "=&r" (temp), "+m" (v->counter) \
332 : "Ir" (i)); \
333 } else if (kernel_uses_llsc) { \
334 long temp; \
335 \
336 do { \
337 __asm__ __volatile__( \
338 " .set arch=r4000 \n" \
339 " lld %0, %1 # atomic64_" #op "\n" \
340 " " #asm_op " %0, %2 \n" \
341 " scd %0, %1 \n" \
342 " .set mips0 \n" \
343 : "=&r" (temp), "+m" (v->counter) \
344 : "Ir" (i)); \
345 } while (unlikely(!temp)); \
346 } else { \
347 unsigned long flags; \
348 \
349 raw_local_irq_save(flags); \
350 v->counter c_op i; \
351 raw_local_irq_restore(flags); \
352 } \
353} \
354
355#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
356static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
357{ \
358 long result; \
359 \
360 smp_mb__before_llsc(); \
361 \
362 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
363 long temp; \
364 \
365 __asm__ __volatile__( \
366 " .set arch=r4000 \n" \
367 "1: lld %1, %2 # atomic64_" #op "_return\n" \
368 " " #asm_op " %0, %1, %3 \n" \
369 " scd %0, %2 \n" \
370 " beqzl %0, 1b \n" \
371 " " #asm_op " %0, %1, %3 \n" \
372 " .set mips0 \n" \
373 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
374 : "Ir" (i)); \
375 } else if (kernel_uses_llsc) { \
376 long temp; \
377 \
378 do { \
379 __asm__ __volatile__( \
380 " .set arch=r4000 \n" \
381 " lld %1, %2 # atomic64_" #op "_return\n" \
382 " " #asm_op " %0, %1, %3 \n" \
383 " scd %0, %2 \n" \
384 " .set mips0 \n" \
385 : "=&r" (result), "=&r" (temp), "=m" (v->counter) \
386 : "Ir" (i), "m" (v->counter) \
387 : "memory"); \
388 } while (unlikely(!result)); \
389 \
da4c5445 390 result = temp; result c_op i; \
ef31563e
PZ
391 } else { \
392 unsigned long flags; \
393 \
394 raw_local_irq_save(flags); \
395 result = v->counter; \
396 result c_op i; \
397 v->counter = result; \
398 raw_local_irq_restore(flags); \
399 } \
400 \
401 smp_llsc_mb(); \
402 \
403 return result; \
1da177e4
LT
404}
405
ef31563e
PZ
406#define ATOMIC64_OPS(op, c_op, asm_op) \
407 ATOMIC64_OP(op, c_op, asm_op) \
408 ATOMIC64_OP_RETURN(op, c_op, asm_op)
1da177e4 409
ef31563e
PZ
410ATOMIC64_OPS(add, +=, daddu)
411ATOMIC64_OPS(sub, -=, dsubu)
1da177e4 412
ef31563e
PZ
413#undef ATOMIC64_OPS
414#undef ATOMIC64_OP_RETURN
415#undef ATOMIC64_OP
1da177e4
LT
416
417/*
f10d14dd
AG
418 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
419 * @i: integer value to subtract
1da177e4
LT
420 * @v: pointer of type atomic64_t
421 *
f10d14dd
AG
422 * Atomically test @v and subtract @i if @v is greater or equal than @i.
423 * The function returns the old value of @v minus @i.
1da177e4
LT
424 */
425static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
426{
915ec1e2 427 long result;
1da177e4 428
f252ffd5 429 smp_mb__before_llsc();
0004a9df 430
b791d119 431 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 432 long temp;
1da177e4
LT
433
434 __asm__ __volatile__(
a809d460 435 " .set arch=r4000 \n"
1da177e4
LT
436 "1: lld %1, %2 # atomic64_sub_if_positive\n"
437 " dsubu %0, %1, %3 \n"
438 " bltz %0, 1f \n"
439 " scd %0, %2 \n"
92f22c18 440 " .set noreorder \n"
1da177e4 441 " beqzl %0, 1b \n"
92f22c18
RB
442 " dsubu %0, %1, %3 \n"
443 " .set reorder \n"
1da177e4 444 "1: \n"
aac8aa77 445 " .set mips0 \n"
1da177e4
LT
446 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
447 : "Ir" (i), "m" (v->counter)
448 : "memory");
b791d119 449 } else if (kernel_uses_llsc) {
915ec1e2 450 long temp;
1da177e4
LT
451
452 __asm__ __volatile__(
a809d460 453 " .set arch=r4000 \n"
1da177e4
LT
454 "1: lld %1, %2 # atomic64_sub_if_positive\n"
455 " dsubu %0, %1, %3 \n"
456 " bltz %0, 1f \n"
457 " scd %0, %2 \n"
92f22c18 458 " .set noreorder \n"
7837314d 459 " beqz %0, 1b \n"
92f22c18
RB
460 " dsubu %0, %1, %3 \n"
461 " .set reorder \n"
50952026 462 "1: \n"
aac8aa77 463 " .set mips0 \n"
b4f2a17b
JK
464 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
465 : "Ir" (i));
1da177e4
LT
466 } else {
467 unsigned long flags;
468
49edd098 469 raw_local_irq_save(flags);
1da177e4
LT
470 result = v->counter;
471 result -= i;
472 if (result >= 0)
473 v->counter = result;
49edd098 474 raw_local_irq_restore(flags);
1da177e4
LT
475 }
476
17099b11 477 smp_llsc_mb();
0004a9df 478
1da177e4
LT
479 return result;
480}
481
e12f644b 482#define atomic64_cmpxchg(v, o, n) \
7b239bb1 483 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
e12f644b
MD
484#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
485
486/**
487 * atomic64_add_unless - add unless the number is a given value
488 * @v: pointer of type atomic64_t
489 * @a: the amount to add to v...
490 * @u: ...unless v is equal to u.
491 *
492 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 493 * Returns the old value of @v.
e12f644b 494 */
2856f5e3
MD
495static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
496{
497 long c, old;
498 c = atomic64_read(v);
499 for (;;) {
500 if (unlikely(c == (u)))
501 break;
502 old = atomic64_cmpxchg((v), c, c + (a));
503 if (likely(old == c))
504 break;
505 c = old;
506 }
507 return c != (u);
508}
509
e12f644b
MD
510#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
511
21a151d8
RB
512#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
513#define atomic64_inc_return(v) atomic64_add_return(1, (v))
1da177e4
LT
514
515/*
516 * atomic64_sub_and_test - subtract value from variable and test result
517 * @i: integer value to subtract
518 * @v: pointer of type atomic64_t
519 *
520 * Atomically subtracts @i from @v and returns
521 * true if the result is zero, or false for all
522 * other cases.
523 */
21a151d8 524#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
1da177e4
LT
525
526/*
527 * atomic64_inc_and_test - increment and test
528 * @v: pointer of type atomic64_t
529 *
530 * Atomically increments @v by 1
531 * and returns true if the result is zero, or false for all
532 * other cases.
533 */
534#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
535
536/*
537 * atomic64_dec_and_test - decrement by 1 and test
538 * @v: pointer of type atomic64_t
539 *
540 * Atomically decrements @v by 1 and
541 * returns true if the result is 0, or false for all other
542 * cases.
543 */
544#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
545
546/*
547 * atomic64_dec_if_positive - decrement by 1 if old value positive
548 * @v: pointer of type atomic64_t
549 */
550#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
551
552/*
553 * atomic64_inc - increment atomic variable
554 * @v: pointer of type atomic64_t
555 *
556 * Atomically increments @v by 1.
557 */
21a151d8 558#define atomic64_inc(v) atomic64_add(1, (v))
1da177e4
LT
559
560/*
561 * atomic64_dec - decrement and test
562 * @v: pointer of type atomic64_t
563 *
564 * Atomically decrements @v by 1.
565 */
21a151d8 566#define atomic64_dec(v) atomic64_sub(1, (v))
1da177e4
LT
567
568/*
569 * atomic64_add_negative - add and test if negative
570 * @v: pointer of type atomic64_t
571 * @i: integer value to add
572 *
573 * Atomically adds @i to @v and returns true
574 * if the result is negative, or false when
575 * result is greater than or equal to zero.
576 */
21a151d8 577#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
1da177e4 578
875d43e7 579#endif /* CONFIG_64BIT */
1da177e4 580
1da177e4 581#endif /* _ASM_ATOMIC_H */
This page took 1.098379 seconds and 5 git commands to generate.