Merge tag 'v4.0-rc5' into next
[deliverable/linux.git] / arch / mips / include / asm / atomic.h
CommitLineData
1da177e4 1/*
edf7b938 2 * Atomic operations that C can't guarantee us. Useful for
1da177e4
LT
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
ea435467 18#include <linux/types.h>
0004a9df 19#include <asm/barrier.h>
b0984c43 20#include <asm/compiler.h>
1da177e4 21#include <asm/cpu-features.h>
b81947c6 22#include <asm/cmpxchg.h>
1da177e4
LT
23#include <asm/war.h>
24
70342287 25#define ATOMIC_INIT(i) { (i) }
1da177e4
LT
26
27/*
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
30 *
31 * Atomically reads the value of @v.
32 */
2291059c 33#define atomic_read(v) ACCESS_ONCE((v)->counter)
1da177e4
LT
34
35/*
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
38 * @i: required value
39 *
40 * Atomically sets the value of @v to @i.
41 */
21a151d8 42#define atomic_set(v, i) ((v)->counter = (i))
1da177e4 43
ddb3108e
MR
44#define ATOMIC_OP(op, c_op, asm_op) \
45static __inline__ void atomic_##op(int i, atomic_t * v) \
46{ \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
48 int temp; \
49 \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
54 " sc %0, %1 \n" \
55 " beqzl %0, 1b \n" \
56 " .set mips0 \n" \
94bfb75a 57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
58 : "Ir" (i)); \
59 } else if (kernel_uses_llsc) { \
60 int temp; \
61 \
62 do { \
63 __asm__ __volatile__( \
0038df22 64 " .set "MIPS_ISA_LEVEL" \n" \
ddb3108e
MR
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
67 " sc %0, %1 \n" \
68 " .set mips0 \n" \
94bfb75a 69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
70 : "Ir" (i)); \
71 } while (unlikely(!temp)); \
72 } else { \
73 unsigned long flags; \
74 \
75 raw_local_irq_save(flags); \
76 v->counter c_op i; \
77 raw_local_irq_restore(flags); \
78 } \
1da177e4
LT
79}
80
ddb3108e
MR
81#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
83{ \
84 int result; \
85 \
86 smp_mb__before_llsc(); \
87 \
88 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
89 int temp; \
90 \
91 __asm__ __volatile__( \
92 " .set arch=r4000 \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
95 " sc %0, %2 \n" \
96 " beqzl %0, 1b \n" \
97 " " #asm_op " %0, %1, %3 \n" \
98 " .set mips0 \n" \
99 : "=&r" (result), "=&r" (temp), \
94bfb75a 100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
101 : "Ir" (i)); \
102 } else if (kernel_uses_llsc) { \
103 int temp; \
104 \
105 do { \
106 __asm__ __volatile__( \
0038df22 107 " .set "MIPS_ISA_LEVEL" \n" \
ddb3108e
MR
108 " ll %1, %2 # atomic_" #op "_return \n" \
109 " " #asm_op " %0, %1, %3 \n" \
110 " sc %0, %2 \n" \
111 " .set mips0 \n" \
112 : "=&r" (result), "=&r" (temp), \
94bfb75a 113 "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
114 : "Ir" (i)); \
115 } while (unlikely(!result)); \
116 \
117 result = temp; result c_op i; \
118 } else { \
119 unsigned long flags; \
120 \
121 raw_local_irq_save(flags); \
122 result = v->counter; \
123 result c_op i; \
124 v->counter = result; \
125 raw_local_irq_restore(flags); \
126 } \
127 \
128 smp_llsc_mb(); \
129 \
130 return result; \
131}
132
133#define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \
ef31563e 135 ATOMIC_OP_RETURN(op, c_op, asm_op)
1da177e4 136
ef31563e
PZ
137ATOMIC_OPS(add, +=, addu)
138ATOMIC_OPS(sub, -=, subu)
1da177e4 139
ef31563e
PZ
140#undef ATOMIC_OPS
141#undef ATOMIC_OP_RETURN
142#undef ATOMIC_OP
1da177e4
LT
143
144/*
f10d14dd
AG
145 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
146 * @i: integer value to subtract
1da177e4
LT
147 * @v: pointer of type atomic_t
148 *
f10d14dd
AG
149 * Atomically test @v and subtract @i if @v is greater or equal than @i.
150 * The function returns the old value of @v minus @i.
1da177e4
LT
151 */
152static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
153{
915ec1e2 154 int result;
1da177e4 155
f252ffd5 156 smp_mb__before_llsc();
0004a9df 157
b791d119 158 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 159 int temp;
1da177e4
LT
160
161 __asm__ __volatile__(
a809d460 162 " .set arch=r4000 \n"
1da177e4
LT
163 "1: ll %1, %2 # atomic_sub_if_positive\n"
164 " subu %0, %1, %3 \n"
165 " bltz %0, 1f \n"
166 " sc %0, %2 \n"
92f22c18 167 " .set noreorder \n"
1da177e4 168 " beqzl %0, 1b \n"
92f22c18
RB
169 " subu %0, %1, %3 \n"
170 " .set reorder \n"
1da177e4 171 "1: \n"
aac8aa77 172 " .set mips0 \n"
b0984c43 173 : "=&r" (result), "=&r" (temp),
94bfb75a
MC
174 "+" GCC_OFF_SMALL_ASM() (v->counter)
175 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
1da177e4 176 : "memory");
b791d119 177 } else if (kernel_uses_llsc) {
915ec1e2 178 int temp;
1da177e4
LT
179
180 __asm__ __volatile__(
0038df22 181 " .set "MIPS_ISA_LEVEL" \n"
1da177e4
LT
182 "1: ll %1, %2 # atomic_sub_if_positive\n"
183 " subu %0, %1, %3 \n"
184 " bltz %0, 1f \n"
185 " sc %0, %2 \n"
92f22c18 186 " .set noreorder \n"
7837314d 187 " beqz %0, 1b \n"
92f22c18
RB
188 " subu %0, %1, %3 \n"
189 " .set reorder \n"
50952026 190 "1: \n"
aac8aa77 191 " .set mips0 \n"
b0984c43 192 : "=&r" (result), "=&r" (temp),
94bfb75a 193 "+" GCC_OFF_SMALL_ASM() (v->counter)
b4f2a17b 194 : "Ir" (i));
1da177e4
LT
195 } else {
196 unsigned long flags;
197
49edd098 198 raw_local_irq_save(flags);
1da177e4
LT
199 result = v->counter;
200 result -= i;
201 if (result >= 0)
202 v->counter = result;
49edd098 203 raw_local_irq_restore(flags);
1da177e4
LT
204 }
205
17099b11 206 smp_llsc_mb();
0004a9df 207
1da177e4
LT
208 return result;
209}
210
e12f644b
MD
211#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
212#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 213
8426e1f6 214/**
f24219b4 215 * __atomic_add_unless - add unless the number is a given value
8426e1f6
NP
216 * @v: pointer of type atomic_t
217 * @a: the amount to add to v...
218 * @u: ...unless v is equal to u.
219 *
220 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 221 * Returns the old value of @v.
8426e1f6 222 */
f24219b4 223static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
2856f5e3
MD
224{
225 int c, old;
226 c = atomic_read(v);
227 for (;;) {
228 if (unlikely(c == (u)))
229 break;
230 old = atomic_cmpxchg((v), c, c + (a));
231 if (likely(old == c))
232 break;
233 c = old;
234 }
f24219b4 235 return c;
2856f5e3 236}
8426e1f6 237
21a151d8
RB
238#define atomic_dec_return(v) atomic_sub_return(1, (v))
239#define atomic_inc_return(v) atomic_add_return(1, (v))
1da177e4
LT
240
241/*
242 * atomic_sub_and_test - subtract value from variable and test result
243 * @i: integer value to subtract
244 * @v: pointer of type atomic_t
245 *
246 * Atomically subtracts @i from @v and returns
247 * true if the result is zero, or false for all
248 * other cases.
249 */
21a151d8 250#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
1da177e4
LT
251
252/*
253 * atomic_inc_and_test - increment and test
254 * @v: pointer of type atomic_t
255 *
256 * Atomically increments @v by 1
257 * and returns true if the result is zero, or false for all
258 * other cases.
259 */
260#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
261
262/*
263 * atomic_dec_and_test - decrement by 1 and test
264 * @v: pointer of type atomic_t
265 *
266 * Atomically decrements @v by 1 and
267 * returns true if the result is 0, or false for all other
268 * cases.
269 */
270#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
271
272/*
273 * atomic_dec_if_positive - decrement by 1 if old value positive
274 * @v: pointer of type atomic_t
275 */
276#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
277
278/*
279 * atomic_inc - increment atomic variable
280 * @v: pointer of type atomic_t
281 *
282 * Atomically increments @v by 1.
283 */
21a151d8 284#define atomic_inc(v) atomic_add(1, (v))
1da177e4
LT
285
286/*
287 * atomic_dec - decrement and test
288 * @v: pointer of type atomic_t
289 *
290 * Atomically decrements @v by 1.
291 */
21a151d8 292#define atomic_dec(v) atomic_sub(1, (v))
1da177e4
LT
293
294/*
295 * atomic_add_negative - add and test if negative
296 * @v: pointer of type atomic_t
297 * @i: integer value to add
298 *
299 * Atomically adds @i to @v and returns true
300 * if the result is negative, or false when
301 * result is greater than or equal to zero.
302 */
21a151d8 303#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
1da177e4 304
875d43e7 305#ifdef CONFIG_64BIT
1da177e4 306
1da177e4
LT
307#define ATOMIC64_INIT(i) { (i) }
308
309/*
310 * atomic64_read - read atomic variable
311 * @v: pointer of type atomic64_t
312 *
313 */
2291059c 314#define atomic64_read(v) ACCESS_ONCE((v)->counter)
1da177e4
LT
315
316/*
317 * atomic64_set - set atomic variable
318 * @v: pointer of type atomic64_t
319 * @i: required value
320 */
21a151d8 321#define atomic64_set(v, i) ((v)->counter = (i))
1da177e4 322
ddb3108e
MR
323#define ATOMIC64_OP(op, c_op, asm_op) \
324static __inline__ void atomic64_##op(long i, atomic64_t * v) \
325{ \
326 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
327 long temp; \
328 \
329 __asm__ __volatile__( \
330 " .set arch=r4000 \n" \
331 "1: lld %0, %1 # atomic64_" #op " \n" \
332 " " #asm_op " %0, %2 \n" \
333 " scd %0, %1 \n" \
334 " beqzl %0, 1b \n" \
335 " .set mips0 \n" \
94bfb75a 336 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
337 : "Ir" (i)); \
338 } else if (kernel_uses_llsc) { \
339 long temp; \
340 \
341 do { \
342 __asm__ __volatile__( \
0038df22 343 " .set "MIPS_ISA_LEVEL" \n" \
ddb3108e
MR
344 " lld %0, %1 # atomic64_" #op "\n" \
345 " " #asm_op " %0, %2 \n" \
346 " scd %0, %1 \n" \
347 " .set mips0 \n" \
94bfb75a 348 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
349 : "Ir" (i)); \
350 } while (unlikely(!temp)); \
351 } else { \
352 unsigned long flags; \
353 \
354 raw_local_irq_save(flags); \
355 v->counter c_op i; \
356 raw_local_irq_restore(flags); \
357 } \
358}
359
360#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
361static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
362{ \
363 long result; \
364 \
365 smp_mb__before_llsc(); \
366 \
367 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
368 long temp; \
369 \
370 __asm__ __volatile__( \
371 " .set arch=r4000 \n" \
372 "1: lld %1, %2 # atomic64_" #op "_return\n" \
373 " " #asm_op " %0, %1, %3 \n" \
374 " scd %0, %2 \n" \
375 " beqzl %0, 1b \n" \
376 " " #asm_op " %0, %1, %3 \n" \
377 " .set mips0 \n" \
378 : "=&r" (result), "=&r" (temp), \
94bfb75a 379 "+" GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
380 : "Ir" (i)); \
381 } else if (kernel_uses_llsc) { \
382 long temp; \
383 \
384 do { \
385 __asm__ __volatile__( \
0038df22 386 " .set "MIPS_ISA_LEVEL" \n" \
ddb3108e
MR
387 " lld %1, %2 # atomic64_" #op "_return\n" \
388 " " #asm_op " %0, %1, %3 \n" \
389 " scd %0, %2 \n" \
390 " .set mips0 \n" \
391 : "=&r" (result), "=&r" (temp), \
94bfb75a
MC
392 "=" GCC_OFF_SMALL_ASM() (v->counter) \
393 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
ddb3108e
MR
394 : "memory"); \
395 } while (unlikely(!result)); \
396 \
397 result = temp; result c_op i; \
398 } else { \
399 unsigned long flags; \
400 \
401 raw_local_irq_save(flags); \
402 result = v->counter; \
403 result c_op i; \
404 v->counter = result; \
405 raw_local_irq_restore(flags); \
406 } \
407 \
408 smp_llsc_mb(); \
409 \
410 return result; \
1da177e4
LT
411}
412
ddb3108e
MR
413#define ATOMIC64_OPS(op, c_op, asm_op) \
414 ATOMIC64_OP(op, c_op, asm_op) \
ef31563e 415 ATOMIC64_OP_RETURN(op, c_op, asm_op)
1da177e4 416
ef31563e
PZ
417ATOMIC64_OPS(add, +=, daddu)
418ATOMIC64_OPS(sub, -=, dsubu)
1da177e4 419
ef31563e
PZ
420#undef ATOMIC64_OPS
421#undef ATOMIC64_OP_RETURN
422#undef ATOMIC64_OP
1da177e4
LT
423
424/*
ddb3108e
MR
425 * atomic64_sub_if_positive - conditionally subtract integer from atomic
426 * variable
f10d14dd 427 * @i: integer value to subtract
1da177e4
LT
428 * @v: pointer of type atomic64_t
429 *
f10d14dd
AG
430 * Atomically test @v and subtract @i if @v is greater or equal than @i.
431 * The function returns the old value of @v minus @i.
1da177e4
LT
432 */
433static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
434{
915ec1e2 435 long result;
1da177e4 436
f252ffd5 437 smp_mb__before_llsc();
0004a9df 438
b791d119 439 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 440 long temp;
1da177e4
LT
441
442 __asm__ __volatile__(
a809d460 443 " .set arch=r4000 \n"
1da177e4
LT
444 "1: lld %1, %2 # atomic64_sub_if_positive\n"
445 " dsubu %0, %1, %3 \n"
446 " bltz %0, 1f \n"
447 " scd %0, %2 \n"
92f22c18 448 " .set noreorder \n"
1da177e4 449 " beqzl %0, 1b \n"
92f22c18
RB
450 " dsubu %0, %1, %3 \n"
451 " .set reorder \n"
1da177e4 452 "1: \n"
aac8aa77 453 " .set mips0 \n"
b0984c43 454 : "=&r" (result), "=&r" (temp),
94bfb75a
MC
455 "=" GCC_OFF_SMALL_ASM() (v->counter)
456 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
1da177e4 457 : "memory");
b791d119 458 } else if (kernel_uses_llsc) {
915ec1e2 459 long temp;
1da177e4
LT
460
461 __asm__ __volatile__(
0038df22 462 " .set "MIPS_ISA_LEVEL" \n"
1da177e4
LT
463 "1: lld %1, %2 # atomic64_sub_if_positive\n"
464 " dsubu %0, %1, %3 \n"
465 " bltz %0, 1f \n"
466 " scd %0, %2 \n"
92f22c18 467 " .set noreorder \n"
7837314d 468 " beqz %0, 1b \n"
92f22c18
RB
469 " dsubu %0, %1, %3 \n"
470 " .set reorder \n"
50952026 471 "1: \n"
aac8aa77 472 " .set mips0 \n"
b0984c43 473 : "=&r" (result), "=&r" (temp),
94bfb75a 474 "+" GCC_OFF_SMALL_ASM() (v->counter)
b4f2a17b 475 : "Ir" (i));
1da177e4
LT
476 } else {
477 unsigned long flags;
478
49edd098 479 raw_local_irq_save(flags);
1da177e4
LT
480 result = v->counter;
481 result -= i;
482 if (result >= 0)
483 v->counter = result;
49edd098 484 raw_local_irq_restore(flags);
1da177e4
LT
485 }
486
17099b11 487 smp_llsc_mb();
0004a9df 488
1da177e4
LT
489 return result;
490}
491
e12f644b 492#define atomic64_cmpxchg(v, o, n) \
7b239bb1 493 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
e12f644b
MD
494#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
495
496/**
497 * atomic64_add_unless - add unless the number is a given value
498 * @v: pointer of type atomic64_t
499 * @a: the amount to add to v...
500 * @u: ...unless v is equal to u.
501 *
502 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 503 * Returns the old value of @v.
e12f644b 504 */
2856f5e3
MD
505static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
506{
507 long c, old;
508 c = atomic64_read(v);
509 for (;;) {
510 if (unlikely(c == (u)))
511 break;
512 old = atomic64_cmpxchg((v), c, c + (a));
513 if (likely(old == c))
514 break;
515 c = old;
516 }
517 return c != (u);
518}
519
e12f644b
MD
520#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
521
21a151d8
RB
522#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
523#define atomic64_inc_return(v) atomic64_add_return(1, (v))
1da177e4
LT
524
525/*
526 * atomic64_sub_and_test - subtract value from variable and test result
527 * @i: integer value to subtract
528 * @v: pointer of type atomic64_t
529 *
530 * Atomically subtracts @i from @v and returns
531 * true if the result is zero, or false for all
532 * other cases.
533 */
21a151d8 534#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
1da177e4
LT
535
536/*
537 * atomic64_inc_and_test - increment and test
538 * @v: pointer of type atomic64_t
539 *
540 * Atomically increments @v by 1
541 * and returns true if the result is zero, or false for all
542 * other cases.
543 */
544#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
545
546/*
547 * atomic64_dec_and_test - decrement by 1 and test
548 * @v: pointer of type atomic64_t
549 *
550 * Atomically decrements @v by 1 and
551 * returns true if the result is 0, or false for all other
552 * cases.
553 */
554#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
555
556/*
557 * atomic64_dec_if_positive - decrement by 1 if old value positive
558 * @v: pointer of type atomic64_t
559 */
560#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
561
562/*
563 * atomic64_inc - increment atomic variable
564 * @v: pointer of type atomic64_t
565 *
566 * Atomically increments @v by 1.
567 */
21a151d8 568#define atomic64_inc(v) atomic64_add(1, (v))
1da177e4
LT
569
570/*
571 * atomic64_dec - decrement and test
572 * @v: pointer of type atomic64_t
573 *
574 * Atomically decrements @v by 1.
575 */
21a151d8 576#define atomic64_dec(v) atomic64_sub(1, (v))
1da177e4
LT
577
578/*
579 * atomic64_add_negative - add and test if negative
580 * @v: pointer of type atomic64_t
581 * @i: integer value to add
582 *
583 * Atomically adds @i to @v and returns true
584 * if the result is negative, or false when
585 * result is greater than or equal to zero.
586 */
21a151d8 587#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
1da177e4 588
875d43e7 589#endif /* CONFIG_64BIT */
1da177e4 590
1da177e4 591#endif /* _ASM_ATOMIC_H */
This page took 0.77575 seconds and 5 git commands to generate.