1 /* atomic.h: Thankfully the V9 is at least reasonable for this
4 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
7 #ifndef __ARCH_SPARC64_ATOMIC__
8 #define __ARCH_SPARC64_ATOMIC__
10 #include <linux/types.h>
11 #include <asm/cmpxchg.h>
12 #include <asm/barrier.h>
14 #define ATOMIC_INIT(i) { (i) }
15 #define ATOMIC64_INIT(i) { (i) }
17 #define atomic_read(v) (*(volatile int *)&(v)->counter)
18 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
20 #define atomic_set(v, i) (((v)->counter) = i)
21 #define atomic64_set(v, i) (((v)->counter) = i)
23 void atomic_add(int, atomic_t
*);
24 void atomic64_add(long, atomic64_t
*);
25 void atomic_sub(int, atomic_t
*);
26 void atomic64_sub(long, atomic64_t
*);
28 int atomic_add_ret(int, atomic_t
*);
29 long atomic64_add_ret(long, atomic64_t
*);
30 int atomic_sub_ret(int, atomic_t
*);
31 long atomic64_sub_ret(long, atomic64_t
*);
33 #define atomic_dec_return(v) atomic_sub_ret(1, v)
34 #define atomic64_dec_return(v) atomic64_sub_ret(1, v)
36 #define atomic_inc_return(v) atomic_add_ret(1, v)
37 #define atomic64_inc_return(v) atomic64_add_ret(1, v)
39 #define atomic_sub_return(i, v) atomic_sub_ret(i, v)
40 #define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
42 #define atomic_add_return(i, v) atomic_add_ret(i, v)
43 #define atomic64_add_return(i, v) atomic64_add_ret(i, v)
46 * atomic_inc_and_test - increment and test
47 * @v: pointer of type atomic_t
49 * Atomically increments @v by 1
50 * and returns true if the result is zero, or false for all
53 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
54 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
56 #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
57 #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
59 #define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
60 #define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
62 #define atomic_inc(v) atomic_add(1, v)
63 #define atomic64_inc(v) atomic64_add(1, v)
65 #define atomic_dec(v) atomic_sub(1, v)
66 #define atomic64_dec(v) atomic64_sub(1, v)
68 #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
69 #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
71 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
72 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
74 static inline int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
79 if (unlikely(c
== (u
)))
81 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
89 #define atomic64_cmpxchg(v, o, n) \
90 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
91 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
93 static inline long atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
98 if (unlikely(c
== (u
)))
100 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
101 if (likely(old
== c
))
108 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
110 long atomic64_dec_if_positive(atomic64_t
*v
);
112 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
This page took 0.035267 seconds and 6 git commands to generate.