[PATCH] powerpc: Move scanlog.c to platforms/pseries
[deliverable/linux.git] / include / asm-powerpc / atomic.h
CommitLineData
feaf7cf1
BB
1#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
1da177e4
LT
4/*
5 * PowerPC atomic operations
6 */
7
1da177e4
LT
8typedef struct { volatile int counter; } atomic_t;
9
10#ifdef __KERNEL__
feaf7cf1 11#include <asm/synch.h>
1da177e4 12
feaf7cf1 13#define ATOMIC_INIT(i) { (i) }
1da177e4
LT
14
15#define atomic_read(v) ((v)->counter)
16#define atomic_set(v,i) (((v)->counter) = (i))
17
1da177e4
LT
18/* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
19 * The old ATOMIC_SYNC_FIX covered some but not all of this.
20 */
21#ifdef CONFIG_IBM405_ERR77
22#define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
23#else
24#define PPC405_ERR77(ra,rb)
25#endif
26
27static __inline__ void atomic_add(int a, atomic_t *v)
28{
29 int t;
30
31 __asm__ __volatile__(
32"1: lwarx %0,0,%3 # atomic_add\n\
33 add %0,%2,%0\n"
34 PPC405_ERR77(0,%3)
35" stwcx. %0,0,%3 \n\
36 bne- 1b"
37 : "=&r" (t), "=m" (v->counter)
38 : "r" (a), "r" (&v->counter), "m" (v->counter)
39 : "cc");
40}
41
42static __inline__ int atomic_add_return(int a, atomic_t *v)
43{
44 int t;
45
46 __asm__ __volatile__(
feaf7cf1 47 EIEIO_ON_SMP
1da177e4
LT
48"1: lwarx %0,0,%2 # atomic_add_return\n\
49 add %0,%1,%0\n"
50 PPC405_ERR77(0,%2)
51" stwcx. %0,0,%2 \n\
52 bne- 1b"
feaf7cf1 53 ISYNC_ON_SMP
1da177e4
LT
54 : "=&r" (t)
55 : "r" (a), "r" (&v->counter)
56 : "cc", "memory");
57
58 return t;
59}
60
61#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
62
63static __inline__ void atomic_sub(int a, atomic_t *v)
64{
65 int t;
66
67 __asm__ __volatile__(
68"1: lwarx %0,0,%3 # atomic_sub\n\
69 subf %0,%2,%0\n"
70 PPC405_ERR77(0,%3)
71" stwcx. %0,0,%3 \n\
72 bne- 1b"
73 : "=&r" (t), "=m" (v->counter)
74 : "r" (a), "r" (&v->counter), "m" (v->counter)
75 : "cc");
76}
77
78static __inline__ int atomic_sub_return(int a, atomic_t *v)
79{
80 int t;
81
82 __asm__ __volatile__(
feaf7cf1 83 EIEIO_ON_SMP
1da177e4
LT
84"1: lwarx %0,0,%2 # atomic_sub_return\n\
85 subf %0,%1,%0\n"
86 PPC405_ERR77(0,%2)
87" stwcx. %0,0,%2 \n\
88 bne- 1b"
feaf7cf1 89 ISYNC_ON_SMP
1da177e4
LT
90 : "=&r" (t)
91 : "r" (a), "r" (&v->counter)
92 : "cc", "memory");
93
94 return t;
95}
96
97static __inline__ void atomic_inc(atomic_t *v)
98{
99 int t;
100
101 __asm__ __volatile__(
102"1: lwarx %0,0,%2 # atomic_inc\n\
103 addic %0,%0,1\n"
104 PPC405_ERR77(0,%2)
105" stwcx. %0,0,%2 \n\
106 bne- 1b"
107 : "=&r" (t), "=m" (v->counter)
108 : "r" (&v->counter), "m" (v->counter)
109 : "cc");
110}
111
112static __inline__ int atomic_inc_return(atomic_t *v)
113{
114 int t;
115
116 __asm__ __volatile__(
feaf7cf1 117 EIEIO_ON_SMP
1da177e4
LT
118"1: lwarx %0,0,%1 # atomic_inc_return\n\
119 addic %0,%0,1\n"
120 PPC405_ERR77(0,%1)
121" stwcx. %0,0,%1 \n\
122 bne- 1b"
feaf7cf1 123 ISYNC_ON_SMP
1da177e4
LT
124 : "=&r" (t)
125 : "r" (&v->counter)
126 : "cc", "memory");
127
128 return t;
129}
130
131/*
132 * atomic_inc_and_test - increment and test
133 * @v: pointer of type atomic_t
134 *
135 * Atomically increments @v by 1
136 * and returns true if the result is zero, or false for all
137 * other cases.
138 */
139#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
140
141static __inline__ void atomic_dec(atomic_t *v)
142{
143 int t;
144
145 __asm__ __volatile__(
146"1: lwarx %0,0,%2 # atomic_dec\n\
147 addic %0,%0,-1\n"
148 PPC405_ERR77(0,%2)\
149" stwcx. %0,0,%2\n\
150 bne- 1b"
151 : "=&r" (t), "=m" (v->counter)
152 : "r" (&v->counter), "m" (v->counter)
153 : "cc");
154}
155
156static __inline__ int atomic_dec_return(atomic_t *v)
157{
158 int t;
159
160 __asm__ __volatile__(
feaf7cf1 161 EIEIO_ON_SMP
1da177e4
LT
162"1: lwarx %0,0,%1 # atomic_dec_return\n\
163 addic %0,%0,-1\n"
164 PPC405_ERR77(0,%1)
165" stwcx. %0,0,%1\n\
166 bne- 1b"
feaf7cf1 167 ISYNC_ON_SMP
1da177e4
LT
168 : "=&r" (t)
169 : "r" (&v->counter)
170 : "cc", "memory");
171
172 return t;
173}
174
175#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
176#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
177
178/*
179 * Atomically test *v and decrement if it is greater than 0.
180 * The function returns the old value of *v minus 1.
181 */
182static __inline__ int atomic_dec_if_positive(atomic_t *v)
183{
184 int t;
185
186 __asm__ __volatile__(
feaf7cf1 187 EIEIO_ON_SMP
1da177e4
LT
188"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
189 addic. %0,%0,-1\n\
190 blt- 2f\n"
191 PPC405_ERR77(0,%1)
192" stwcx. %0,0,%1\n\
193 bne- 1b"
feaf7cf1 194 ISYNC_ON_SMP
1da177e4
LT
195 "\n\
1962:" : "=&r" (t)
197 : "r" (&v->counter)
198 : "cc", "memory");
199
200 return t;
201}
202
feaf7cf1
BB
203#define smp_mb__before_atomic_dec() smp_mb()
204#define smp_mb__after_atomic_dec() smp_mb()
205#define smp_mb__before_atomic_inc() smp_mb()
206#define smp_mb__after_atomic_inc() smp_mb()
1da177e4
LT
207
208#endif /* __KERNEL__ */
feaf7cf1 209#endif /* _ASM_POWERPC_ATOMIC_H_ */
This page took 0.113481 seconds and 5 git commands to generate.