Merge remote-tracking branches 'asoc/fix/rt5659', 'asoc/fix/sigmadsp', 'asoc/fix...
[deliverable/linux.git] / arch / s390 / include / asm / atomic.h
CommitLineData
1da177e4 1/*
a53c8fab 2 * Copyright IBM Corp. 1999, 2009
12751058
HC
3 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
4 * Denis Joseph Barrow,
5 * Arnd Bergmann <arndb@de.ibm.com>,
1da177e4 6 *
12751058
HC
7 * Atomic operations that C can't guarantee us.
8 * Useful for resource counting etc.
25985edc 9 * s390 uses 'Compare And Swap' for atomicity in SMP environment.
1da177e4
LT
10 *
11 */
12
a53c8fab
HC
13#ifndef __ARCH_S390_ATOMIC__
14#define __ARCH_S390_ATOMIC__
15
12751058
HC
16#include <linux/compiler.h>
17#include <linux/types.h>
0ccc8b7a 18#include <asm/barrier.h>
a0616cde 19#include <asm/cmpxchg.h>
1da177e4 20
1da177e4
LT
21#define ATOMIC_INIT(i) { (i) }
22
0ccc8b7a
HC
23#define __ATOMIC_NO_BARRIER "\n"
24
75287430
HC
25#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
26
27#define __ATOMIC_OR "lao"
28#define __ATOMIC_AND "lan"
29#define __ATOMIC_ADD "laa"
ae8c35c8 30#define __ATOMIC_XOR "lax"
0ccc8b7a 31#define __ATOMIC_BARRIER "bcr 14,0\n"
75287430 32
0ccc8b7a 33#define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
75287430
HC
34({ \
35 int old_val; \
9a70a428
HC
36 \
37 typecheck(atomic_t *, ptr); \
75287430
HC
38 asm volatile( \
39 op_string " %0,%2,%1\n" \
0ccc8b7a 40 __barrier \
9a70a428 41 : "=d" (old_val), "+Q" ((ptr)->counter) \
75287430
HC
42 : "d" (op_val) \
43 : "cc", "memory"); \
44 old_val; \
45})
46
47#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
48
49#define __ATOMIC_OR "or"
50#define __ATOMIC_AND "nr"
51#define __ATOMIC_ADD "ar"
ae8c35c8 52#define __ATOMIC_XOR "xr"
0ccc8b7a 53#define __ATOMIC_BARRIER "\n"
75287430 54
0ccc8b7a 55#define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
75287430 56({ \
39475179 57 int old_val, new_val; \
9a70a428
HC
58 \
59 typecheck(atomic_t *, ptr); \
94c12cc7
MS
60 asm volatile( \
61 " l %0,%2\n" \
62 "0: lr %1,%0\n" \
63 op_string " %1,%3\n" \
64 " cs %0,%1,%2\n" \
65 " jl 0b" \
9a70a428
HC
66 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
67 : "d" (op_val) \
94c12cc7 68 : "cc", "memory"); \
75287430 69 old_val; \
1da177e4 70})
94c12cc7 71
75287430
HC
72#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
73
c51b9621
HC
74static inline int atomic_read(const atomic_t *v)
75{
7657e41a
HC
76 int c;
77
78 asm volatile(
79 " l %0,%1\n"
80 : "=d" (c) : "Q" (v->counter));
81 return c;
c51b9621
HC
82}
83
84static inline void atomic_set(atomic_t *v, int i)
85{
7657e41a
HC
86 asm volatile(
87 " st %1,%0\n"
88 : "=Q" (v->counter) : "d" (i));
c51b9621 89}
1da177e4 90
bfe3349b 91static inline int atomic_add_return(int i, atomic_t *v)
1da177e4 92{
0ccc8b7a 93 return __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_BARRIER) + i;
1da177e4 94}
75287430 95
5692e4d1
HC
96static inline void atomic_add(int i, atomic_t *v)
97{
98#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
99 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
100 asm volatile(
101 "asi %0,%1\n"
102 : "+Q" (v->counter)
103 : "i" (i)
104 : "cc", "memory");
0ccc8b7a 105 return;
5692e4d1 106 }
5692e4d1 107#endif
0ccc8b7a 108 __ATOMIC_LOOP(v, i, __ATOMIC_ADD, __ATOMIC_NO_BARRIER);
5692e4d1
HC
109}
110
973bd993 111#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
5692e4d1 112#define atomic_inc(_v) atomic_add(1, _v)
973bd993
MS
113#define atomic_inc_return(_v) atomic_add_return(1, _v)
114#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
5692e4d1 115#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
86d51bc3 116#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
973bd993 117#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
5692e4d1 118#define atomic_dec(_v) atomic_sub(1, _v)
973bd993
MS
119#define atomic_dec_return(_v) atomic_sub_return(1, _v)
120#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
1da177e4 121
ae8c35c8
PZ
122#define ATOMIC_OP(op, OP) \
123static inline void atomic_##op(int i, atomic_t *v) \
124{ \
125 __ATOMIC_LOOP(v, i, __ATOMIC_##OP, __ATOMIC_NO_BARRIER); \
126}
127
128ATOMIC_OP(and, AND)
129ATOMIC_OP(or, OR)
130ATOMIC_OP(xor, XOR)
131
132#undef ATOMIC_OP
133
ffbf670f
IM
134#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
135
bfe3349b 136static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
973bd993 137{
94c12cc7
MS
138 asm volatile(
139 " cs %0,%2,%1"
9a70a428
HC
140 : "+d" (old), "+Q" (v->counter)
141 : "d" (new)
94c12cc7 142 : "cc", "memory");
973bd993
MS
143 return old;
144}
145
f24219b4 146static inline int __atomic_add_unless(atomic_t *v, int a, int u)
973bd993
MS
147{
148 int c, old;
973bd993 149 c = atomic_read(v);
0b2fcfdb
NP
150 for (;;) {
151 if (unlikely(c == u))
152 break;
153 old = atomic_cmpxchg(v, c, c + a);
154 if (likely(old == c))
155 break;
973bd993 156 c = old;
0b2fcfdb 157 }
f24219b4 158 return c;
973bd993
MS
159}
160
973bd993 161
75287430 162#undef __ATOMIC_LOOP
1da177e4 163
1da177e4
LT
164#define ATOMIC64_INIT(i) { (i) }
165
0ccc8b7a
HC
166#define __ATOMIC64_NO_BARRIER "\n"
167
75287430
HC
168#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
169
170#define __ATOMIC64_OR "laog"
171#define __ATOMIC64_AND "lang"
172#define __ATOMIC64_ADD "laag"
ae8c35c8 173#define __ATOMIC64_XOR "laxg"
0ccc8b7a 174#define __ATOMIC64_BARRIER "bcr 14,0\n"
75287430 175
0ccc8b7a 176#define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
75287430
HC
177({ \
178 long long old_val; \
9a70a428
HC
179 \
180 typecheck(atomic64_t *, ptr); \
75287430
HC
181 asm volatile( \
182 op_string " %0,%2,%1\n" \
0ccc8b7a 183 __barrier \
9a70a428 184 : "=d" (old_val), "+Q" ((ptr)->counter) \
75287430
HC
185 : "d" (op_val) \
186 : "cc", "memory"); \
187 old_val; \
188})
189
190#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
191
192#define __ATOMIC64_OR "ogr"
193#define __ATOMIC64_AND "ngr"
194#define __ATOMIC64_ADD "agr"
ae8c35c8 195#define __ATOMIC64_XOR "xgr"
0ccc8b7a 196#define __ATOMIC64_BARRIER "\n"
75287430 197
0ccc8b7a 198#define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
75287430 199({ \
39475179 200 long long old_val, new_val; \
9a70a428
HC
201 \
202 typecheck(atomic64_t *, ptr); \
94c12cc7
MS
203 asm volatile( \
204 " lg %0,%2\n" \
205 "0: lgr %1,%0\n" \
206 op_string " %1,%3\n" \
207 " csg %0,%1,%2\n" \
208 " jl 0b" \
9a70a428
HC
209 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
210 : "d" (op_val) \
bfe3349b 211 : "cc", "memory"); \
75287430 212 old_val; \
1da177e4 213})
94c12cc7 214
75287430
HC
215#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
216
c51b9621
HC
217static inline long long atomic64_read(const atomic64_t *v)
218{
7657e41a
HC
219 long long c;
220
221 asm volatile(
222 " lg %0,%1\n"
223 : "=d" (c) : "Q" (v->counter));
224 return c;
c51b9621
HC
225}
226
227static inline void atomic64_set(atomic64_t *v, long long i)
228{
7657e41a
HC
229 asm volatile(
230 " stg %1,%0\n"
231 : "=Q" (v->counter) : "d" (i));
c51b9621 232}
1da177e4 233
bfe3349b 234static inline long long atomic64_add_return(long long i, atomic64_t *v)
1da177e4 235{
0ccc8b7a
HC
236 return __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_BARRIER) + i;
237}
238
239static inline void atomic64_add(long long i, atomic64_t *v)
240{
241#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
242 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
243 asm volatile(
244 "agsi %0,%1\n"
245 : "+Q" (v->counter)
246 : "i" (i)
247 : "cc", "memory");
248 return;
249 }
250#endif
251 __ATOMIC64_LOOP(v, i, __ATOMIC64_ADD, __ATOMIC64_NO_BARRIER);
1da177e4 252}
973bd993 253
3a5f10e3
MD
254#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
255
bfe3349b 256static inline long long atomic64_cmpxchg(atomic64_t *v,
973bd993
MS
257 long long old, long long new)
258{
94c12cc7
MS
259 asm volatile(
260 " csg %0,%2,%1"
9a70a428
HC
261 : "+d" (old), "+Q" (v->counter)
262 : "d" (new)
94c12cc7 263 : "cc", "memory");
973bd993
MS
264 return old;
265}
1da177e4 266
ae8c35c8
PZ
267#define ATOMIC64_OP(op, OP) \
268static inline void atomic64_##op(long i, atomic64_t *v) \
269{ \
270 __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \
271}
272
ae8c35c8
PZ
273ATOMIC64_OP(and, AND)
274ATOMIC64_OP(or, OR)
275ATOMIC64_OP(xor, XOR)
276
277#undef ATOMIC64_OP
75287430 278#undef __ATOMIC64_LOOP
12751058 279
9a70a428 280static inline int atomic64_add_unless(atomic64_t *v, long long i, long long u)
1da177e4 281{
973bd993 282 long long c, old;
2ddb3ec4 283
973bd993 284 c = atomic64_read(v);
0b2fcfdb
NP
285 for (;;) {
286 if (unlikely(c == u))
287 break;
9a70a428 288 old = atomic64_cmpxchg(v, c, c + i);
0b2fcfdb
NP
289 if (likely(old == c))
290 break;
973bd993 291 c = old;
0b2fcfdb 292 }
973bd993 293 return c != u;
1da177e4
LT
294}
295
2ddb3ec4
HC
296static inline long long atomic64_dec_if_positive(atomic64_t *v)
297{
298 long long c, old, dec;
299
300 c = atomic64_read(v);
301 for (;;) {
302 dec = c - 1;
303 if (unlikely(dec < 0))
304 break;
305 old = atomic64_cmpxchg((v), c, dec);
306 if (likely(old == c))
307 break;
308 c = old;
309 }
310 return dec;
311}
312
12751058 313#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
5692e4d1 314#define atomic64_inc(_v) atomic64_add(1, _v)
12751058
HC
315#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
316#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
86d51bc3 317#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long long)(_i), _v)
5692e4d1 318#define atomic64_sub(_i, _v) atomic64_add(-(long long)(_i), _v)
12751058 319#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
5692e4d1 320#define atomic64_dec(_v) atomic64_sub(1, _v)
12751058
HC
321#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
322#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
323#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
8426e1f6 324
1da177e4 325#endif /* __ARCH_S390_ATOMIC__ */
This page took 0.761963 seconds and 5 git commands to generate.