2 * Copyright IBM Corp. 1999, 2009
3 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 * Arnd Bergmann <arndb@de.ibm.com>,
7 * Atomic operations that C can't guarantee us.
8 * Useful for resource counting etc.
9 * s390 uses 'Compare And Swap' for atomicity in SMP environment.
13 #ifndef __ARCH_S390_ATOMIC__
14 #define __ARCH_S390_ATOMIC__
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/cmpxchg.h>
21 #define ATOMIC_INIT(i) { (i) }
23 #define __ATOMIC_NO_BARRIER "\n"
25 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
27 #define __ATOMIC_OR "lao"
28 #define __ATOMIC_AND "lan"
29 #define __ATOMIC_ADD "laa"
30 #define __ATOMIC_XOR "lax"
31 #define __ATOMIC_BARRIER "bcr 14,0\n"
33 #define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
37 typecheck(atomic_t *, ptr); \
39 op_string " %0,%2,%1\n" \
41 : "=d" (old_val), "+Q" ((ptr)->counter) \
47 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
49 #define __ATOMIC_OR "or"
50 #define __ATOMIC_AND "nr"
51 #define __ATOMIC_ADD "ar"
52 #define __ATOMIC_XOR "xr"
53 #define __ATOMIC_BARRIER "\n"
55 #define __ATOMIC_LOOP(ptr, op_val, op_string, __barrier) \
57 int old_val, new_val; \
59 typecheck(atomic_t *, ptr); \
63 op_string " %1,%3\n" \
66 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
72 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
74 static inline int atomic_read(const atomic_t
*v
)
80 : "=d" (c
) : "Q" (v
->counter
));
84 static inline void atomic_set(atomic_t
*v
, int i
)
88 : "=Q" (v
->counter
) : "d" (i
));
91 static inline int atomic_add_return(int i
, atomic_t
*v
)
93 return __ATOMIC_LOOP(v
, i
, __ATOMIC_ADD
, __ATOMIC_BARRIER
) + i
;
96 static inline void atomic_add(int i
, atomic_t
*v
)
98 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
99 if (__builtin_constant_p(i
) && (i
> -129) && (i
< 128)) {
108 __ATOMIC_LOOP(v
, i
, __ATOMIC_ADD
, __ATOMIC_NO_BARRIER
);
111 #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
112 #define atomic_inc(_v) atomic_add(1, _v)
113 #define atomic_inc_return(_v) atomic_add_return(1, _v)
114 #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
115 #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
116 #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
117 #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
118 #define atomic_dec(_v) atomic_sub(1, _v)
119 #define atomic_dec_return(_v) atomic_sub_return(1, _v)
120 #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
122 #define ATOMIC_OP(op, OP) \
123 static inline void atomic_##op(int i, atomic_t *v) \
125 __ATOMIC_LOOP(v, i, __ATOMIC_##OP, __ATOMIC_NO_BARRIER); \
134 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
136 static inline int atomic_cmpxchg(atomic_t
*v
, int old
, int new)
140 : "+d" (old
), "+Q" (v
->counter
)
146 static inline int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
151 if (unlikely(c
== u
))
153 old
= atomic_cmpxchg(v
, c
, c
+ a
);
154 if (likely(old
== c
))
164 #define ATOMIC64_INIT(i) { (i) }
166 #define __ATOMIC64_NO_BARRIER "\n"
168 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
170 #define __ATOMIC64_OR "laog"
171 #define __ATOMIC64_AND "lang"
172 #define __ATOMIC64_ADD "laag"
173 #define __ATOMIC64_XOR "laxg"
174 #define __ATOMIC64_BARRIER "bcr 14,0\n"
176 #define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
180 typecheck(atomic64_t *, ptr); \
182 op_string " %0,%2,%1\n" \
184 : "=d" (old_val), "+Q" ((ptr)->counter) \
190 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
192 #define __ATOMIC64_OR "ogr"
193 #define __ATOMIC64_AND "ngr"
194 #define __ATOMIC64_ADD "agr"
195 #define __ATOMIC64_XOR "xgr"
196 #define __ATOMIC64_BARRIER "\n"
198 #define __ATOMIC64_LOOP(ptr, op_val, op_string, __barrier) \
200 long long old_val, new_val; \
202 typecheck(atomic64_t *, ptr); \
206 op_string " %1,%3\n" \
209 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
215 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
217 static inline long long atomic64_read(const atomic64_t
*v
)
223 : "=d" (c
) : "Q" (v
->counter
));
227 static inline void atomic64_set(atomic64_t
*v
, long long i
)
231 : "=Q" (v
->counter
) : "d" (i
));
234 static inline long long atomic64_add_return(long long i
, atomic64_t
*v
)
236 return __ATOMIC64_LOOP(v
, i
, __ATOMIC64_ADD
, __ATOMIC64_BARRIER
) + i
;
239 static inline void atomic64_add(long long i
, atomic64_t
*v
)
241 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
242 if (__builtin_constant_p(i
) && (i
> -129) && (i
< 128)) {
251 __ATOMIC64_LOOP(v
, i
, __ATOMIC64_ADD
, __ATOMIC64_NO_BARRIER
);
254 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
256 static inline long long atomic64_cmpxchg(atomic64_t
*v
,
257 long long old
, long long new)
261 : "+d" (old
), "+Q" (v
->counter
)
267 #define ATOMIC64_OP(op, OP) \
268 static inline void atomic64_##op(long i, atomic64_t *v) \
270 __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \
273 ATOMIC64_OP(and, AND
)
275 ATOMIC64_OP(xor, XOR
)
278 #undef __ATOMIC64_LOOP
280 static inline int atomic64_add_unless(atomic64_t
*v
, long long i
, long long u
)
284 c
= atomic64_read(v
);
286 if (unlikely(c
== u
))
288 old
= atomic64_cmpxchg(v
, c
, c
+ i
);
289 if (likely(old
== c
))
296 static inline long long atomic64_dec_if_positive(atomic64_t
*v
)
298 long long c
, old
, dec
;
300 c
= atomic64_read(v
);
303 if (unlikely(dec
< 0))
305 old
= atomic64_cmpxchg((v
), c
, dec
);
306 if (likely(old
== c
))
313 #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
314 #define atomic64_inc(_v) atomic64_add(1, _v)
315 #define atomic64_inc_return(_v) atomic64_add_return(1, _v)
316 #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
317 #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long long)(_i), _v)
318 #define atomic64_sub(_i, _v) atomic64_add(-(long long)(_i), _v)
319 #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
320 #define atomic64_dec(_v) atomic64_sub(1, _v)
321 #define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
322 #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
323 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
325 #endif /* __ARCH_S390_ATOMIC__ */
This page took 0.045295 seconds and 5 git commands to generate.