b5dce6544d76e3357e4e8629333817008a0ee183
[deliverable/linux.git] / arch / s390 / include / asm / barrier.h
1 /*
2 * Copyright IBM Corp. 1999, 2009
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5 */
6
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 /*
11 * Force strict CPU ordering.
12 * And yes, this is required on UP too when we're talking
13 * to devices.
14 */
15
16 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
17 /* Fast-BCR without checkpoint synchronization */
18 #define __ASM_BARRIER "bcr 14,0\n"
19 #else
20 #define __ASM_BARRIER "bcr 15,0\n"
21 #endif
22
23 #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0)
24
25 #define rmb() mb()
26 #define wmb() mb()
27 #define read_barrier_depends() do { } while(0)
28 #define smp_mb() mb()
29 #define smp_rmb() rmb()
30 #define smp_wmb() wmb()
31 #define smp_read_barrier_depends() read_barrier_depends()
32
33 #define smp_mb__before_atomic() smp_mb()
34 #define smp_mb__after_atomic() smp_mb()
35
36 #define set_mb(var, value) do { var = value; mb(); } while (0)
37
38 #define smp_store_release(p, v) \
39 do { \
40 compiletime_assert_atomic_type(*p); \
41 barrier(); \
42 ACCESS_ONCE(*p) = (v); \
43 } while (0)
44
45 #define smp_load_acquire(p) \
46 ({ \
47 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
48 compiletime_assert_atomic_type(*p); \
49 barrier(); \
50 ___p1; \
51 })
52
53 #endif /* __ASM_BARRIER_H */
This page took 0.048166 seconds and 5 git commands to generate.