compat: generic compat_sys_sched_rr_get_interval() implementation
[deliverable/linux.git] / arch / powerpc / include / asm / cmpxchg.h
1 #ifndef _ASM_POWERPC_CMPXCHG_H_
2 #define _ASM_POWERPC_CMPXCHG_H_
3
4 #ifdef __KERNEL__
5 #include <linux/compiler.h>
6 #include <asm/synch.h>
7 #include <asm/asm-compat.h>
8
9 /*
10 * Atomic exchange
11 *
12 * Changes the memory location '*ptr' to be val and returns
13 * the previous value stored there.
14 */
15 static __always_inline unsigned long
16 __xchg_u32(volatile void *p, unsigned long val)
17 {
18 unsigned long prev;
19
20 __asm__ __volatile__(
21 PPC_RELEASE_BARRIER
22 "1: lwarx %0,0,%2 \n"
23 PPC405_ERR77(0,%2)
24 " stwcx. %3,0,%2 \n\
25 bne- 1b"
26 PPC_ACQUIRE_BARRIER
27 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
28 : "r" (p), "r" (val)
29 : "cc", "memory");
30
31 return prev;
32 }
33
34 /*
35 * Atomic exchange
36 *
37 * Changes the memory location '*ptr' to be val and returns
38 * the previous value stored there.
39 */
40 static __always_inline unsigned long
41 __xchg_u32_local(volatile void *p, unsigned long val)
42 {
43 unsigned long prev;
44
45 __asm__ __volatile__(
46 "1: lwarx %0,0,%2 \n"
47 PPC405_ERR77(0,%2)
48 " stwcx. %3,0,%2 \n\
49 bne- 1b"
50 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
51 : "r" (p), "r" (val)
52 : "cc", "memory");
53
54 return prev;
55 }
56
57 #ifdef CONFIG_PPC64
58 static __always_inline unsigned long
59 __xchg_u64(volatile void *p, unsigned long val)
60 {
61 unsigned long prev;
62
63 __asm__ __volatile__(
64 PPC_RELEASE_BARRIER
65 "1: ldarx %0,0,%2 \n"
66 PPC405_ERR77(0,%2)
67 " stdcx. %3,0,%2 \n\
68 bne- 1b"
69 PPC_ACQUIRE_BARRIER
70 : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
71 : "r" (p), "r" (val)
72 : "cc", "memory");
73
74 return prev;
75 }
76
77 static __always_inline unsigned long
78 __xchg_u64_local(volatile void *p, unsigned long val)
79 {
80 unsigned long prev;
81
82 __asm__ __volatile__(
83 "1: ldarx %0,0,%2 \n"
84 PPC405_ERR77(0,%2)
85 " stdcx. %3,0,%2 \n\
86 bne- 1b"
87 : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
88 : "r" (p), "r" (val)
89 : "cc", "memory");
90
91 return prev;
92 }
93 #endif
94
95 /*
96 * This function doesn't exist, so you'll get a linker error
97 * if something tries to do an invalid xchg().
98 */
99 extern void __xchg_called_with_bad_pointer(void);
100
101 static __always_inline unsigned long
102 __xchg(volatile void *ptr, unsigned long x, unsigned int size)
103 {
104 switch (size) {
105 case 4:
106 return __xchg_u32(ptr, x);
107 #ifdef CONFIG_PPC64
108 case 8:
109 return __xchg_u64(ptr, x);
110 #endif
111 }
112 __xchg_called_with_bad_pointer();
113 return x;
114 }
115
116 static __always_inline unsigned long
117 __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
118 {
119 switch (size) {
120 case 4:
121 return __xchg_u32_local(ptr, x);
122 #ifdef CONFIG_PPC64
123 case 8:
124 return __xchg_u64_local(ptr, x);
125 #endif
126 }
127 __xchg_called_with_bad_pointer();
128 return x;
129 }
130 #define xchg(ptr,x) \
131 ({ \
132 __typeof__(*(ptr)) _x_ = (x); \
133 (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
134 })
135
136 #define xchg_local(ptr,x) \
137 ({ \
138 __typeof__(*(ptr)) _x_ = (x); \
139 (__typeof__(*(ptr))) __xchg_local((ptr), \
140 (unsigned long)_x_, sizeof(*(ptr))); \
141 })
142
143 /*
144 * Compare and exchange - if *p == old, set it to new,
145 * and return the old value of *p.
146 */
147 #define __HAVE_ARCH_CMPXCHG 1
148
149 static __always_inline unsigned long
150 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
151 {
152 unsigned int prev;
153
154 __asm__ __volatile__ (
155 PPC_RELEASE_BARRIER
156 "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
157 cmpw 0,%0,%3\n\
158 bne- 2f\n"
159 PPC405_ERR77(0,%2)
160 " stwcx. %4,0,%2\n\
161 bne- 1b"
162 PPC_ACQUIRE_BARRIER
163 "\n\
164 2:"
165 : "=&r" (prev), "+m" (*p)
166 : "r" (p), "r" (old), "r" (new)
167 : "cc", "memory");
168
169 return prev;
170 }
171
172 static __always_inline unsigned long
173 __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
174 unsigned long new)
175 {
176 unsigned int prev;
177
178 __asm__ __volatile__ (
179 "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
180 cmpw 0,%0,%3\n\
181 bne- 2f\n"
182 PPC405_ERR77(0,%2)
183 " stwcx. %4,0,%2\n\
184 bne- 1b"
185 "\n\
186 2:"
187 : "=&r" (prev), "+m" (*p)
188 : "r" (p), "r" (old), "r" (new)
189 : "cc", "memory");
190
191 return prev;
192 }
193
194 #ifdef CONFIG_PPC64
195 static __always_inline unsigned long
196 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
197 {
198 unsigned long prev;
199
200 __asm__ __volatile__ (
201 PPC_RELEASE_BARRIER
202 "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
203 cmpd 0,%0,%3\n\
204 bne- 2f\n\
205 stdcx. %4,0,%2\n\
206 bne- 1b"
207 PPC_ACQUIRE_BARRIER
208 "\n\
209 2:"
210 : "=&r" (prev), "+m" (*p)
211 : "r" (p), "r" (old), "r" (new)
212 : "cc", "memory");
213
214 return prev;
215 }
216
217 static __always_inline unsigned long
218 __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
219 unsigned long new)
220 {
221 unsigned long prev;
222
223 __asm__ __volatile__ (
224 "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
225 cmpd 0,%0,%3\n\
226 bne- 2f\n\
227 stdcx. %4,0,%2\n\
228 bne- 1b"
229 "\n\
230 2:"
231 : "=&r" (prev), "+m" (*p)
232 : "r" (p), "r" (old), "r" (new)
233 : "cc", "memory");
234
235 return prev;
236 }
237 #endif
238
239 /* This function doesn't exist, so you'll get a linker error
240 if something tries to do an invalid cmpxchg(). */
241 extern void __cmpxchg_called_with_bad_pointer(void);
242
243 static __always_inline unsigned long
244 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
245 unsigned int size)
246 {
247 switch (size) {
248 case 4:
249 return __cmpxchg_u32(ptr, old, new);
250 #ifdef CONFIG_PPC64
251 case 8:
252 return __cmpxchg_u64(ptr, old, new);
253 #endif
254 }
255 __cmpxchg_called_with_bad_pointer();
256 return old;
257 }
258
259 static __always_inline unsigned long
260 __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
261 unsigned int size)
262 {
263 switch (size) {
264 case 4:
265 return __cmpxchg_u32_local(ptr, old, new);
266 #ifdef CONFIG_PPC64
267 case 8:
268 return __cmpxchg_u64_local(ptr, old, new);
269 #endif
270 }
271 __cmpxchg_called_with_bad_pointer();
272 return old;
273 }
274
275 #define cmpxchg(ptr, o, n) \
276 ({ \
277 __typeof__(*(ptr)) _o_ = (o); \
278 __typeof__(*(ptr)) _n_ = (n); \
279 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
280 (unsigned long)_n_, sizeof(*(ptr))); \
281 })
282
283
284 #define cmpxchg_local(ptr, o, n) \
285 ({ \
286 __typeof__(*(ptr)) _o_ = (o); \
287 __typeof__(*(ptr)) _n_ = (n); \
288 (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
289 (unsigned long)_n_, sizeof(*(ptr))); \
290 })
291
292 #ifdef CONFIG_PPC64
293 #define cmpxchg64(ptr, o, n) \
294 ({ \
295 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
296 cmpxchg((ptr), (o), (n)); \
297 })
298 #define cmpxchg64_local(ptr, o, n) \
299 ({ \
300 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
301 cmpxchg_local((ptr), (o), (n)); \
302 })
303 #else
304 #include <asm-generic/cmpxchg-local.h>
305 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
306 #endif
307
308 #endif /* __KERNEL__ */
309 #endif /* _ASM_POWERPC_CMPXCHG_H_ */
This page took 0.040837 seconds and 5 git commands to generate.