2300d989087b00353f72a39ebb546029572367a8
[deliverable/linux.git] / include / asm-generic / percpu.h
1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
3
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
7
8 #ifdef CONFIG_SMP
9
10 /*
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
13 *
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
16 */
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset[NR_CPUS];
19
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
21 #endif
22
23 /*
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
27 * current processor.
28 */
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31 #endif
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
34 #else
35 #define my_cpu_offset __my_cpu_offset
36 #endif
37
38 /*
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
41 */
42 #ifndef arch_raw_cpu_ptr
43 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
44 #endif
45
46 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47 extern void setup_per_cpu_areas(void);
48 #endif
49
50 #endif /* SMP */
51
52 #ifndef PER_CPU_BASE_SECTION
53 #ifdef CONFIG_SMP
54 #define PER_CPU_BASE_SECTION ".data..percpu"
55 #else
56 #define PER_CPU_BASE_SECTION ".data"
57 #endif
58 #endif
59
60 #ifndef PER_CPU_ATTRIBUTES
61 #define PER_CPU_ATTRIBUTES
62 #endif
63
64 #ifndef PER_CPU_DEF_ATTRIBUTES
65 #define PER_CPU_DEF_ATTRIBUTES
66 #endif
67
68 #define raw_cpu_generic_to_op(pcp, val, op) \
69 do { \
70 *raw_cpu_ptr(&(pcp)) op val; \
71 } while (0)
72
73 #define raw_cpu_generic_add_return(pcp, val) \
74 ({ \
75 raw_cpu_add(pcp, val); \
76 raw_cpu_read(pcp); \
77 })
78
79 #define raw_cpu_generic_xchg(pcp, nval) \
80 ({ typeof(pcp) ret__; \
81 ret__ = raw_cpu_read(pcp); \
82 raw_cpu_write(pcp, nval); \
83 ret__; \
84 })
85
86 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
87 ({ \
88 typeof(pcp) ret__; \
89 ret__ = raw_cpu_read(pcp); \
90 if (ret__ == (oval)) \
91 raw_cpu_write(pcp, nval); \
92 ret__; \
93 })
94
95 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
96 ({ \
97 int __ret = 0; \
98 if (raw_cpu_read(pcp1) == (oval1) && \
99 raw_cpu_read(pcp2) == (oval2)) { \
100 raw_cpu_write(pcp1, (nval1)); \
101 raw_cpu_write(pcp2, (nval2)); \
102 __ret = 1; \
103 } \
104 (__ret); \
105 })
106
107 #define _this_cpu_generic_read(pcp) \
108 ({ typeof(pcp) ret__; \
109 preempt_disable(); \
110 ret__ = *this_cpu_ptr(&(pcp)); \
111 preempt_enable(); \
112 ret__; \
113 })
114
115 #define _this_cpu_generic_to_op(pcp, val, op) \
116 do { \
117 unsigned long flags; \
118 raw_local_irq_save(flags); \
119 *raw_cpu_ptr(&(pcp)) op val; \
120 raw_local_irq_restore(flags); \
121 } while (0)
122
123 #define _this_cpu_generic_add_return(pcp, val) \
124 ({ \
125 typeof(pcp) ret__; \
126 unsigned long flags; \
127 raw_local_irq_save(flags); \
128 raw_cpu_add(pcp, val); \
129 ret__ = raw_cpu_read(pcp); \
130 raw_local_irq_restore(flags); \
131 ret__; \
132 })
133
134 #define _this_cpu_generic_xchg(pcp, nval) \
135 ({ typeof(pcp) ret__; \
136 unsigned long flags; \
137 raw_local_irq_save(flags); \
138 ret__ = raw_cpu_read(pcp); \
139 raw_cpu_write(pcp, nval); \
140 raw_local_irq_restore(flags); \
141 ret__; \
142 })
143
144 #define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
145 ({ \
146 typeof(pcp) ret__; \
147 unsigned long flags; \
148 raw_local_irq_save(flags); \
149 ret__ = raw_cpu_read(pcp); \
150 if (ret__ == (oval)) \
151 raw_cpu_write(pcp, nval); \
152 raw_local_irq_restore(flags); \
153 ret__; \
154 })
155
156 #define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
157 ({ \
158 int ret__; \
159 unsigned long flags; \
160 raw_local_irq_save(flags); \
161 ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
162 oval1, oval2, nval1, nval2); \
163 raw_local_irq_restore(flags); \
164 ret__; \
165 })
166
167 # ifndef raw_cpu_read_1
168 # define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
169 # endif
170 # ifndef raw_cpu_read_2
171 # define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
172 # endif
173 # ifndef raw_cpu_read_4
174 # define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
175 # endif
176 # ifndef raw_cpu_read_8
177 # define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
178 # endif
179
180 # ifndef raw_cpu_write_1
181 # define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
182 # endif
183 # ifndef raw_cpu_write_2
184 # define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
185 # endif
186 # ifndef raw_cpu_write_4
187 # define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
188 # endif
189 # ifndef raw_cpu_write_8
190 # define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
191 # endif
192
193 # ifndef raw_cpu_add_1
194 # define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
195 # endif
196 # ifndef raw_cpu_add_2
197 # define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
198 # endif
199 # ifndef raw_cpu_add_4
200 # define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
201 # endif
202 # ifndef raw_cpu_add_8
203 # define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
204 # endif
205
206 # ifndef raw_cpu_and_1
207 # define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
208 # endif
209 # ifndef raw_cpu_and_2
210 # define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
211 # endif
212 # ifndef raw_cpu_and_4
213 # define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
214 # endif
215 # ifndef raw_cpu_and_8
216 # define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
217 # endif
218
219 # ifndef raw_cpu_or_1
220 # define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
221 # endif
222 # ifndef raw_cpu_or_2
223 # define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
224 # endif
225 # ifndef raw_cpu_or_4
226 # define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
227 # endif
228 # ifndef raw_cpu_or_8
229 # define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
230 # endif
231
232 # ifndef raw_cpu_add_return_1
233 # define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
234 # endif
235 # ifndef raw_cpu_add_return_2
236 # define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
237 # endif
238 # ifndef raw_cpu_add_return_4
239 # define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
240 # endif
241 # ifndef raw_cpu_add_return_8
242 # define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
243 # endif
244
245 # ifndef raw_cpu_xchg_1
246 # define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
247 # endif
248 # ifndef raw_cpu_xchg_2
249 # define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
250 # endif
251 # ifndef raw_cpu_xchg_4
252 # define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
253 # endif
254 # ifndef raw_cpu_xchg_8
255 # define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
256 # endif
257
258 # ifndef raw_cpu_cmpxchg_1
259 # define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
260 # endif
261 # ifndef raw_cpu_cmpxchg_2
262 # define raw_cpu_cmpxchg_2(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
263 # endif
264 # ifndef raw_cpu_cmpxchg_4
265 # define raw_cpu_cmpxchg_4(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
266 # endif
267 # ifndef raw_cpu_cmpxchg_8
268 # define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
269 # endif
270
271 # ifndef raw_cpu_cmpxchg_double_1
272 # define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
273 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
274 # endif
275 # ifndef raw_cpu_cmpxchg_double_2
276 # define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
277 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
278 # endif
279 # ifndef raw_cpu_cmpxchg_double_4
280 # define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
281 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
282 # endif
283 # ifndef raw_cpu_cmpxchg_double_8
284 # define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
285 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
286 # endif
287
288 # ifndef this_cpu_read_1
289 # define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp)
290 # endif
291 # ifndef this_cpu_read_2
292 # define this_cpu_read_2(pcp) _this_cpu_generic_read(pcp)
293 # endif
294 # ifndef this_cpu_read_4
295 # define this_cpu_read_4(pcp) _this_cpu_generic_read(pcp)
296 # endif
297 # ifndef this_cpu_read_8
298 # define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
299 # endif
300
301 # ifndef this_cpu_write_1
302 # define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
303 # endif
304 # ifndef this_cpu_write_2
305 # define this_cpu_write_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
306 # endif
307 # ifndef this_cpu_write_4
308 # define this_cpu_write_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
309 # endif
310 # ifndef this_cpu_write_8
311 # define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
312 # endif
313
314 # ifndef this_cpu_add_1
315 # define this_cpu_add_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
316 # endif
317 # ifndef this_cpu_add_2
318 # define this_cpu_add_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
319 # endif
320 # ifndef this_cpu_add_4
321 # define this_cpu_add_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
322 # endif
323 # ifndef this_cpu_add_8
324 # define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
325 # endif
326
327 # ifndef this_cpu_and_1
328 # define this_cpu_and_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
329 # endif
330 # ifndef this_cpu_and_2
331 # define this_cpu_and_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
332 # endif
333 # ifndef this_cpu_and_4
334 # define this_cpu_and_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
335 # endif
336 # ifndef this_cpu_and_8
337 # define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
338 # endif
339
340 # ifndef this_cpu_or_1
341 # define this_cpu_or_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
342 # endif
343 # ifndef this_cpu_or_2
344 # define this_cpu_or_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
345 # endif
346 # ifndef this_cpu_or_4
347 # define this_cpu_or_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
348 # endif
349 # ifndef this_cpu_or_8
350 # define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
351 # endif
352
353 # ifndef this_cpu_add_return_1
354 # define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
355 # endif
356 # ifndef this_cpu_add_return_2
357 # define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val)
358 # endif
359 # ifndef this_cpu_add_return_4
360 # define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val)
361 # endif
362 # ifndef this_cpu_add_return_8
363 # define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
364 # endif
365
366 # ifndef this_cpu_xchg_1
367 # define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
368 # endif
369 # ifndef this_cpu_xchg_2
370 # define this_cpu_xchg_2(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
371 # endif
372 # ifndef this_cpu_xchg_4
373 # define this_cpu_xchg_4(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
374 # endif
375 # ifndef this_cpu_xchg_8
376 # define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
377 # endif
378
379 # ifndef this_cpu_cmpxchg_1
380 # define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
381 # endif
382 # ifndef this_cpu_cmpxchg_2
383 # define this_cpu_cmpxchg_2(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
384 # endif
385 # ifndef this_cpu_cmpxchg_4
386 # define this_cpu_cmpxchg_4(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
387 # endif
388 # ifndef this_cpu_cmpxchg_8
389 # define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
390 # endif
391
392 # ifndef this_cpu_cmpxchg_double_1
393 # define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
394 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
395 # endif
396 # ifndef this_cpu_cmpxchg_double_2
397 # define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
398 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
399 # endif
400 # ifndef this_cpu_cmpxchg_double_4
401 # define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
402 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
403 # endif
404 # ifndef this_cpu_cmpxchg_double_8
405 # define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
406 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
407 # endif
408
409 #endif /* _ASM_GENERIC_PERCPU_H_ */
This page took 0.072312 seconds and 4 git commands to generate.