Merge tag 'pci-v3.15-changes' of git://git.kernel.org/pub/scm/linux/kernel/git/helgaa...
[deliverable/linux.git] / arch / s390 / include / asm / bitops.h
1 /*
2 * Copyright IBM Corp. 1999,2013
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 *
6 * The description below was taken in large parts from the powerpc
7 * bitops header file:
8 * Within a word, bits are numbered LSB first. Lot's of places make
9 * this assumption by directly testing bits with (val & (1<<nr)).
10 * This can cause confusion for large (> 1 word) bitmaps on a
11 * big-endian system because, unlike little endian, the number of each
12 * bit depends on the word size.
13 *
14 * The bitop functions are defined to work on unsigned longs, so for an
15 * s390x system the bits end up numbered:
16 * |63..............0|127............64|191...........128|255...........192|
17 * and on s390:
18 * |31.....0|63....32|95....64|127...96|159..128|191..160|223..192|255..224|
19 *
20 * There are a few little-endian macros used mostly for filesystem
21 * bitmaps, these work on similar bit arrays layouts, but
22 * byte-oriented:
23 * |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
24 *
25 * The main difference is that bit 3-5 (64b) or 3-4 (32b) in the bit
26 * number field needs to be reversed compared to the big-endian bit
27 * fields. This can be achieved by XOR with 0x38 (64b) or 0x18 (32b).
28 *
29 * We also have special functions which work with an MSB0 encoding:
30 * on an s390x system the bits are numbered:
31 * |0..............63|64............127|128...........191|192...........255|
32 * and on s390:
33 * |0.....31|32....63|64....95|96...127|128..159|160..191|192..223|224..255|
34 *
35 * The main difference is that bit 0-63 (64b) or 0-31 (32b) in the bit
36 * number field needs to be reversed compared to the LSB0 encoded bit
37 * fields. This can be achieved by XOR with 0x3f (64b) or 0x1f (32b).
38 *
39 */
40
41 #ifndef _S390_BITOPS_H
42 #define _S390_BITOPS_H
43
44 #ifndef _LINUX_BITOPS_H
45 #error only <linux/bitops.h> can be included directly
46 #endif
47
48 #include <linux/typecheck.h>
49 #include <linux/compiler.h>
50
51 #ifndef CONFIG_64BIT
52
53 #define __BITOPS_OR "or"
54 #define __BITOPS_AND "nr"
55 #define __BITOPS_XOR "xr"
56
57 #define __BITOPS_LOOP(__addr, __val, __op_string) \
58 ({ \
59 unsigned long __old, __new; \
60 \
61 typecheck(unsigned long *, (__addr)); \
62 asm volatile( \
63 " l %0,%2\n" \
64 "0: lr %1,%0\n" \
65 __op_string " %1,%3\n" \
66 " cs %0,%1,%2\n" \
67 " jl 0b" \
68 : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
69 : "d" (__val) \
70 : "cc"); \
71 __old; \
72 })
73
74 #else /* CONFIG_64BIT */
75
76 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
77
78 #define __BITOPS_OR "laog"
79 #define __BITOPS_AND "lang"
80 #define __BITOPS_XOR "laxg"
81
82 #define __BITOPS_LOOP(__addr, __val, __op_string) \
83 ({ \
84 unsigned long __old; \
85 \
86 typecheck(unsigned long *, (__addr)); \
87 asm volatile( \
88 __op_string " %0,%2,%1\n" \
89 : "=d" (__old), "+Q" (*(__addr)) \
90 : "d" (__val) \
91 : "cc"); \
92 __old; \
93 })
94
95 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
96
97 #define __BITOPS_OR "ogr"
98 #define __BITOPS_AND "ngr"
99 #define __BITOPS_XOR "xgr"
100
101 #define __BITOPS_LOOP(__addr, __val, __op_string) \
102 ({ \
103 unsigned long __old, __new; \
104 \
105 typecheck(unsigned long *, (__addr)); \
106 asm volatile( \
107 " lg %0,%2\n" \
108 "0: lgr %1,%0\n" \
109 __op_string " %1,%3\n" \
110 " csg %0,%1,%2\n" \
111 " jl 0b" \
112 : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
113 : "d" (__val) \
114 : "cc"); \
115 __old; \
116 })
117
118 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
119
120 #endif /* CONFIG_64BIT */
121
122 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
123
124 static inline unsigned long *
125 __bitops_word(unsigned long nr, volatile unsigned long *ptr)
126 {
127 unsigned long addr;
128
129 addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
130 return (unsigned long *)addr;
131 }
132
133 static inline unsigned char *
134 __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
135 {
136 return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
137 }
138
139 static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
140 {
141 unsigned long *addr = __bitops_word(nr, ptr);
142 unsigned long mask;
143
144 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
145 if (__builtin_constant_p(nr)) {
146 unsigned char *caddr = __bitops_byte(nr, ptr);
147
148 asm volatile(
149 "oi %0,%b1\n"
150 : "+Q" (*caddr)
151 : "i" (1 << (nr & 7))
152 : "cc");
153 return;
154 }
155 #endif
156 mask = 1UL << (nr & (BITS_PER_LONG - 1));
157 __BITOPS_LOOP(addr, mask, __BITOPS_OR);
158 }
159
160 static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
161 {
162 unsigned long *addr = __bitops_word(nr, ptr);
163 unsigned long mask;
164
165 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
166 if (__builtin_constant_p(nr)) {
167 unsigned char *caddr = __bitops_byte(nr, ptr);
168
169 asm volatile(
170 "ni %0,%b1\n"
171 : "+Q" (*caddr)
172 : "i" (~(1 << (nr & 7)))
173 : "cc");
174 return;
175 }
176 #endif
177 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
178 __BITOPS_LOOP(addr, mask, __BITOPS_AND);
179 }
180
181 static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
182 {
183 unsigned long *addr = __bitops_word(nr, ptr);
184 unsigned long mask;
185
186 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
187 if (__builtin_constant_p(nr)) {
188 unsigned char *caddr = __bitops_byte(nr, ptr);
189
190 asm volatile(
191 "xi %0,%b1\n"
192 : "+Q" (*caddr)
193 : "i" (1 << (nr & 7))
194 : "cc");
195 return;
196 }
197 #endif
198 mask = 1UL << (nr & (BITS_PER_LONG - 1));
199 __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
200 }
201
202 static inline int
203 test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
204 {
205 unsigned long *addr = __bitops_word(nr, ptr);
206 unsigned long old, mask;
207
208 mask = 1UL << (nr & (BITS_PER_LONG - 1));
209 old = __BITOPS_LOOP(addr, mask, __BITOPS_OR);
210 barrier();
211 return (old & mask) != 0;
212 }
213
214 static inline int
215 test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
216 {
217 unsigned long *addr = __bitops_word(nr, ptr);
218 unsigned long old, mask;
219
220 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
221 old = __BITOPS_LOOP(addr, mask, __BITOPS_AND);
222 barrier();
223 return (old & ~mask) != 0;
224 }
225
226 static inline int
227 test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
228 {
229 unsigned long *addr = __bitops_word(nr, ptr);
230 unsigned long old, mask;
231
232 mask = 1UL << (nr & (BITS_PER_LONG - 1));
233 old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
234 barrier();
235 return (old & mask) != 0;
236 }
237
238 static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
239 {
240 unsigned char *addr = __bitops_byte(nr, ptr);
241
242 *addr |= 1 << (nr & 7);
243 }
244
245 static inline void
246 __clear_bit(unsigned long nr, volatile unsigned long *ptr)
247 {
248 unsigned char *addr = __bitops_byte(nr, ptr);
249
250 *addr &= ~(1 << (nr & 7));
251 }
252
253 static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
254 {
255 unsigned char *addr = __bitops_byte(nr, ptr);
256
257 *addr ^= 1 << (nr & 7);
258 }
259
260 static inline int
261 __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
262 {
263 unsigned char *addr = __bitops_byte(nr, ptr);
264 unsigned char ch;
265
266 ch = *addr;
267 *addr |= 1 << (nr & 7);
268 return (ch >> (nr & 7)) & 1;
269 }
270
271 static inline int
272 __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
273 {
274 unsigned char *addr = __bitops_byte(nr, ptr);
275 unsigned char ch;
276
277 ch = *addr;
278 *addr &= ~(1 << (nr & 7));
279 return (ch >> (nr & 7)) & 1;
280 }
281
282 static inline int
283 __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
284 {
285 unsigned char *addr = __bitops_byte(nr, ptr);
286 unsigned char ch;
287
288 ch = *addr;
289 *addr ^= 1 << (nr & 7);
290 return (ch >> (nr & 7)) & 1;
291 }
292
293 static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
294 {
295 const volatile unsigned char *addr;
296
297 addr = ((const volatile unsigned char *)ptr);
298 addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
299 return (*addr >> (nr & 7)) & 1;
300 }
301
302 /*
303 * Functions which use MSB0 bit numbering.
304 * On an s390x system the bits are numbered:
305 * |0..............63|64............127|128...........191|192...........255|
306 * and on s390:
307 * |0.....31|32....63|64....95|96...127|128..159|160..191|192..223|224..255|
308 */
309 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
310 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
311 unsigned long offset);
312
313 static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
314 {
315 return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
316 }
317
318 static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
319 {
320 return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
321 }
322
323 static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
324 {
325 return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
326 }
327
328 static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
329 {
330 return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
331 }
332
333 static inline int test_bit_inv(unsigned long nr,
334 const volatile unsigned long *ptr)
335 {
336 return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
337 }
338
339 #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
340
341 /**
342 * __flogr - find leftmost one
343 * @word - The word to search
344 *
345 * Returns the bit number of the most significant bit set,
346 * where the most significant bit has bit number 0.
347 * If no bit is set this function returns 64.
348 */
349 static inline unsigned char __flogr(unsigned long word)
350 {
351 if (__builtin_constant_p(word)) {
352 unsigned long bit = 0;
353
354 if (!word)
355 return 64;
356 if (!(word & 0xffffffff00000000UL)) {
357 word <<= 32;
358 bit += 32;
359 }
360 if (!(word & 0xffff000000000000UL)) {
361 word <<= 16;
362 bit += 16;
363 }
364 if (!(word & 0xff00000000000000UL)) {
365 word <<= 8;
366 bit += 8;
367 }
368 if (!(word & 0xf000000000000000UL)) {
369 word <<= 4;
370 bit += 4;
371 }
372 if (!(word & 0xc000000000000000UL)) {
373 word <<= 2;
374 bit += 2;
375 }
376 if (!(word & 0x8000000000000000UL)) {
377 word <<= 1;
378 bit += 1;
379 }
380 return bit;
381 } else {
382 register unsigned long bit asm("4") = word;
383 register unsigned long out asm("5");
384
385 asm volatile(
386 " flogr %[bit],%[bit]\n"
387 : [bit] "+d" (bit), [out] "=d" (out) : : "cc");
388 return bit;
389 }
390 }
391
392 /**
393 * __ffs - find first bit in word.
394 * @word: The word to search
395 *
396 * Undefined if no bit exists, so code should check against 0 first.
397 */
398 static inline unsigned long __ffs(unsigned long word)
399 {
400 return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
401 }
402
403 /**
404 * ffs - find first bit set
405 * @word: the word to search
406 *
407 * This is defined the same way as the libc and
408 * compiler builtin ffs routines (man ffs).
409 */
410 static inline int ffs(int word)
411 {
412 unsigned long mask = 2 * BITS_PER_LONG - 1;
413 unsigned int val = (unsigned int)word;
414
415 return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
416 }
417
418 /**
419 * __fls - find last (most-significant) set bit in a long word
420 * @word: the word to search
421 *
422 * Undefined if no set bit exists, so code should check against 0 first.
423 */
424 static inline unsigned long __fls(unsigned long word)
425 {
426 return __flogr(word) ^ (BITS_PER_LONG - 1);
427 }
428
429 /**
430 * fls64 - find last set bit in a 64-bit word
431 * @word: the word to search
432 *
433 * This is defined in a similar way as the libc and compiler builtin
434 * ffsll, but returns the position of the most significant set bit.
435 *
436 * fls64(value) returns 0 if value is 0 or the position of the last
437 * set bit if value is nonzero. The last (most significant) bit is
438 * at position 64.
439 */
440 static inline int fls64(unsigned long word)
441 {
442 unsigned long mask = 2 * BITS_PER_LONG - 1;
443
444 return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
445 }
446
447 /**
448 * fls - find last (most-significant) bit set
449 * @word: the word to search
450 *
451 * This is defined the same way as ffs.
452 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
453 */
454 static inline int fls(int word)
455 {
456 return fls64((unsigned int)word);
457 }
458
459 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
460
461 #include <asm-generic/bitops/__ffs.h>
462 #include <asm-generic/bitops/ffs.h>
463 #include <asm-generic/bitops/__fls.h>
464 #include <asm-generic/bitops/fls.h>
465 #include <asm-generic/bitops/fls64.h>
466
467 #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
468
469 #include <asm-generic/bitops/ffz.h>
470 #include <asm-generic/bitops/find.h>
471 #include <asm-generic/bitops/hweight.h>
472 #include <asm-generic/bitops/lock.h>
473 #include <asm-generic/bitops/sched.h>
474 #include <asm-generic/bitops/le.h>
475 #include <asm-generic/bitops/ext2-atomic-setbit.h>
476
477 #endif /* _S390_BITOPS_H */
This page took 0.048134 seconds and 5 git commands to generate.