Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
1da177e4 | 2 | * S390 version |
a53c8fab | 3 | * Copyright IBM Corp. 1999 |
1da177e4 LT |
4 | * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com) |
5 | * | |
6 | * Derived from "include/asm-i386/bitops.h" | |
7 | * Copyright (C) 1992, Linus Torvalds | |
8 | * | |
9 | */ | |
c406abd3 | 10 | |
a53c8fab HC |
11 | #ifndef _S390_BITOPS_H |
12 | #define _S390_BITOPS_H | |
13 | ||
0624517d JS |
14 | #ifndef _LINUX_BITOPS_H |
15 | #error only <linux/bitops.h> can be included directly | |
16 | #endif | |
17 | ||
370b0b5f | 18 | #include <linux/typecheck.h> |
1da177e4 LT |
19 | #include <linux/compiler.h> |
20 | ||
21 | /* | |
22 | * 32 bit bitops format: | |
23 | * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr; | |
24 | * bit 32 is the LSB of *(addr+4). That combined with the | |
25 | * big endian byte order on S390 give the following bit | |
26 | * order in memory: | |
27 | * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \ | |
28 | * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 | |
29 | * after that follows the next long with bit numbers | |
30 | * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 | |
31 | * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 | |
32 | * The reason for this bit ordering is the fact that | |
33 | * in the architecture independent code bits operations | |
34 | * of the form "flags |= (1 << bitnr)" are used INTERMIXED | |
35 | * with operation of the form "set_bit(bitnr, flags)". | |
36 | * | |
37 | * 64 bit bitops format: | |
38 | * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr; | |
39 | * bit 64 is the LSB of *(addr+8). That combined with the | |
40 | * big endian byte order on S390 give the following bit | |
41 | * order in memory: | |
42 | * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 | |
43 | * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 | |
44 | * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 | |
45 | * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 | |
46 | * after that follows the next long with bit numbers | |
47 | * 7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70 | |
48 | * 6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60 | |
49 | * 5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50 | |
50 | * 4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40 | |
51 | * The reason for this bit ordering is the fact that | |
52 | * in the architecture independent code bits operations | |
53 | * of the form "flags |= (1 << bitnr)" are used INTERMIXED | |
54 | * with operation of the form "set_bit(bitnr, flags)". | |
55 | */ | |
56 | ||
e3dd9c2d | 57 | /* bitmap tables from arch/s390/kernel/bitmap.c */ |
1da177e4 LT |
58 | extern const char _zb_findmap[]; |
59 | extern const char _sb_findmap[]; | |
60 | ||
f4815ac6 | 61 | #ifndef CONFIG_64BIT |
1da177e4 | 62 | |
1da177e4 LT |
63 | #define __BITOPS_OR "or" |
64 | #define __BITOPS_AND "nr" | |
65 | #define __BITOPS_XOR "xr" | |
66 | ||
e344e52c HC |
67 | #define __BITOPS_LOOP(__addr, __val, __op_string) \ |
68 | ({ \ | |
69 | unsigned long __old, __new; \ | |
70 | \ | |
370b0b5f | 71 | typecheck(unsigned long *, (__addr)); \ |
94c12cc7 MS |
72 | asm volatile( \ |
73 | " l %0,%2\n" \ | |
74 | "0: lr %1,%0\n" \ | |
75 | __op_string " %1,%3\n" \ | |
76 | " cs %0,%1,%2\n" \ | |
77 | " jl 0b" \ | |
370b0b5f HC |
78 | : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\ |
79 | : "d" (__val) \ | |
e344e52c HC |
80 | : "cc"); \ |
81 | __old; \ | |
82 | }) | |
94c12cc7 | 83 | |
f4815ac6 | 84 | #else /* CONFIG_64BIT */ |
1da177e4 | 85 | |
e344e52c HC |
86 | #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES |
87 | ||
88 | #define __BITOPS_OR "laog" | |
89 | #define __BITOPS_AND "lang" | |
90 | #define __BITOPS_XOR "laxg" | |
91 | ||
92 | #define __BITOPS_LOOP(__addr, __val, __op_string) \ | |
93 | ({ \ | |
94 | unsigned long __old; \ | |
95 | \ | |
370b0b5f | 96 | typecheck(unsigned long *, (__addr)); \ |
e344e52c HC |
97 | asm volatile( \ |
98 | __op_string " %0,%2,%1\n" \ | |
370b0b5f | 99 | : "=d" (__old), "+Q" (*(__addr)) \ |
e344e52c HC |
100 | : "d" (__val) \ |
101 | : "cc"); \ | |
102 | __old; \ | |
103 | }) | |
104 | ||
105 | #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */ | |
106 | ||
1da177e4 LT |
107 | #define __BITOPS_OR "ogr" |
108 | #define __BITOPS_AND "ngr" | |
109 | #define __BITOPS_XOR "xgr" | |
110 | ||
e344e52c HC |
111 | #define __BITOPS_LOOP(__addr, __val, __op_string) \ |
112 | ({ \ | |
113 | unsigned long __old, __new; \ | |
114 | \ | |
370b0b5f | 115 | typecheck(unsigned long *, (__addr)); \ |
94c12cc7 MS |
116 | asm volatile( \ |
117 | " lg %0,%2\n" \ | |
118 | "0: lgr %1,%0\n" \ | |
119 | __op_string " %1,%3\n" \ | |
120 | " csg %0,%1,%2\n" \ | |
121 | " jl 0b" \ | |
370b0b5f HC |
122 | : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\ |
123 | : "d" (__val) \ | |
e344e52c HC |
124 | : "cc"); \ |
125 | __old; \ | |
126 | }) | |
127 | ||
128 | #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */ | |
94c12cc7 | 129 | |
f4815ac6 | 130 | #endif /* CONFIG_64BIT */ |
1da177e4 | 131 | |
01c2475f | 132 | #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG) |
1da177e4 | 133 | |
370b0b5f HC |
134 | static inline unsigned long * |
135 | __bitops_word(unsigned long nr, volatile unsigned long *ptr) | |
136 | { | |
137 | unsigned long addr; | |
138 | ||
139 | addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3); | |
140 | return (unsigned long *)addr; | |
141 | } | |
142 | ||
143 | static inline unsigned char * | |
144 | __bitops_byte(unsigned long nr, volatile unsigned long *ptr) | |
145 | { | |
146 | return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); | |
147 | } | |
148 | ||
149 | static inline void set_bit(unsigned long nr, volatile unsigned long *ptr) | |
1da177e4 | 150 | { |
370b0b5f HC |
151 | unsigned long *addr = __bitops_word(nr, ptr); |
152 | unsigned long mask; | |
1da177e4 | 153 | |
4ae80325 HC |
154 | #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES |
155 | if (__builtin_constant_p(nr)) { | |
156 | unsigned char *caddr = __bitops_byte(nr, ptr); | |
157 | ||
158 | asm volatile( | |
159 | "oi %0,%b1\n" | |
160 | : "+Q" (*caddr) | |
161 | : "i" (1 << (nr & 7)) | |
162 | : "cc"); | |
163 | return; | |
164 | } | |
165 | #endif | |
01c2475f | 166 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
e344e52c | 167 | __BITOPS_LOOP(addr, mask, __BITOPS_OR); |
1da177e4 LT |
168 | } |
169 | ||
370b0b5f | 170 | static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 171 | { |
370b0b5f HC |
172 | unsigned long *addr = __bitops_word(nr, ptr); |
173 | unsigned long mask; | |
1da177e4 | 174 | |
4ae80325 HC |
175 | #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES |
176 | if (__builtin_constant_p(nr)) { | |
177 | unsigned char *caddr = __bitops_byte(nr, ptr); | |
178 | ||
179 | asm volatile( | |
180 | "ni %0,%b1\n" | |
181 | : "+Q" (*caddr) | |
182 | : "i" (~(1 << (nr & 7))) | |
183 | : "cc"); | |
184 | return; | |
185 | } | |
186 | #endif | |
01c2475f | 187 | mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
e344e52c | 188 | __BITOPS_LOOP(addr, mask, __BITOPS_AND); |
1da177e4 LT |
189 | } |
190 | ||
370b0b5f | 191 | static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 192 | { |
370b0b5f HC |
193 | unsigned long *addr = __bitops_word(nr, ptr); |
194 | unsigned long mask; | |
1da177e4 | 195 | |
4ae80325 HC |
196 | #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES |
197 | if (__builtin_constant_p(nr)) { | |
198 | unsigned char *caddr = __bitops_byte(nr, ptr); | |
199 | ||
200 | asm volatile( | |
201 | "xi %0,%b1\n" | |
202 | : "+Q" (*caddr) | |
203 | : "i" (1 << (nr & 7)) | |
204 | : "cc"); | |
205 | return; | |
206 | } | |
207 | #endif | |
01c2475f | 208 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
e344e52c | 209 | __BITOPS_LOOP(addr, mask, __BITOPS_XOR); |
1da177e4 LT |
210 | } |
211 | ||
1da177e4 | 212 | static inline int |
370b0b5f | 213 | test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 214 | { |
370b0b5f HC |
215 | unsigned long *addr = __bitops_word(nr, ptr); |
216 | unsigned long old, mask; | |
1da177e4 | 217 | |
01c2475f | 218 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
e344e52c | 219 | old = __BITOPS_LOOP(addr, mask, __BITOPS_OR); |
5294ee00 | 220 | barrier(); |
1da177e4 LT |
221 | return (old & mask) != 0; |
222 | } | |
223 | ||
1da177e4 | 224 | static inline int |
370b0b5f | 225 | test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 226 | { |
370b0b5f HC |
227 | unsigned long *addr = __bitops_word(nr, ptr); |
228 | unsigned long old, mask; | |
1da177e4 | 229 | |
01c2475f | 230 | mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); |
e344e52c | 231 | old = __BITOPS_LOOP(addr, mask, __BITOPS_AND); |
5294ee00 | 232 | barrier(); |
e344e52c | 233 | return (old & ~mask) != 0; |
1da177e4 LT |
234 | } |
235 | ||
1da177e4 | 236 | static inline int |
370b0b5f | 237 | test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 238 | { |
370b0b5f HC |
239 | unsigned long *addr = __bitops_word(nr, ptr); |
240 | unsigned long old, mask; | |
1da177e4 | 241 | |
01c2475f | 242 | mask = 1UL << (nr & (BITS_PER_LONG - 1)); |
e344e52c | 243 | old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR); |
5294ee00 | 244 | barrier(); |
1da177e4 LT |
245 | return (old & mask) != 0; |
246 | } | |
1da177e4 | 247 | |
1da177e4 LT |
248 | static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) |
249 | { | |
370b0b5f | 250 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 | 251 | |
370b0b5f | 252 | *addr |= 1 << (nr & 7); |
1da177e4 LT |
253 | } |
254 | ||
1da177e4 LT |
255 | static inline void |
256 | __clear_bit(unsigned long nr, volatile unsigned long *ptr) | |
257 | { | |
370b0b5f | 258 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 | 259 | |
370b0b5f | 260 | *addr &= ~(1 << (nr & 7)); |
1da177e4 LT |
261 | } |
262 | ||
1da177e4 LT |
263 | static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) |
264 | { | |
370b0b5f | 265 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 | 266 | |
370b0b5f | 267 | *addr ^= 1 << (nr & 7); |
1da177e4 LT |
268 | } |
269 | ||
1da177e4 | 270 | static inline int |
370b0b5f | 271 | __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 272 | { |
370b0b5f | 273 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 LT |
274 | unsigned char ch; |
275 | ||
370b0b5f HC |
276 | ch = *addr; |
277 | *addr |= 1 << (nr & 7); | |
1da177e4 LT |
278 | return (ch >> (nr & 7)) & 1; |
279 | } | |
1da177e4 | 280 | |
1da177e4 | 281 | static inline int |
370b0b5f | 282 | __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 283 | { |
370b0b5f | 284 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 LT |
285 | unsigned char ch; |
286 | ||
370b0b5f HC |
287 | ch = *addr; |
288 | *addr &= ~(1 << (nr & 7)); | |
1da177e4 LT |
289 | return (ch >> (nr & 7)) & 1; |
290 | } | |
1da177e4 | 291 | |
1da177e4 | 292 | static inline int |
370b0b5f | 293 | __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) |
1da177e4 | 294 | { |
370b0b5f | 295 | unsigned char *addr = __bitops_byte(nr, ptr); |
1da177e4 LT |
296 | unsigned char ch; |
297 | ||
370b0b5f HC |
298 | ch = *addr; |
299 | *addr ^= 1 << (nr & 7); | |
1da177e4 LT |
300 | return (ch >> (nr & 7)) & 1; |
301 | } | |
1da177e4 | 302 | |
370b0b5f | 303 | static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr) |
1da177e4 | 304 | { |
370b0b5f | 305 | const volatile unsigned char *addr; |
1da177e4 | 306 | |
370b0b5f HC |
307 | addr = ((const volatile unsigned char *)ptr); |
308 | addr += (nr ^ (BITS_PER_LONG - 8)) >> 3; | |
309 | return (*addr >> (nr & 7)) & 1; | |
1da177e4 LT |
310 | } |
311 | ||
afff7e2b | 312 | /* |
0abbf05c | 313 | * Optimized find bit helper functions. |
afff7e2b | 314 | */ |
0abbf05c MS |
315 | |
316 | /** | |
317 | * __ffz_word_loop - find byte offset of first long != -1UL | |
318 | * @addr: pointer to array of unsigned long | |
319 | * @size: size of the array in bits | |
320 | */ | |
321 | static inline unsigned long __ffz_word_loop(const unsigned long *addr, | |
322 | unsigned long size) | |
323 | { | |
324 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; | |
325 | unsigned long bytes = 0; | |
326 | ||
327 | asm volatile( | |
f4815ac6 | 328 | #ifndef CONFIG_64BIT |
a22fb7ff MS |
329 | " ahi %1,-1\n" |
330 | " sra %1,5\n" | |
331 | " jz 1f\n" | |
0abbf05c MS |
332 | "0: c %2,0(%0,%3)\n" |
333 | " jne 1f\n" | |
334 | " la %0,4(%0)\n" | |
335 | " brct %1,0b\n" | |
336 | "1:\n" | |
337 | #else | |
a22fb7ff MS |
338 | " aghi %1,-1\n" |
339 | " srag %1,%1,6\n" | |
340 | " jz 1f\n" | |
0abbf05c MS |
341 | "0: cg %2,0(%0,%3)\n" |
342 | " jne 1f\n" | |
343 | " la %0,8(%0)\n" | |
344 | " brct %1,0b\n" | |
345 | "1:\n" | |
346 | #endif | |
6d88f827 | 347 | : "+&a" (bytes), "+&d" (size) |
0abbf05c MS |
348 | : "d" (-1UL), "a" (addr), "m" (*(addrtype *) addr) |
349 | : "cc" ); | |
350 | return bytes; | |
351 | } | |
352 | ||
353 | /** | |
354 | * __ffs_word_loop - find byte offset of first long != 0UL | |
355 | * @addr: pointer to array of unsigned long | |
356 | * @size: size of the array in bits | |
357 | */ | |
358 | static inline unsigned long __ffs_word_loop(const unsigned long *addr, | |
359 | unsigned long size) | |
afff7e2b | 360 | { |
0abbf05c MS |
361 | typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; |
362 | unsigned long bytes = 0; | |
afff7e2b | 363 | |
0abbf05c | 364 | asm volatile( |
f4815ac6 | 365 | #ifndef CONFIG_64BIT |
a22fb7ff MS |
366 | " ahi %1,-1\n" |
367 | " sra %1,5\n" | |
368 | " jz 1f\n" | |
0abbf05c MS |
369 | "0: c %2,0(%0,%3)\n" |
370 | " jne 1f\n" | |
371 | " la %0,4(%0)\n" | |
372 | " brct %1,0b\n" | |
373 | "1:\n" | |
374 | #else | |
a22fb7ff MS |
375 | " aghi %1,-1\n" |
376 | " srag %1,%1,6\n" | |
377 | " jz 1f\n" | |
0abbf05c MS |
378 | "0: cg %2,0(%0,%3)\n" |
379 | " jne 1f\n" | |
380 | " la %0,8(%0)\n" | |
381 | " brct %1,0b\n" | |
382 | "1:\n" | |
383 | #endif | |
6d88f827 | 384 | : "+&a" (bytes), "+&a" (size) |
0abbf05c MS |
385 | : "d" (0UL), "a" (addr), "m" (*(addrtype *) addr) |
386 | : "cc" ); | |
387 | return bytes; | |
388 | } | |
389 | ||
390 | /** | |
391 | * __ffz_word - add number of the first unset bit | |
392 | * @nr: base value the bit number is added to | |
393 | * @word: the word that is searched for unset bits | |
394 | */ | |
395 | static inline unsigned long __ffz_word(unsigned long nr, unsigned long word) | |
396 | { | |
f4815ac6 | 397 | #ifdef CONFIG_64BIT |
e13ed9b2 | 398 | if ((word & 0xffffffff) == 0xffffffff) { |
afff7e2b | 399 | word >>= 32; |
0abbf05c | 400 | nr += 32; |
afff7e2b MS |
401 | } |
402 | #endif | |
e13ed9b2 | 403 | if ((word & 0xffff) == 0xffff) { |
afff7e2b | 404 | word >>= 16; |
0abbf05c | 405 | nr += 16; |
afff7e2b | 406 | } |
e13ed9b2 | 407 | if ((word & 0xff) == 0xff) { |
afff7e2b | 408 | word >>= 8; |
0abbf05c | 409 | nr += 8; |
afff7e2b | 410 | } |
0abbf05c | 411 | return nr + _zb_findmap[(unsigned char) word]; |
afff7e2b MS |
412 | } |
413 | ||
0abbf05c MS |
414 | /** |
415 | * __ffs_word - add number of the first set bit | |
416 | * @nr: base value the bit number is added to | |
417 | * @word: the word that is searched for set bits | |
afff7e2b | 418 | */ |
0abbf05c | 419 | static inline unsigned long __ffs_word(unsigned long nr, unsigned long word) |
afff7e2b | 420 | { |
f4815ac6 | 421 | #ifdef CONFIG_64BIT |
e13ed9b2 | 422 | if ((word & 0xffffffff) == 0) { |
afff7e2b | 423 | word >>= 32; |
0abbf05c | 424 | nr += 32; |
afff7e2b MS |
425 | } |
426 | #endif | |
e13ed9b2 | 427 | if ((word & 0xffff) == 0) { |
afff7e2b | 428 | word >>= 16; |
0abbf05c | 429 | nr += 16; |
afff7e2b | 430 | } |
e13ed9b2 | 431 | if ((word & 0xff) == 0) { |
afff7e2b | 432 | word >>= 8; |
0abbf05c | 433 | nr += 8; |
afff7e2b | 434 | } |
0abbf05c | 435 | return nr + _sb_findmap[(unsigned char) word]; |
afff7e2b | 436 | } |
1da177e4 | 437 | |
afff7e2b | 438 | |
0abbf05c MS |
439 | /** |
440 | * __load_ulong_be - load big endian unsigned long | |
441 | * @p: pointer to array of unsigned long | |
442 | * @offset: byte offset of source value in the array | |
443 | */ | |
444 | static inline unsigned long __load_ulong_be(const unsigned long *p, | |
445 | unsigned long offset) | |
446 | { | |
447 | p = (unsigned long *)((unsigned long) p + offset); | |
448 | return *p; | |
449 | } | |
afff7e2b | 450 | |
0abbf05c MS |
451 | /** |
452 | * __load_ulong_le - load little endian unsigned long | |
453 | * @p: pointer to array of unsigned long | |
454 | * @offset: byte offset of source value in the array | |
455 | */ | |
456 | static inline unsigned long __load_ulong_le(const unsigned long *p, | |
457 | unsigned long offset) | |
1da177e4 | 458 | { |
0abbf05c | 459 | unsigned long word; |
1da177e4 | 460 | |
0abbf05c | 461 | p = (unsigned long *)((unsigned long) p + offset); |
f4815ac6 | 462 | #ifndef CONFIG_64BIT |
94c12cc7 | 463 | asm volatile( |
987bcdac MS |
464 | " ic %0,%O1(%R1)\n" |
465 | " icm %0,2,%O1+1(%R1)\n" | |
466 | " icm %0,4,%O1+2(%R1)\n" | |
467 | " icm %0,8,%O1+3(%R1)" | |
468 | : "=&d" (word) : "Q" (*p) : "cc"); | |
0abbf05c MS |
469 | #else |
470 | asm volatile( | |
471 | " lrvg %0,%1" | |
472 | : "=d" (word) : "m" (*p) ); | |
473 | #endif | |
474 | return word; | |
1da177e4 LT |
475 | } |
476 | ||
0abbf05c MS |
477 | /* |
478 | * The various find bit functions. | |
479 | */ | |
480 | ||
481 | /* | |
482 | * ffz - find first zero in word. | |
483 | * @word: The word to search | |
484 | * | |
485 | * Undefined if no zero exists, so code should check against ~0UL first. | |
486 | */ | |
487 | static inline unsigned long ffz(unsigned long word) | |
1da177e4 | 488 | { |
0abbf05c MS |
489 | return __ffz_word(0, word); |
490 | } | |
1da177e4 | 491 | |
0abbf05c MS |
492 | /** |
493 | * __ffs - find first bit in word. | |
494 | * @word: The word to search | |
495 | * | |
496 | * Undefined if no bit exists, so code should check against 0 first. | |
497 | */ | |
498 | static inline unsigned long __ffs (unsigned long word) | |
499 | { | |
500 | return __ffs_word(0, word); | |
1da177e4 LT |
501 | } |
502 | ||
0abbf05c MS |
503 | /** |
504 | * ffs - find first bit set | |
505 | * @x: the word to search | |
506 | * | |
507 | * This is defined the same way as | |
508 | * the libc and compiler builtin ffs routines, therefore | |
509 | * differs in spirit from the above ffz (man ffs). | |
510 | */ | |
511 | static inline int ffs(int x) | |
512 | { | |
513 | if (!x) | |
514 | return 0; | |
515 | return __ffs_word(1, x); | |
516 | } | |
1da177e4 | 517 | |
0abbf05c MS |
518 | /** |
519 | * find_first_zero_bit - find the first zero bit in a memory region | |
520 | * @addr: The address to start the search at | |
521 | * @size: The maximum size to search | |
522 | * | |
523 | * Returns the bit-number of the first zero bit, not the number of the byte | |
524 | * containing a bit. | |
525 | */ | |
526 | static inline unsigned long find_first_zero_bit(const unsigned long *addr, | |
527 | unsigned long size) | |
1da177e4 | 528 | { |
0abbf05c | 529 | unsigned long bytes, bits; |
1da177e4 LT |
530 | |
531 | if (!size) | |
532 | return 0; | |
0abbf05c MS |
533 | bytes = __ffz_word_loop(addr, size); |
534 | bits = __ffz_word(bytes*8, __load_ulong_be(addr, bytes)); | |
535 | return (bits < size) ? bits : size; | |
536 | } | |
a2812e17 | 537 | #define find_first_zero_bit find_first_zero_bit |
0abbf05c MS |
538 | |
539 | /** | |
540 | * find_first_bit - find the first set bit in a memory region | |
541 | * @addr: The address to start the search at | |
542 | * @size: The maximum size to search | |
543 | * | |
544 | * Returns the bit-number of the first set bit, not the number of the byte | |
545 | * containing a bit. | |
546 | */ | |
547 | static inline unsigned long find_first_bit(const unsigned long * addr, | |
548 | unsigned long size) | |
1da177e4 | 549 | { |
0abbf05c | 550 | unsigned long bytes, bits; |
1da177e4 LT |
551 | |
552 | if (!size) | |
553 | return 0; | |
0abbf05c MS |
554 | bytes = __ffs_word_loop(addr, size); |
555 | bits = __ffs_word(bytes*8, __load_ulong_be(addr, bytes)); | |
556 | return (bits < size) ? bits : size; | |
1da177e4 | 557 | } |
a2812e17 | 558 | #define find_first_bit find_first_bit |
1da177e4 | 559 | |
e56e4e87 JG |
560 | /* |
561 | * Big endian variant whichs starts bit counting from left using | |
562 | * the flogr (find leftmost one) instruction. | |
563 | */ | |
564 | static inline unsigned long __flo_word(unsigned long nr, unsigned long val) | |
565 | { | |
566 | register unsigned long bit asm("2") = val; | |
567 | register unsigned long out asm("3"); | |
568 | ||
569 | asm volatile ( | |
570 | " .insn rre,0xb9830000,%[bit],%[bit]\n" | |
571 | : [bit] "+d" (bit), [out] "=d" (out) : : "cc"); | |
572 | return nr + bit; | |
573 | } | |
574 | ||
575 | /* | |
576 | * 64 bit special left bitops format: | |
577 | * order in memory: | |
578 | * 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f | |
579 | * 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f | |
580 | * 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f | |
581 | * 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f | |
582 | * after that follows the next long with bit numbers | |
583 | * 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f | |
584 | * 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f | |
585 | * 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f | |
586 | * 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f | |
587 | * The reason for this bit ordering is the fact that | |
588 | * the hardware sets bits in a bitmap starting at bit 0 | |
589 | * and we don't want to scan the bitmap from the 'wrong | |
590 | * end'. | |
591 | */ | |
592 | static inline unsigned long find_first_bit_left(const unsigned long *addr, | |
593 | unsigned long size) | |
594 | { | |
595 | unsigned long bytes, bits; | |
596 | ||
597 | if (!size) | |
598 | return 0; | |
599 | bytes = __ffs_word_loop(addr, size); | |
600 | bits = __flo_word(bytes * 8, __load_ulong_be(addr, bytes)); | |
601 | return (bits < size) ? bits : size; | |
602 | } | |
603 | ||
604 | static inline int find_next_bit_left(const unsigned long *addr, | |
605 | unsigned long size, | |
606 | unsigned long offset) | |
607 | { | |
608 | const unsigned long *p; | |
609 | unsigned long bit, set; | |
610 | ||
611 | if (offset >= size) | |
612 | return size; | |
01c2475f | 613 | bit = offset & (BITS_PER_LONG - 1); |
e56e4e87 JG |
614 | offset -= bit; |
615 | size -= offset; | |
01c2475f | 616 | p = addr + offset / BITS_PER_LONG; |
e56e4e87 | 617 | if (bit) { |
3b0040a4 | 618 | set = __flo_word(0, *p & (~0UL >> bit)); |
e56e4e87 JG |
619 | if (set >= size) |
620 | return size + offset; | |
01c2475f | 621 | if (set < BITS_PER_LONG) |
e56e4e87 | 622 | return set + offset; |
01c2475f AM |
623 | offset += BITS_PER_LONG; |
624 | size -= BITS_PER_LONG; | |
e56e4e87 JG |
625 | p++; |
626 | } | |
627 | return offset + find_first_bit_left(p, size); | |
628 | } | |
629 | ||
630 | #define for_each_set_bit_left(bit, addr, size) \ | |
631 | for ((bit) = find_first_bit_left((addr), (size)); \ | |
632 | (bit) < (size); \ | |
633 | (bit) = find_next_bit_left((addr), (size), (bit) + 1)) | |
634 | ||
635 | /* same as for_each_set_bit() but use bit as value to start with */ | |
636 | #define for_each_set_bit_left_cont(bit, addr, size) \ | |
637 | for ((bit) = find_next_bit_left((addr), (size), (bit)); \ | |
638 | (bit) < (size); \ | |
639 | (bit) = find_next_bit_left((addr), (size), (bit) + 1)) | |
640 | ||
0abbf05c MS |
641 | /** |
642 | * find_next_zero_bit - find the first zero bit in a memory region | |
643 | * @addr: The address to base the search on | |
644 | * @offset: The bitnumber to start searching at | |
645 | * @size: The maximum size to search | |
646 | */ | |
647 | static inline int find_next_zero_bit (const unsigned long * addr, | |
648 | unsigned long size, | |
649 | unsigned long offset) | |
1da177e4 | 650 | { |
afff7e2b MS |
651 | const unsigned long *p; |
652 | unsigned long bit, set; | |
653 | ||
654 | if (offset >= size) | |
655 | return size; | |
01c2475f | 656 | bit = offset & (BITS_PER_LONG - 1); |
afff7e2b MS |
657 | offset -= bit; |
658 | size -= offset; | |
01c2475f | 659 | p = addr + offset / BITS_PER_LONG; |
afff7e2b MS |
660 | if (bit) { |
661 | /* | |
01c2475f | 662 | * __ffz_word returns BITS_PER_LONG |
afff7e2b MS |
663 | * if no zero bit is present in the word. |
664 | */ | |
152382af | 665 | set = __ffz_word(bit, *p >> bit); |
afff7e2b MS |
666 | if (set >= size) |
667 | return size + offset; | |
01c2475f | 668 | if (set < BITS_PER_LONG) |
afff7e2b | 669 | return set + offset; |
01c2475f AM |
670 | offset += BITS_PER_LONG; |
671 | size -= BITS_PER_LONG; | |
afff7e2b | 672 | p++; |
1da177e4 | 673 | } |
afff7e2b | 674 | return offset + find_first_zero_bit(p, size); |
1da177e4 | 675 | } |
a2812e17 | 676 | #define find_next_zero_bit find_next_zero_bit |
1da177e4 | 677 | |
0abbf05c MS |
678 | /** |
679 | * find_next_bit - find the first set bit in a memory region | |
680 | * @addr: The address to base the search on | |
681 | * @offset: The bitnumber to start searching at | |
682 | * @size: The maximum size to search | |
683 | */ | |
684 | static inline int find_next_bit (const unsigned long * addr, | |
685 | unsigned long size, | |
686 | unsigned long offset) | |
1da177e4 | 687 | { |
afff7e2b MS |
688 | const unsigned long *p; |
689 | unsigned long bit, set; | |
690 | ||
691 | if (offset >= size) | |
692 | return size; | |
01c2475f | 693 | bit = offset & (BITS_PER_LONG - 1); |
afff7e2b MS |
694 | offset -= bit; |
695 | size -= offset; | |
01c2475f | 696 | p = addr + offset / BITS_PER_LONG; |
afff7e2b MS |
697 | if (bit) { |
698 | /* | |
01c2475f | 699 | * __ffs_word returns BITS_PER_LONG |
afff7e2b MS |
700 | * if no one bit is present in the word. |
701 | */ | |
0abbf05c | 702 | set = __ffs_word(0, *p & (~0UL << bit)); |
afff7e2b MS |
703 | if (set >= size) |
704 | return size + offset; | |
01c2475f | 705 | if (set < BITS_PER_LONG) |
afff7e2b | 706 | return set + offset; |
01c2475f AM |
707 | offset += BITS_PER_LONG; |
708 | size -= BITS_PER_LONG; | |
afff7e2b | 709 | p++; |
1da177e4 | 710 | } |
afff7e2b | 711 | return offset + find_first_bit(p, size); |
1da177e4 | 712 | } |
a2812e17 | 713 | #define find_next_bit find_next_bit |
1da177e4 LT |
714 | |
715 | /* | |
716 | * Every architecture must define this function. It's the fastest | |
717 | * way of searching a 140-bit bitmap where the first 100 bits are | |
718 | * unlikely to be set. It's guaranteed that at least one of the 140 | |
719 | * bits is cleared. | |
720 | */ | |
721 | static inline int sched_find_first_bit(unsigned long *b) | |
722 | { | |
723 | return find_first_bit(b, 140); | |
724 | } | |
725 | ||
7e33db4e | 726 | #include <asm-generic/bitops/fls.h> |
56a6b1eb | 727 | #include <asm-generic/bitops/__fls.h> |
7e33db4e | 728 | #include <asm-generic/bitops/fls64.h> |
1da177e4 | 729 | |
7e33db4e | 730 | #include <asm-generic/bitops/hweight.h> |
26333576 | 731 | #include <asm-generic/bitops/lock.h> |
1da177e4 | 732 | |
1da177e4 LT |
733 | /* |
734 | * ATTENTION: intel byte ordering convention for ext2 and minix !! | |
735 | * bit 0 is the LSB of addr; bit 31 is the MSB of addr; | |
736 | * bit 32 is the LSB of (addr+4). | |
737 | * That combined with the little endian byte order of Intel gives the | |
738 | * following bit order in memory: | |
739 | * 07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \ | |
740 | * 23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24 | |
741 | */ | |
742 | ||
50b9b475 | 743 | static inline int find_first_zero_bit_le(void *vaddr, unsigned int size) |
1da177e4 | 744 | { |
0abbf05c | 745 | unsigned long bytes, bits; |
1da177e4 LT |
746 | |
747 | if (!size) | |
748 | return 0; | |
0abbf05c MS |
749 | bytes = __ffz_word_loop(vaddr, size); |
750 | bits = __ffz_word(bytes*8, __load_ulong_le(vaddr, bytes)); | |
751 | return (bits < size) ? bits : size; | |
1da177e4 | 752 | } |
a2812e17 | 753 | #define find_first_zero_bit_le find_first_zero_bit_le |
1da177e4 | 754 | |
50b9b475 | 755 | static inline int find_next_zero_bit_le(void *vaddr, unsigned long size, |
0abbf05c | 756 | unsigned long offset) |
1da177e4 | 757 | { |
afff7e2b | 758 | unsigned long *addr = vaddr, *p; |
0abbf05c | 759 | unsigned long bit, set; |
1da177e4 LT |
760 | |
761 | if (offset >= size) | |
762 | return size; | |
01c2475f | 763 | bit = offset & (BITS_PER_LONG - 1); |
afff7e2b MS |
764 | offset -= bit; |
765 | size -= offset; | |
01c2475f | 766 | p = addr + offset / BITS_PER_LONG; |
1da177e4 | 767 | if (bit) { |
afff7e2b | 768 | /* |
01c2475f | 769 | * s390 version of ffz returns BITS_PER_LONG |
afff7e2b MS |
770 | * if no zero bit is present in the word. |
771 | */ | |
152382af | 772 | set = __ffz_word(bit, __load_ulong_le(p, 0) >> bit); |
afff7e2b MS |
773 | if (set >= size) |
774 | return size + offset; | |
01c2475f | 775 | if (set < BITS_PER_LONG) |
afff7e2b | 776 | return set + offset; |
01c2475f AM |
777 | offset += BITS_PER_LONG; |
778 | size -= BITS_PER_LONG; | |
afff7e2b | 779 | p++; |
1da177e4 | 780 | } |
50b9b475 | 781 | return offset + find_first_zero_bit_le(p, size); |
1da177e4 | 782 | } |
a2812e17 | 783 | #define find_next_zero_bit_le find_next_zero_bit_le |
1da177e4 | 784 | |
50b9b475 | 785 | static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size) |
67fe9251 HC |
786 | { |
787 | unsigned long bytes, bits; | |
788 | ||
789 | if (!size) | |
790 | return 0; | |
791 | bytes = __ffs_word_loop(vaddr, size); | |
792 | bits = __ffs_word(bytes*8, __load_ulong_le(vaddr, bytes)); | |
793 | return (bits < size) ? bits : size; | |
794 | } | |
a2812e17 | 795 | #define find_first_bit_le find_first_bit_le |
67fe9251 | 796 | |
50b9b475 | 797 | static inline int find_next_bit_le(void *vaddr, unsigned long size, |
67fe9251 HC |
798 | unsigned long offset) |
799 | { | |
800 | unsigned long *addr = vaddr, *p; | |
801 | unsigned long bit, set; | |
802 | ||
803 | if (offset >= size) | |
804 | return size; | |
01c2475f | 805 | bit = offset & (BITS_PER_LONG - 1); |
67fe9251 HC |
806 | offset -= bit; |
807 | size -= offset; | |
01c2475f | 808 | p = addr + offset / BITS_PER_LONG; |
67fe9251 HC |
809 | if (bit) { |
810 | /* | |
01c2475f | 811 | * s390 version of ffz returns BITS_PER_LONG |
67fe9251 HC |
812 | * if no zero bit is present in the word. |
813 | */ | |
152382af | 814 | set = __ffs_word(0, __load_ulong_le(p, 0) & (~0UL << bit)); |
67fe9251 HC |
815 | if (set >= size) |
816 | return size + offset; | |
01c2475f | 817 | if (set < BITS_PER_LONG) |
67fe9251 | 818 | return set + offset; |
01c2475f AM |
819 | offset += BITS_PER_LONG; |
820 | size -= BITS_PER_LONG; | |
67fe9251 HC |
821 | p++; |
822 | } | |
50b9b475 AM |
823 | return offset + find_first_bit_le(p, size); |
824 | } | |
a2812e17 | 825 | #define find_next_bit_le find_next_bit_le |
50b9b475 | 826 | |
802caabb AM |
827 | #include <asm-generic/bitops/le.h> |
828 | ||
148817ba | 829 | #include <asm-generic/bitops/ext2-atomic-setbit.h> |
67fe9251 | 830 | |
1da177e4 | 831 | #endif /* _S390_BITOPS_H */ |