remove asm/bitops.h includes
[deliverable/linux.git] / include / asm-s390 / bitops.h
CommitLineData
1da177e4
LT
1#ifndef _S390_BITOPS_H
2#define _S390_BITOPS_H
3
4/*
5 * include/asm-s390/bitops.h
6 *
7 * S390 version
8 * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
9 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
10 *
11 * Derived from "include/asm-i386/bitops.h"
12 * Copyright (C) 1992, Linus Torvalds
13 *
14 */
c406abd3
HC
15
16#ifdef __KERNEL__
17
1da177e4
LT
18#include <linux/compiler.h>
19
20/*
21 * 32 bit bitops format:
22 * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr;
23 * bit 32 is the LSB of *(addr+4). That combined with the
24 * big endian byte order on S390 give the following bit
25 * order in memory:
26 * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \
27 * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
28 * after that follows the next long with bit numbers
29 * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
30 * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
31 * The reason for this bit ordering is the fact that
32 * in the architecture independent code bits operations
33 * of the form "flags |= (1 << bitnr)" are used INTERMIXED
34 * with operation of the form "set_bit(bitnr, flags)".
35 *
36 * 64 bit bitops format:
37 * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr;
38 * bit 64 is the LSB of *(addr+8). That combined with the
39 * big endian byte order on S390 give the following bit
40 * order in memory:
41 * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
42 * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
43 * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10
44 * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
45 * after that follows the next long with bit numbers
46 * 7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70
47 * 6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60
48 * 5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50
49 * 4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40
50 * The reason for this bit ordering is the fact that
51 * in the architecture independent code bits operations
52 * of the form "flags |= (1 << bitnr)" are used INTERMIXED
53 * with operation of the form "set_bit(bitnr, flags)".
54 */
55
1da177e4
LT
56/* bitmap tables from arch/S390/kernel/bitmap.S */
57extern const char _oi_bitmap[];
58extern const char _ni_bitmap[];
59extern const char _zb_findmap[];
60extern const char _sb_findmap[];
61
62#ifndef __s390x__
63
64#define __BITOPS_ALIGN 3
65#define __BITOPS_WORDSIZE 32
66#define __BITOPS_OR "or"
67#define __BITOPS_AND "nr"
68#define __BITOPS_XOR "xr"
69
94c12cc7
MS
70#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
71
72#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
73 asm volatile( \
74 " l %0,%2\n" \
75 "0: lr %1,%0\n" \
76 __op_string " %1,%3\n" \
77 " cs %0,%1,%2\n" \
78 " jl 0b" \
79 : "=&d" (__old), "=&d" (__new), \
80 "=Q" (*(unsigned long *) __addr) \
81 : "d" (__val), "Q" (*(unsigned long *) __addr) \
82 : "cc");
83
84#else /* __GNUC__ */
85
86#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
87 asm volatile( \
88 " l %0,0(%4)\n" \
89 "0: lr %1,%0\n" \
90 __op_string " %1,%3\n" \
91 " cs %0,%1,0(%4)\n" \
92 " jl 0b" \
93 : "=&d" (__old), "=&d" (__new), \
94 "=m" (*(unsigned long *) __addr) \
95 : "d" (__val), "a" (__addr), \
96 "m" (*(unsigned long *) __addr) : "cc");
97
98#endif /* __GNUC__ */
1da177e4
LT
99
100#else /* __s390x__ */
101
102#define __BITOPS_ALIGN 7
103#define __BITOPS_WORDSIZE 64
104#define __BITOPS_OR "ogr"
105#define __BITOPS_AND "ngr"
106#define __BITOPS_XOR "xgr"
107
94c12cc7
MS
108#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
109
110#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
111 asm volatile( \
112 " lg %0,%2\n" \
113 "0: lgr %1,%0\n" \
114 __op_string " %1,%3\n" \
115 " csg %0,%1,%2\n" \
116 " jl 0b" \
117 : "=&d" (__old), "=&d" (__new), \
118 "=Q" (*(unsigned long *) __addr) \
119 : "d" (__val), "Q" (*(unsigned long *) __addr) \
120 : "cc");
121
122#else /* __GNUC__ */
123
124#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
125 asm volatile( \
126 " lg %0,0(%4)\n" \
127 "0: lgr %1,%0\n" \
128 __op_string " %1,%3\n" \
129 " csg %0,%1,0(%4)\n" \
130 " jl 0b" \
131 : "=&d" (__old), "=&d" (__new), \
132 "=m" (*(unsigned long *) __addr) \
133 : "d" (__val), "a" (__addr), \
134 "m" (*(unsigned long *) __addr) : "cc");
135
136
137#endif /* __GNUC__ */
1da177e4
LT
138
139#endif /* __s390x__ */
140
141#define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE)
94c12cc7 142#define __BITOPS_BARRIER() asm volatile("" : : : "memory")
1da177e4
LT
143
144#ifdef CONFIG_SMP
145/*
146 * SMP safe set_bit routine based on compare and swap (CS)
147 */
148static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
149{
150 unsigned long addr, old, new, mask;
151
152 addr = (unsigned long) ptr;
1da177e4
LT
153 /* calculate address for CS */
154 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
155 /* make OR mask */
156 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
157 /* Do the atomic update. */
158 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
159}
160
161/*
162 * SMP safe clear_bit routine based on compare and swap (CS)
163 */
164static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
165{
166 unsigned long addr, old, new, mask;
167
168 addr = (unsigned long) ptr;
1da177e4
LT
169 /* calculate address for CS */
170 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
171 /* make AND mask */
172 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
173 /* Do the atomic update. */
174 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
175}
176
177/*
178 * SMP safe change_bit routine based on compare and swap (CS)
179 */
180static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
181{
182 unsigned long addr, old, new, mask;
183
184 addr = (unsigned long) ptr;
1da177e4
LT
185 /* calculate address for CS */
186 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
187 /* make XOR mask */
188 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
189 /* Do the atomic update. */
190 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
191}
192
193/*
194 * SMP safe test_and_set_bit routine based on compare and swap (CS)
195 */
196static inline int
197test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
198{
199 unsigned long addr, old, new, mask;
200
201 addr = (unsigned long) ptr;
1da177e4
LT
202 /* calculate address for CS */
203 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
204 /* make OR/test mask */
205 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
206 /* Do the atomic update. */
207 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
208 __BITOPS_BARRIER();
209 return (old & mask) != 0;
210}
211
212/*
213 * SMP safe test_and_clear_bit routine based on compare and swap (CS)
214 */
215static inline int
216test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
217{
218 unsigned long addr, old, new, mask;
219
220 addr = (unsigned long) ptr;
1da177e4
LT
221 /* calculate address for CS */
222 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
223 /* make AND/test mask */
224 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
225 /* Do the atomic update. */
226 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
227 __BITOPS_BARRIER();
228 return (old ^ new) != 0;
229}
230
231/*
232 * SMP safe test_and_change_bit routine based on compare and swap (CS)
233 */
234static inline int
235test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
236{
237 unsigned long addr, old, new, mask;
238
239 addr = (unsigned long) ptr;
1da177e4
LT
240 /* calculate address for CS */
241 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
242 /* make XOR/test mask */
243 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
244 /* Do the atomic update. */
245 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
246 __BITOPS_BARRIER();
247 return (old & mask) != 0;
248}
249#endif /* CONFIG_SMP */
250
251/*
252 * fast, non-SMP set_bit routine
253 */
254static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
255{
256 unsigned long addr;
257
258 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7
MS
259 asm volatile(
260 " oc 0(1,%1),0(%2)"
261 : "=m" (*(char *) addr) : "a" (addr),
262 "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" );
1da177e4
LT
263}
264
265static inline void
266__constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
267{
268 unsigned long addr;
269
270 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7 271 *(unsigned char *) addr |= 1 << (nr & 7);
1da177e4
LT
272}
273
274#define set_bit_simple(nr,addr) \
275(__builtin_constant_p((nr)) ? \
276 __constant_set_bit((nr),(addr)) : \
277 __set_bit((nr),(addr)) )
278
279/*
280 * fast, non-SMP clear_bit routine
281 */
282static inline void
283__clear_bit(unsigned long nr, volatile unsigned long *ptr)
284{
285 unsigned long addr;
286
287 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7
MS
288 asm volatile(
289 " nc 0(1,%1),0(%2)"
290 : "=m" (*(char *) addr) : "a" (addr),
291 "a" (_ni_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc");
1da177e4
LT
292}
293
294static inline void
295__constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
296{
297 unsigned long addr;
298
299 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7 300 *(unsigned char *) addr &= ~(1 << (nr & 7));
1da177e4
LT
301}
302
303#define clear_bit_simple(nr,addr) \
304(__builtin_constant_p((nr)) ? \
305 __constant_clear_bit((nr),(addr)) : \
306 __clear_bit((nr),(addr)) )
307
308/*
309 * fast, non-SMP change_bit routine
310 */
311static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
312{
313 unsigned long addr;
314
315 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7
MS
316 asm volatile(
317 " xc 0(1,%1),0(%2)"
318 : "=m" (*(char *) addr) : "a" (addr),
319 "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" );
1da177e4
LT
320}
321
322static inline void
323__constant_change_bit(const unsigned long nr, volatile unsigned long *ptr)
324{
325 unsigned long addr;
326
327 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
94c12cc7 328 *(unsigned char *) addr ^= 1 << (nr & 7);
1da177e4
LT
329}
330
331#define change_bit_simple(nr,addr) \
332(__builtin_constant_p((nr)) ? \
333 __constant_change_bit((nr),(addr)) : \
334 __change_bit((nr),(addr)) )
335
336/*
337 * fast, non-SMP test_and_set_bit routine
338 */
339static inline int
340test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
341{
342 unsigned long addr;
343 unsigned char ch;
344
345 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
346 ch = *(unsigned char *) addr;
94c12cc7
MS
347 asm volatile(
348 " oc 0(1,%1),0(%2)"
349 : "=m" (*(char *) addr)
350 : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
351 "m" (*(char *) addr) : "cc", "memory");
1da177e4
LT
352 return (ch >> (nr & 7)) & 1;
353}
354#define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y)
355
356/*
357 * fast, non-SMP test_and_clear_bit routine
358 */
359static inline int
360test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
361{
362 unsigned long addr;
363 unsigned char ch;
364
365 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
366 ch = *(unsigned char *) addr;
94c12cc7
MS
367 asm volatile(
368 " nc 0(1,%1),0(%2)"
369 : "=m" (*(char *) addr)
370 : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
371 "m" (*(char *) addr) : "cc", "memory");
1da177e4
LT
372 return (ch >> (nr & 7)) & 1;
373}
374#define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y)
375
376/*
377 * fast, non-SMP test_and_change_bit routine
378 */
379static inline int
380test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
381{
382 unsigned long addr;
383 unsigned char ch;
384
385 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
386 ch = *(unsigned char *) addr;
94c12cc7
MS
387 asm volatile(
388 " xc 0(1,%1),0(%2)"
389 : "=m" (*(char *) addr)
390 : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
391 "m" (*(char *) addr) : "cc", "memory");
1da177e4
LT
392 return (ch >> (nr & 7)) & 1;
393}
394#define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y)
395
396#ifdef CONFIG_SMP
397#define set_bit set_bit_cs
398#define clear_bit clear_bit_cs
399#define change_bit change_bit_cs
400#define test_and_set_bit test_and_set_bit_cs
401#define test_and_clear_bit test_and_clear_bit_cs
402#define test_and_change_bit test_and_change_bit_cs
403#else
404#define set_bit set_bit_simple
405#define clear_bit clear_bit_simple
406#define change_bit change_bit_simple
407#define test_and_set_bit test_and_set_bit_simple
408#define test_and_clear_bit test_and_clear_bit_simple
409#define test_and_change_bit test_and_change_bit_simple
410#endif
411
412
413/*
414 * This routine doesn't need to be atomic.
415 */
416
417static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr)
418{
419 unsigned long addr;
420 unsigned char ch;
421
422 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
423 ch = *(volatile unsigned char *) addr;
424 return (ch >> (nr & 7)) & 1;
425}
426
427static inline int
428__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
ef1bea9e
EP
429 return (((volatile char *) addr)
430 [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7))) != 0;
1da177e4
LT
431}
432
433#define test_bit(nr,addr) \
434(__builtin_constant_p((nr)) ? \
435 __constant_test_bit((nr),(addr)) : \
436 __test_bit((nr),(addr)) )
437
afff7e2b
MS
438/*
439 * ffz = Find First Zero in word. Undefined if no zero exists,
440 * so code should check against ~0UL first..
441 */
442static inline unsigned long ffz(unsigned long word)
443{
444 unsigned long bit = 0;
445
446#ifdef __s390x__
447 if (likely((word & 0xffffffff) == 0xffffffff)) {
448 word >>= 32;
449 bit += 32;
450 }
451#endif
452 if (likely((word & 0xffff) == 0xffff)) {
453 word >>= 16;
454 bit += 16;
455 }
456 if (likely((word & 0xff) == 0xff)) {
457 word >>= 8;
458 bit += 8;
459 }
460 return bit + _zb_findmap[word & 0xff];
461}
462
463/*
464 * __ffs = find first bit in word. Undefined if no bit exists,
465 * so code should check against 0UL first..
466 */
467static inline unsigned long __ffs (unsigned long word)
468{
469 unsigned long bit = 0;
470
471#ifdef __s390x__
472 if (likely((word & 0xffffffff) == 0)) {
473 word >>= 32;
474 bit += 32;
475 }
476#endif
477 if (likely((word & 0xffff) == 0)) {
478 word >>= 16;
479 bit += 16;
480 }
481 if (likely((word & 0xff) == 0)) {
482 word >>= 8;
483 bit += 8;
484 }
485 return bit + _sb_findmap[word & 0xff];
486}
1da177e4
LT
487
488/*
489 * Find-bit routines..
490 */
afff7e2b
MS
491
492#ifndef __s390x__
493
1da177e4 494static inline int
afff7e2b 495find_first_zero_bit(const unsigned long * addr, unsigned long size)
1da177e4
LT
496{
497 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
498 unsigned long cmp, count;
499 unsigned int res;
500
501 if (!size)
502 return 0;
94c12cc7
MS
503 asm volatile(
504 " lhi %1,-1\n"
505 " lr %2,%3\n"
506 " slr %0,%0\n"
507 " ahi %2,31\n"
508 " srl %2,5\n"
509 "0: c %1,0(%0,%4)\n"
510 " jne 1f\n"
511 " la %0,4(%0)\n"
512 " brct %2,0b\n"
513 " lr %0,%3\n"
514 " j 4f\n"
515 "1: l %2,0(%0,%4)\n"
516 " sll %0,3\n"
517 " lhi %1,0xff\n"
518 " tml %2,0xffff\n"
519 " jno 2f\n"
520 " ahi %0,16\n"
521 " srl %2,16\n"
522 "2: tml %2,0x00ff\n"
523 " jno 3f\n"
524 " ahi %0,8\n"
525 " srl %2,8\n"
526 "3: nr %2,%1\n"
527 " ic %2,0(%2,%5)\n"
528 " alr %0,%2\n"
529 "4:"
530 : "=&a" (res), "=&d" (cmp), "=&a" (count)
531 : "a" (size), "a" (addr), "a" (&_zb_findmap),
532 "m" (*(addrtype *) addr) : "cc");
1da177e4
LT
533 return (res < size) ? res : size;
534}
535
536static inline int
afff7e2b 537find_first_bit(const unsigned long * addr, unsigned long size)
1da177e4
LT
538{
539 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
540 unsigned long cmp, count;
541 unsigned int res;
542
543 if (!size)
544 return 0;
94c12cc7
MS
545 asm volatile(
546 " slr %1,%1\n"
547 " lr %2,%3\n"
548 " slr %0,%0\n"
549 " ahi %2,31\n"
550 " srl %2,5\n"
551 "0: c %1,0(%0,%4)\n"
552 " jne 1f\n"
553 " la %0,4(%0)\n"
554 " brct %2,0b\n"
555 " lr %0,%3\n"
556 " j 4f\n"
557 "1: l %2,0(%0,%4)\n"
558 " sll %0,3\n"
559 " lhi %1,0xff\n"
560 " tml %2,0xffff\n"
561 " jnz 2f\n"
562 " ahi %0,16\n"
563 " srl %2,16\n"
564 "2: tml %2,0x00ff\n"
565 " jnz 3f\n"
566 " ahi %0,8\n"
567 " srl %2,8\n"
568 "3: nr %2,%1\n"
569 " ic %2,0(%2,%5)\n"
570 " alr %0,%2\n"
571 "4:"
572 : "=&a" (res), "=&d" (cmp), "=&a" (count)
573 : "a" (size), "a" (addr), "a" (&_sb_findmap),
574 "m" (*(addrtype *) addr) : "cc");
1da177e4
LT
575 return (res < size) ? res : size;
576}
577
1da177e4
LT
578#else /* __s390x__ */
579
1da177e4
LT
580static inline unsigned long
581find_first_zero_bit(const unsigned long * addr, unsigned long size)
582{
583 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
584 unsigned long res, cmp, count;
585
586 if (!size)
587 return 0;
94c12cc7
MS
588 asm volatile(
589 " lghi %1,-1\n"
590 " lgr %2,%3\n"
591 " slgr %0,%0\n"
592 " aghi %2,63\n"
593 " srlg %2,%2,6\n"
594 "0: cg %1,0(%0,%4)\n"
595 " jne 1f\n"
596 " la %0,8(%0)\n"
597 " brct %2,0b\n"
598 " lgr %0,%3\n"
599 " j 5f\n"
600 "1: lg %2,0(%0,%4)\n"
601 " sllg %0,%0,3\n"
602 " clr %2,%1\n"
603 " jne 2f\n"
604 " aghi %0,32\n"
605 " srlg %2,%2,32\n"
606 "2: lghi %1,0xff\n"
607 " tmll %2,0xffff\n"
608 " jno 3f\n"
609 " aghi %0,16\n"
610 " srl %2,16\n"
611 "3: tmll %2,0x00ff\n"
612 " jno 4f\n"
613 " aghi %0,8\n"
614 " srl %2,8\n"
615 "4: ngr %2,%1\n"
616 " ic %2,0(%2,%5)\n"
617 " algr %0,%2\n"
618 "5:"
619 : "=&a" (res), "=&d" (cmp), "=&a" (count)
1da177e4 620 : "a" (size), "a" (addr), "a" (&_zb_findmap),
94c12cc7 621 "m" (*(addrtype *) addr) : "cc");
1da177e4
LT
622 return (res < size) ? res : size;
623}
624
625static inline unsigned long
626find_first_bit(const unsigned long * addr, unsigned long size)
627{
628 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
629 unsigned long res, cmp, count;
630
631 if (!size)
632 return 0;
94c12cc7
MS
633 asm volatile(
634 " slgr %1,%1\n"
635 " lgr %2,%3\n"
636 " slgr %0,%0\n"
637 " aghi %2,63\n"
638 " srlg %2,%2,6\n"
639 "0: cg %1,0(%0,%4)\n"
640 " jne 1f\n"
641 " aghi %0,8\n"
642 " brct %2,0b\n"
643 " lgr %0,%3\n"
644 " j 5f\n"
645 "1: lg %2,0(%0,%4)\n"
646 " sllg %0,%0,3\n"
647 " clr %2,%1\n"
648 " jne 2f\n"
649 " aghi %0,32\n"
650 " srlg %2,%2,32\n"
651 "2: lghi %1,0xff\n"
652 " tmll %2,0xffff\n"
653 " jnz 3f\n"
654 " aghi %0,16\n"
655 " srl %2,16\n"
656 "3: tmll %2,0x00ff\n"
657 " jnz 4f\n"
658 " aghi %0,8\n"
659 " srl %2,8\n"
660 "4: ngr %2,%1\n"
661 " ic %2,0(%2,%5)\n"
662 " algr %0,%2\n"
663 "5:"
664 : "=&a" (res), "=&d" (cmp), "=&a" (count)
1da177e4 665 : "a" (size), "a" (addr), "a" (&_sb_findmap),
94c12cc7 666 "m" (*(addrtype *) addr) : "cc");
1da177e4
LT
667 return (res < size) ? res : size;
668}
669
1da177e4
LT
670#endif /* __s390x__ */
671
afff7e2b
MS
672static inline int
673find_next_zero_bit (const unsigned long * addr, unsigned long size,
674 unsigned long offset)
1da177e4 675{
afff7e2b
MS
676 const unsigned long *p;
677 unsigned long bit, set;
678
679 if (offset >= size)
680 return size;
681 bit = offset & (__BITOPS_WORDSIZE - 1);
682 offset -= bit;
683 size -= offset;
684 p = addr + offset / __BITOPS_WORDSIZE;
685 if (bit) {
686 /*
687 * s390 version of ffz returns __BITOPS_WORDSIZE
688 * if no zero bit is present in the word.
689 */
690 set = ffz(*p >> bit) + bit;
691 if (set >= size)
692 return size + offset;
693 if (set < __BITOPS_WORDSIZE)
694 return set + offset;
695 offset += __BITOPS_WORDSIZE;
696 size -= __BITOPS_WORDSIZE;
697 p++;
1da177e4 698 }
afff7e2b 699 return offset + find_first_zero_bit(p, size);
1da177e4
LT
700}
701
afff7e2b
MS
702static inline int
703find_next_bit (const unsigned long * addr, unsigned long size,
704 unsigned long offset)
1da177e4 705{
afff7e2b
MS
706 const unsigned long *p;
707 unsigned long bit, set;
708
709 if (offset >= size)
710 return size;
711 bit = offset & (__BITOPS_WORDSIZE - 1);
712 offset -= bit;
713 size -= offset;
714 p = addr + offset / __BITOPS_WORDSIZE;
715 if (bit) {
716 /*
717 * s390 version of __ffs returns __BITOPS_WORDSIZE
718 * if no one bit is present in the word.
719 */
720 set = __ffs(*p & (~0UL << bit));
721 if (set >= size)
722 return size + offset;
723 if (set < __BITOPS_WORDSIZE)
724 return set + offset;
725 offset += __BITOPS_WORDSIZE;
726 size -= __BITOPS_WORDSIZE;
727 p++;
1da177e4 728 }
afff7e2b 729 return offset + find_first_bit(p, size);
1da177e4
LT
730}
731
732/*
733 * Every architecture must define this function. It's the fastest
734 * way of searching a 140-bit bitmap where the first 100 bits are
735 * unlikely to be set. It's guaranteed that at least one of the 140
736 * bits is cleared.
737 */
738static inline int sched_find_first_bit(unsigned long *b)
739{
740 return find_first_bit(b, 140);
741}
742
7e33db4e 743#include <asm-generic/bitops/ffs.h>
1da177e4 744
7e33db4e
AM
745#include <asm-generic/bitops/fls.h>
746#include <asm-generic/bitops/fls64.h>
1da177e4 747
7e33db4e 748#include <asm-generic/bitops/hweight.h>
26333576 749#include <asm-generic/bitops/lock.h>
1da177e4 750
1da177e4
LT
751/*
752 * ATTENTION: intel byte ordering convention for ext2 and minix !!
753 * bit 0 is the LSB of addr; bit 31 is the MSB of addr;
754 * bit 32 is the LSB of (addr+4).
755 * That combined with the little endian byte order of Intel gives the
756 * following bit order in memory:
757 * 07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \
758 * 23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24
759 */
760
761#define ext2_set_bit(nr, addr) \
67b0ad57 762 __test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1da177e4
LT
763#define ext2_set_bit_atomic(lock, nr, addr) \
764 test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
765#define ext2_clear_bit(nr, addr) \
67b0ad57 766 __test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1da177e4
LT
767#define ext2_clear_bit_atomic(lock, nr, addr) \
768 test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
769#define ext2_test_bit(nr, addr) \
770 test_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
771
772#ifndef __s390x__
773
774static inline int
775ext2_find_first_zero_bit(void *vaddr, unsigned int size)
776{
777 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
778 unsigned long cmp, count;
779 unsigned int res;
780
781 if (!size)
782 return 0;
94c12cc7
MS
783 asm volatile(
784 " lhi %1,-1\n"
785 " lr %2,%3\n"
786 " ahi %2,31\n"
787 " srl %2,5\n"
788 " slr %0,%0\n"
789 "0: cl %1,0(%0,%4)\n"
790 " jne 1f\n"
791 " ahi %0,4\n"
792 " brct %2,0b\n"
793 " lr %0,%3\n"
794 " j 4f\n"
795 "1: l %2,0(%0,%4)\n"
796 " sll %0,3\n"
797 " ahi %0,24\n"
798 " lhi %1,0xff\n"
799 " tmh %2,0xffff\n"
800 " jo 2f\n"
801 " ahi %0,-16\n"
802 " srl %2,16\n"
803 "2: tml %2,0xff00\n"
804 " jo 3f\n"
805 " ahi %0,-8\n"
806 " srl %2,8\n"
807 "3: nr %2,%1\n"
808 " ic %2,0(%2,%5)\n"
809 " alr %0,%2\n"
810 "4:"
811 : "=&a" (res), "=&d" (cmp), "=&a" (count)
812 : "a" (size), "a" (vaddr), "a" (&_zb_findmap),
813 "m" (*(addrtype *) vaddr) : "cc");
1da177e4
LT
814 return (res < size) ? res : size;
815}
816
1da177e4
LT
817#else /* __s390x__ */
818
819static inline unsigned long
820ext2_find_first_zero_bit(void *vaddr, unsigned long size)
821{
822 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
823 unsigned long res, cmp, count;
824
825 if (!size)
826 return 0;
94c12cc7
MS
827 asm volatile(
828 " lghi %1,-1\n"
829 " lgr %2,%3\n"
830 " aghi %2,63\n"
831 " srlg %2,%2,6\n"
832 " slgr %0,%0\n"
833 "0: clg %1,0(%0,%4)\n"
834 " jne 1f\n"
835 " aghi %0,8\n"
836 " brct %2,0b\n"
837 " lgr %0,%3\n"
838 " j 5f\n"
839 "1: cl %1,0(%0,%4)\n"
840 " jne 2f\n"
841 " aghi %0,4\n"
842 "2: l %2,0(%0,%4)\n"
843 " sllg %0,%0,3\n"
844 " aghi %0,24\n"
845 " lghi %1,0xff\n"
846 " tmlh %2,0xffff\n"
847 " jo 3f\n"
848 " aghi %0,-16\n"
849 " srl %2,16\n"
850 "3: tmll %2,0xff00\n"
851 " jo 4f\n"
852 " aghi %0,-8\n"
853 " srl %2,8\n"
854 "4: ngr %2,%1\n"
855 " ic %2,0(%2,%5)\n"
856 " algr %0,%2\n"
857 "5:"
858 : "=&a" (res), "=&d" (cmp), "=&a" (count)
1da177e4 859 : "a" (size), "a" (vaddr), "a" (&_zb_findmap),
94c12cc7 860 "m" (*(addrtype *) vaddr) : "cc");
1da177e4
LT
861 return (res < size) ? res : size;
862}
863
afff7e2b
MS
864#endif /* __s390x__ */
865
866static inline int
1da177e4
LT
867ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset)
868{
afff7e2b
MS
869 unsigned long *addr = vaddr, *p;
870 unsigned long word, bit, set;
1da177e4
LT
871
872 if (offset >= size)
873 return size;
afff7e2b
MS
874 bit = offset & (__BITOPS_WORDSIZE - 1);
875 offset -= bit;
876 size -= offset;
877 p = addr + offset / __BITOPS_WORDSIZE;
1da177e4 878 if (bit) {
afff7e2b 879#ifndef __s390x__
94c12cc7
MS
880 asm volatile(
881 " ic %0,0(%1)\n"
882 " icm %0,2,1(%1)\n"
883 " icm %0,4,2(%1)\n"
884 " icm %0,8,3(%1)"
885 : "=&a" (word) : "a" (p), "m" (*p) : "cc");
afff7e2b 886#else
94c12cc7
MS
887 asm volatile(
888 " lrvg %0,%1"
889 : "=a" (word) : "m" (*p) );
afff7e2b
MS
890#endif
891 /*
892 * s390 version of ffz returns __BITOPS_WORDSIZE
893 * if no zero bit is present in the word.
894 */
895 set = ffz(word >> bit) + bit;
896 if (set >= size)
897 return size + offset;
898 if (set < __BITOPS_WORDSIZE)
899 return set + offset;
900 offset += __BITOPS_WORDSIZE;
901 size -= __BITOPS_WORDSIZE;
902 p++;
1da177e4 903 }
afff7e2b 904 return offset + ext2_find_first_zero_bit(p, size);
1da177e4
LT
905}
906
7e33db4e 907#include <asm-generic/bitops/minix.h>
1da177e4
LT
908
909#endif /* __KERNEL__ */
910
911#endif /* _S390_BITOPS_H */
This page took 0.299544 seconds and 5 git commands to generate.