Commit | Line | Data |
---|---|---|
8c56cc8b WD |
1 | #ifndef __ASM_ARM_WORD_AT_A_TIME_H |
2 | #define __ASM_ARM_WORD_AT_A_TIME_H | |
3 | ||
4 | #ifndef __ARMEB__ | |
5 | ||
6 | /* | |
7 | * Little-endian word-at-a-time zero byte handling. | |
8 | * Heavily based on the x86 algorithm. | |
9 | */ | |
10 | #include <linux/kernel.h> | |
11 | ||
12 | struct word_at_a_time { | |
13 | const unsigned long one_bits, high_bits; | |
14 | }; | |
15 | ||
16 | #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) } | |
17 | ||
18 | static inline unsigned long has_zero(unsigned long a, unsigned long *bits, | |
19 | const struct word_at_a_time *c) | |
20 | { | |
21 | unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits; | |
22 | *bits = mask; | |
23 | return mask; | |
24 | } | |
25 | ||
26 | #define prep_zero_mask(a, bits, c) (bits) | |
27 | ||
28 | static inline unsigned long create_zero_mask(unsigned long bits) | |
29 | { | |
30 | bits = (bits - 1) & ~bits; | |
31 | return bits >> 7; | |
32 | } | |
33 | ||
34 | static inline unsigned long find_zero(unsigned long mask) | |
35 | { | |
36 | unsigned long ret; | |
37 | ||
38 | #if __LINUX_ARM_ARCH__ >= 5 | |
39 | /* We have clz available. */ | |
40 | ret = fls(mask) >> 3; | |
41 | #else | |
42 | /* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */ | |
43 | ret = (0x0ff0001 + mask) >> 23; | |
44 | /* Fix the 1 for 00 case */ | |
45 | ret &= mask; | |
46 | #endif | |
47 | ||
48 | return ret; | |
49 | } | |
50 | ||
b9a50f74 WD |
51 | #define zero_bytemask(mask) (mask) |
52 | ||
cb601185 WD |
53 | #else /* __ARMEB__ */ |
54 | #include <asm-generic/word-at-a-time.h> | |
55 | #endif | |
56 | ||
57 | #ifdef CONFIG_DCACHE_WORD_ACCESS | |
58 | ||
b9a50f74 WD |
59 | /* |
60 | * Load an unaligned word from kernel space. | |
61 | * | |
62 | * In the (very unlikely) case of the word being a page-crosser | |
63 | * and the next page not being mapped, take the exception and | |
64 | * return zeroes in the non-existing part. | |
65 | */ | |
66 | static inline unsigned long load_unaligned_zeropad(const void *addr) | |
67 | { | |
68 | unsigned long ret, offset; | |
69 | ||
70 | /* Load word from unaligned pointer addr */ | |
71 | asm( | |
72 | "1: ldr %0, [%2]\n" | |
73 | "2:\n" | |
c4a84ae3 | 74 | " .pushsection .text.fixup,\"ax\"\n" |
b9a50f74 WD |
75 | " .align 2\n" |
76 | "3: and %1, %2, #0x3\n" | |
77 | " bic %2, %2, #0x3\n" | |
78 | " ldr %0, [%2]\n" | |
79 | " lsl %1, %1, #0x3\n" | |
cb601185 | 80 | #ifndef __ARMEB__ |
b9a50f74 | 81 | " lsr %0, %0, %1\n" |
cb601185 WD |
82 | #else |
83 | " lsl %0, %0, %1\n" | |
84 | #endif | |
b9a50f74 WD |
85 | " b 2b\n" |
86 | " .popsection\n" | |
87 | " .pushsection __ex_table,\"a\"\n" | |
88 | " .align 3\n" | |
89 | " .long 1b, 3b\n" | |
90 | " .popsection" | |
91 | : "=&r" (ret), "=&r" (offset) | |
92 | : "r" (addr), "Qo" (*(unsigned long *)addr)); | |
93 | ||
94 | return ret; | |
95 | } | |
96 | ||
b9a50f74 | 97 | #endif /* DCACHE_WORD_ACCESS */ |
8c56cc8b | 98 | #endif /* __ASM_ARM_WORD_AT_A_TIME_H */ |