ARM: bitops: ensure set/clear/change bitops take a word-aligned pointer
authorRussell King <rmk+kernel@arm.linux.org.uk>
Sun, 16 Jan 2011 17:59:44 +0000 (17:59 +0000)
committerRussell King <rmk+kernel@arm.linux.org.uk>
Wed, 2 Feb 2011 21:21:53 +0000 (21:21 +0000)
Add additional instructions to our assembly bitops functions to ensure
that they only operate on word-aligned pointers.  This will be necessary
when we switch these operations to use the word-based exclusive
operations.

Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
arch/arm/lib/bitops.h

index d42252918bfb4e7b944dbda70e205ccb0ebcc87f..bd00551fb79761430343d6ab93c09510bf59924c 100644 (file)
@@ -1,6 +1,8 @@
 
 #if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_CPU_32v6K)
        .macro  bitop, instr
+       ands    ip, r1, #3
+       strneb  r1, [ip]                @ assert word-aligned
        mov     r2, #1
        and     r3, r0, #7              @ Get bit offset
        add     r1, r1, r0, lsr #3      @ Get byte offset
@@ -14,6 +16,8 @@
        .endm
 
        .macro  testop, instr, store
+       ands    ip, r1, #3
+       strneb  r1, [ip]                @ assert word-aligned
        and     r3, r0, #7              @ Get bit offset
        mov     r2, #1
        add     r1, r1, r0, lsr #3      @ Get byte offset
@@ -32,6 +36,8 @@
        .endm
 #else
        .macro  bitop, instr
+       ands    ip, r1, #3
+       strneb  r1, [ip]                @ assert word-aligned
        and     r2, r0, #7
        mov     r3, #1
        mov     r3, r3, lsl r2
@@ -52,6 +58,8 @@
  * to avoid dirtying the data cache.
  */
        .macro  testop, instr, store
+       ands    ip, r1, #3
+       strneb  r1, [ip]                @ assert word-aligned
        add     r1, r1, r0, lsr #3
        and     r3, r0, #7
        mov     r0, #1
This page took 0.025239 seconds and 5 git commands to generate.