powerpc: Fix Unaligned Loads and Stores
authorTom Musta <tommusta@gmail.com>
Fri, 18 Oct 2013 19:42:08 +0000 (14:42 -0500)
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>
Wed, 30 Oct 2013 05:01:30 +0000 (16:01 +1100)
This patch modifies the unaligned access routines of the sstep.c
module so that it properly reverses the bytes of storage operands
in the little endian kernel kernel.   This is implemented by
breaking an unaligned little endian access into a combination of
single byte accesses plus an overal byte reversal operation.

Signed-off-by: Tom Musta <tmusta@gmail.com>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
arch/powerpc/lib/sstep.c

index b1faa1593c9067e68995e1cdc54dbb8465dce587..0121d2140ab9f92fb1e6d68d8e16829f2707431f 100644 (file)
@@ -212,11 +212,19 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
 {
        int err;
        unsigned long x, b, c;
+#ifdef __LITTLE_ENDIAN__
+       int len = nb; /* save a copy of the length for byte reversal */
+#endif
 
        /* unaligned, do this in pieces */
        x = 0;
        for (; nb > 0; nb -= c) {
+#ifdef __LITTLE_ENDIAN__
+               c = 1;
+#endif
+#ifdef __BIG_ENDIAN__
                c = max_align(ea);
+#endif
                if (c > nb)
                        c = max_align(nb);
                err = read_mem_aligned(&b, ea, c);
@@ -225,7 +233,24 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
                x = (x << (8 * c)) + b;
                ea += c;
        }
+#ifdef __LITTLE_ENDIAN__
+       switch (len) {
+       case 2:
+               *dest = byterev_2(x);
+               break;
+       case 4:
+               *dest = byterev_4(x);
+               break;
+#ifdef __powerpc64__
+       case 8:
+               *dest = byterev_8(x);
+               break;
+#endif
+       }
+#endif
+#ifdef __BIG_ENDIAN__
        *dest = x;
+#endif
        return 0;
 }
 
@@ -273,9 +298,29 @@ static int __kprobes write_mem_unaligned(unsigned long val, unsigned long ea,
        int err;
        unsigned long c;
 
+#ifdef __LITTLE_ENDIAN__
+       switch (nb) {
+       case 2:
+               val = byterev_2(val);
+               break;
+       case 4:
+               val = byterev_4(val);
+               break;
+#ifdef __powerpc64__
+       case 8:
+               val = byterev_8(val);
+               break;
+#endif
+       }
+#endif
        /* unaligned or little-endian, do this in pieces */
        for (; nb > 0; nb -= c) {
+#ifdef __LITTLE_ENDIAN__
+               c = 1;
+#endif
+#ifdef __BIG_ENDIAN__
                c = max_align(ea);
+#endif
                if (c > nb)
                        c = max_align(nb);
                err = write_mem_aligned(val >> (nb - c) * 8, ea, c);
This page took 0.02609 seconds and 5 git commands to generate.