+/* Instruction masks used during single-stepping of atomic
+ sequences. */
+#define LWARX_MASK 0xfc0007fe
+#define LWARX_INSTRUCTION 0x7c000028
+#define LDARX_INSTRUCTION 0x7c0000A8
+#define STWCX_MASK 0xfc0007ff
+#define STWCX_INSTRUCTION 0x7c00012d
+#define STDCX_INSTRUCTION 0x7c0001ad
+
+/* We can't displaced step atomic sequences. Otherwise this is just
+ like simple_displaced_step_copy_insn. */
+
+static struct displaced_step_closure *
+ppc_displaced_step_copy_insn (struct gdbarch *gdbarch,
+ CORE_ADDR from, CORE_ADDR to,
+ struct regcache *regs)
+{
+ size_t len = gdbarch_max_insn_length (gdbarch);
+ gdb_byte *buf = (gdb_byte *) xmalloc (len);
+ struct cleanup *old_chain = make_cleanup (xfree, buf);
+ enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
+ int insn;
+
+ read_memory (from, buf, len);
+
+ insn = extract_signed_integer (buf, PPC_INSN_SIZE, byte_order);
+
+ /* Assume all atomic sequences start with a lwarx/ldarx instruction. */
+ if ((insn & LWARX_MASK) == LWARX_INSTRUCTION
+ || (insn & LWARX_MASK) == LDARX_INSTRUCTION)
+ {
+ if (debug_displaced)
+ {
+ fprintf_unfiltered (gdb_stdlog,
+ "displaced: can't displaced step "
+ "atomic sequence at %s\n",
+ paddress (gdbarch, from));
+ }
+ do_cleanups (old_chain);
+ return NULL;
+ }
+
+ write_memory (to, buf, len);
+
+ if (debug_displaced)
+ {
+ fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
+ paddress (gdbarch, from), paddress (gdbarch, to));
+ displaced_step_dump_bytes (gdb_stdlog, buf, len);
+ }
+
+ discard_cleanups (old_chain);
+ return (struct displaced_step_closure *) buf;
+}
+