1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data
*arm_objfile_data_key
;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
92 DEF_VEC_O(arm_mapping_symbol_s
);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s
) **section_maps
;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element
*setarmcmdlist
= NULL
;
101 static struct cmd_list_element
*showarmcmdlist
= NULL
;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings
[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
117 static const char *current_fp_model
= "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings
[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
130 static const char *arm_abi_string
= "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings
[] =
141 static const char *arm_fallback_mode_string
= "auto";
142 static const char *arm_force_mode_string
= "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode
= -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options
;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases
[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names
[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles
;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style
;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element
*);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat
*, const void *,
228 static void convert_to_extended (const struct floatformat
*, void *,
231 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
232 struct regcache
*regcache
,
233 int regnum
, gdb_byte
*buf
);
234 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
235 struct regcache
*regcache
,
236 int regnum
, const gdb_byte
*buf
);
238 static int thumb_insn_size (unsigned short inst1
);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg
*saved_regs
;
260 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
261 CORE_ADDR prologue_start
,
262 CORE_ADDR prologue_end
,
263 struct arm_prologue_cache
*cache
);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
285 if (gdbarch_tdep (gdbarch
)->is_m
)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info
*frame
)
297 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
305 return (cpsr
& t_bit
) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
312 const struct arm_mapping_symbol
*rhs
)
314 return lhs
->value
< rhs
->value
;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
324 struct obj_section
*sec
;
326 /* If there are mapping symbols, consult them. */
327 sec
= find_pc_section (memaddr
);
330 struct arm_per_objfile
*data
;
331 VEC(arm_mapping_symbol_s
) *map
;
332 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
336 data
= objfile_data (sec
->objfile
, arm_objfile_data_key
);
339 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
340 if (!VEC_empty (arm_mapping_symbol_s
, map
))
342 struct arm_mapping_symbol
*map_sym
;
344 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
345 arm_compare_mapping_symbols
);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
353 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
354 if (map_sym
->value
== map_key
.value
)
357 *start
= map_sym
->value
+ obj_section_addr (sec
);
358 return map_sym
->type
;
364 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
366 *start
= map_sym
->value
+ obj_section_addr (sec
);
367 return map_sym
->type
;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
383 struct bound_minimal_symbol sym
;
385 struct displaced_step_closure
* dsc
386 = get_displaced_step_closure_by_addr(memaddr
);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog
,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc
->insn_addr
,
396 (unsigned long) memaddr
);
397 memaddr
= dsc
->insn_addr
;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr
))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode
!= -1)
406 return arm_override_mode
;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string
, "arm") == 0)
411 if (strcmp (arm_force_mode_string
, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch
)->is_m
)
418 /* If there are mapping symbols, consult them. */
419 type
= arm_find_mapping_symbol (memaddr
, NULL
);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym
= lookup_minimal_symbol_by_pc (memaddr
);
426 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers
)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch
)->is_m
453 && (val
& 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val
);
459 return (val
& 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
469 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
470 struct bound_minimal_symbol msym
;
472 msym
= lookup_minimal_symbol_by_pc (pc
);
473 if (msym
.minsym
!= NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
475 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
477 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name
, "_from_thumb") != NULL
)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (startswith (name
, "__truncdfsf2"))
489 if (startswith (name
, "__aeabi_d2f"))
492 /* Internal functions related to thread-local storage. */
493 if (startswith (name
, "__tls_get_addr"))
495 if (startswith (name
, "__aeabi_read_tp"))
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm
)
545 unsigned int count
= imm
>> 7;
553 return (imm
& 0xff) | ((imm
& 0xff) << 16);
555 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
557 return (imm
& 0xff) | ((imm
& 0xff) << 8)
558 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
561 return (0x80 | (imm
& 0x7f)) << (32 - count
);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst
)
570 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
576 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
597 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
606 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1
& 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1
, 7) && !bit (inst1
, 8))
630 else if (!bit (inst1
, 7) && bit (inst1
, 8))
636 else if (bit (inst1
, 7) && bit (inst1
, 8))
641 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
650 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
659 if (bits (inst1
, 0, 3) == 15)
665 if ((inst2
& 0x0fc0) == 0x0000)
671 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
677 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn
)
692 return (insn
== 0x46bd /* mov sp, r7 */
693 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
694 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
705 CORE_ADDR start
, CORE_ADDR limit
,
706 struct arm_prologue_cache
*cache
)
708 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
709 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
712 struct pv_area
*stack
;
713 struct cleanup
*back_to
;
715 CORE_ADDR unrecognized_pc
= 0;
717 for (i
= 0; i
< 16; i
++)
718 regs
[i
] = pv_register (i
, 0);
719 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
720 back_to
= make_cleanup_free_pv_area (stack
);
722 while (start
< limit
)
726 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
728 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
742 if (mask
& (1 << regno
))
744 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
746 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
749 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
751 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
752 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
755 else if (thumb_instruction_restores_sp (insn
))
757 /* Don't scan past the epilogue. */
760 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
763 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
765 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
767 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
769 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
771 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
773 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
774 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
775 regs
[bits (insn
, 6, 8)]);
776 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
779 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
780 int rm
= bits (insn
, 3, 6);
781 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
783 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
786 int src_reg
= (insn
& 0x78) >> 3;
787 regs
[dst_reg
] = regs
[src_reg
];
789 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno
= (insn
>> 8) & 0x7;
797 offset
= (insn
& 0xff) << 2;
798 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
800 if (pv_area_store_would_trash (stack
, addr
))
803 pv_area_store (stack
, addr
, 4, regs
[regno
]);
805 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd
= bits (insn
, 0, 2);
808 int rn
= bits (insn
, 3, 5);
811 offset
= bits (insn
, 6, 10) << 2;
812 addr
= pv_add_constant (regs
[rn
], offset
);
814 if (pv_area_store_would_trash (stack
, addr
))
817 pv_area_store (stack
, addr
, 4, regs
[rd
]);
819 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
843 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant
;
849 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
850 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
851 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
853 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2
;
857 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
858 byte_order_for_code
);
860 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1
, j2
, imm1
, imm2
;
868 imm1
= sbits (insn
, 0, 10);
869 imm2
= bits (inst2
, 0, 10);
870 j1
= bit (inst2
, 13);
871 j2
= bit (inst2
, 11);
873 offset
= ((imm1
<< 12) + (imm2
<< 1));
874 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
876 nextpc
= start
+ 4 + offset
;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2
, 12) == 0)
879 nextpc
= nextpc
& 0xfffffffc;
881 if (!skip_prologue_function (gdbarch
, nextpc
,
882 bit (inst2
, 12) != 0))
886 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
890 pv_t addr
= regs
[bits (insn
, 0, 3)];
893 if (pv_area_store_would_trash (stack
, addr
))
896 /* Calculate offsets of saved registers. */
897 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
898 if (inst2
& (1 << regno
))
900 addr
= pv_add_constant (addr
, -4);
901 pv_area_store (stack
, addr
, 4, regs
[regno
]);
905 regs
[bits (insn
, 0, 3)] = addr
;
908 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
912 int regno1
= bits (inst2
, 12, 15);
913 int regno2
= bits (inst2
, 8, 11);
914 pv_t addr
= regs
[bits (insn
, 0, 3)];
916 offset
= inst2
& 0xff;
918 addr
= pv_add_constant (addr
, offset
);
920 addr
= pv_add_constant (addr
, -offset
);
922 if (pv_area_store_would_trash (stack
, addr
))
925 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
926 pv_area_store (stack
, pv_add_constant (addr
, 4),
930 regs
[bits (insn
, 0, 3)] = addr
;
933 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2
& 0x0c00) == 0x0c00
935 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
937 int regno
= bits (inst2
, 12, 15);
938 pv_t addr
= regs
[bits (insn
, 0, 3)];
940 offset
= inst2
& 0xff;
942 addr
= pv_add_constant (addr
, offset
);
944 addr
= pv_add_constant (addr
, -offset
);
946 if (pv_area_store_would_trash (stack
, addr
))
949 pv_area_store (stack
, addr
, 4, regs
[regno
]);
952 regs
[bits (insn
, 0, 3)] = addr
;
955 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
958 int regno
= bits (inst2
, 12, 15);
961 offset
= inst2
& 0xfff;
962 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
964 if (pv_area_store_would_trash (stack
, addr
))
967 pv_area_store (stack
, addr
, 4, regs
[regno
]);
970 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2
& 0x0d00) == 0x0c00
977 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2
& 0x8000) == 0x0000
984 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2
& 0x0d00) == 0x0c00
997 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2
& 0x8000) == 0x0000)
1009 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1010 | (bits (inst2
, 12, 14) << 8)
1011 | bits (inst2
, 0, 7));
1013 regs
[bits (inst2
, 8, 11)]
1014 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1015 thumb_expand_immediate (imm
));
1018 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2
& 0x8000) == 0x0000)
1021 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1022 | (bits (inst2
, 12, 14) << 8)
1023 | bits (inst2
, 0, 7));
1025 regs
[bits (inst2
, 8, 11)]
1026 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1029 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2
& 0x8000) == 0x0000)
1032 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1033 | (bits (inst2
, 12, 14) << 8)
1034 | bits (inst2
, 0, 7));
1036 regs
[bits (inst2
, 8, 11)]
1037 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1038 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1041 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2
& 0x8000) == 0x0000)
1044 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1045 | (bits (inst2
, 12, 14) << 8)
1046 | bits (inst2
, 0, 7));
1048 regs
[bits (inst2
, 8, 11)]
1049 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1052 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1055 | (bits (inst2
, 12, 14) << 8)
1056 | bits (inst2
, 0, 7));
1058 regs
[bits (inst2
, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm
));
1062 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1067 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1070 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1071 && (inst2
& 0xf0f0) == 0)
1073 int dst_reg
= (inst2
& 0x0f00) >> 8;
1074 int src_reg
= inst2
& 0xf;
1075 regs
[dst_reg
] = regs
[src_reg
];
1078 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant
;
1084 offset
= bits (inst2
, 0, 11);
1086 loc
= start
+ 4 + offset
;
1088 loc
= start
+ 4 - offset
;
1090 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1091 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1094 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant
;
1100 offset
= bits (inst2
, 0, 7) << 2;
1102 loc
= start
+ 4 + offset
;
1104 loc
= start
+ 4 - offset
;
1106 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1107 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1109 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1110 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1113 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc
= start
;
1127 else if (thumb_instruction_changes_pc (insn
))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc
= start
;
1143 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch
, start
));
1146 if (unrecognized_pc
== 0)
1147 unrecognized_pc
= start
;
1151 do_cleanups (back_to
);
1152 return unrecognized_pc
;
1155 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache
->framereg
= ARM_FP_REGNUM
;
1159 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1161 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache
->framereg
= THUMB_FP_REGNUM
;
1165 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache
->framereg
= ARM_SP_REGNUM
;
1171 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1174 for (i
= 0; i
< 16; i
++)
1175 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1176 cache
->saved_regs
[i
].addr
= offset
;
1178 do_cleanups (back_to
);
1179 return unrecognized_pc
;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1191 unsigned int *destreg
, int *offset
)
1193 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1194 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1195 unsigned int low
, high
, address
;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1203 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg
= bits (insn1
, 8, 10);
1207 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1208 address
= read_memory_unsigned_integer (address
, 4,
1209 byte_order_for_code
);
1211 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1216 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1219 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1221 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1223 /* movt Rd, #const */
1224 if ((insn1
& 0xfbc0) == 0xf2c0)
1226 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1227 *destreg
= bits (insn2
, 8, 11);
1229 address
= (high
<< 16 | low
);
1236 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1238 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address
= bits (insn
, 0, 11) + pc
+ 8;
1241 address
= read_memory_unsigned_integer (address
, 4,
1242 byte_order_for_code
);
1244 *destreg
= bits (insn
, 12, 15);
1247 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1252 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1254 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1257 *destreg
= bits (insn
, 12, 15);
1259 address
= (high
<< 16 | low
);
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1298 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1300 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1301 unsigned int basereg
;
1302 struct bound_minimal_symbol stack_chk_guard
;
1304 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1307 /* Try to parse the instructions in Step 1. */
1308 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1313 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard
.minsym
== NULL
1317 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1322 unsigned int destreg
;
1324 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1326 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1327 if ((insn
& 0xf800) != 0x6800)
1329 if (bits (insn
, 3, 5) != basereg
)
1331 destreg
= bits (insn
, 0, 2);
1333 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1334 byte_order_for_code
);
1335 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1336 if ((insn
& 0xf800) != 0x6000)
1338 if (destreg
!= bits (insn
, 0, 2))
1343 unsigned int destreg
;
1345 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1347 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1348 if ((insn
& 0x0e500000) != 0x04100000)
1350 if (bits (insn
, 16, 19) != basereg
)
1352 destreg
= bits (insn
, 12, 15);
1353 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1354 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1355 4, byte_order_for_code
);
1356 if ((insn
& 0x0e500000) != 0x04000000)
1358 if (bits (insn
, 12, 15) != destreg
)
1361 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1364 return pc
+ offset
+ 4;
1366 return pc
+ offset
+ 8;
1369 /* Advance the PC across any function entry prologue instructions to
1370 reach some "real" code.
1372 The APCS (ARM Procedure Call Standard) defines the following
1376 [stmfd sp!, {a1,a2,a3,a4}]
1377 stmfd sp!, {...,fp,ip,lr,pc}
1378 [stfe f7, [sp, #-12]!]
1379 [stfe f6, [sp, #-12]!]
1380 [stfe f5, [sp, #-12]!]
1381 [stfe f4, [sp, #-12]!]
1382 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1385 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1387 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1389 CORE_ADDR func_addr
, limit_pc
;
1391 /* See if we can determine the end of the prologue via the symbol table.
1392 If so, then return either PC, or the PC after the prologue, whichever
1394 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1396 CORE_ADDR post_prologue_pc
1397 = skip_prologue_using_sal (gdbarch
, func_addr
);
1398 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1400 if (post_prologue_pc
)
1402 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1405 /* GCC always emits a line note before the prologue and another
1406 one after, even if the two are at the same address or on the
1407 same line. Take advantage of this so that we do not need to
1408 know every instruction that might appear in the prologue. We
1409 will have producer information for most binaries; if it is
1410 missing (e.g. for -gstabs), assuming the GNU tools. */
1411 if (post_prologue_pc
1413 || COMPUNIT_PRODUCER (cust
) == NULL
1414 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1415 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1416 return post_prologue_pc
;
1418 if (post_prologue_pc
!= 0)
1420 CORE_ADDR analyzed_limit
;
1422 /* For non-GCC compilers, make sure the entire line is an
1423 acceptable prologue; GDB will round this function's
1424 return value up to the end of the following line so we
1425 can not skip just part of a line (and we do not want to).
1427 RealView does not treat the prologue specially, but does
1428 associate prologue code with the opening brace; so this
1429 lets us skip the first line if we think it is the opening
1431 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1432 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1433 post_prologue_pc
, NULL
);
1435 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1436 post_prologue_pc
, NULL
);
1438 if (analyzed_limit
!= post_prologue_pc
)
1441 return post_prologue_pc
;
1445 /* Can't determine prologue from the symbol table, need to examine
1448 /* Find an upper limit on the function prologue using the debug
1449 information. If the debug information could not be used to provide
1450 that bound, then use an arbitrary large number as the upper bound. */
1451 /* Like arm_scan_prologue, stop no later than pc + 64. */
1452 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1454 limit_pc
= pc
+ 64; /* Magic. */
1457 /* Check if this is Thumb code. */
1458 if (arm_pc_is_thumb (gdbarch
, pc
))
1459 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1461 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1465 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1466 This function decodes a Thumb function prologue to determine:
1467 1) the size of the stack frame
1468 2) which registers are saved on it
1469 3) the offsets of saved regs
1470 4) the offset from the stack pointer to the frame pointer
1472 A typical Thumb function prologue would create this stack frame
1473 (offsets relative to FP)
1474 old SP -> 24 stack parameters
1477 R7 -> 0 local variables (16 bytes)
1478 SP -> -12 additional stack space (12 bytes)
1479 The frame size would thus be 36 bytes, and the frame offset would be
1480 12 bytes. The frame register is R7.
1482 The comments for thumb_skip_prolog() describe the algorithm we use
1483 to detect the end of the prolog. */
1487 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1488 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1490 CORE_ADDR prologue_start
;
1491 CORE_ADDR prologue_end
;
1493 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1496 /* See comment in arm_scan_prologue for an explanation of
1498 if (prologue_end
> prologue_start
+ 64)
1500 prologue_end
= prologue_start
+ 64;
1504 /* We're in the boondocks: we have no idea where the start of the
1508 prologue_end
= min (prologue_end
, prev_pc
);
1510 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1513 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1516 arm_instruction_changes_pc (uint32_t this_instr
)
1518 if (bits (this_instr
, 28, 31) == INST_NV
)
1519 /* Unconditional instructions. */
1520 switch (bits (this_instr
, 24, 27))
1524 /* Branch with Link and change to Thumb. */
1529 /* Coprocessor register transfer. */
1530 if (bits (this_instr
, 12, 15) == 15)
1531 error (_("Invalid update to pc in instruction"));
1537 switch (bits (this_instr
, 25, 27))
1540 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1542 /* Multiplies and extra load/stores. */
1543 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1544 /* Neither multiplies nor extension load/stores are allowed
1548 /* Otherwise, miscellaneous instructions. */
1550 /* BX <reg>, BXJ <reg>, BLX <reg> */
1551 if (bits (this_instr
, 4, 27) == 0x12fff1
1552 || bits (this_instr
, 4, 27) == 0x12fff2
1553 || bits (this_instr
, 4, 27) == 0x12fff3)
1556 /* Other miscellaneous instructions are unpredictable if they
1560 /* Data processing instruction. Fall through. */
1563 if (bits (this_instr
, 12, 15) == 15)
1570 /* Media instructions and architecturally undefined instructions. */
1571 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1575 if (bit (this_instr
, 20) == 0)
1579 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1585 /* Load/store multiple. */
1586 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1592 /* Branch and branch with link. */
1597 /* Coprocessor transfers or SWIs can not affect PC. */
1601 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1605 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1609 arm_instruction_restores_sp (unsigned int insn
)
1611 if (bits (insn
, 28, 31) != INST_NV
)
1613 if ((insn
& 0x0df0f000) == 0x0080d000
1614 /* ADD SP (register or immediate). */
1615 || (insn
& 0x0df0f000) == 0x0040d000
1616 /* SUB SP (register or immediate). */
1617 || (insn
& 0x0ffffff0) == 0x01a0d000
1619 || (insn
& 0x0fff0000) == 0x08bd0000
1621 || (insn
& 0x0fff0000) == 0x049d0000)
1622 /* POP of a single register. */
1629 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1630 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1631 fill it in. Return the first address not recognized as a prologue
1634 We recognize all the instructions typically found in ARM prologues,
1635 plus harmless instructions which can be skipped (either for analysis
1636 purposes, or a more restrictive set that can be skipped when finding
1637 the end of the prologue). */
1640 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1641 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1642 struct arm_prologue_cache
*cache
)
1644 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1645 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1647 CORE_ADDR offset
, current_pc
;
1648 pv_t regs
[ARM_FPS_REGNUM
];
1649 struct pv_area
*stack
;
1650 struct cleanup
*back_to
;
1651 CORE_ADDR unrecognized_pc
= 0;
1653 /* Search the prologue looking for instructions that set up the
1654 frame pointer, adjust the stack pointer, and save registers.
1656 Be careful, however, and if it doesn't look like a prologue,
1657 don't try to scan it. If, for instance, a frameless function
1658 begins with stmfd sp!, then we will tell ourselves there is
1659 a frame, which will confuse stack traceback, as well as "finish"
1660 and other operations that rely on a knowledge of the stack
1663 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1664 regs
[regno
] = pv_register (regno
, 0);
1665 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1666 back_to
= make_cleanup_free_pv_area (stack
);
1668 for (current_pc
= prologue_start
;
1669 current_pc
< prologue_end
;
1673 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1675 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1677 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1680 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1681 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1683 unsigned imm
= insn
& 0xff; /* immediate value */
1684 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1685 int rd
= bits (insn
, 12, 15);
1686 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1687 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1690 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1691 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1693 unsigned imm
= insn
& 0xff; /* immediate value */
1694 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1695 int rd
= bits (insn
, 12, 15);
1696 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1697 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1700 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1703 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1705 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1706 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1707 regs
[bits (insn
, 12, 15)]);
1710 else if ((insn
& 0xffff0000) == 0xe92d0000)
1711 /* stmfd sp!, {..., fp, ip, lr, pc}
1713 stmfd sp!, {a1, a2, a3, a4} */
1715 int mask
= insn
& 0xffff;
1717 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1720 /* Calculate offsets of saved registers. */
1721 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1722 if (mask
& (1 << regno
))
1725 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1726 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1729 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1730 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1731 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1733 /* No need to add this to saved_regs -- it's just an arg reg. */
1736 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1737 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1738 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1740 /* No need to add this to saved_regs -- it's just an arg reg. */
1743 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1745 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1747 /* No need to add this to saved_regs -- it's just arg regs. */
1750 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1752 unsigned imm
= insn
& 0xff; /* immediate value */
1753 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1754 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1755 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1757 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1759 unsigned imm
= insn
& 0xff; /* immediate value */
1760 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1761 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1762 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1764 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1766 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1768 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1771 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1772 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1773 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1775 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1777 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1779 int n_saved_fp_regs
;
1780 unsigned int fp_start_reg
, fp_bound_reg
;
1782 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1785 if ((insn
& 0x800) == 0x800) /* N0 is set */
1787 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1788 n_saved_fp_regs
= 3;
1790 n_saved_fp_regs
= 1;
1794 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1795 n_saved_fp_regs
= 2;
1797 n_saved_fp_regs
= 4;
1800 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1801 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1802 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1804 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1805 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1806 regs
[fp_start_reg
++]);
1809 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1811 /* Allow some special function calls when skipping the
1812 prologue; GCC generates these before storing arguments to
1814 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1816 if (skip_prologue_function (gdbarch
, dest
, 0))
1821 else if ((insn
& 0xf0000000) != 0xe0000000)
1822 break; /* Condition not true, exit early. */
1823 else if (arm_instruction_changes_pc (insn
))
1824 /* Don't scan past anything that might change control flow. */
1826 else if (arm_instruction_restores_sp (insn
))
1828 /* Don't scan past the epilogue. */
1831 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1832 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1833 /* Ignore block loads from the stack, potentially copying
1834 parameters from memory. */
1836 else if ((insn
& 0xfc500000) == 0xe4100000
1837 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1838 /* Similarly ignore single loads from the stack. */
1840 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1841 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1842 register instead of the stack. */
1846 /* The optimizer might shove anything into the prologue, if
1847 we build up cache (cache != NULL) from scanning prologue,
1848 we just skip what we don't recognize and scan further to
1849 make cache as complete as possible. However, if we skip
1850 prologue, we'll stop immediately on unrecognized
1852 unrecognized_pc
= current_pc
;
1860 if (unrecognized_pc
== 0)
1861 unrecognized_pc
= current_pc
;
1865 int framereg
, framesize
;
1867 /* The frame size is just the distance from the frame register
1868 to the original stack pointer. */
1869 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1871 /* Frame pointer is fp. */
1872 framereg
= ARM_FP_REGNUM
;
1873 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1877 /* Try the stack pointer... this is a bit desperate. */
1878 framereg
= ARM_SP_REGNUM
;
1879 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1882 cache
->framereg
= framereg
;
1883 cache
->framesize
= framesize
;
1885 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1886 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1887 cache
->saved_regs
[regno
].addr
= offset
;
1891 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1892 paddress (gdbarch
, unrecognized_pc
));
1894 do_cleanups (back_to
);
1895 return unrecognized_pc
;
1899 arm_scan_prologue (struct frame_info
*this_frame
,
1900 struct arm_prologue_cache
*cache
)
1902 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1903 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1905 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1906 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1907 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1908 pv_t regs
[ARM_FPS_REGNUM
];
1909 struct pv_area
*stack
;
1910 struct cleanup
*back_to
;
1913 /* Assume there is no frame until proven otherwise. */
1914 cache
->framereg
= ARM_SP_REGNUM
;
1915 cache
->framesize
= 0;
1917 /* Check for Thumb prologue. */
1918 if (arm_frame_is_thumb (this_frame
))
1920 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1924 /* Find the function prologue. If we can't find the function in
1925 the symbol table, peek in the stack frame to find the PC. */
1926 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1929 /* One way to find the end of the prologue (which works well
1930 for unoptimized code) is to do the following:
1932 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1935 prologue_end = prev_pc;
1936 else if (sal.end < prologue_end)
1937 prologue_end = sal.end;
1939 This mechanism is very accurate so long as the optimizer
1940 doesn't move any instructions from the function body into the
1941 prologue. If this happens, sal.end will be the last
1942 instruction in the first hunk of prologue code just before
1943 the first instruction that the scheduler has moved from
1944 the body to the prologue.
1946 In order to make sure that we scan all of the prologue
1947 instructions, we use a slightly less accurate mechanism which
1948 may scan more than necessary. To help compensate for this
1949 lack of accuracy, the prologue scanning loop below contains
1950 several clauses which'll cause the loop to terminate early if
1951 an implausible prologue instruction is encountered.
1957 is a suitable endpoint since it accounts for the largest
1958 possible prologue plus up to five instructions inserted by
1961 if (prologue_end
> prologue_start
+ 64)
1963 prologue_end
= prologue_start
+ 64; /* See above. */
1968 /* We have no symbol information. Our only option is to assume this
1969 function has a standard stack frame and the normal frame register.
1970 Then, we can find the value of our frame pointer on entrance to
1971 the callee (or at the present moment if this is the innermost frame).
1972 The value stored there should be the address of the stmfd + 8. */
1973 CORE_ADDR frame_loc
;
1974 LONGEST return_value
;
1976 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1977 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
1981 prologue_start
= gdbarch_addr_bits_remove
1982 (gdbarch
, return_value
) - 8;
1983 prologue_end
= prologue_start
+ 64; /* See above. */
1987 if (prev_pc
< prologue_end
)
1988 prologue_end
= prev_pc
;
1990 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1993 static struct arm_prologue_cache
*
1994 arm_make_prologue_cache (struct frame_info
*this_frame
)
1997 struct arm_prologue_cache
*cache
;
1998 CORE_ADDR unwound_fp
;
2000 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2001 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2003 arm_scan_prologue (this_frame
, cache
);
2005 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2006 if (unwound_fp
== 0)
2009 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2011 /* Calculate actual addresses of saved registers using offsets
2012 determined by arm_scan_prologue. */
2013 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2014 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2015 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2020 /* Implementation of the stop_reason hook for arm_prologue frames. */
2022 static enum unwind_stop_reason
2023 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
2026 struct arm_prologue_cache
*cache
;
2029 if (*this_cache
== NULL
)
2030 *this_cache
= arm_make_prologue_cache (this_frame
);
2031 cache
= *this_cache
;
2033 /* This is meant to halt the backtrace at "_start". */
2034 pc
= get_frame_pc (this_frame
);
2035 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2036 return UNWIND_OUTERMOST
;
2038 /* If we've hit a wall, stop. */
2039 if (cache
->prev_sp
== 0)
2040 return UNWIND_OUTERMOST
;
2042 return UNWIND_NO_REASON
;
2045 /* Our frame ID for a normal frame is the current function's starting PC
2046 and the caller's SP when we were called. */
2049 arm_prologue_this_id (struct frame_info
*this_frame
,
2051 struct frame_id
*this_id
)
2053 struct arm_prologue_cache
*cache
;
2057 if (*this_cache
== NULL
)
2058 *this_cache
= arm_make_prologue_cache (this_frame
);
2059 cache
= *this_cache
;
2061 /* Use function start address as part of the frame ID. If we cannot
2062 identify the start address (due to missing symbol information),
2063 fall back to just using the current PC. */
2064 pc
= get_frame_pc (this_frame
);
2065 func
= get_frame_func (this_frame
);
2069 id
= frame_id_build (cache
->prev_sp
, func
);
2073 static struct value
*
2074 arm_prologue_prev_register (struct frame_info
*this_frame
,
2078 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2079 struct arm_prologue_cache
*cache
;
2081 if (*this_cache
== NULL
)
2082 *this_cache
= arm_make_prologue_cache (this_frame
);
2083 cache
= *this_cache
;
2085 /* If we are asked to unwind the PC, then we need to return the LR
2086 instead. The prologue may save PC, but it will point into this
2087 frame's prologue, not the next frame's resume location. Also
2088 strip the saved T bit. A valid LR may have the low bit set, but
2089 a valid PC never does. */
2090 if (prev_regnum
== ARM_PC_REGNUM
)
2094 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2095 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2096 arm_addr_bits_remove (gdbarch
, lr
));
2099 /* SP is generally not saved to the stack, but this frame is
2100 identified by the next frame's stack pointer at the time of the call.
2101 The value was already reconstructed into PREV_SP. */
2102 if (prev_regnum
== ARM_SP_REGNUM
)
2103 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2105 /* The CPSR may have been changed by the call instruction and by the
2106 called function. The only bit we can reconstruct is the T bit,
2107 by checking the low bit of LR as of the call. This is a reliable
2108 indicator of Thumb-ness except for some ARM v4T pre-interworking
2109 Thumb code, which could get away with a clear low bit as long as
2110 the called function did not use bx. Guess that all other
2111 bits are unchanged; the condition flags are presumably lost,
2112 but the processor status is likely valid. */
2113 if (prev_regnum
== ARM_PS_REGNUM
)
2116 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2118 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2119 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2120 if (IS_THUMB_ADDR (lr
))
2124 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2127 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2131 struct frame_unwind arm_prologue_unwind
= {
2133 arm_prologue_unwind_stop_reason
,
2134 arm_prologue_this_id
,
2135 arm_prologue_prev_register
,
2137 default_frame_sniffer
2140 /* Maintain a list of ARM exception table entries per objfile, similar to the
2141 list of mapping symbols. We only cache entries for standard ARM-defined
2142 personality routines; the cache will contain only the frame unwinding
2143 instructions associated with the entry (not the descriptors). */
2145 static const struct objfile_data
*arm_exidx_data_key
;
2147 struct arm_exidx_entry
2152 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2153 DEF_VEC_O(arm_exidx_entry_s
);
2155 struct arm_exidx_data
2157 VEC(arm_exidx_entry_s
) **section_maps
;
2161 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2163 struct arm_exidx_data
*data
= arg
;
2166 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2167 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2171 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2172 const struct arm_exidx_entry
*rhs
)
2174 return lhs
->addr
< rhs
->addr
;
2177 static struct obj_section
*
2178 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2180 struct obj_section
*osect
;
2182 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2183 if (bfd_get_section_flags (objfile
->obfd
,
2184 osect
->the_bfd_section
) & SEC_ALLOC
)
2186 bfd_vma start
, size
;
2187 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2188 size
= bfd_get_section_size (osect
->the_bfd_section
);
2190 if (start
<= vma
&& vma
< start
+ size
)
2197 /* Parse contents of exception table and exception index sections
2198 of OBJFILE, and fill in the exception table entry cache.
2200 For each entry that refers to a standard ARM-defined personality
2201 routine, extract the frame unwinding instructions (from either
2202 the index or the table section). The unwinding instructions
2204 - extracting them from the rest of the table data
2205 - converting to host endianness
2206 - appending the implicit 0xb0 ("Finish") code
2208 The extracted and normalized instructions are stored for later
2209 retrieval by the arm_find_exidx_entry routine. */
2212 arm_exidx_new_objfile (struct objfile
*objfile
)
2214 struct cleanup
*cleanups
;
2215 struct arm_exidx_data
*data
;
2216 asection
*exidx
, *extab
;
2217 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2218 bfd_size_type exidx_size
= 0, extab_size
= 0;
2219 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2222 /* If we've already touched this file, do nothing. */
2223 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2225 cleanups
= make_cleanup (null_cleanup
, NULL
);
2227 /* Read contents of exception table and index. */
2228 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2231 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2232 exidx_size
= bfd_get_section_size (exidx
);
2233 exidx_data
= xmalloc (exidx_size
);
2234 make_cleanup (xfree
, exidx_data
);
2236 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2237 exidx_data
, 0, exidx_size
))
2239 do_cleanups (cleanups
);
2244 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2247 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2248 extab_size
= bfd_get_section_size (extab
);
2249 extab_data
= xmalloc (extab_size
);
2250 make_cleanup (xfree
, extab_data
);
2252 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2253 extab_data
, 0, extab_size
))
2255 do_cleanups (cleanups
);
2260 /* Allocate exception table data structure. */
2261 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2262 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2263 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2264 objfile
->obfd
->section_count
,
2265 VEC(arm_exidx_entry_s
) *);
2267 /* Fill in exception table. */
2268 for (i
= 0; i
< exidx_size
/ 8; i
++)
2270 struct arm_exidx_entry new_exidx_entry
;
2271 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2272 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2273 bfd_vma addr
= 0, word
= 0;
2274 int n_bytes
= 0, n_words
= 0;
2275 struct obj_section
*sec
;
2276 gdb_byte
*entry
= NULL
;
2278 /* Extract address of start of function. */
2279 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2280 idx
+= exidx_vma
+ i
* 8;
2282 /* Find section containing function and compute section offset. */
2283 sec
= arm_obj_section_from_vma (objfile
, idx
);
2286 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2288 /* Determine address of exception table entry. */
2291 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2293 else if ((val
& 0xff000000) == 0x80000000)
2295 /* Exception table entry embedded in .ARM.exidx
2296 -- must be short form. */
2300 else if (!(val
& 0x80000000))
2302 /* Exception table entry in .ARM.extab. */
2303 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2304 addr
+= exidx_vma
+ i
* 8 + 4;
2306 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2308 word
= bfd_h_get_32 (objfile
->obfd
,
2309 extab_data
+ addr
- extab_vma
);
2312 if ((word
& 0xff000000) == 0x80000000)
2317 else if ((word
& 0xff000000) == 0x81000000
2318 || (word
& 0xff000000) == 0x82000000)
2322 n_words
= ((word
>> 16) & 0xff);
2324 else if (!(word
& 0x80000000))
2327 struct obj_section
*pers_sec
;
2328 int gnu_personality
= 0;
2330 /* Custom personality routine. */
2331 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2332 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2334 /* Check whether we've got one of the variants of the
2335 GNU personality routines. */
2336 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2339 static const char *personality
[] =
2341 "__gcc_personality_v0",
2342 "__gxx_personality_v0",
2343 "__gcj_personality_v0",
2344 "__gnu_objc_personality_v0",
2348 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2351 for (k
= 0; personality
[k
]; k
++)
2352 if (lookup_minimal_symbol_by_pc_name
2353 (pc
, personality
[k
], objfile
))
2355 gnu_personality
= 1;
2360 /* If so, the next word contains a word count in the high
2361 byte, followed by the same unwind instructions as the
2362 pre-defined forms. */
2364 && addr
+ 4 <= extab_vma
+ extab_size
)
2366 word
= bfd_h_get_32 (objfile
->obfd
,
2367 extab_data
+ addr
- extab_vma
);
2370 n_words
= ((word
>> 24) & 0xff);
2376 /* Sanity check address. */
2378 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2379 n_words
= n_bytes
= 0;
2381 /* The unwind instructions reside in WORD (only the N_BYTES least
2382 significant bytes are valid), followed by N_WORDS words in the
2383 extab section starting at ADDR. */
2384 if (n_bytes
|| n_words
)
2386 gdb_byte
*p
= entry
= obstack_alloc (&objfile
->objfile_obstack
,
2387 n_bytes
+ n_words
* 4 + 1);
2390 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2394 word
= bfd_h_get_32 (objfile
->obfd
,
2395 extab_data
+ addr
- extab_vma
);
2398 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2399 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2400 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2401 *p
++ = (gdb_byte
) (word
& 0xff);
2404 /* Implied "Finish" to terminate the list. */
2408 /* Push entry onto vector. They are guaranteed to always
2409 appear in order of increasing addresses. */
2410 new_exidx_entry
.addr
= idx
;
2411 new_exidx_entry
.entry
= entry
;
2412 VEC_safe_push (arm_exidx_entry_s
,
2413 data
->section_maps
[sec
->the_bfd_section
->index
],
2417 do_cleanups (cleanups
);
2420 /* Search for the exception table entry covering MEMADDR. If one is found,
2421 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2422 set *START to the start of the region covered by this entry. */
2425 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2427 struct obj_section
*sec
;
2429 sec
= find_pc_section (memaddr
);
2432 struct arm_exidx_data
*data
;
2433 VEC(arm_exidx_entry_s
) *map
;
2434 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2437 data
= objfile_data (sec
->objfile
, arm_exidx_data_key
);
2440 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2441 if (!VEC_empty (arm_exidx_entry_s
, map
))
2443 struct arm_exidx_entry
*map_sym
;
2445 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2446 arm_compare_exidx_entries
);
2448 /* VEC_lower_bound finds the earliest ordered insertion
2449 point. If the following symbol starts at this exact
2450 address, we use that; otherwise, the preceding
2451 exception table entry covers this address. */
2452 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2454 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2455 if (map_sym
->addr
== map_key
.addr
)
2458 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2459 return map_sym
->entry
;
2465 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2467 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2468 return map_sym
->entry
;
2477 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2478 instruction list from the ARM exception table entry ENTRY, allocate and
2479 return a prologue cache structure describing how to unwind this frame.
2481 Return NULL if the unwinding instruction list contains a "spare",
2482 "reserved" or "refuse to unwind" instruction as defined in section
2483 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2484 for the ARM Architecture" document. */
2486 static struct arm_prologue_cache
*
2487 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2492 struct arm_prologue_cache
*cache
;
2493 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2494 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2500 /* Whenever we reload SP, we actually have to retrieve its
2501 actual value in the current frame. */
2504 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2506 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2507 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2511 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2512 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2518 /* Decode next unwind instruction. */
2521 if ((insn
& 0xc0) == 0)
2523 int offset
= insn
& 0x3f;
2524 vsp
+= (offset
<< 2) + 4;
2526 else if ((insn
& 0xc0) == 0x40)
2528 int offset
= insn
& 0x3f;
2529 vsp
-= (offset
<< 2) + 4;
2531 else if ((insn
& 0xf0) == 0x80)
2533 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2536 /* The special case of an all-zero mask identifies
2537 "Refuse to unwind". We return NULL to fall back
2538 to the prologue analyzer. */
2542 /* Pop registers r4..r15 under mask. */
2543 for (i
= 0; i
< 12; i
++)
2544 if (mask
& (1 << i
))
2546 cache
->saved_regs
[4 + i
].addr
= vsp
;
2550 /* Special-case popping SP -- we need to reload vsp. */
2551 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2554 else if ((insn
& 0xf0) == 0x90)
2556 int reg
= insn
& 0xf;
2558 /* Reserved cases. */
2559 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2562 /* Set SP from another register and mark VSP for reload. */
2563 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2566 else if ((insn
& 0xf0) == 0xa0)
2568 int count
= insn
& 0x7;
2569 int pop_lr
= (insn
& 0x8) != 0;
2572 /* Pop r4..r[4+count]. */
2573 for (i
= 0; i
<= count
; i
++)
2575 cache
->saved_regs
[4 + i
].addr
= vsp
;
2579 /* If indicated by flag, pop LR as well. */
2582 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2586 else if (insn
== 0xb0)
2588 /* We could only have updated PC by popping into it; if so, it
2589 will show up as address. Otherwise, copy LR into PC. */
2590 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2591 cache
->saved_regs
[ARM_PC_REGNUM
]
2592 = cache
->saved_regs
[ARM_LR_REGNUM
];
2597 else if (insn
== 0xb1)
2599 int mask
= *entry
++;
2602 /* All-zero mask and mask >= 16 is "spare". */
2603 if (mask
== 0 || mask
>= 16)
2606 /* Pop r0..r3 under mask. */
2607 for (i
= 0; i
< 4; i
++)
2608 if (mask
& (1 << i
))
2610 cache
->saved_regs
[i
].addr
= vsp
;
2614 else if (insn
== 0xb2)
2616 ULONGEST offset
= 0;
2621 offset
|= (*entry
& 0x7f) << shift
;
2624 while (*entry
++ & 0x80);
2626 vsp
+= 0x204 + (offset
<< 2);
2628 else if (insn
== 0xb3)
2630 int start
= *entry
>> 4;
2631 int count
= (*entry
++) & 0xf;
2634 /* Only registers D0..D15 are valid here. */
2635 if (start
+ count
>= 16)
2638 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2639 for (i
= 0; i
<= count
; i
++)
2641 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2645 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2648 else if ((insn
& 0xf8) == 0xb8)
2650 int count
= insn
& 0x7;
2653 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2654 for (i
= 0; i
<= count
; i
++)
2656 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2660 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2663 else if (insn
== 0xc6)
2665 int start
= *entry
>> 4;
2666 int count
= (*entry
++) & 0xf;
2669 /* Only registers WR0..WR15 are valid. */
2670 if (start
+ count
>= 16)
2673 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2674 for (i
= 0; i
<= count
; i
++)
2676 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2680 else if (insn
== 0xc7)
2682 int mask
= *entry
++;
2685 /* All-zero mask and mask >= 16 is "spare". */
2686 if (mask
== 0 || mask
>= 16)
2689 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2690 for (i
= 0; i
< 4; i
++)
2691 if (mask
& (1 << i
))
2693 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2697 else if ((insn
& 0xf8) == 0xc0)
2699 int count
= insn
& 0x7;
2702 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2703 for (i
= 0; i
<= count
; i
++)
2705 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2709 else if (insn
== 0xc8)
2711 int start
= *entry
>> 4;
2712 int count
= (*entry
++) & 0xf;
2715 /* Only registers D0..D31 are valid. */
2716 if (start
+ count
>= 16)
2719 /* Pop VFP double-precision registers
2720 D[16+start]..D[16+start+count]. */
2721 for (i
= 0; i
<= count
; i
++)
2723 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2727 else if (insn
== 0xc9)
2729 int start
= *entry
>> 4;
2730 int count
= (*entry
++) & 0xf;
2733 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2734 for (i
= 0; i
<= count
; i
++)
2736 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2740 else if ((insn
& 0xf8) == 0xd0)
2742 int count
= insn
& 0x7;
2745 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2746 for (i
= 0; i
<= count
; i
++)
2748 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2754 /* Everything else is "spare". */
2759 /* If we restore SP from a register, assume this was the frame register.
2760 Otherwise just fall back to SP as frame register. */
2761 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2762 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2764 cache
->framereg
= ARM_SP_REGNUM
;
2766 /* Determine offset to previous frame. */
2768 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2770 /* We already got the previous SP. */
2771 cache
->prev_sp
= vsp
;
2776 /* Unwinding via ARM exception table entries. Note that the sniffer
2777 already computes a filled-in prologue cache, which is then used
2778 with the same arm_prologue_this_id and arm_prologue_prev_register
2779 routines also used for prologue-parsing based unwinding. */
2782 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2783 struct frame_info
*this_frame
,
2784 void **this_prologue_cache
)
2786 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2787 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2788 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2789 struct arm_prologue_cache
*cache
;
2792 /* See if we have an ARM exception table entry covering this address. */
2793 addr_in_block
= get_frame_address_in_block (this_frame
);
2794 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2798 /* The ARM exception table does not describe unwind information
2799 for arbitrary PC values, but is guaranteed to be correct only
2800 at call sites. We have to decide here whether we want to use
2801 ARM exception table information for this frame, or fall back
2802 to using prologue parsing. (Note that if we have DWARF CFI,
2803 this sniffer isn't even called -- CFI is always preferred.)
2805 Before we make this decision, however, we check whether we
2806 actually have *symbol* information for the current frame.
2807 If not, prologue parsing would not work anyway, so we might
2808 as well use the exception table and hope for the best. */
2809 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2813 /* If the next frame is "normal", we are at a call site in this
2814 frame, so exception information is guaranteed to be valid. */
2815 if (get_next_frame (this_frame
)
2816 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2819 /* We also assume exception information is valid if we're currently
2820 blocked in a system call. The system library is supposed to
2821 ensure this, so that e.g. pthread cancellation works. */
2822 if (arm_frame_is_thumb (this_frame
))
2826 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2827 byte_order_for_code
, &insn
)
2828 && (insn
& 0xff00) == 0xdf00 /* svc */)
2835 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2836 byte_order_for_code
, &insn
)
2837 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2841 /* Bail out if we don't know that exception information is valid. */
2845 /* The ARM exception index does not mark the *end* of the region
2846 covered by the entry, and some functions will not have any entry.
2847 To correctly recognize the end of the covered region, the linker
2848 should have inserted dummy records with a CANTUNWIND marker.
2850 Unfortunately, current versions of GNU ld do not reliably do
2851 this, and thus we may have found an incorrect entry above.
2852 As a (temporary) sanity check, we only use the entry if it
2853 lies *within* the bounds of the function. Note that this check
2854 might reject perfectly valid entries that just happen to cover
2855 multiple functions; therefore this check ought to be removed
2856 once the linker is fixed. */
2857 if (func_start
> exidx_region
)
2861 /* Decode the list of unwinding instructions into a prologue cache.
2862 Note that this may fail due to e.g. a "refuse to unwind" code. */
2863 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2867 *this_prologue_cache
= cache
;
2871 struct frame_unwind arm_exidx_unwind
= {
2873 default_frame_unwind_stop_reason
,
2874 arm_prologue_this_id
,
2875 arm_prologue_prev_register
,
2877 arm_exidx_unwind_sniffer
2880 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2881 trampoline, return the target PC. Otherwise return 0.
2883 void call0a (char c, short s, int i, long l) {}
2887 (*pointer_to_call0a) (c, s, i, l);
2890 Instead of calling a stub library function _call_via_xx (xx is
2891 the register name), GCC may inline the trampoline in the object
2892 file as below (register r2 has the address of call0a).
2895 .type main, %function
2904 The trampoline 'bx r2' doesn't belong to main. */
2907 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2909 /* The heuristics of recognizing such trampoline is that FRAME is
2910 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2911 if (arm_frame_is_thumb (frame
))
2915 if (target_read_memory (pc
, buf
, 2) == 0)
2917 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2918 enum bfd_endian byte_order_for_code
2919 = gdbarch_byte_order_for_code (gdbarch
);
2921 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2923 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2926 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2928 /* Clear the LSB so that gdb core sets step-resume
2929 breakpoint at the right address. */
2930 return UNMAKE_THUMB_ADDR (dest
);
2938 static struct arm_prologue_cache
*
2939 arm_make_stub_cache (struct frame_info
*this_frame
)
2941 struct arm_prologue_cache
*cache
;
2943 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2944 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2946 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2951 /* Our frame ID for a stub frame is the current SP and LR. */
2954 arm_stub_this_id (struct frame_info
*this_frame
,
2956 struct frame_id
*this_id
)
2958 struct arm_prologue_cache
*cache
;
2960 if (*this_cache
== NULL
)
2961 *this_cache
= arm_make_stub_cache (this_frame
);
2962 cache
= *this_cache
;
2964 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2968 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2969 struct frame_info
*this_frame
,
2970 void **this_prologue_cache
)
2972 CORE_ADDR addr_in_block
;
2974 CORE_ADDR pc
, start_addr
;
2977 addr_in_block
= get_frame_address_in_block (this_frame
);
2978 pc
= get_frame_pc (this_frame
);
2979 if (in_plt_section (addr_in_block
)
2980 /* We also use the stub winder if the target memory is unreadable
2981 to avoid having the prologue unwinder trying to read it. */
2982 || target_read_memory (pc
, dummy
, 4) != 0)
2985 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2986 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2992 struct frame_unwind arm_stub_unwind
= {
2994 default_frame_unwind_stop_reason
,
2996 arm_prologue_prev_register
,
2998 arm_stub_unwind_sniffer
3001 /* Put here the code to store, into CACHE->saved_regs, the addresses
3002 of the saved registers of frame described by THIS_FRAME. CACHE is
3005 static struct arm_prologue_cache
*
3006 arm_m_exception_cache (struct frame_info
*this_frame
)
3008 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3009 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3010 struct arm_prologue_cache
*cache
;
3011 CORE_ADDR unwound_sp
;
3014 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3015 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3017 unwound_sp
= get_frame_register_unsigned (this_frame
,
3020 /* The hardware saves eight 32-bit words, comprising xPSR,
3021 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3022 "B1.5.6 Exception entry behavior" in
3023 "ARMv7-M Architecture Reference Manual". */
3024 cache
->saved_regs
[0].addr
= unwound_sp
;
3025 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3026 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3027 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3028 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
3029 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
3030 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
3031 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3033 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3034 aligner between the top of the 32-byte stack frame and the
3035 previous context's stack pointer. */
3036 cache
->prev_sp
= unwound_sp
+ 32;
3037 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3038 && (xpsr
& (1 << 9)) != 0)
3039 cache
->prev_sp
+= 4;
3044 /* Implementation of function hook 'this_id' in
3045 'struct frame_uwnind'. */
3048 arm_m_exception_this_id (struct frame_info
*this_frame
,
3050 struct frame_id
*this_id
)
3052 struct arm_prologue_cache
*cache
;
3054 if (*this_cache
== NULL
)
3055 *this_cache
= arm_m_exception_cache (this_frame
);
3056 cache
= *this_cache
;
3058 /* Our frame ID for a stub frame is the current SP and LR. */
3059 *this_id
= frame_id_build (cache
->prev_sp
,
3060 get_frame_pc (this_frame
));
3063 /* Implementation of function hook 'prev_register' in
3064 'struct frame_uwnind'. */
3066 static struct value
*
3067 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3071 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3072 struct arm_prologue_cache
*cache
;
3074 if (*this_cache
== NULL
)
3075 *this_cache
= arm_m_exception_cache (this_frame
);
3076 cache
= *this_cache
;
3078 /* The value was already reconstructed into PREV_SP. */
3079 if (prev_regnum
== ARM_SP_REGNUM
)
3080 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3083 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3087 /* Implementation of function hook 'sniffer' in
3088 'struct frame_uwnind'. */
3091 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3092 struct frame_info
*this_frame
,
3093 void **this_prologue_cache
)
3095 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3097 /* No need to check is_m; this sniffer is only registered for
3098 M-profile architectures. */
3100 /* Exception frames return to one of these magic PCs. Other values
3101 are not defined as of v7-M. See details in "B1.5.8 Exception
3102 return behavior" in "ARMv7-M Architecture Reference Manual". */
3103 if (this_pc
== 0xfffffff1 || this_pc
== 0xfffffff9
3104 || this_pc
== 0xfffffffd)
3110 /* Frame unwinder for M-profile exceptions. */
3112 struct frame_unwind arm_m_exception_unwind
=
3115 default_frame_unwind_stop_reason
,
3116 arm_m_exception_this_id
,
3117 arm_m_exception_prev_register
,
3119 arm_m_exception_unwind_sniffer
3123 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3125 struct arm_prologue_cache
*cache
;
3127 if (*this_cache
== NULL
)
3128 *this_cache
= arm_make_prologue_cache (this_frame
);
3129 cache
= *this_cache
;
3131 return cache
->prev_sp
- cache
->framesize
;
3134 struct frame_base arm_normal_base
= {
3135 &arm_prologue_unwind
,
3136 arm_normal_frame_base
,
3137 arm_normal_frame_base
,
3138 arm_normal_frame_base
3141 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3142 dummy frame. The frame ID's base needs to match the TOS value
3143 saved by save_dummy_frame_tos() and returned from
3144 arm_push_dummy_call, and the PC needs to match the dummy frame's
3147 static struct frame_id
3148 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3150 return frame_id_build (get_frame_register_unsigned (this_frame
,
3152 get_frame_pc (this_frame
));
3155 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3156 be used to construct the previous frame's ID, after looking up the
3157 containing function). */
3160 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3163 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3164 return arm_addr_bits_remove (gdbarch
, pc
);
3168 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3170 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3173 static struct value
*
3174 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3177 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3179 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3184 /* The PC is normally copied from the return column, which
3185 describes saves of LR. However, that version may have an
3186 extra bit set to indicate Thumb state. The bit is not
3188 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3189 return frame_unwind_got_constant (this_frame
, regnum
,
3190 arm_addr_bits_remove (gdbarch
, lr
));
3193 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3194 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3195 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3196 if (IS_THUMB_ADDR (lr
))
3200 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3203 internal_error (__FILE__
, __LINE__
,
3204 _("Unexpected register %d"), regnum
);
3209 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3210 struct dwarf2_frame_state_reg
*reg
,
3211 struct frame_info
*this_frame
)
3217 reg
->how
= DWARF2_FRAME_REG_FN
;
3218 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3221 reg
->how
= DWARF2_FRAME_REG_CFA
;
3226 /* Implement the stack_frame_destroyed_p gdbarch method. */
3229 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3231 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3232 unsigned int insn
, insn2
;
3233 int found_return
= 0, found_stack_adjust
= 0;
3234 CORE_ADDR func_start
, func_end
;
3238 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3241 /* The epilogue is a sequence of instructions along the following lines:
3243 - add stack frame size to SP or FP
3244 - [if frame pointer used] restore SP from FP
3245 - restore registers from SP [may include PC]
3246 - a return-type instruction [if PC wasn't already restored]
3248 In a first pass, we scan forward from the current PC and verify the
3249 instructions we find as compatible with this sequence, ending in a
3252 However, this is not sufficient to distinguish indirect function calls
3253 within a function from indirect tail calls in the epilogue in some cases.
3254 Therefore, if we didn't already find any SP-changing instruction during
3255 forward scan, we add a backward scanning heuristic to ensure we actually
3256 are in the epilogue. */
3259 while (scan_pc
< func_end
&& !found_return
)
3261 if (target_read_memory (scan_pc
, buf
, 2))
3265 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3267 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3269 else if (insn
== 0x46f7) /* mov pc, lr */
3271 else if (thumb_instruction_restores_sp (insn
))
3273 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3276 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3278 if (target_read_memory (scan_pc
, buf
, 2))
3282 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3284 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3286 if (insn2
& 0x8000) /* <registers> include PC. */
3289 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3290 && (insn2
& 0x0fff) == 0x0b04)
3292 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3295 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3296 && (insn2
& 0x0e00) == 0x0a00)
3308 /* Since any instruction in the epilogue sequence, with the possible
3309 exception of return itself, updates the stack pointer, we need to
3310 scan backwards for at most one instruction. Try either a 16-bit or
3311 a 32-bit instruction. This is just a heuristic, so we do not worry
3312 too much about false positives. */
3314 if (pc
- 4 < func_start
)
3316 if (target_read_memory (pc
- 4, buf
, 4))
3319 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3320 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3322 if (thumb_instruction_restores_sp (insn2
))
3323 found_stack_adjust
= 1;
3324 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3325 found_stack_adjust
= 1;
3326 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3327 && (insn2
& 0x0fff) == 0x0b04)
3328 found_stack_adjust
= 1;
3329 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3330 && (insn2
& 0x0e00) == 0x0a00)
3331 found_stack_adjust
= 1;
3333 return found_stack_adjust
;
3336 /* Implement the stack_frame_destroyed_p gdbarch method. */
3339 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3341 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3344 CORE_ADDR func_start
, func_end
;
3346 if (arm_pc_is_thumb (gdbarch
, pc
))
3347 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3349 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3352 /* We are in the epilogue if the previous instruction was a stack
3353 adjustment and the next instruction is a possible return (bx, mov
3354 pc, or pop). We could have to scan backwards to find the stack
3355 adjustment, or forwards to find the return, but this is a decent
3356 approximation. First scan forwards. */
3359 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3360 if (bits (insn
, 28, 31) != INST_NV
)
3362 if ((insn
& 0x0ffffff0) == 0x012fff10)
3365 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3368 else if ((insn
& 0x0fff0000) == 0x08bd0000
3369 && (insn
& 0x0000c000) != 0)
3370 /* POP (LDMIA), including PC or LR. */
3377 /* Scan backwards. This is just a heuristic, so do not worry about
3378 false positives from mode changes. */
3380 if (pc
< func_start
+ 4)
3383 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3384 if (arm_instruction_restores_sp (insn
))
3391 /* When arguments must be pushed onto the stack, they go on in reverse
3392 order. The code below implements a FILO (stack) to do this. */
3397 struct stack_item
*prev
;
3401 static struct stack_item
*
3402 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3404 struct stack_item
*si
;
3405 si
= xmalloc (sizeof (struct stack_item
));
3406 si
->data
= xmalloc (len
);
3409 memcpy (si
->data
, contents
, len
);
3413 static struct stack_item
*
3414 pop_stack_item (struct stack_item
*si
)
3416 struct stack_item
*dead
= si
;
3424 /* Return the alignment (in bytes) of the given type. */
3427 arm_type_align (struct type
*t
)
3433 t
= check_typedef (t
);
3434 switch (TYPE_CODE (t
))
3437 /* Should never happen. */
3438 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3442 case TYPE_CODE_ENUM
:
3446 case TYPE_CODE_RANGE
:
3448 case TYPE_CODE_CHAR
:
3449 case TYPE_CODE_BOOL
:
3450 return TYPE_LENGTH (t
);
3452 case TYPE_CODE_ARRAY
:
3453 case TYPE_CODE_COMPLEX
:
3454 /* TODO: What about vector types? */
3455 return arm_type_align (TYPE_TARGET_TYPE (t
));
3457 case TYPE_CODE_STRUCT
:
3458 case TYPE_CODE_UNION
:
3460 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3462 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3470 /* Possible base types for a candidate for passing and returning in
3473 enum arm_vfp_cprc_base_type
3482 /* The length of one element of base type B. */
3485 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3489 case VFP_CPRC_SINGLE
:
3491 case VFP_CPRC_DOUBLE
:
3493 case VFP_CPRC_VEC64
:
3495 case VFP_CPRC_VEC128
:
3498 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3503 /* The character ('s', 'd' or 'q') for the type of VFP register used
3504 for passing base type B. */
3507 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3511 case VFP_CPRC_SINGLE
:
3513 case VFP_CPRC_DOUBLE
:
3515 case VFP_CPRC_VEC64
:
3517 case VFP_CPRC_VEC128
:
3520 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3525 /* Determine whether T may be part of a candidate for passing and
3526 returning in VFP registers, ignoring the limit on the total number
3527 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3528 classification of the first valid component found; if it is not
3529 VFP_CPRC_UNKNOWN, all components must have the same classification
3530 as *BASE_TYPE. If it is found that T contains a type not permitted
3531 for passing and returning in VFP registers, a type differently
3532 classified from *BASE_TYPE, or two types differently classified
3533 from each other, return -1, otherwise return the total number of
3534 base-type elements found (possibly 0 in an empty structure or
3535 array). Vector types are not currently supported, matching the
3536 generic AAPCS support. */
3539 arm_vfp_cprc_sub_candidate (struct type
*t
,
3540 enum arm_vfp_cprc_base_type
*base_type
)
3542 t
= check_typedef (t
);
3543 switch (TYPE_CODE (t
))
3546 switch (TYPE_LENGTH (t
))
3549 if (*base_type
== VFP_CPRC_UNKNOWN
)
3550 *base_type
= VFP_CPRC_SINGLE
;
3551 else if (*base_type
!= VFP_CPRC_SINGLE
)
3556 if (*base_type
== VFP_CPRC_UNKNOWN
)
3557 *base_type
= VFP_CPRC_DOUBLE
;
3558 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3567 case TYPE_CODE_COMPLEX
:
3568 /* Arguments of complex T where T is one of the types float or
3569 double get treated as if they are implemented as:
3578 switch (TYPE_LENGTH (t
))
3581 if (*base_type
== VFP_CPRC_UNKNOWN
)
3582 *base_type
= VFP_CPRC_SINGLE
;
3583 else if (*base_type
!= VFP_CPRC_SINGLE
)
3588 if (*base_type
== VFP_CPRC_UNKNOWN
)
3589 *base_type
= VFP_CPRC_DOUBLE
;
3590 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3599 case TYPE_CODE_ARRAY
:
3603 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3606 if (TYPE_LENGTH (t
) == 0)
3608 gdb_assert (count
== 0);
3611 else if (count
== 0)
3613 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3614 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3615 return TYPE_LENGTH (t
) / unitlen
;
3619 case TYPE_CODE_STRUCT
:
3624 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3626 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3628 if (sub_count
== -1)
3632 if (TYPE_LENGTH (t
) == 0)
3634 gdb_assert (count
== 0);
3637 else if (count
== 0)
3639 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3640 if (TYPE_LENGTH (t
) != unitlen
* count
)
3645 case TYPE_CODE_UNION
:
3650 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3652 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3654 if (sub_count
== -1)
3656 count
= (count
> sub_count
? count
: sub_count
);
3658 if (TYPE_LENGTH (t
) == 0)
3660 gdb_assert (count
== 0);
3663 else if (count
== 0)
3665 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3666 if (TYPE_LENGTH (t
) != unitlen
* count
)
3678 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3679 if passed to or returned from a non-variadic function with the VFP
3680 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3681 *BASE_TYPE to the base type for T and *COUNT to the number of
3682 elements of that base type before returning. */
3685 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3688 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3689 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3690 if (c
<= 0 || c
> 4)
3697 /* Return 1 if the VFP ABI should be used for passing arguments to and
3698 returning values from a function of type FUNC_TYPE, 0
3702 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3704 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3705 /* Variadic functions always use the base ABI. Assume that functions
3706 without debug info are not variadic. */
3707 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3709 /* The VFP ABI is only supported as a variant of AAPCS. */
3710 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3712 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3715 /* We currently only support passing parameters in integer registers, which
3716 conforms with GCC's default model, and VFP argument passing following
3717 the VFP variant of AAPCS. Several other variants exist and
3718 we should probably support some of them based on the selected ABI. */
3721 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3722 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3723 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3724 CORE_ADDR struct_addr
)
3726 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3730 struct stack_item
*si
= NULL
;
3733 unsigned vfp_regs_free
= (1 << 16) - 1;
3735 /* Determine the type of this function and whether the VFP ABI
3737 ftype
= check_typedef (value_type (function
));
3738 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3739 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3740 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3742 /* Set the return address. For the ARM, the return breakpoint is
3743 always at BP_ADDR. */
3744 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3746 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3748 /* Walk through the list of args and determine how large a temporary
3749 stack is required. Need to take care here as structs may be
3750 passed on the stack, and we have to push them. */
3753 argreg
= ARM_A1_REGNUM
;
3756 /* The struct_return pointer occupies the first parameter
3757 passing register. */
3761 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3762 gdbarch_register_name (gdbarch
, argreg
),
3763 paddress (gdbarch
, struct_addr
));
3764 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3768 for (argnum
= 0; argnum
< nargs
; argnum
++)
3771 struct type
*arg_type
;
3772 struct type
*target_type
;
3773 enum type_code typecode
;
3774 const bfd_byte
*val
;
3776 enum arm_vfp_cprc_base_type vfp_base_type
;
3778 int may_use_core_reg
= 1;
3780 arg_type
= check_typedef (value_type (args
[argnum
]));
3781 len
= TYPE_LENGTH (arg_type
);
3782 target_type
= TYPE_TARGET_TYPE (arg_type
);
3783 typecode
= TYPE_CODE (arg_type
);
3784 val
= value_contents (args
[argnum
]);
3786 align
= arm_type_align (arg_type
);
3787 /* Round alignment up to a whole number of words. */
3788 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3789 /* Different ABIs have different maximum alignments. */
3790 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3792 /* The APCS ABI only requires word alignment. */
3793 align
= INT_REGISTER_SIZE
;
3797 /* The AAPCS requires at most doubleword alignment. */
3798 if (align
> INT_REGISTER_SIZE
* 2)
3799 align
= INT_REGISTER_SIZE
* 2;
3803 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3811 /* Because this is a CPRC it cannot go in a core register or
3812 cause a core register to be skipped for alignment.
3813 Either it goes in VFP registers and the rest of this loop
3814 iteration is skipped for this argument, or it goes on the
3815 stack (and the stack alignment code is correct for this
3817 may_use_core_reg
= 0;
3819 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3820 shift
= unit_length
/ 4;
3821 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3822 for (regno
= 0; regno
< 16; regno
+= shift
)
3823 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3832 vfp_regs_free
&= ~(mask
<< regno
);
3833 reg_scaled
= regno
/ shift
;
3834 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3835 for (i
= 0; i
< vfp_base_count
; i
++)
3839 if (reg_char
== 'q')
3840 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3841 val
+ i
* unit_length
);
3844 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3845 reg_char
, reg_scaled
+ i
);
3846 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3848 regcache_cooked_write (regcache
, regnum
,
3849 val
+ i
* unit_length
);
3856 /* This CPRC could not go in VFP registers, so all VFP
3857 registers are now marked as used. */
3862 /* Push stack padding for dowubleword alignment. */
3863 if (nstack
& (align
- 1))
3865 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3866 nstack
+= INT_REGISTER_SIZE
;
3869 /* Doubleword aligned quantities must go in even register pairs. */
3870 if (may_use_core_reg
3871 && argreg
<= ARM_LAST_ARG_REGNUM
3872 && align
> INT_REGISTER_SIZE
3876 /* If the argument is a pointer to a function, and it is a
3877 Thumb function, create a LOCAL copy of the value and set
3878 the THUMB bit in it. */
3879 if (TYPE_CODE_PTR
== typecode
3880 && target_type
!= NULL
3881 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3883 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3884 if (arm_pc_is_thumb (gdbarch
, regval
))
3886 bfd_byte
*copy
= alloca (len
);
3887 store_unsigned_integer (copy
, len
, byte_order
,
3888 MAKE_THUMB_ADDR (regval
));
3893 /* Copy the argument to general registers or the stack in
3894 register-sized pieces. Large arguments are split between
3895 registers and stack. */
3898 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3900 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3902 /* The argument is being passed in a general purpose
3905 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3906 if (byte_order
== BFD_ENDIAN_BIG
)
3907 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3909 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3911 gdbarch_register_name
3913 phex (regval
, INT_REGISTER_SIZE
));
3914 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3919 /* Push the arguments onto the stack. */
3921 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3923 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3924 nstack
+= INT_REGISTER_SIZE
;
3931 /* If we have an odd number of words to push, then decrement the stack
3932 by one word now, so first stack argument will be dword aligned. */
3939 write_memory (sp
, si
->data
, si
->len
);
3940 si
= pop_stack_item (si
);
3943 /* Finally, update teh SP register. */
3944 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3950 /* Always align the frame to an 8-byte boundary. This is required on
3951 some platforms and harmless on the rest. */
3954 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3956 /* Align the stack to eight bytes. */
3957 return sp
& ~ (CORE_ADDR
) 7;
3961 print_fpu_flags (struct ui_file
*file
, int flags
)
3963 if (flags
& (1 << 0))
3964 fputs_filtered ("IVO ", file
);
3965 if (flags
& (1 << 1))
3966 fputs_filtered ("DVZ ", file
);
3967 if (flags
& (1 << 2))
3968 fputs_filtered ("OFL ", file
);
3969 if (flags
& (1 << 3))
3970 fputs_filtered ("UFL ", file
);
3971 if (flags
& (1 << 4))
3972 fputs_filtered ("INX ", file
);
3973 fputc_filtered ('\n', file
);
3976 /* Print interesting information about the floating point processor
3977 (if present) or emulator. */
3979 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3980 struct frame_info
*frame
, const char *args
)
3982 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3985 type
= (status
>> 24) & 127;
3986 if (status
& (1 << 31))
3987 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3989 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3990 /* i18n: [floating point unit] mask */
3991 fputs_filtered (_("mask: "), file
);
3992 print_fpu_flags (file
, status
>> 16);
3993 /* i18n: [floating point unit] flags */
3994 fputs_filtered (_("flags: "), file
);
3995 print_fpu_flags (file
, status
);
3998 /* Construct the ARM extended floating point type. */
3999 static struct type
*
4000 arm_ext_type (struct gdbarch
*gdbarch
)
4002 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4004 if (!tdep
->arm_ext_type
)
4006 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4007 floatformats_arm_ext
);
4009 return tdep
->arm_ext_type
;
4012 static struct type
*
4013 arm_neon_double_type (struct gdbarch
*gdbarch
)
4015 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4017 if (tdep
->neon_double_type
== NULL
)
4019 struct type
*t
, *elem
;
4021 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4023 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4024 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4025 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4026 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4027 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4028 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4029 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4030 append_composite_type_field (t
, "u64", elem
);
4031 elem
= builtin_type (gdbarch
)->builtin_float
;
4032 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4033 elem
= builtin_type (gdbarch
)->builtin_double
;
4034 append_composite_type_field (t
, "f64", elem
);
4036 TYPE_VECTOR (t
) = 1;
4037 TYPE_NAME (t
) = "neon_d";
4038 tdep
->neon_double_type
= t
;
4041 return tdep
->neon_double_type
;
4044 /* FIXME: The vector types are not correctly ordered on big-endian
4045 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4046 bits of d0 - regardless of what unit size is being held in d0. So
4047 the offset of the first uint8 in d0 is 7, but the offset of the
4048 first float is 4. This code works as-is for little-endian
4051 static struct type
*
4052 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4054 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4056 if (tdep
->neon_quad_type
== NULL
)
4058 struct type
*t
, *elem
;
4060 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4062 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4063 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4064 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4065 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4066 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4067 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4068 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4069 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4070 elem
= builtin_type (gdbarch
)->builtin_float
;
4071 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4072 elem
= builtin_type (gdbarch
)->builtin_double
;
4073 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4075 TYPE_VECTOR (t
) = 1;
4076 TYPE_NAME (t
) = "neon_q";
4077 tdep
->neon_quad_type
= t
;
4080 return tdep
->neon_quad_type
;
4083 /* Return the GDB type object for the "standard" data type of data in
4086 static struct type
*
4087 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4089 int num_regs
= gdbarch_num_regs (gdbarch
);
4091 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4092 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4093 return builtin_type (gdbarch
)->builtin_float
;
4095 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4096 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4097 return arm_neon_quad_type (gdbarch
);
4099 /* If the target description has register information, we are only
4100 in this function so that we can override the types of
4101 double-precision registers for NEON. */
4102 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4104 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4106 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4107 && TYPE_CODE (t
) == TYPE_CODE_FLT
4108 && gdbarch_tdep (gdbarch
)->have_neon
)
4109 return arm_neon_double_type (gdbarch
);
4114 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4116 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4117 return builtin_type (gdbarch
)->builtin_void
;
4119 return arm_ext_type (gdbarch
);
4121 else if (regnum
== ARM_SP_REGNUM
)
4122 return builtin_type (gdbarch
)->builtin_data_ptr
;
4123 else if (regnum
== ARM_PC_REGNUM
)
4124 return builtin_type (gdbarch
)->builtin_func_ptr
;
4125 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4126 /* These registers are only supported on targets which supply
4127 an XML description. */
4128 return builtin_type (gdbarch
)->builtin_int0
;
4130 return builtin_type (gdbarch
)->builtin_uint32
;
4133 /* Map a DWARF register REGNUM onto the appropriate GDB register
4137 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4139 /* Core integer regs. */
4140 if (reg
>= 0 && reg
<= 15)
4143 /* Legacy FPA encoding. These were once used in a way which
4144 overlapped with VFP register numbering, so their use is
4145 discouraged, but GDB doesn't support the ARM toolchain
4146 which used them for VFP. */
4147 if (reg
>= 16 && reg
<= 23)
4148 return ARM_F0_REGNUM
+ reg
- 16;
4150 /* New assignments for the FPA registers. */
4151 if (reg
>= 96 && reg
<= 103)
4152 return ARM_F0_REGNUM
+ reg
- 96;
4154 /* WMMX register assignments. */
4155 if (reg
>= 104 && reg
<= 111)
4156 return ARM_WCGR0_REGNUM
+ reg
- 104;
4158 if (reg
>= 112 && reg
<= 127)
4159 return ARM_WR0_REGNUM
+ reg
- 112;
4161 if (reg
>= 192 && reg
<= 199)
4162 return ARM_WC0_REGNUM
+ reg
- 192;
4164 /* VFP v2 registers. A double precision value is actually
4165 in d1 rather than s2, but the ABI only defines numbering
4166 for the single precision registers. This will "just work"
4167 in GDB for little endian targets (we'll read eight bytes,
4168 starting in s0 and then progressing to s1), but will be
4169 reversed on big endian targets with VFP. This won't
4170 be a problem for the new Neon quad registers; you're supposed
4171 to use DW_OP_piece for those. */
4172 if (reg
>= 64 && reg
<= 95)
4176 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4177 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4181 /* VFP v3 / Neon registers. This range is also used for VFP v2
4182 registers, except that it now describes d0 instead of s0. */
4183 if (reg
>= 256 && reg
<= 287)
4187 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4188 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4195 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4197 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4200 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4202 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4203 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4205 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4206 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4208 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4209 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4211 if (reg
< NUM_GREGS
)
4212 return SIM_ARM_R0_REGNUM
+ reg
;
4215 if (reg
< NUM_FREGS
)
4216 return SIM_ARM_FP0_REGNUM
+ reg
;
4219 if (reg
< NUM_SREGS
)
4220 return SIM_ARM_FPS_REGNUM
+ reg
;
4223 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4226 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4227 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4228 It is thought that this is is the floating-point register format on
4229 little-endian systems. */
4232 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4233 void *dbl
, int endianess
)
4237 if (endianess
== BFD_ENDIAN_BIG
)
4238 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4240 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4242 floatformat_from_doublest (fmt
, &d
, dbl
);
4246 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4251 floatformat_to_doublest (fmt
, ptr
, &d
);
4252 if (endianess
== BFD_ENDIAN_BIG
)
4253 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4255 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4260 condition_true (unsigned long cond
, unsigned long status_reg
)
4262 if (cond
== INST_AL
|| cond
== INST_NV
)
4268 return ((status_reg
& FLAG_Z
) != 0);
4270 return ((status_reg
& FLAG_Z
) == 0);
4272 return ((status_reg
& FLAG_C
) != 0);
4274 return ((status_reg
& FLAG_C
) == 0);
4276 return ((status_reg
& FLAG_N
) != 0);
4278 return ((status_reg
& FLAG_N
) == 0);
4280 return ((status_reg
& FLAG_V
) != 0);
4282 return ((status_reg
& FLAG_V
) == 0);
4284 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4286 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4288 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4290 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4292 return (((status_reg
& FLAG_Z
) == 0)
4293 && (((status_reg
& FLAG_N
) == 0)
4294 == ((status_reg
& FLAG_V
) == 0)));
4296 return (((status_reg
& FLAG_Z
) != 0)
4297 || (((status_reg
& FLAG_N
) == 0)
4298 != ((status_reg
& FLAG_V
) == 0)));
4303 static unsigned long
4304 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4305 unsigned long pc_val
, unsigned long status_reg
)
4307 unsigned long res
, shift
;
4308 int rm
= bits (inst
, 0, 3);
4309 unsigned long shifttype
= bits (inst
, 5, 6);
4313 int rs
= bits (inst
, 8, 11);
4314 shift
= (rs
== 15 ? pc_val
+ 8
4315 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4318 shift
= bits (inst
, 7, 11);
4320 res
= (rm
== ARM_PC_REGNUM
4321 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4322 : get_frame_register_unsigned (frame
, rm
));
4327 res
= shift
>= 32 ? 0 : res
<< shift
;
4331 res
= shift
>= 32 ? 0 : res
>> shift
;
4337 res
= ((res
& 0x80000000L
)
4338 ? ~((~res
) >> shift
) : res
>> shift
);
4341 case 3: /* ROR/RRX */
4344 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4346 res
= (res
>> shift
) | (res
<< (32 - shift
));
4350 return res
& 0xffffffff;
4353 /* Return number of 1-bits in VAL. */
4356 bitcount (unsigned long val
)
4359 for (nbits
= 0; val
!= 0; nbits
++)
4360 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4364 /* Return the size in bytes of the complete Thumb instruction whose
4365 first halfword is INST1. */
4368 thumb_insn_size (unsigned short inst1
)
4370 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4377 thumb_advance_itstate (unsigned int itstate
)
4379 /* Preserve IT[7:5], the first three bits of the condition. Shift
4380 the upcoming condition flags left by one bit. */
4381 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4383 /* If we have finished the IT block, clear the state. */
4384 if ((itstate
& 0x0f) == 0)
4390 /* Find the next PC after the current instruction executes. In some
4391 cases we can not statically determine the answer (see the IT state
4392 handling in this function); in that case, a breakpoint may be
4393 inserted in addition to the returned PC, which will be used to set
4394 another breakpoint by our caller. */
4397 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4399 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4400 struct address_space
*aspace
= get_frame_address_space (frame
);
4401 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4402 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4403 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4404 unsigned short inst1
;
4405 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4406 unsigned long offset
;
4407 ULONGEST status
, itstate
;
4409 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4410 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4412 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4414 /* Thumb-2 conditional execution support. There are eight bits in
4415 the CPSR which describe conditional execution state. Once
4416 reconstructed (they're in a funny order), the low five bits
4417 describe the low bit of the condition for each instruction and
4418 how many instructions remain. The high three bits describe the
4419 base condition. One of the low four bits will be set if an IT
4420 block is active. These bits read as zero on earlier
4422 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4423 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4425 /* If-Then handling. On GNU/Linux, where this routine is used, we
4426 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4427 can disable execution of the undefined instruction. So we might
4428 miss the breakpoint if we set it on a skipped conditional
4429 instruction. Because conditional instructions can change the
4430 flags, affecting the execution of further instructions, we may
4431 need to set two breakpoints. */
4433 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4435 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4437 /* An IT instruction. Because this instruction does not
4438 modify the flags, we can accurately predict the next
4439 executed instruction. */
4440 itstate
= inst1
& 0x00ff;
4441 pc
+= thumb_insn_size (inst1
);
4443 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4445 inst1
= read_memory_unsigned_integer (pc
, 2,
4446 byte_order_for_code
);
4447 pc
+= thumb_insn_size (inst1
);
4448 itstate
= thumb_advance_itstate (itstate
);
4451 return MAKE_THUMB_ADDR (pc
);
4453 else if (itstate
!= 0)
4455 /* We are in a conditional block. Check the condition. */
4456 if (! condition_true (itstate
>> 4, status
))
4458 /* Advance to the next executed instruction. */
4459 pc
+= thumb_insn_size (inst1
);
4460 itstate
= thumb_advance_itstate (itstate
);
4462 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4464 inst1
= read_memory_unsigned_integer (pc
, 2,
4465 byte_order_for_code
);
4466 pc
+= thumb_insn_size (inst1
);
4467 itstate
= thumb_advance_itstate (itstate
);
4470 return MAKE_THUMB_ADDR (pc
);
4472 else if ((itstate
& 0x0f) == 0x08)
4474 /* This is the last instruction of the conditional
4475 block, and it is executed. We can handle it normally
4476 because the following instruction is not conditional,
4477 and we must handle it normally because it is
4478 permitted to branch. Fall through. */
4484 /* There are conditional instructions after this one.
4485 If this instruction modifies the flags, then we can
4486 not predict what the next executed instruction will
4487 be. Fortunately, this instruction is architecturally
4488 forbidden to branch; we know it will fall through.
4489 Start by skipping past it. */
4490 pc
+= thumb_insn_size (inst1
);
4491 itstate
= thumb_advance_itstate (itstate
);
4493 /* Set a breakpoint on the following instruction. */
4494 gdb_assert ((itstate
& 0x0f) != 0);
4495 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4496 MAKE_THUMB_ADDR (pc
));
4497 cond_negated
= (itstate
>> 4) & 1;
4499 /* Skip all following instructions with the same
4500 condition. If there is a later instruction in the IT
4501 block with the opposite condition, set the other
4502 breakpoint there. If not, then set a breakpoint on
4503 the instruction after the IT block. */
4506 inst1
= read_memory_unsigned_integer (pc
, 2,
4507 byte_order_for_code
);
4508 pc
+= thumb_insn_size (inst1
);
4509 itstate
= thumb_advance_itstate (itstate
);
4511 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4513 return MAKE_THUMB_ADDR (pc
);
4517 else if (itstate
& 0x0f)
4519 /* We are in a conditional block. Check the condition. */
4520 int cond
= itstate
>> 4;
4522 if (! condition_true (cond
, status
))
4523 /* Advance to the next instruction. All the 32-bit
4524 instructions share a common prefix. */
4525 return MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
));
4527 /* Otherwise, handle the instruction normally. */
4530 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4534 /* Fetch the saved PC from the stack. It's stored above
4535 all of the other registers. */
4536 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4537 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4538 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4540 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4542 unsigned long cond
= bits (inst1
, 8, 11);
4543 if (cond
== 0x0f) /* 0x0f = SWI */
4545 struct gdbarch_tdep
*tdep
;
4546 tdep
= gdbarch_tdep (gdbarch
);
4548 if (tdep
->syscall_next_pc
!= NULL
)
4549 nextpc
= tdep
->syscall_next_pc (frame
);
4552 else if (cond
!= 0x0f && condition_true (cond
, status
))
4553 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4555 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4557 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4559 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
4561 unsigned short inst2
;
4562 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4564 /* Default to the next instruction. */
4566 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4568 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4570 /* Branches and miscellaneous control instructions. */
4572 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4575 int j1
, j2
, imm1
, imm2
;
4577 imm1
= sbits (inst1
, 0, 10);
4578 imm2
= bits (inst2
, 0, 10);
4579 j1
= bit (inst2
, 13);
4580 j2
= bit (inst2
, 11);
4582 offset
= ((imm1
<< 12) + (imm2
<< 1));
4583 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4585 nextpc
= pc_val
+ offset
;
4586 /* For BLX make sure to clear the low bits. */
4587 if (bit (inst2
, 12) == 0)
4588 nextpc
= nextpc
& 0xfffffffc;
4590 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4592 /* SUBS PC, LR, #imm8. */
4593 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4594 nextpc
-= inst2
& 0x00ff;
4596 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4598 /* Conditional branch. */
4599 if (condition_true (bits (inst1
, 6, 9), status
))
4601 int sign
, j1
, j2
, imm1
, imm2
;
4603 sign
= sbits (inst1
, 10, 10);
4604 imm1
= bits (inst1
, 0, 5);
4605 imm2
= bits (inst2
, 0, 10);
4606 j1
= bit (inst2
, 13);
4607 j2
= bit (inst2
, 11);
4609 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4610 offset
+= (imm1
<< 12) + (imm2
<< 1);
4612 nextpc
= pc_val
+ offset
;
4616 else if ((inst1
& 0xfe50) == 0xe810)
4618 /* Load multiple or RFE. */
4619 int rn
, offset
, load_pc
= 1;
4621 rn
= bits (inst1
, 0, 3);
4622 if (bit (inst1
, 7) && !bit (inst1
, 8))
4625 if (!bit (inst2
, 15))
4627 offset
= bitcount (inst2
) * 4 - 4;
4629 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4632 if (!bit (inst2
, 15))
4636 else if (bit (inst1
, 7) && bit (inst1
, 8))
4641 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4651 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4652 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4655 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4657 /* MOV PC or MOVS PC. */
4658 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4659 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4661 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4665 int rn
, load_pc
= 1;
4667 rn
= bits (inst1
, 0, 3);
4668 base
= get_frame_register_unsigned (frame
, rn
);
4669 if (rn
== ARM_PC_REGNUM
)
4671 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4673 base
+= bits (inst2
, 0, 11);
4675 base
-= bits (inst2
, 0, 11);
4677 else if (bit (inst1
, 7))
4678 base
+= bits (inst2
, 0, 11);
4679 else if (bit (inst2
, 11))
4681 if (bit (inst2
, 10))
4684 base
+= bits (inst2
, 0, 7);
4686 base
-= bits (inst2
, 0, 7);
4689 else if ((inst2
& 0x0fc0) == 0x0000)
4691 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4692 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4699 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4701 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4704 CORE_ADDR tbl_reg
, table
, offset
, length
;
4706 tbl_reg
= bits (inst1
, 0, 3);
4707 if (tbl_reg
== 0x0f)
4708 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4710 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4712 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4713 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4714 nextpc
= pc_val
+ length
;
4716 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4719 CORE_ADDR tbl_reg
, table
, offset
, length
;
4721 tbl_reg
= bits (inst1
, 0, 3);
4722 if (tbl_reg
== 0x0f)
4723 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4725 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4727 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4728 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4729 nextpc
= pc_val
+ length
;
4732 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4734 if (bits (inst1
, 3, 6) == 0x0f)
4735 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
4737 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4739 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4741 if (bits (inst1
, 3, 6) == 0x0f)
4744 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4746 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4748 else if ((inst1
& 0xf500) == 0xb100)
4751 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4752 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4754 if (bit (inst1
, 11) && reg
!= 0)
4755 nextpc
= pc_val
+ imm
;
4756 else if (!bit (inst1
, 11) && reg
== 0)
4757 nextpc
= pc_val
+ imm
;
4762 /* Get the raw next address. PC is the current program counter, in
4763 FRAME, which is assumed to be executing in ARM mode.
4765 The value returned has the execution state of the next instruction
4766 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4767 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4771 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4773 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4774 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4775 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4776 unsigned long pc_val
;
4777 unsigned long this_instr
;
4778 unsigned long status
;
4781 pc_val
= (unsigned long) pc
;
4782 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4784 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4785 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4787 if (bits (this_instr
, 28, 31) == INST_NV
)
4788 switch (bits (this_instr
, 24, 27))
4793 /* Branch with Link and change to Thumb. */
4794 nextpc
= BranchDest (pc
, this_instr
);
4795 nextpc
|= bit (this_instr
, 24) << 1;
4796 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4802 /* Coprocessor register transfer. */
4803 if (bits (this_instr
, 12, 15) == 15)
4804 error (_("Invalid update to pc in instruction"));
4807 else if (condition_true (bits (this_instr
, 28, 31), status
))
4809 switch (bits (this_instr
, 24, 27))
4812 case 0x1: /* data processing */
4816 unsigned long operand1
, operand2
, result
= 0;
4820 if (bits (this_instr
, 12, 15) != 15)
4823 if (bits (this_instr
, 22, 25) == 0
4824 && bits (this_instr
, 4, 7) == 9) /* multiply */
4825 error (_("Invalid update to pc in instruction"));
4827 /* BX <reg>, BLX <reg> */
4828 if (bits (this_instr
, 4, 27) == 0x12fff1
4829 || bits (this_instr
, 4, 27) == 0x12fff3)
4831 rn
= bits (this_instr
, 0, 3);
4832 nextpc
= ((rn
== ARM_PC_REGNUM
)
4834 : get_frame_register_unsigned (frame
, rn
));
4839 /* Multiply into PC. */
4840 c
= (status
& FLAG_C
) ? 1 : 0;
4841 rn
= bits (this_instr
, 16, 19);
4842 operand1
= ((rn
== ARM_PC_REGNUM
)
4844 : get_frame_register_unsigned (frame
, rn
));
4846 if (bit (this_instr
, 25))
4848 unsigned long immval
= bits (this_instr
, 0, 7);
4849 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4850 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4853 else /* operand 2 is a shifted register. */
4854 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4857 switch (bits (this_instr
, 21, 24))
4860 result
= operand1
& operand2
;
4864 result
= operand1
^ operand2
;
4868 result
= operand1
- operand2
;
4872 result
= operand2
- operand1
;
4876 result
= operand1
+ operand2
;
4880 result
= operand1
+ operand2
+ c
;
4884 result
= operand1
- operand2
+ c
;
4888 result
= operand2
- operand1
+ c
;
4894 case 0xb: /* tst, teq, cmp, cmn */
4895 result
= (unsigned long) nextpc
;
4899 result
= operand1
| operand2
;
4903 /* Always step into a function. */
4908 result
= operand1
& ~operand2
;
4916 /* In 26-bit APCS the bottom two bits of the result are
4917 ignored, and we always end up in ARM state. */
4919 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4927 case 0x5: /* data transfer */
4930 if (bits (this_instr
, 25, 27) == 0x3 && bit (this_instr
, 4) == 1)
4932 /* Media instructions and architecturally undefined
4937 if (bit (this_instr
, 20))
4940 if (bits (this_instr
, 12, 15) == 15)
4946 if (bit (this_instr
, 22))
4947 error (_("Invalid update to pc in instruction"));
4949 /* byte write to PC */
4950 rn
= bits (this_instr
, 16, 19);
4951 base
= ((rn
== ARM_PC_REGNUM
)
4953 : get_frame_register_unsigned (frame
, rn
));
4955 if (bit (this_instr
, 24))
4958 int c
= (status
& FLAG_C
) ? 1 : 0;
4959 unsigned long offset
=
4960 (bit (this_instr
, 25)
4961 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4962 : bits (this_instr
, 0, 11));
4964 if (bit (this_instr
, 23))
4970 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
4977 case 0x9: /* block transfer */
4978 if (bit (this_instr
, 20))
4981 if (bit (this_instr
, 15))
4985 unsigned long rn_val
4986 = get_frame_register_unsigned (frame
,
4987 bits (this_instr
, 16, 19));
4989 if (bit (this_instr
, 23))
4992 unsigned long reglist
= bits (this_instr
, 0, 14);
4993 offset
= bitcount (reglist
) * 4;
4994 if (bit (this_instr
, 24)) /* pre */
4997 else if (bit (this_instr
, 24))
5001 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
5008 case 0xb: /* branch & link */
5009 case 0xa: /* branch */
5011 nextpc
= BranchDest (pc
, this_instr
);
5017 case 0xe: /* coproc ops */
5021 struct gdbarch_tdep
*tdep
;
5022 tdep
= gdbarch_tdep (gdbarch
);
5024 if (tdep
->syscall_next_pc
!= NULL
)
5025 nextpc
= tdep
->syscall_next_pc (frame
);
5031 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
5039 /* Determine next PC after current instruction executes. Will call either
5040 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5041 loop is detected. */
5044 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
5048 if (arm_frame_is_thumb (frame
))
5049 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
5051 nextpc
= arm_get_next_pc_raw (frame
, pc
);
5056 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5057 of the appropriate mode (as encoded in the PC value), even if this
5058 differs from what would be expected according to the symbol tables. */
5061 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
5062 struct address_space
*aspace
,
5065 struct cleanup
*old_chain
5066 = make_cleanup_restore_integer (&arm_override_mode
);
5068 arm_override_mode
= IS_THUMB_ADDR (pc
);
5069 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
5071 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
5073 do_cleanups (old_chain
);
5076 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5077 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5078 is found, attempt to step through it. A breakpoint is placed at the end of
5082 thumb_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5084 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5085 struct address_space
*aspace
= get_frame_address_space (frame
);
5086 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5087 CORE_ADDR pc
= get_frame_pc (frame
);
5088 CORE_ADDR breaks
[2] = {-1, -1};
5090 unsigned short insn1
, insn2
;
5093 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5094 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5095 ULONGEST status
, itstate
;
5097 /* We currently do not support atomic sequences within an IT block. */
5098 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
5099 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
5103 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5104 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5106 if (thumb_insn_size (insn1
) != 4)
5109 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5111 if (!((insn1
& 0xfff0) == 0xe850
5112 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
5115 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5117 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5119 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5122 if (thumb_insn_size (insn1
) != 4)
5124 /* Assume that there is at most one conditional branch in the
5125 atomic sequence. If a conditional branch is found, put a
5126 breakpoint in its destination address. */
5127 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
5129 if (last_breakpoint
> 0)
5130 return 0; /* More than one conditional branch found,
5131 fallback to the standard code. */
5133 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
5137 /* We do not support atomic sequences that use any *other*
5138 instructions but conditional branches to change the PC.
5139 Fall back to standard code to avoid losing control of
5141 else if (thumb_instruction_changes_pc (insn1
))
5146 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5149 /* Assume that there is at most one conditional branch in the
5150 atomic sequence. If a conditional branch is found, put a
5151 breakpoint in its destination address. */
5152 if ((insn1
& 0xf800) == 0xf000
5153 && (insn2
& 0xd000) == 0x8000
5154 && (insn1
& 0x0380) != 0x0380)
5156 int sign
, j1
, j2
, imm1
, imm2
;
5157 unsigned int offset
;
5159 sign
= sbits (insn1
, 10, 10);
5160 imm1
= bits (insn1
, 0, 5);
5161 imm2
= bits (insn2
, 0, 10);
5162 j1
= bit (insn2
, 13);
5163 j2
= bit (insn2
, 11);
5165 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
5166 offset
+= (imm1
<< 12) + (imm2
<< 1);
5168 if (last_breakpoint
> 0)
5169 return 0; /* More than one conditional branch found,
5170 fallback to the standard code. */
5172 breaks
[1] = loc
+ offset
;
5176 /* We do not support atomic sequences that use any *other*
5177 instructions but conditional branches to change the PC.
5178 Fall back to standard code to avoid losing control of
5180 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
5183 /* If we find a strex{,b,h,d}, we're done. */
5184 if ((insn1
& 0xfff0) == 0xe840
5185 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
5190 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5191 if (insn_count
== atomic_sequence_length
)
5194 /* Insert a breakpoint right after the end of the atomic sequence. */
5197 /* Check for duplicated breakpoints. Check also for a breakpoint
5198 placed (branch instruction's destination) anywhere in sequence. */
5200 && (breaks
[1] == breaks
[0]
5201 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5202 last_breakpoint
= 0;
5204 /* Effectively inserts the breakpoints. */
5205 for (index
= 0; index
<= last_breakpoint
; index
++)
5206 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
5207 MAKE_THUMB_ADDR (breaks
[index
]));
5213 arm_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5215 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5216 struct address_space
*aspace
= get_frame_address_space (frame
);
5217 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5218 CORE_ADDR pc
= get_frame_pc (frame
);
5219 CORE_ADDR breaks
[2] = {-1, -1};
5224 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5225 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5227 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5228 Note that we do not currently support conditionally executed atomic
5230 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5232 if ((insn
& 0xff9000f0) != 0xe1900090)
5235 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5237 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5239 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5242 /* Assume that there is at most one conditional branch in the atomic
5243 sequence. If a conditional branch is found, put a breakpoint in
5244 its destination address. */
5245 if (bits (insn
, 24, 27) == 0xa)
5247 if (last_breakpoint
> 0)
5248 return 0; /* More than one conditional branch found, fallback
5249 to the standard single-step code. */
5251 breaks
[1] = BranchDest (loc
- 4, insn
);
5255 /* We do not support atomic sequences that use any *other* instructions
5256 but conditional branches to change the PC. Fall back to standard
5257 code to avoid losing control of execution. */
5258 else if (arm_instruction_changes_pc (insn
))
5261 /* If we find a strex{,b,h,d}, we're done. */
5262 if ((insn
& 0xff9000f0) == 0xe1800090)
5266 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5267 if (insn_count
== atomic_sequence_length
)
5270 /* Insert a breakpoint right after the end of the atomic sequence. */
5273 /* Check for duplicated breakpoints. Check also for a breakpoint
5274 placed (branch instruction's destination) anywhere in sequence. */
5276 && (breaks
[1] == breaks
[0]
5277 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5278 last_breakpoint
= 0;
5280 /* Effectively inserts the breakpoints. */
5281 for (index
= 0; index
<= last_breakpoint
; index
++)
5282 arm_insert_single_step_breakpoint (gdbarch
, aspace
, breaks
[index
]);
5288 arm_deal_with_atomic_sequence (struct frame_info
*frame
)
5290 if (arm_frame_is_thumb (frame
))
5291 return thumb_deal_with_atomic_sequence_raw (frame
);
5293 return arm_deal_with_atomic_sequence_raw (frame
);
5296 /* single_step() is called just before we want to resume the inferior,
5297 if we want to single-step it but there is no hardware or kernel
5298 single-step support. We find the target of the coming instruction
5299 and breakpoint it. */
5302 arm_software_single_step (struct frame_info
*frame
)
5304 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5305 struct address_space
*aspace
= get_frame_address_space (frame
);
5308 if (arm_deal_with_atomic_sequence (frame
))
5311 next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
5312 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
5317 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5318 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5319 NULL if an error occurs. BUF is freed. */
5322 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
5323 int old_len
, int new_len
)
5326 int bytes_to_read
= new_len
- old_len
;
5328 new_buf
= xmalloc (new_len
);
5329 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
5331 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
5339 /* An IT block is at most the 2-byte IT instruction followed by
5340 four 4-byte instructions. The furthest back we must search to
5341 find an IT block that affects the current instruction is thus
5342 2 + 3 * 4 == 14 bytes. */
5343 #define MAX_IT_BLOCK_PREFIX 14
5345 /* Use a quick scan if there are more than this many bytes of
5347 #define IT_SCAN_THRESHOLD 32
5349 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5350 A breakpoint in an IT block may not be hit, depending on the
5353 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
5357 CORE_ADDR boundary
, func_start
;
5359 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
5360 int i
, any
, last_it
, last_it_count
;
5362 /* If we are using BKPT breakpoints, none of this is necessary. */
5363 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
5366 /* ARM mode does not have this problem. */
5367 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
5370 /* We are setting a breakpoint in Thumb code that could potentially
5371 contain an IT block. The first step is to find how much Thumb
5372 code there is; we do not need to read outside of known Thumb
5374 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
5376 /* Thumb-2 code must have mapping symbols to have a chance. */
5379 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
5381 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
5382 && func_start
> boundary
)
5383 boundary
= func_start
;
5385 /* Search for a candidate IT instruction. We have to do some fancy
5386 footwork to distinguish a real IT instruction from the second
5387 half of a 32-bit instruction, but there is no need for that if
5388 there's no candidate. */
5389 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
5391 /* No room for an IT instruction. */
5394 buf
= xmalloc (buf_len
);
5395 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
5398 for (i
= 0; i
< buf_len
; i
+= 2)
5400 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5401 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5413 /* OK, the code bytes before this instruction contain at least one
5414 halfword which resembles an IT instruction. We know that it's
5415 Thumb code, but there are still two possibilities. Either the
5416 halfword really is an IT instruction, or it is the second half of
5417 a 32-bit Thumb instruction. The only way we can tell is to
5418 scan forwards from a known instruction boundary. */
5419 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5423 /* There's a lot of code before this instruction. Start with an
5424 optimistic search; it's easy to recognize halfwords that can
5425 not be the start of a 32-bit instruction, and use that to
5426 lock on to the instruction boundaries. */
5427 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5430 buf_len
= IT_SCAN_THRESHOLD
;
5433 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5435 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5436 if (thumb_insn_size (inst1
) == 2)
5443 /* At this point, if DEFINITE, BUF[I] is the first place we
5444 are sure that we know the instruction boundaries, and it is far
5445 enough from BPADDR that we could not miss an IT instruction
5446 affecting BPADDR. If ! DEFINITE, give up - start from a
5450 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5454 buf_len
= bpaddr
- boundary
;
5460 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5463 buf_len
= bpaddr
- boundary
;
5467 /* Scan forwards. Find the last IT instruction before BPADDR. */
5472 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5474 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5479 else if (inst1
& 0x0002)
5481 else if (inst1
& 0x0004)
5486 i
+= thumb_insn_size (inst1
);
5492 /* There wasn't really an IT instruction after all. */
5495 if (last_it_count
< 1)
5496 /* It was too far away. */
5499 /* This really is a trouble spot. Move the breakpoint to the IT
5501 return bpaddr
- buf_len
+ last_it
;
5504 /* ARM displaced stepping support.
5506 Generally ARM displaced stepping works as follows:
5508 1. When an instruction is to be single-stepped, it is first decoded by
5509 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5510 Depending on the type of instruction, it is then copied to a scratch
5511 location, possibly in a modified form. The copy_* set of functions
5512 performs such modification, as necessary. A breakpoint is placed after
5513 the modified instruction in the scratch space to return control to GDB.
5514 Note in particular that instructions which modify the PC will no longer
5515 do so after modification.
5517 2. The instruction is single-stepped, by setting the PC to the scratch
5518 location address, and resuming. Control returns to GDB when the
5521 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5522 function used for the current instruction. This function's job is to
5523 put the CPU/memory state back to what it would have been if the
5524 instruction had been executed unmodified in its original location. */
5526 /* NOP instruction (mov r0, r0). */
5527 #define ARM_NOP 0xe1a00000
5528 #define THUMB_NOP 0x4600
5530 /* Helper for register reads for displaced stepping. In particular, this
5531 returns the PC as it would be seen by the instruction at its original
5535 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5539 CORE_ADDR from
= dsc
->insn_addr
;
5541 if (regno
== ARM_PC_REGNUM
)
5543 /* Compute pipeline offset:
5544 - When executing an ARM instruction, PC reads as the address of the
5545 current instruction plus 8.
5546 - When executing a Thumb instruction, PC reads as the address of the
5547 current instruction plus 4. */
5554 if (debug_displaced
)
5555 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5556 (unsigned long) from
);
5557 return (ULONGEST
) from
;
5561 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5562 if (debug_displaced
)
5563 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5564 regno
, (unsigned long) ret
);
5570 displaced_in_arm_mode (struct regcache
*regs
)
5573 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5575 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5577 return (ps
& t_bit
) == 0;
5580 /* Write to the PC as from a branch instruction. */
5583 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5587 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5588 architecture versions < 6. */
5589 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5590 val
& ~(ULONGEST
) 0x3);
5592 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5593 val
& ~(ULONGEST
) 0x1);
5596 /* Write to the PC as from a branch-exchange instruction. */
5599 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5602 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5604 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5608 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5609 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5611 else if ((val
& 2) == 0)
5613 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5614 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5618 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5619 mode, align dest to 4 bytes). */
5620 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5621 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5622 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5626 /* Write to the PC as if from a load instruction. */
5629 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5632 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5633 bx_write_pc (regs
, val
);
5635 branch_write_pc (regs
, dsc
, val
);
5638 /* Write to the PC as if from an ALU instruction. */
5641 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5644 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5645 bx_write_pc (regs
, val
);
5647 branch_write_pc (regs
, dsc
, val
);
5650 /* Helper for writing to registers for displaced stepping. Writing to the PC
5651 has a varying effects depending on the instruction which does the write:
5652 this is controlled by the WRITE_PC argument. */
5655 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5656 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5658 if (regno
== ARM_PC_REGNUM
)
5660 if (debug_displaced
)
5661 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5662 (unsigned long) val
);
5665 case BRANCH_WRITE_PC
:
5666 branch_write_pc (regs
, dsc
, val
);
5670 bx_write_pc (regs
, val
);
5674 load_write_pc (regs
, dsc
, val
);
5678 alu_write_pc (regs
, dsc
, val
);
5681 case CANNOT_WRITE_PC
:
5682 warning (_("Instruction wrote to PC in an unexpected way when "
5683 "single-stepping"));
5687 internal_error (__FILE__
, __LINE__
,
5688 _("Invalid argument to displaced_write_reg"));
5691 dsc
->wrote_to_pc
= 1;
5695 if (debug_displaced
)
5696 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5697 regno
, (unsigned long) val
);
5698 regcache_cooked_write_unsigned (regs
, regno
, val
);
5702 /* This function is used to concisely determine if an instruction INSN
5703 references PC. Register fields of interest in INSN should have the
5704 corresponding fields of BITMASK set to 0b1111. The function
5705 returns return 1 if any of these fields in INSN reference the PC
5706 (also 0b1111, r15), else it returns 0. */
5709 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5711 uint32_t lowbit
= 1;
5713 while (bitmask
!= 0)
5717 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5723 mask
= lowbit
* 0xf;
5725 if ((insn
& mask
) == mask
)
5734 /* The simplest copy function. Many instructions have the same effect no
5735 matter what address they are executed at: in those cases, use this. */
5738 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5739 const char *iname
, struct displaced_step_closure
*dsc
)
5741 if (debug_displaced
)
5742 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5743 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5746 dsc
->modinsn
[0] = insn
;
5752 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5753 uint16_t insn2
, const char *iname
,
5754 struct displaced_step_closure
*dsc
)
5756 if (debug_displaced
)
5757 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5758 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5761 dsc
->modinsn
[0] = insn1
;
5762 dsc
->modinsn
[1] = insn2
;
5768 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5771 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5773 struct displaced_step_closure
*dsc
)
5775 if (debug_displaced
)
5776 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5777 "opcode/class '%s' unmodified\n", insn
,
5780 dsc
->modinsn
[0] = insn
;
5785 /* Preload instructions with immediate offset. */
5788 cleanup_preload (struct gdbarch
*gdbarch
,
5789 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5791 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5792 if (!dsc
->u
.preload
.immed
)
5793 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5797 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5798 struct displaced_step_closure
*dsc
, unsigned int rn
)
5801 /* Preload instructions:
5803 {pli/pld} [rn, #+/-imm]
5805 {pli/pld} [r0, #+/-imm]. */
5807 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5808 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5809 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5810 dsc
->u
.preload
.immed
= 1;
5812 dsc
->cleanup
= &cleanup_preload
;
5816 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5817 struct displaced_step_closure
*dsc
)
5819 unsigned int rn
= bits (insn
, 16, 19);
5821 if (!insn_references_pc (insn
, 0x000f0000ul
))
5822 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5824 if (debug_displaced
)
5825 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5826 (unsigned long) insn
);
5828 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5830 install_preload (gdbarch
, regs
, dsc
, rn
);
5836 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5837 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5839 unsigned int rn
= bits (insn1
, 0, 3);
5840 unsigned int u_bit
= bit (insn1
, 7);
5841 int imm12
= bits (insn2
, 0, 11);
5844 if (rn
!= ARM_PC_REGNUM
)
5845 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5847 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5848 PLD (literal) Encoding T1. */
5849 if (debug_displaced
)
5850 fprintf_unfiltered (gdb_stdlog
,
5851 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5852 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5858 /* Rewrite instruction {pli/pld} PC imm12 into:
5859 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5863 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5865 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5866 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5868 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5870 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5871 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5872 dsc
->u
.preload
.immed
= 0;
5874 /* {pli/pld} [r0, r1] */
5875 dsc
->modinsn
[0] = insn1
& 0xfff0;
5876 dsc
->modinsn
[1] = 0xf001;
5879 dsc
->cleanup
= &cleanup_preload
;
5883 /* Preload instructions with register offset. */
5886 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5887 struct displaced_step_closure
*dsc
, unsigned int rn
,
5890 ULONGEST rn_val
, rm_val
;
5892 /* Preload register-offset instructions:
5894 {pli/pld} [rn, rm {, shift}]
5896 {pli/pld} [r0, r1 {, shift}]. */
5898 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5899 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5900 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5901 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5902 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5903 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5904 dsc
->u
.preload
.immed
= 0;
5906 dsc
->cleanup
= &cleanup_preload
;
5910 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5911 struct regcache
*regs
,
5912 struct displaced_step_closure
*dsc
)
5914 unsigned int rn
= bits (insn
, 16, 19);
5915 unsigned int rm
= bits (insn
, 0, 3);
5918 if (!insn_references_pc (insn
, 0x000f000ful
))
5919 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5921 if (debug_displaced
)
5922 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5923 (unsigned long) insn
);
5925 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5927 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5931 /* Copy/cleanup coprocessor load and store instructions. */
5934 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5935 struct regcache
*regs
,
5936 struct displaced_step_closure
*dsc
)
5938 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5940 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5942 if (dsc
->u
.ldst
.writeback
)
5943 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5947 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5948 struct displaced_step_closure
*dsc
,
5949 int writeback
, unsigned int rn
)
5953 /* Coprocessor load/store instructions:
5955 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5957 {stc/stc2} [r0, #+/-imm].
5959 ldc/ldc2 are handled identically. */
5961 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5962 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5963 /* PC should be 4-byte aligned. */
5964 rn_val
= rn_val
& 0xfffffffc;
5965 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5967 dsc
->u
.ldst
.writeback
= writeback
;
5968 dsc
->u
.ldst
.rn
= rn
;
5970 dsc
->cleanup
= &cleanup_copro_load_store
;
5974 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
5975 struct regcache
*regs
,
5976 struct displaced_step_closure
*dsc
)
5978 unsigned int rn
= bits (insn
, 16, 19);
5980 if (!insn_references_pc (insn
, 0x000f0000ul
))
5981 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
5983 if (debug_displaced
)
5984 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5985 "load/store insn %.8lx\n", (unsigned long) insn
);
5987 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5989 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
5995 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
5996 uint16_t insn2
, struct regcache
*regs
,
5997 struct displaced_step_closure
*dsc
)
5999 unsigned int rn
= bits (insn1
, 0, 3);
6001 if (rn
!= ARM_PC_REGNUM
)
6002 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6003 "copro load/store", dsc
);
6005 if (debug_displaced
)
6006 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6007 "load/store insn %.4x%.4x\n", insn1
, insn2
);
6009 dsc
->modinsn
[0] = insn1
& 0xfff0;
6010 dsc
->modinsn
[1] = insn2
;
6013 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6014 doesn't support writeback, so pass 0. */
6015 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
6020 /* Clean up branch instructions (actually perform the branch, by setting
6024 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6025 struct displaced_step_closure
*dsc
)
6027 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6028 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
6029 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
6030 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
6035 if (dsc
->u
.branch
.link
)
6037 /* The value of LR should be the next insn of current one. In order
6038 not to confuse logic hanlding later insn `bx lr', if current insn mode
6039 is Thumb, the bit 0 of LR value should be set to 1. */
6040 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6043 next_insn_addr
|= 0x1;
6045 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
6049 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
6052 /* Copy B/BL/BLX instructions with immediate destinations. */
6055 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6056 struct displaced_step_closure
*dsc
,
6057 unsigned int cond
, int exchange
, int link
, long offset
)
6059 /* Implement "BL<cond> <label>" as:
6061 Preparation: cond <- instruction condition
6062 Insn: mov r0, r0 (nop)
6063 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6065 B<cond> similar, but don't set r14 in cleanup. */
6067 dsc
->u
.branch
.cond
= cond
;
6068 dsc
->u
.branch
.link
= link
;
6069 dsc
->u
.branch
.exchange
= exchange
;
6071 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
6072 if (link
&& exchange
)
6073 /* For BLX, offset is computed from the Align (PC, 4). */
6074 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
6077 dsc
->u
.branch
.dest
+= 4 + offset
;
6079 dsc
->u
.branch
.dest
+= 8 + offset
;
6081 dsc
->cleanup
= &cleanup_branch
;
6084 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
6085 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6087 unsigned int cond
= bits (insn
, 28, 31);
6088 int exchange
= (cond
== 0xf);
6089 int link
= exchange
|| bit (insn
, 24);
6092 if (debug_displaced
)
6093 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
6094 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
6095 (unsigned long) insn
);
6097 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6098 then arrange the switch into Thumb mode. */
6099 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
6101 offset
= bits (insn
, 0, 23) << 2;
6103 if (bit (offset
, 25))
6104 offset
= offset
| ~0x3ffffff;
6106 dsc
->modinsn
[0] = ARM_NOP
;
6108 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6113 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
6114 uint16_t insn2
, struct regcache
*regs
,
6115 struct displaced_step_closure
*dsc
)
6117 int link
= bit (insn2
, 14);
6118 int exchange
= link
&& !bit (insn2
, 12);
6121 int j1
= bit (insn2
, 13);
6122 int j2
= bit (insn2
, 11);
6123 int s
= sbits (insn1
, 10, 10);
6124 int i1
= !(j1
^ bit (insn1
, 10));
6125 int i2
= !(j2
^ bit (insn1
, 10));
6127 if (!link
&& !exchange
) /* B */
6129 offset
= (bits (insn2
, 0, 10) << 1);
6130 if (bit (insn2
, 12)) /* Encoding T4 */
6132 offset
|= (bits (insn1
, 0, 9) << 12)
6138 else /* Encoding T3 */
6140 offset
|= (bits (insn1
, 0, 5) << 12)
6144 cond
= bits (insn1
, 6, 9);
6149 offset
= (bits (insn1
, 0, 9) << 12);
6150 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
6151 offset
|= exchange
?
6152 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
6155 if (debug_displaced
)
6156 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
6157 "%.4x %.4x with offset %.8lx\n",
6158 link
? (exchange
) ? "blx" : "bl" : "b",
6159 insn1
, insn2
, offset
);
6161 dsc
->modinsn
[0] = THUMB_NOP
;
6163 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6167 /* Copy B Thumb instructions. */
6169 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
6170 struct displaced_step_closure
*dsc
)
6172 unsigned int cond
= 0;
6174 unsigned short bit_12_15
= bits (insn
, 12, 15);
6175 CORE_ADDR from
= dsc
->insn_addr
;
6177 if (bit_12_15
== 0xd)
6179 /* offset = SignExtend (imm8:0, 32) */
6180 offset
= sbits ((insn
<< 1), 0, 8);
6181 cond
= bits (insn
, 8, 11);
6183 else if (bit_12_15
== 0xe) /* Encoding T2 */
6185 offset
= sbits ((insn
<< 1), 0, 11);
6189 if (debug_displaced
)
6190 fprintf_unfiltered (gdb_stdlog
,
6191 "displaced: copying b immediate insn %.4x "
6192 "with offset %d\n", insn
, offset
);
6194 dsc
->u
.branch
.cond
= cond
;
6195 dsc
->u
.branch
.link
= 0;
6196 dsc
->u
.branch
.exchange
= 0;
6197 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
6199 dsc
->modinsn
[0] = THUMB_NOP
;
6201 dsc
->cleanup
= &cleanup_branch
;
6206 /* Copy BX/BLX with register-specified destinations. */
6209 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6210 struct displaced_step_closure
*dsc
, int link
,
6211 unsigned int cond
, unsigned int rm
)
6213 /* Implement {BX,BLX}<cond> <reg>" as:
6215 Preparation: cond <- instruction condition
6216 Insn: mov r0, r0 (nop)
6217 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6219 Don't set r14 in cleanup for BX. */
6221 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
6223 dsc
->u
.branch
.cond
= cond
;
6224 dsc
->u
.branch
.link
= link
;
6226 dsc
->u
.branch
.exchange
= 1;
6228 dsc
->cleanup
= &cleanup_branch
;
6232 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6233 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6235 unsigned int cond
= bits (insn
, 28, 31);
6238 int link
= bit (insn
, 5);
6239 unsigned int rm
= bits (insn
, 0, 3);
6241 if (debug_displaced
)
6242 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
6243 (unsigned long) insn
);
6245 dsc
->modinsn
[0] = ARM_NOP
;
6247 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
6252 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6253 struct regcache
*regs
,
6254 struct displaced_step_closure
*dsc
)
6256 int link
= bit (insn
, 7);
6257 unsigned int rm
= bits (insn
, 3, 6);
6259 if (debug_displaced
)
6260 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
6261 (unsigned short) insn
);
6263 dsc
->modinsn
[0] = THUMB_NOP
;
6265 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
6271 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6274 cleanup_alu_imm (struct gdbarch
*gdbarch
,
6275 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6277 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6278 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6279 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6280 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6284 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6285 struct displaced_step_closure
*dsc
)
6287 unsigned int rn
= bits (insn
, 16, 19);
6288 unsigned int rd
= bits (insn
, 12, 15);
6289 unsigned int op
= bits (insn
, 21, 24);
6290 int is_mov
= (op
== 0xd);
6291 ULONGEST rd_val
, rn_val
;
6293 if (!insn_references_pc (insn
, 0x000ff000ul
))
6294 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
6296 if (debug_displaced
)
6297 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
6298 "%.8lx\n", is_mov
? "move" : "ALU",
6299 (unsigned long) insn
);
6301 /* Instruction is of form:
6303 <op><cond> rd, [rn,] #imm
6307 Preparation: tmp1, tmp2 <- r0, r1;
6309 Insn: <op><cond> r0, r1, #imm
6310 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6313 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6314 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6315 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6316 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6317 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6318 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6322 dsc
->modinsn
[0] = insn
& 0xfff00fff;
6324 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
6326 dsc
->cleanup
= &cleanup_alu_imm
;
6332 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6333 uint16_t insn2
, struct regcache
*regs
,
6334 struct displaced_step_closure
*dsc
)
6336 unsigned int op
= bits (insn1
, 5, 8);
6337 unsigned int rn
, rm
, rd
;
6338 ULONGEST rd_val
, rn_val
;
6340 rn
= bits (insn1
, 0, 3); /* Rn */
6341 rm
= bits (insn2
, 0, 3); /* Rm */
6342 rd
= bits (insn2
, 8, 11); /* Rd */
6344 /* This routine is only called for instruction MOV. */
6345 gdb_assert (op
== 0x2 && rn
== 0xf);
6347 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
6348 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
6350 if (debug_displaced
)
6351 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
6352 "ALU", insn1
, insn2
);
6354 /* Instruction is of form:
6356 <op><cond> rd, [rn,] #imm
6360 Preparation: tmp1, tmp2 <- r0, r1;
6362 Insn: <op><cond> r0, r1, #imm
6363 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6366 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6367 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6368 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6369 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6370 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6371 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6374 dsc
->modinsn
[0] = insn1
;
6375 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
6378 dsc
->cleanup
= &cleanup_alu_imm
;
6383 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6386 cleanup_alu_reg (struct gdbarch
*gdbarch
,
6387 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6392 rd_val
= displaced_read_reg (regs
, dsc
, 0);
6394 for (i
= 0; i
< 3; i
++)
6395 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6397 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6401 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6402 struct displaced_step_closure
*dsc
,
6403 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6405 ULONGEST rd_val
, rn_val
, rm_val
;
6407 /* Instruction is of form:
6409 <op><cond> rd, [rn,] rm [, <shift>]
6413 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6414 r0, r1, r2 <- rd, rn, rm
6415 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6416 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6419 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6420 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6421 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6422 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6423 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6424 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6425 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6426 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6427 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6430 dsc
->cleanup
= &cleanup_alu_reg
;
6434 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6435 struct displaced_step_closure
*dsc
)
6437 unsigned int op
= bits (insn
, 21, 24);
6438 int is_mov
= (op
== 0xd);
6440 if (!insn_references_pc (insn
, 0x000ff00ful
))
6441 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6443 if (debug_displaced
)
6444 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6445 is_mov
? "move" : "ALU", (unsigned long) insn
);
6448 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6450 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6452 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6458 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6459 struct regcache
*regs
,
6460 struct displaced_step_closure
*dsc
)
6464 rm
= bits (insn
, 3, 6);
6465 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6467 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
6468 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6470 if (debug_displaced
)
6471 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
6472 (unsigned short) insn
);
6474 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
6476 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
6481 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6484 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6485 struct regcache
*regs
,
6486 struct displaced_step_closure
*dsc
)
6488 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6491 for (i
= 0; i
< 4; i
++)
6492 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6494 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6498 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6499 struct displaced_step_closure
*dsc
,
6500 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6504 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6506 /* Instruction is of form:
6508 <op><cond> rd, [rn,] rm, <shift> rs
6512 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6513 r0, r1, r2, r3 <- rd, rn, rm, rs
6514 Insn: <op><cond> r0, r1, r2, <shift> r3
6516 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6520 for (i
= 0; i
< 4; i
++)
6521 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6523 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6524 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6525 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6526 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6527 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6528 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6529 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6530 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6532 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6536 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6537 struct regcache
*regs
,
6538 struct displaced_step_closure
*dsc
)
6540 unsigned int op
= bits (insn
, 21, 24);
6541 int is_mov
= (op
== 0xd);
6542 unsigned int rd
, rn
, rm
, rs
;
6544 if (!insn_references_pc (insn
, 0x000fff0ful
))
6545 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6547 if (debug_displaced
)
6548 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6549 "%.8lx\n", is_mov
? "move" : "ALU",
6550 (unsigned long) insn
);
6552 rn
= bits (insn
, 16, 19);
6553 rm
= bits (insn
, 0, 3);
6554 rs
= bits (insn
, 8, 11);
6555 rd
= bits (insn
, 12, 15);
6558 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6560 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6562 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6567 /* Clean up load instructions. */
6570 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6571 struct displaced_step_closure
*dsc
)
6573 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6575 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6576 if (dsc
->u
.ldst
.xfersize
== 8)
6577 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6578 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6580 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6581 if (dsc
->u
.ldst
.xfersize
> 4)
6582 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6583 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6584 if (!dsc
->u
.ldst
.immed
)
6585 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6587 /* Handle register writeback. */
6588 if (dsc
->u
.ldst
.writeback
)
6589 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6590 /* Put result in right place. */
6591 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6592 if (dsc
->u
.ldst
.xfersize
== 8)
6593 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6596 /* Clean up store instructions. */
6599 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6600 struct displaced_step_closure
*dsc
)
6602 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6604 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6605 if (dsc
->u
.ldst
.xfersize
> 4)
6606 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6607 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6608 if (!dsc
->u
.ldst
.immed
)
6609 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6610 if (!dsc
->u
.ldst
.restore_r4
)
6611 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6614 if (dsc
->u
.ldst
.writeback
)
6615 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6618 /* Copy "extra" load/store instructions. These are halfword/doubleword
6619 transfers, which have a different encoding to byte/word transfers. */
6622 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6623 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6625 unsigned int op1
= bits (insn
, 20, 24);
6626 unsigned int op2
= bits (insn
, 5, 6);
6627 unsigned int rt
= bits (insn
, 12, 15);
6628 unsigned int rn
= bits (insn
, 16, 19);
6629 unsigned int rm
= bits (insn
, 0, 3);
6630 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6631 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6632 int immed
= (op1
& 0x4) != 0;
6634 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6636 if (!insn_references_pc (insn
, 0x000ff00ful
))
6637 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6639 if (debug_displaced
)
6640 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6641 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6642 (unsigned long) insn
);
6644 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6647 internal_error (__FILE__
, __LINE__
,
6648 _("copy_extra_ld_st: instruction decode error"));
6650 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6651 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6652 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6654 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6656 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6657 if (bytesize
[opcode
] == 8)
6658 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6659 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6661 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6663 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6664 if (bytesize
[opcode
] == 8)
6665 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6666 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6668 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6671 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6672 dsc
->u
.ldst
.rn
= rn
;
6673 dsc
->u
.ldst
.immed
= immed
;
6674 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6675 dsc
->u
.ldst
.restore_r4
= 0;
6678 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6680 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6681 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6683 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6685 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6686 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6688 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6693 /* Copy byte/half word/word loads and stores. */
6696 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6697 struct displaced_step_closure
*dsc
, int load
,
6698 int immed
, int writeback
, int size
, int usermode
,
6699 int rt
, int rm
, int rn
)
6701 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6703 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6704 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6706 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6708 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6710 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6711 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6713 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6715 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6716 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6718 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6720 dsc
->u
.ldst
.xfersize
= size
;
6721 dsc
->u
.ldst
.rn
= rn
;
6722 dsc
->u
.ldst
.immed
= immed
;
6723 dsc
->u
.ldst
.writeback
= writeback
;
6725 /* To write PC we can do:
6727 Before this sequence of instructions:
6728 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6729 r2 is the Rn value got from dispalced_read_reg.
6731 Insn1: push {pc} Write address of STR instruction + offset on stack
6732 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6733 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6734 = addr(Insn1) + offset - addr(Insn3) - 8
6736 Insn4: add r4, r4, #8 r4 = offset - 8
6737 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6739 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6741 Otherwise we don't know what value to write for PC, since the offset is
6742 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6743 of this can be found in Section "Saving from r15" in
6744 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6746 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6751 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6752 uint16_t insn2
, struct regcache
*regs
,
6753 struct displaced_step_closure
*dsc
, int size
)
6755 unsigned int u_bit
= bit (insn1
, 7);
6756 unsigned int rt
= bits (insn2
, 12, 15);
6757 int imm12
= bits (insn2
, 0, 11);
6760 if (debug_displaced
)
6761 fprintf_unfiltered (gdb_stdlog
,
6762 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6763 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6769 /* Rewrite instruction LDR Rt imm12 into:
6771 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6775 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6778 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6779 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6780 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6782 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6784 pc_val
= pc_val
& 0xfffffffc;
6786 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6787 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6791 dsc
->u
.ldst
.xfersize
= size
;
6792 dsc
->u
.ldst
.immed
= 0;
6793 dsc
->u
.ldst
.writeback
= 0;
6794 dsc
->u
.ldst
.restore_r4
= 0;
6796 /* LDR R0, R2, R3 */
6797 dsc
->modinsn
[0] = 0xf852;
6798 dsc
->modinsn
[1] = 0x3;
6801 dsc
->cleanup
= &cleanup_load
;
6807 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6808 uint16_t insn2
, struct regcache
*regs
,
6809 struct displaced_step_closure
*dsc
,
6810 int writeback
, int immed
)
6812 unsigned int rt
= bits (insn2
, 12, 15);
6813 unsigned int rn
= bits (insn1
, 0, 3);
6814 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6815 /* In LDR (register), there is also a register Rm, which is not allowed to
6816 be PC, so we don't have to check it. */
6818 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6819 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6822 if (debug_displaced
)
6823 fprintf_unfiltered (gdb_stdlog
,
6824 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6825 rt
, rn
, insn1
, insn2
);
6827 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6830 dsc
->u
.ldst
.restore_r4
= 0;
6833 /* ldr[b]<cond> rt, [rn, #imm], etc.
6835 ldr[b]<cond> r0, [r2, #imm]. */
6837 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6838 dsc
->modinsn
[1] = insn2
& 0x0fff;
6841 /* ldr[b]<cond> rt, [rn, rm], etc.
6843 ldr[b]<cond> r0, [r2, r3]. */
6845 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6846 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6856 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6857 struct regcache
*regs
,
6858 struct displaced_step_closure
*dsc
,
6859 int load
, int size
, int usermode
)
6861 int immed
= !bit (insn
, 25);
6862 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6863 unsigned int rt
= bits (insn
, 12, 15);
6864 unsigned int rn
= bits (insn
, 16, 19);
6865 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6867 if (!insn_references_pc (insn
, 0x000ff00ful
))
6868 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6870 if (debug_displaced
)
6871 fprintf_unfiltered (gdb_stdlog
,
6872 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6873 load
? (size
== 1 ? "ldrb" : "ldr")
6874 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6876 (unsigned long) insn
);
6878 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6879 usermode
, rt
, rm
, rn
);
6881 if (load
|| rt
!= ARM_PC_REGNUM
)
6883 dsc
->u
.ldst
.restore_r4
= 0;
6886 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6888 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6889 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6891 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6893 {ldr,str}[b]<cond> r0, [r2, r3]. */
6894 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6898 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6899 dsc
->u
.ldst
.restore_r4
= 1;
6900 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6901 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6902 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6903 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6904 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6908 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6910 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6915 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6920 /* Cleanup LDM instructions with fully-populated register list. This is an
6921 unfortunate corner case: it's impossible to implement correctly by modifying
6922 the instruction. The issue is as follows: we have an instruction,
6926 which we must rewrite to avoid loading PC. A possible solution would be to
6927 do the load in two halves, something like (with suitable cleanup
6931 ldm[id][ab] r8!, {r0-r7}
6933 ldm[id][ab] r8, {r7-r14}
6936 but at present there's no suitable place for <temp>, since the scratch space
6937 is overwritten before the cleanup routine is called. For now, we simply
6938 emulate the instruction. */
6941 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6942 struct displaced_step_closure
*dsc
)
6944 int inc
= dsc
->u
.block
.increment
;
6945 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6946 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6947 uint32_t regmask
= dsc
->u
.block
.regmask
;
6948 int regno
= inc
? 0 : 15;
6949 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6950 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6951 && (regmask
& 0x8000) != 0;
6952 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6953 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6954 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6959 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6960 sensible we can do here. Complain loudly. */
6961 if (exception_return
)
6962 error (_("Cannot single-step exception return"));
6964 /* We don't handle any stores here for now. */
6965 gdb_assert (dsc
->u
.block
.load
!= 0);
6967 if (debug_displaced
)
6968 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
6969 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
6970 dsc
->u
.block
.increment
? "inc" : "dec",
6971 dsc
->u
.block
.before
? "before" : "after");
6978 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
6981 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
6984 xfer_addr
+= bump_before
;
6986 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
6987 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
6989 xfer_addr
+= bump_after
;
6991 regmask
&= ~(1 << regno
);
6994 if (dsc
->u
.block
.writeback
)
6995 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
6999 /* Clean up an STM which included the PC in the register list. */
7002 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7003 struct displaced_step_closure
*dsc
)
7005 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7006 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7007 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
7008 CORE_ADDR stm_insn_addr
;
7011 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7013 /* If condition code fails, there's nothing else to do. */
7014 if (!store_executed
)
7017 if (dsc
->u
.block
.increment
)
7019 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
7021 if (dsc
->u
.block
.before
)
7026 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
7028 if (dsc
->u
.block
.before
)
7032 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
7033 stm_insn_addr
= dsc
->scratch_base
;
7034 offset
= pc_val
- stm_insn_addr
;
7036 if (debug_displaced
)
7037 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
7038 "STM instruction\n", offset
);
7040 /* Rewrite the stored PC to the proper value for the non-displaced original
7042 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
7043 dsc
->insn_addr
+ offset
);
7046 /* Clean up an LDM which includes the PC in the register list. We clumped all
7047 the registers in the transferred list into a contiguous range r0...rX (to
7048 avoid loading PC directly and losing control of the debugged program), so we
7049 must undo that here. */
7052 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
7053 struct regcache
*regs
,
7054 struct displaced_step_closure
*dsc
)
7056 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7057 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7058 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
7059 unsigned int regs_loaded
= bitcount (mask
);
7060 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
7062 /* The method employed here will fail if the register list is fully populated
7063 (we need to avoid loading PC directly). */
7064 gdb_assert (num_to_shuffle
< 16);
7069 clobbered
= (1 << num_to_shuffle
) - 1;
7071 while (num_to_shuffle
> 0)
7073 if ((mask
& (1 << write_reg
)) != 0)
7075 unsigned int read_reg
= num_to_shuffle
- 1;
7077 if (read_reg
!= write_reg
)
7079 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
7080 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
7081 if (debug_displaced
)
7082 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
7083 "loaded register r%d to r%d\n"), read_reg
,
7086 else if (debug_displaced
)
7087 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
7088 "r%d already in the right place\n"),
7091 clobbered
&= ~(1 << write_reg
);
7099 /* Restore any registers we scribbled over. */
7100 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
7102 if ((clobbered
& (1 << write_reg
)) != 0)
7104 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
7106 if (debug_displaced
)
7107 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
7108 "clobbered register r%d\n"), write_reg
);
7109 clobbered
&= ~(1 << write_reg
);
7113 /* Perform register writeback manually. */
7114 if (dsc
->u
.block
.writeback
)
7116 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
7118 if (dsc
->u
.block
.increment
)
7119 new_rn_val
+= regs_loaded
* 4;
7121 new_rn_val
-= regs_loaded
* 4;
7123 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
7128 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7129 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7132 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
7133 struct regcache
*regs
,
7134 struct displaced_step_closure
*dsc
)
7136 int load
= bit (insn
, 20);
7137 int user
= bit (insn
, 22);
7138 int increment
= bit (insn
, 23);
7139 int before
= bit (insn
, 24);
7140 int writeback
= bit (insn
, 21);
7141 int rn
= bits (insn
, 16, 19);
7143 /* Block transfers which don't mention PC can be run directly
7145 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
7146 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
7148 if (rn
== ARM_PC_REGNUM
)
7150 warning (_("displaced: Unpredictable LDM or STM with "
7151 "base register r15"));
7152 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
7155 if (debug_displaced
)
7156 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7157 "%.8lx\n", (unsigned long) insn
);
7159 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7160 dsc
->u
.block
.rn
= rn
;
7162 dsc
->u
.block
.load
= load
;
7163 dsc
->u
.block
.user
= user
;
7164 dsc
->u
.block
.increment
= increment
;
7165 dsc
->u
.block
.before
= before
;
7166 dsc
->u
.block
.writeback
= writeback
;
7167 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
7169 dsc
->u
.block
.regmask
= insn
& 0xffff;
7173 if ((insn
& 0xffff) == 0xffff)
7175 /* LDM with a fully-populated register list. This case is
7176 particularly tricky. Implement for now by fully emulating the
7177 instruction (which might not behave perfectly in all cases, but
7178 these instructions should be rare enough for that not to matter
7180 dsc
->modinsn
[0] = ARM_NOP
;
7182 dsc
->cleanup
= &cleanup_block_load_all
;
7186 /* LDM of a list of registers which includes PC. Implement by
7187 rewriting the list of registers to be transferred into a
7188 contiguous chunk r0...rX before doing the transfer, then shuffling
7189 registers into the correct places in the cleanup routine. */
7190 unsigned int regmask
= insn
& 0xffff;
7191 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7192 unsigned int to
= 0, from
= 0, i
, new_rn
;
7194 for (i
= 0; i
< num_in_list
; i
++)
7195 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7197 /* Writeback makes things complicated. We need to avoid clobbering
7198 the base register with one of the registers in our modified
7199 register list, but just using a different register can't work in
7202 ldm r14!, {r0-r13,pc}
7204 which would need to be rewritten as:
7208 but that can't work, because there's no free register for N.
7210 Solve this by turning off the writeback bit, and emulating
7211 writeback manually in the cleanup routine. */
7216 new_regmask
= (1 << num_in_list
) - 1;
7218 if (debug_displaced
)
7219 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7220 "{..., pc}: original reg list %.4x, modified "
7221 "list %.4x\n"), rn
, writeback
? "!" : "",
7222 (int) insn
& 0xffff, new_regmask
);
7224 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
7226 dsc
->cleanup
= &cleanup_block_load_pc
;
7231 /* STM of a list of registers which includes PC. Run the instruction
7232 as-is, but out of line: this will store the wrong value for the PC,
7233 so we must manually fix up the memory in the cleanup routine.
7234 Doing things this way has the advantage that we can auto-detect
7235 the offset of the PC write (which is architecture-dependent) in
7236 the cleanup routine. */
7237 dsc
->modinsn
[0] = insn
;
7239 dsc
->cleanup
= &cleanup_block_store_pc
;
7246 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7247 struct regcache
*regs
,
7248 struct displaced_step_closure
*dsc
)
7250 int rn
= bits (insn1
, 0, 3);
7251 int load
= bit (insn1
, 4);
7252 int writeback
= bit (insn1
, 5);
7254 /* Block transfers which don't mention PC can be run directly
7256 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
7257 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
7259 if (rn
== ARM_PC_REGNUM
)
7261 warning (_("displaced: Unpredictable LDM or STM with "
7262 "base register r15"));
7263 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7264 "unpredictable ldm/stm", dsc
);
7267 if (debug_displaced
)
7268 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7269 "%.4x%.4x\n", insn1
, insn2
);
7271 /* Clear bit 13, since it should be always zero. */
7272 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
7273 dsc
->u
.block
.rn
= rn
;
7275 dsc
->u
.block
.load
= load
;
7276 dsc
->u
.block
.user
= 0;
7277 dsc
->u
.block
.increment
= bit (insn1
, 7);
7278 dsc
->u
.block
.before
= bit (insn1
, 8);
7279 dsc
->u
.block
.writeback
= writeback
;
7280 dsc
->u
.block
.cond
= INST_AL
;
7281 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7285 if (dsc
->u
.block
.regmask
== 0xffff)
7287 /* This branch is impossible to happen. */
7292 unsigned int regmask
= dsc
->u
.block
.regmask
;
7293 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7294 unsigned int to
= 0, from
= 0, i
, new_rn
;
7296 for (i
= 0; i
< num_in_list
; i
++)
7297 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7302 new_regmask
= (1 << num_in_list
) - 1;
7304 if (debug_displaced
)
7305 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7306 "{..., pc}: original reg list %.4x, modified "
7307 "list %.4x\n"), rn
, writeback
? "!" : "",
7308 (int) dsc
->u
.block
.regmask
, new_regmask
);
7310 dsc
->modinsn
[0] = insn1
;
7311 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
7314 dsc
->cleanup
= &cleanup_block_load_pc
;
7319 dsc
->modinsn
[0] = insn1
;
7320 dsc
->modinsn
[1] = insn2
;
7322 dsc
->cleanup
= &cleanup_block_store_pc
;
7327 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7328 for Linux, where some SVC instructions must be treated specially. */
7331 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7332 struct displaced_step_closure
*dsc
)
7334 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
7336 if (debug_displaced
)
7337 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
7338 "%.8lx\n", (unsigned long) resume_addr
);
7340 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
7344 /* Common copy routine for svc instruciton. */
7347 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7348 struct displaced_step_closure
*dsc
)
7350 /* Preparation: none.
7351 Insn: unmodified svc.
7352 Cleanup: pc <- insn_addr + insn_size. */
7354 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7356 dsc
->wrote_to_pc
= 1;
7358 /* Allow OS-specific code to override SVC handling. */
7359 if (dsc
->u
.svc
.copy_svc_os
)
7360 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
7363 dsc
->cleanup
= &cleanup_svc
;
7369 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
7370 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7373 if (debug_displaced
)
7374 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
7375 (unsigned long) insn
);
7377 dsc
->modinsn
[0] = insn
;
7379 return install_svc (gdbarch
, regs
, dsc
);
7383 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
7384 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7387 if (debug_displaced
)
7388 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
7391 dsc
->modinsn
[0] = insn
;
7393 return install_svc (gdbarch
, regs
, dsc
);
7396 /* Copy undefined instructions. */
7399 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
7400 struct displaced_step_closure
*dsc
)
7402 if (debug_displaced
)
7403 fprintf_unfiltered (gdb_stdlog
,
7404 "displaced: copying undefined insn %.8lx\n",
7405 (unsigned long) insn
);
7407 dsc
->modinsn
[0] = insn
;
7413 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7414 struct displaced_step_closure
*dsc
)
7417 if (debug_displaced
)
7418 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7419 "%.4x %.4x\n", (unsigned short) insn1
,
7420 (unsigned short) insn2
);
7422 dsc
->modinsn
[0] = insn1
;
7423 dsc
->modinsn
[1] = insn2
;
7429 /* Copy unpredictable instructions. */
7432 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7433 struct displaced_step_closure
*dsc
)
7435 if (debug_displaced
)
7436 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7437 "%.8lx\n", (unsigned long) insn
);
7439 dsc
->modinsn
[0] = insn
;
7444 /* The decode_* functions are instruction decoding helpers. They mostly follow
7445 the presentation in the ARM ARM. */
7448 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7449 struct regcache
*regs
,
7450 struct displaced_step_closure
*dsc
)
7452 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7453 unsigned int rn
= bits (insn
, 16, 19);
7455 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7456 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7457 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7458 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7459 else if ((op1
& 0x60) == 0x20)
7460 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7461 else if ((op1
& 0x71) == 0x40)
7462 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7464 else if ((op1
& 0x77) == 0x41)
7465 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7466 else if ((op1
& 0x77) == 0x45)
7467 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7468 else if ((op1
& 0x77) == 0x51)
7471 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7473 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7475 else if ((op1
& 0x77) == 0x55)
7476 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7477 else if (op1
== 0x57)
7480 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7481 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7482 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7483 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7484 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7486 else if ((op1
& 0x63) == 0x43)
7487 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7488 else if ((op2
& 0x1) == 0x0)
7489 switch (op1
& ~0x80)
7492 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7494 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7495 case 0x71: case 0x75:
7497 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7498 case 0x63: case 0x67: case 0x73: case 0x77:
7499 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7501 return arm_copy_undef (gdbarch
, insn
, dsc
);
7504 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7508 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7509 struct regcache
*regs
,
7510 struct displaced_step_closure
*dsc
)
7512 if (bit (insn
, 27) == 0)
7513 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7514 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7515 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7518 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7521 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7523 case 0x4: case 0x5: case 0x6: case 0x7:
7524 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7527 switch ((insn
& 0xe00000) >> 21)
7529 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7531 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7534 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7537 return arm_copy_undef (gdbarch
, insn
, dsc
);
7542 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7543 switch ((insn
& 0xe00000) >> 21)
7546 /* ldc/ldc2 imm (undefined for rn == pc). */
7547 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7548 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7551 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7553 case 0x4: case 0x5: case 0x6: case 0x7:
7554 /* ldc/ldc2 lit (undefined for rn != pc). */
7555 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7556 : arm_copy_undef (gdbarch
, insn
, dsc
);
7559 return arm_copy_undef (gdbarch
, insn
, dsc
);
7564 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7567 if (bits (insn
, 16, 19) == 0xf)
7569 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7571 return arm_copy_undef (gdbarch
, insn
, dsc
);
7575 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7577 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7581 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7583 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7586 return arm_copy_undef (gdbarch
, insn
, dsc
);
7590 /* Decode miscellaneous instructions in dp/misc encoding space. */
7593 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7594 struct regcache
*regs
,
7595 struct displaced_step_closure
*dsc
)
7597 unsigned int op2
= bits (insn
, 4, 6);
7598 unsigned int op
= bits (insn
, 21, 22);
7599 unsigned int op1
= bits (insn
, 16, 19);
7604 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7607 if (op
== 0x1) /* bx. */
7608 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7610 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7612 return arm_copy_undef (gdbarch
, insn
, dsc
);
7616 /* Not really supported. */
7617 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7619 return arm_copy_undef (gdbarch
, insn
, dsc
);
7623 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7624 regs
, dsc
); /* blx register. */
7626 return arm_copy_undef (gdbarch
, insn
, dsc
);
7629 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7633 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7635 /* Not really supported. */
7636 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7639 return arm_copy_undef (gdbarch
, insn
, dsc
);
7644 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7645 struct regcache
*regs
,
7646 struct displaced_step_closure
*dsc
)
7649 switch (bits (insn
, 20, 24))
7652 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7655 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7657 case 0x12: case 0x16:
7658 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7661 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7665 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7667 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7668 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7669 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7670 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7671 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7672 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7673 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7674 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7675 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7676 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7677 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7678 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7679 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7680 /* 2nd arg means "unpriveleged". */
7681 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7685 /* Should be unreachable. */
7690 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7691 struct regcache
*regs
,
7692 struct displaced_step_closure
*dsc
)
7694 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7695 uint32_t op1
= bits (insn
, 20, 24);
7696 int rn_f
= bits (insn
, 16, 19) == 0xf;
7698 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7699 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7700 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7701 else if ((!a
&& (op1
& 0x17) == 0x02)
7702 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7704 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7705 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7707 else if ((!a
&& (op1
& 0x17) == 0x03)
7708 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7710 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7711 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7713 else if ((!a
&& (op1
& 0x17) == 0x06)
7714 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7715 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7716 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7717 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7718 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7719 else if ((!a
&& (op1
& 0x17) == 0x07)
7720 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7721 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7723 /* Should be unreachable. */
7728 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7729 struct displaced_step_closure
*dsc
)
7731 switch (bits (insn
, 20, 24))
7733 case 0x00: case 0x01: case 0x02: case 0x03:
7734 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7736 case 0x04: case 0x05: case 0x06: case 0x07:
7737 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7739 case 0x08: case 0x09: case 0x0a: case 0x0b:
7740 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7741 return arm_copy_unmodified (gdbarch
, insn
,
7742 "decode/pack/unpack/saturate/reverse", dsc
);
7745 if (bits (insn
, 5, 7) == 0) /* op2. */
7747 if (bits (insn
, 12, 15) == 0xf)
7748 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7750 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7753 return arm_copy_undef (gdbarch
, insn
, dsc
);
7755 case 0x1a: case 0x1b:
7756 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7757 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7759 return arm_copy_undef (gdbarch
, insn
, dsc
);
7761 case 0x1c: case 0x1d:
7762 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7764 if (bits (insn
, 0, 3) == 0xf)
7765 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7767 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7770 return arm_copy_undef (gdbarch
, insn
, dsc
);
7772 case 0x1e: case 0x1f:
7773 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7774 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7776 return arm_copy_undef (gdbarch
, insn
, dsc
);
7779 /* Should be unreachable. */
7784 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7785 struct regcache
*regs
,
7786 struct displaced_step_closure
*dsc
)
7789 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7791 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7795 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7796 struct regcache
*regs
,
7797 struct displaced_step_closure
*dsc
)
7799 unsigned int opcode
= bits (insn
, 20, 24);
7803 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7804 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7806 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7807 case 0x12: case 0x16:
7808 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7810 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7811 case 0x13: case 0x17:
7812 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7814 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7815 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7816 /* Note: no writeback for these instructions. Bit 25 will always be
7817 zero though (via caller), so the following works OK. */
7818 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7821 /* Should be unreachable. */
7825 /* Decode shifted register instructions. */
7828 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7829 uint16_t insn2
, struct regcache
*regs
,
7830 struct displaced_step_closure
*dsc
)
7832 /* PC is only allowed to be used in instruction MOV. */
7834 unsigned int op
= bits (insn1
, 5, 8);
7835 unsigned int rn
= bits (insn1
, 0, 3);
7837 if (op
== 0x2 && rn
== 0xf) /* MOV */
7838 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7840 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7841 "dp (shift reg)", dsc
);
7845 /* Decode extension register load/store. Exactly the same as
7846 arm_decode_ext_reg_ld_st. */
7849 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7850 uint16_t insn2
, struct regcache
*regs
,
7851 struct displaced_step_closure
*dsc
)
7853 unsigned int opcode
= bits (insn1
, 4, 8);
7857 case 0x04: case 0x05:
7858 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7859 "vfp/neon vmov", dsc
);
7861 case 0x08: case 0x0c: /* 01x00 */
7862 case 0x0a: case 0x0e: /* 01x10 */
7863 case 0x12: case 0x16: /* 10x10 */
7864 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7865 "vfp/neon vstm/vpush", dsc
);
7867 case 0x09: case 0x0d: /* 01x01 */
7868 case 0x0b: case 0x0f: /* 01x11 */
7869 case 0x13: case 0x17: /* 10x11 */
7870 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7871 "vfp/neon vldm/vpop", dsc
);
7873 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7874 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7876 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7877 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7880 /* Should be unreachable. */
7885 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7886 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7888 unsigned int op1
= bits (insn
, 20, 25);
7889 int op
= bit (insn
, 4);
7890 unsigned int coproc
= bits (insn
, 8, 11);
7891 unsigned int rn
= bits (insn
, 16, 19);
7893 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7894 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7895 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7896 && (coproc
& 0xe) != 0xa)
7898 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7899 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7900 && (coproc
& 0xe) != 0xa)
7901 /* ldc/ldc2 imm/lit. */
7902 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7903 else if ((op1
& 0x3e) == 0x00)
7904 return arm_copy_undef (gdbarch
, insn
, dsc
);
7905 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7906 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7907 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7908 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7909 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7910 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7911 else if ((op1
& 0x30) == 0x20 && !op
)
7913 if ((coproc
& 0xe) == 0xa)
7914 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7916 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7918 else if ((op1
& 0x30) == 0x20 && op
)
7919 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7920 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7921 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7922 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7923 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7924 else if ((op1
& 0x30) == 0x30)
7925 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7927 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7931 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7932 uint16_t insn2
, struct regcache
*regs
,
7933 struct displaced_step_closure
*dsc
)
7935 unsigned int coproc
= bits (insn2
, 8, 11);
7936 unsigned int op1
= bits (insn1
, 4, 9);
7937 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7938 unsigned int bit_9
= bit (insn1
, 9);
7939 unsigned int bit_4
= bit (insn1
, 4);
7940 unsigned int rn
= bits (insn1
, 0, 3);
7945 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7946 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7948 else if (bit_5_8
== 0) /* UNDEFINED. */
7949 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7952 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7953 if ((coproc
& 0xe) == 0xa)
7954 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7956 else /* coproc is not 101x. */
7958 if (bit_4
== 0) /* STC/STC2. */
7959 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7961 else /* LDC/LDC2 {literal, immeidate}. */
7962 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
7968 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7974 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7975 struct displaced_step_closure
*dsc
, int rd
)
7981 Preparation: Rd <- PC
7987 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7988 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
7992 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7993 struct displaced_step_closure
*dsc
,
7994 int rd
, unsigned int imm
)
7997 /* Encoding T2: ADDS Rd, #imm */
7998 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
8000 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8006 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
8007 struct regcache
*regs
,
8008 struct displaced_step_closure
*dsc
)
8010 unsigned int rd
= bits (insn
, 8, 10);
8011 unsigned int imm8
= bits (insn
, 0, 7);
8013 if (debug_displaced
)
8014 fprintf_unfiltered (gdb_stdlog
,
8015 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8018 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
8022 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
8023 uint16_t insn2
, struct regcache
*regs
,
8024 struct displaced_step_closure
*dsc
)
8026 unsigned int rd
= bits (insn2
, 8, 11);
8027 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8028 extract raw immediate encoding rather than computing immediate. When
8029 generating ADD or SUB instruction, we can simply perform OR operation to
8030 set immediate into ADD. */
8031 unsigned int imm_3_8
= insn2
& 0x70ff;
8032 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
8034 if (debug_displaced
)
8035 fprintf_unfiltered (gdb_stdlog
,
8036 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8037 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
8039 if (bit (insn1
, 7)) /* Encoding T2 */
8041 /* Encoding T3: SUB Rd, Rd, #imm */
8042 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
8043 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8045 else /* Encoding T3 */
8047 /* Encoding T3: ADD Rd, Rd, #imm */
8048 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
8049 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8053 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8059 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
8060 struct regcache
*regs
,
8061 struct displaced_step_closure
*dsc
)
8063 unsigned int rt
= bits (insn1
, 8, 10);
8065 int imm8
= (bits (insn1
, 0, 7) << 2);
8066 CORE_ADDR from
= dsc
->insn_addr
;
8072 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8074 Insn: LDR R0, [R2, R3];
8075 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8077 if (debug_displaced
)
8078 fprintf_unfiltered (gdb_stdlog
,
8079 "displaced: copying thumb ldr r%d [pc #%d]\n"
8082 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
8083 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
8084 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
8085 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8086 /* The assembler calculates the required value of the offset from the
8087 Align(PC,4) value of this instruction to the label. */
8088 pc
= pc
& 0xfffffffc;
8090 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
8091 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
8094 dsc
->u
.ldst
.xfersize
= 4;
8096 dsc
->u
.ldst
.immed
= 0;
8097 dsc
->u
.ldst
.writeback
= 0;
8098 dsc
->u
.ldst
.restore_r4
= 0;
8100 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8102 dsc
->cleanup
= &cleanup_load
;
8107 /* Copy Thumb cbnz/cbz insruction. */
8110 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
8111 struct regcache
*regs
,
8112 struct displaced_step_closure
*dsc
)
8114 int non_zero
= bit (insn1
, 11);
8115 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
8116 CORE_ADDR from
= dsc
->insn_addr
;
8117 int rn
= bits (insn1
, 0, 2);
8118 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
8120 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
8121 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8122 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8123 condition is false, let it be, cleanup_branch will do nothing. */
8124 if (dsc
->u
.branch
.cond
)
8126 dsc
->u
.branch
.cond
= INST_AL
;
8127 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
8130 dsc
->u
.branch
.dest
= from
+ 2;
8132 dsc
->u
.branch
.link
= 0;
8133 dsc
->u
.branch
.exchange
= 0;
8135 if (debug_displaced
)
8136 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
8137 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
8138 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
8140 dsc
->modinsn
[0] = THUMB_NOP
;
8142 dsc
->cleanup
= &cleanup_branch
;
8146 /* Copy Table Branch Byte/Halfword */
8148 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
8149 uint16_t insn2
, struct regcache
*regs
,
8150 struct displaced_step_closure
*dsc
)
8152 ULONGEST rn_val
, rm_val
;
8153 int is_tbh
= bit (insn2
, 4);
8154 CORE_ADDR halfwords
= 0;
8155 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8157 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
8158 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
8164 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
8165 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
8171 target_read_memory (rn_val
+ rm_val
, buf
, 1);
8172 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
8175 if (debug_displaced
)
8176 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
8177 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
8178 (unsigned int) rn_val
, (unsigned int) rm_val
,
8179 (unsigned int) halfwords
);
8181 dsc
->u
.branch
.cond
= INST_AL
;
8182 dsc
->u
.branch
.link
= 0;
8183 dsc
->u
.branch
.exchange
= 0;
8184 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
8186 dsc
->cleanup
= &cleanup_branch
;
8192 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8193 struct displaced_step_closure
*dsc
)
8196 int val
= displaced_read_reg (regs
, dsc
, 7);
8197 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
8200 val
= displaced_read_reg (regs
, dsc
, 8);
8201 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
8204 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
8209 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
8210 struct regcache
*regs
,
8211 struct displaced_step_closure
*dsc
)
8213 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
8215 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8218 (1) register list is full, that is, r0-r7 are used.
8219 Prepare: tmp[0] <- r8
8221 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8222 MOV r8, r7; Move value of r7 to r8;
8223 POP {r7}; Store PC value into r7.
8225 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8227 (2) register list is not full, supposing there are N registers in
8228 register list (except PC, 0 <= N <= 7).
8229 Prepare: for each i, 0 - N, tmp[i] <- ri.
8231 POP {r0, r1, ...., rN};
8233 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8234 from tmp[] properly.
8236 if (debug_displaced
)
8237 fprintf_unfiltered (gdb_stdlog
,
8238 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8239 dsc
->u
.block
.regmask
, insn1
);
8241 if (dsc
->u
.block
.regmask
== 0xff)
8243 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
8245 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
8246 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
8247 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
8250 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
8254 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
8255 unsigned int new_regmask
, bit
= 1;
8256 unsigned int to
= 0, from
= 0, i
, new_rn
;
8258 for (i
= 0; i
< num_in_list
+ 1; i
++)
8259 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
8261 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
8263 if (debug_displaced
)
8264 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
8265 "{..., pc}: original reg list %.4x,"
8266 " modified list %.4x\n"),
8267 (int) dsc
->u
.block
.regmask
, new_regmask
);
8269 dsc
->u
.block
.regmask
|= 0x8000;
8270 dsc
->u
.block
.writeback
= 0;
8271 dsc
->u
.block
.cond
= INST_AL
;
8273 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
8275 dsc
->cleanup
= &cleanup_block_load_pc
;
8282 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8283 struct regcache
*regs
,
8284 struct displaced_step_closure
*dsc
)
8286 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
8287 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
8290 /* 16-bit thumb instructions. */
8291 switch (op_bit_12_15
)
8293 /* Shift (imme), add, subtract, move and compare. */
8294 case 0: case 1: case 2: case 3:
8295 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8296 "shift/add/sub/mov/cmp",
8300 switch (op_bit_10_11
)
8302 case 0: /* Data-processing */
8303 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8307 case 1: /* Special data instructions and branch and exchange. */
8309 unsigned short op
= bits (insn1
, 7, 9);
8310 if (op
== 6 || op
== 7) /* BX or BLX */
8311 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
8312 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8313 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
8315 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
8319 default: /* LDR (literal) */
8320 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
8323 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8324 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
8327 if (op_bit_10_11
< 2) /* Generate PC-relative address */
8328 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
8329 else /* Generate SP-relative address */
8330 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
8332 case 11: /* Misc 16-bit instructions */
8334 switch (bits (insn1
, 8, 11))
8336 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8337 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
8339 case 12: case 13: /* POP */
8340 if (bit (insn1
, 8)) /* PC is in register list. */
8341 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
8343 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
8345 case 15: /* If-Then, and hints */
8346 if (bits (insn1
, 0, 3))
8347 /* If-Then makes up to four following instructions conditional.
8348 IT instruction itself is not conditional, so handle it as a
8349 common unmodified instruction. */
8350 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
8353 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
8356 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
8361 if (op_bit_10_11
< 2) /* Store multiple registers */
8362 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
8363 else /* Load multiple registers */
8364 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
8366 case 13: /* Conditional branch and supervisor call */
8367 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
8368 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8370 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
8372 case 14: /* Unconditional branch */
8373 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8380 internal_error (__FILE__
, __LINE__
,
8381 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8385 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
8386 uint16_t insn1
, uint16_t insn2
,
8387 struct regcache
*regs
,
8388 struct displaced_step_closure
*dsc
)
8390 int rt
= bits (insn2
, 12, 15);
8391 int rn
= bits (insn1
, 0, 3);
8392 int op1
= bits (insn1
, 7, 8);
8395 switch (bits (insn1
, 5, 6))
8397 case 0: /* Load byte and memory hints */
8398 if (rt
== 0xf) /* PLD/PLI */
8401 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8402 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8404 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8409 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8410 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8413 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8414 "ldrb{reg, immediate}/ldrbt",
8419 case 1: /* Load halfword and memory hints. */
8420 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8421 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8422 "pld/unalloc memhint", dsc
);
8426 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8429 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8433 case 2: /* Load word */
8435 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8438 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8439 else if (op1
== 0x1) /* Encoding T3 */
8440 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8442 else /* op1 == 0x0 */
8444 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8445 /* LDR (immediate) */
8446 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8447 dsc
, bit (insn2
, 8), 1);
8448 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8449 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8452 /* LDR (register) */
8453 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8459 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8466 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8467 uint16_t insn2
, struct regcache
*regs
,
8468 struct displaced_step_closure
*dsc
)
8471 unsigned short op
= bit (insn2
, 15);
8472 unsigned int op1
= bits (insn1
, 11, 12);
8478 switch (bits (insn1
, 9, 10))
8483 /* Load/store {dual, execlusive}, table branch. */
8484 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8485 && bits (insn2
, 5, 7) == 0)
8486 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8489 /* PC is not allowed to use in load/store {dual, exclusive}
8491 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8492 "load/store dual/ex", dsc
);
8494 else /* load/store multiple */
8496 switch (bits (insn1
, 7, 8))
8498 case 0: case 3: /* SRS, RFE */
8499 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8502 case 1: case 2: /* LDM/STM/PUSH/POP */
8503 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8510 /* Data-processing (shift register). */
8511 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8514 default: /* Coprocessor instructions. */
8515 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8520 case 2: /* op1 = 2 */
8521 if (op
) /* Branch and misc control. */
8523 if (bit (insn2
, 14) /* BLX/BL */
8524 || bit (insn2
, 12) /* Unconditional branch */
8525 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8526 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8528 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8533 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8535 int op
= bits (insn1
, 4, 8);
8536 int rn
= bits (insn1
, 0, 3);
8537 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8538 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8541 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8544 else /* Data processing (modified immeidate) */
8545 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8549 case 3: /* op1 = 3 */
8550 switch (bits (insn1
, 9, 10))
8554 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8556 else /* NEON Load/Store and Store single data item */
8557 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8558 "neon elt/struct load/store",
8561 case 1: /* op1 = 3, bits (9, 10) == 1 */
8562 switch (bits (insn1
, 7, 8))
8564 case 0: case 1: /* Data processing (register) */
8565 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8568 case 2: /* Multiply and absolute difference */
8569 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8570 "mul/mua/diff", dsc
);
8572 case 3: /* Long multiply and divide */
8573 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8578 default: /* Coprocessor instructions */
8579 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8588 internal_error (__FILE__
, __LINE__
,
8589 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8594 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8595 CORE_ADDR to
, struct regcache
*regs
,
8596 struct displaced_step_closure
*dsc
)
8598 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8600 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8602 if (debug_displaced
)
8603 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8604 "at %.8lx\n", insn1
, (unsigned long) from
);
8607 dsc
->insn_size
= thumb_insn_size (insn1
);
8608 if (thumb_insn_size (insn1
) == 4)
8611 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8612 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8615 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8619 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8620 CORE_ADDR to
, struct regcache
*regs
,
8621 struct displaced_step_closure
*dsc
)
8624 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8627 /* Most displaced instructions use a 1-instruction scratch space, so set this
8628 here and override below if/when necessary. */
8630 dsc
->insn_addr
= from
;
8631 dsc
->scratch_base
= to
;
8632 dsc
->cleanup
= NULL
;
8633 dsc
->wrote_to_pc
= 0;
8635 if (!displaced_in_arm_mode (regs
))
8636 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8640 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8641 if (debug_displaced
)
8642 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8643 "at %.8lx\n", (unsigned long) insn
,
8644 (unsigned long) from
);
8646 if ((insn
& 0xf0000000) == 0xf0000000)
8647 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8648 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8650 case 0x0: case 0x1: case 0x2: case 0x3:
8651 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8654 case 0x4: case 0x5: case 0x6:
8655 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8659 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8662 case 0x8: case 0x9: case 0xa: case 0xb:
8663 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8666 case 0xc: case 0xd: case 0xe: case 0xf:
8667 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8672 internal_error (__FILE__
, __LINE__
,
8673 _("arm_process_displaced_insn: Instruction decode error"));
8676 /* Actually set up the scratch space for a displaced instruction. */
8679 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8680 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8682 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8683 unsigned int i
, len
, offset
;
8684 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8685 int size
= dsc
->is_thumb
? 2 : 4;
8686 const gdb_byte
*bkp_insn
;
8689 /* Poke modified instruction(s). */
8690 for (i
= 0; i
< dsc
->numinsns
; i
++)
8692 if (debug_displaced
)
8694 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8696 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8699 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8700 (unsigned short)dsc
->modinsn
[i
]);
8702 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8703 (unsigned long) to
+ offset
);
8706 write_memory_unsigned_integer (to
+ offset
, size
,
8707 byte_order_for_code
,
8712 /* Choose the correct breakpoint instruction. */
8715 bkp_insn
= tdep
->thumb_breakpoint
;
8716 len
= tdep
->thumb_breakpoint_size
;
8720 bkp_insn
= tdep
->arm_breakpoint
;
8721 len
= tdep
->arm_breakpoint_size
;
8724 /* Put breakpoint afterwards. */
8725 write_memory (to
+ offset
, bkp_insn
, len
);
8727 if (debug_displaced
)
8728 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8729 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8732 /* Entry point for copying an instruction into scratch space for displaced
8735 struct displaced_step_closure
*
8736 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8737 CORE_ADDR from
, CORE_ADDR to
,
8738 struct regcache
*regs
)
8740 struct displaced_step_closure
*dsc
8741 = xmalloc (sizeof (struct displaced_step_closure
));
8742 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8743 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8748 /* Entry point for cleaning things up after a displaced instruction has been
8752 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8753 struct displaced_step_closure
*dsc
,
8754 CORE_ADDR from
, CORE_ADDR to
,
8755 struct regcache
*regs
)
8758 dsc
->cleanup (gdbarch
, regs
, dsc
);
8760 if (!dsc
->wrote_to_pc
)
8761 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8762 dsc
->insn_addr
+ dsc
->insn_size
);
8766 #include "bfd-in2.h"
8767 #include "libcoff.h"
8770 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8772 struct gdbarch
*gdbarch
= info
->application_data
;
8774 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8776 static asymbol
*asym
;
8777 static combined_entry_type ce
;
8778 static struct coff_symbol_struct csym
;
8779 static struct bfd fake_bfd
;
8780 static bfd_target fake_target
;
8782 if (csym
.native
== NULL
)
8784 /* Create a fake symbol vector containing a Thumb symbol.
8785 This is solely so that the code in print_insn_little_arm()
8786 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8787 the presence of a Thumb symbol and switch to decoding
8788 Thumb instructions. */
8790 fake_target
.flavour
= bfd_target_coff_flavour
;
8791 fake_bfd
.xvec
= &fake_target
;
8792 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8794 csym
.symbol
.the_bfd
= &fake_bfd
;
8795 csym
.symbol
.name
= "fake";
8796 asym
= (asymbol
*) & csym
;
8799 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8800 info
->symbols
= &asym
;
8803 info
->symbols
= NULL
;
8805 if (info
->endian
== BFD_ENDIAN_BIG
)
8806 return print_insn_big_arm (memaddr
, info
);
8808 return print_insn_little_arm (memaddr
, info
);
8811 /* The following define instruction sequences that will cause ARM
8812 cpu's to take an undefined instruction trap. These are used to
8813 signal a breakpoint to GDB.
8815 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8816 modes. A different instruction is required for each mode. The ARM
8817 cpu's can also be big or little endian. Thus four different
8818 instructions are needed to support all cases.
8820 Note: ARMv4 defines several new instructions that will take the
8821 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8822 not in fact add the new instructions. The new undefined
8823 instructions in ARMv4 are all instructions that had no defined
8824 behaviour in earlier chips. There is no guarantee that they will
8825 raise an exception, but may be treated as NOP's. In practice, it
8826 may only safe to rely on instructions matching:
8828 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8829 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8830 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8832 Even this may only true if the condition predicate is true. The
8833 following use a condition predicate of ALWAYS so it is always TRUE.
8835 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8836 and NetBSD all use a software interrupt rather than an undefined
8837 instruction to force a trap. This can be handled by by the
8838 abi-specific code during establishment of the gdbarch vector. */
8840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8845 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8846 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8847 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8848 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8850 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8851 the program counter value to determine whether a 16-bit or 32-bit
8852 breakpoint should be used. It returns a pointer to a string of
8853 bytes that encode a breakpoint instruction, stores the length of
8854 the string to *lenptr, and adjusts the program counter (if
8855 necessary) to point to the actual memory location where the
8856 breakpoint should be inserted. */
8858 static const unsigned char *
8859 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8861 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8862 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8864 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8866 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8868 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8869 check whether we are replacing a 32-bit instruction. */
8870 if (tdep
->thumb2_breakpoint
!= NULL
)
8873 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8875 unsigned short inst1
;
8876 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8877 if (thumb_insn_size (inst1
) == 4)
8879 *lenptr
= tdep
->thumb2_breakpoint_size
;
8880 return tdep
->thumb2_breakpoint
;
8885 *lenptr
= tdep
->thumb_breakpoint_size
;
8886 return tdep
->thumb_breakpoint
;
8890 *lenptr
= tdep
->arm_breakpoint_size
;
8891 return tdep
->arm_breakpoint
;
8896 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8899 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8901 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8902 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8903 that this is not confused with a 32-bit ARM breakpoint. */
8907 /* Extract from an array REGBUF containing the (raw) register state a
8908 function return value of type TYPE, and copy that, in virtual
8909 format, into VALBUF. */
8912 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8915 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8916 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8918 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8920 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8924 /* The value is in register F0 in internal format. We need to
8925 extract the raw value and then convert it to the desired
8927 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8929 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8930 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8931 valbuf
, gdbarch_byte_order (gdbarch
));
8935 case ARM_FLOAT_SOFT_FPA
:
8936 case ARM_FLOAT_SOFT_VFP
:
8937 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8938 not using the VFP ABI code. */
8940 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8941 if (TYPE_LENGTH (type
) > 4)
8942 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8943 valbuf
+ INT_REGISTER_SIZE
);
8947 internal_error (__FILE__
, __LINE__
,
8948 _("arm_extract_return_value: "
8949 "Floating point model not supported"));
8953 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8954 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8955 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8956 || TYPE_CODE (type
) == TYPE_CODE_PTR
8957 || TYPE_CODE (type
) == TYPE_CODE_REF
8958 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8960 /* If the type is a plain integer, then the access is
8961 straight-forward. Otherwise we have to play around a bit
8963 int len
= TYPE_LENGTH (type
);
8964 int regno
= ARM_A1_REGNUM
;
8969 /* By using store_unsigned_integer we avoid having to do
8970 anything special for small big-endian values. */
8971 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8972 store_unsigned_integer (valbuf
,
8973 (len
> INT_REGISTER_SIZE
8974 ? INT_REGISTER_SIZE
: len
),
8976 len
-= INT_REGISTER_SIZE
;
8977 valbuf
+= INT_REGISTER_SIZE
;
8982 /* For a structure or union the behaviour is as if the value had
8983 been stored to word-aligned memory and then loaded into
8984 registers with 32-bit load instruction(s). */
8985 int len
= TYPE_LENGTH (type
);
8986 int regno
= ARM_A1_REGNUM
;
8987 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8991 regcache_cooked_read (regs
, regno
++, tmpbuf
);
8992 memcpy (valbuf
, tmpbuf
,
8993 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8994 len
-= INT_REGISTER_SIZE
;
8995 valbuf
+= INT_REGISTER_SIZE
;
9001 /* Will a function return an aggregate type in memory or in a
9002 register? Return 0 if an aggregate type can be returned in a
9003 register, 1 if it must be returned in memory. */
9006 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
9009 enum type_code code
;
9011 type
= check_typedef (type
);
9013 /* In the ARM ABI, "integer" like aggregate types are returned in
9014 registers. For an aggregate type to be integer like, its size
9015 must be less than or equal to INT_REGISTER_SIZE and the
9016 offset of each addressable subfield must be zero. Note that bit
9017 fields are not addressable, and all addressable subfields of
9018 unions always start at offset zero.
9020 This function is based on the behaviour of GCC 2.95.1.
9021 See: gcc/arm.c: arm_return_in_memory() for details.
9023 Note: All versions of GCC before GCC 2.95.2 do not set up the
9024 parameters correctly for a function returning the following
9025 structure: struct { float f;}; This should be returned in memory,
9026 not a register. Richard Earnshaw sent me a patch, but I do not
9027 know of any way to detect if a function like the above has been
9028 compiled with the correct calling convention. */
9030 /* All aggregate types that won't fit in a register must be returned
9032 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
9037 /* The AAPCS says all aggregates not larger than a word are returned
9039 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
9042 /* The only aggregate types that can be returned in a register are
9043 structs and unions. Arrays must be returned in memory. */
9044 code
= TYPE_CODE (type
);
9045 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
9050 /* Assume all other aggregate types can be returned in a register.
9051 Run a check for structures, unions and arrays. */
9054 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
9057 /* Need to check if this struct/union is "integer" like. For
9058 this to be true, its size must be less than or equal to
9059 INT_REGISTER_SIZE and the offset of each addressable
9060 subfield must be zero. Note that bit fields are not
9061 addressable, and unions always start at offset zero. If any
9062 of the subfields is a floating point type, the struct/union
9063 cannot be an integer type. */
9065 /* For each field in the object, check:
9066 1) Is it FP? --> yes, nRc = 1;
9067 2) Is it addressable (bitpos != 0) and
9068 not packed (bitsize == 0)?
9072 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
9074 enum type_code field_type_code
;
9075 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
9078 /* Is it a floating point type field? */
9079 if (field_type_code
== TYPE_CODE_FLT
)
9085 /* If bitpos != 0, then we have to care about it. */
9086 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
9088 /* Bitfields are not addressable. If the field bitsize is
9089 zero, then the field is not packed. Hence it cannot be
9090 a bitfield or any other packed type. */
9091 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
9103 /* Write into appropriate registers a function return value of type
9104 TYPE, given in virtual format. */
9107 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
9108 const gdb_byte
*valbuf
)
9110 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
9111 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9113 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
9115 gdb_byte buf
[MAX_REGISTER_SIZE
];
9117 switch (gdbarch_tdep (gdbarch
)->fp_model
)
9121 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
9122 gdbarch_byte_order (gdbarch
));
9123 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
9126 case ARM_FLOAT_SOFT_FPA
:
9127 case ARM_FLOAT_SOFT_VFP
:
9128 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9129 not using the VFP ABI code. */
9131 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
9132 if (TYPE_LENGTH (type
) > 4)
9133 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
9134 valbuf
+ INT_REGISTER_SIZE
);
9138 internal_error (__FILE__
, __LINE__
,
9139 _("arm_store_return_value: Floating "
9140 "point model not supported"));
9144 else if (TYPE_CODE (type
) == TYPE_CODE_INT
9145 || TYPE_CODE (type
) == TYPE_CODE_CHAR
9146 || TYPE_CODE (type
) == TYPE_CODE_BOOL
9147 || TYPE_CODE (type
) == TYPE_CODE_PTR
9148 || TYPE_CODE (type
) == TYPE_CODE_REF
9149 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
9151 if (TYPE_LENGTH (type
) <= 4)
9153 /* Values of one word or less are zero/sign-extended and
9155 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9156 LONGEST val
= unpack_long (type
, valbuf
);
9158 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
9159 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
9163 /* Integral values greater than one word are stored in consecutive
9164 registers starting with r0. This will always be a multiple of
9165 the regiser size. */
9166 int len
= TYPE_LENGTH (type
);
9167 int regno
= ARM_A1_REGNUM
;
9171 regcache_cooked_write (regs
, regno
++, valbuf
);
9172 len
-= INT_REGISTER_SIZE
;
9173 valbuf
+= INT_REGISTER_SIZE
;
9179 /* For a structure or union the behaviour is as if the value had
9180 been stored to word-aligned memory and then loaded into
9181 registers with 32-bit load instruction(s). */
9182 int len
= TYPE_LENGTH (type
);
9183 int regno
= ARM_A1_REGNUM
;
9184 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9188 memcpy (tmpbuf
, valbuf
,
9189 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9190 regcache_cooked_write (regs
, regno
++, tmpbuf
);
9191 len
-= INT_REGISTER_SIZE
;
9192 valbuf
+= INT_REGISTER_SIZE
;
9198 /* Handle function return values. */
9200 static enum return_value_convention
9201 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
9202 struct type
*valtype
, struct regcache
*regcache
,
9203 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
9205 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9206 struct type
*func_type
= function
? value_type (function
) : NULL
;
9207 enum arm_vfp_cprc_base_type vfp_base_type
;
9210 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
9211 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
9213 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
9214 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
9216 for (i
= 0; i
< vfp_base_count
; i
++)
9218 if (reg_char
== 'q')
9221 arm_neon_quad_write (gdbarch
, regcache
, i
,
9222 writebuf
+ i
* unit_length
);
9225 arm_neon_quad_read (gdbarch
, regcache
, i
,
9226 readbuf
+ i
* unit_length
);
9233 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
9234 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9237 regcache_cooked_write (regcache
, regnum
,
9238 writebuf
+ i
* unit_length
);
9240 regcache_cooked_read (regcache
, regnum
,
9241 readbuf
+ i
* unit_length
);
9244 return RETURN_VALUE_REGISTER_CONVENTION
;
9247 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
9248 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
9249 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
9251 if (tdep
->struct_return
== pcc_struct_return
9252 || arm_return_in_memory (gdbarch
, valtype
))
9253 return RETURN_VALUE_STRUCT_CONVENTION
;
9256 /* AAPCS returns complex types longer than a register in memory. */
9257 if (tdep
->arm_abi
!= ARM_ABI_APCS
9258 && TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
9259 && TYPE_LENGTH (valtype
) > INT_REGISTER_SIZE
)
9260 return RETURN_VALUE_STRUCT_CONVENTION
;
9263 arm_store_return_value (valtype
, regcache
, writebuf
);
9266 arm_extract_return_value (valtype
, regcache
, readbuf
);
9268 return RETURN_VALUE_REGISTER_CONVENTION
;
9273 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
9275 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
9276 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9277 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9279 gdb_byte buf
[INT_REGISTER_SIZE
];
9281 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
9283 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
9287 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
9291 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9292 return the target PC. Otherwise return 0. */
9295 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
9299 CORE_ADDR start_addr
;
9301 /* Find the starting address and name of the function containing the PC. */
9302 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
9304 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9306 start_addr
= arm_skip_bx_reg (frame
, pc
);
9307 if (start_addr
!= 0)
9313 /* If PC is in a Thumb call or return stub, return the address of the
9314 target PC, which is in a register. The thunk functions are called
9315 _call_via_xx, where x is the register name. The possible names
9316 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9317 functions, named __ARM_call_via_r[0-7]. */
9318 if (startswith (name
, "_call_via_")
9319 || startswith (name
, "__ARM_call_via_"))
9321 /* Use the name suffix to determine which register contains the
9323 static char *table
[15] =
9324 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9325 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9328 int offset
= strlen (name
) - 2;
9330 for (regno
= 0; regno
<= 14; regno
++)
9331 if (strcmp (&name
[offset
], table
[regno
]) == 0)
9332 return get_frame_register_unsigned (frame
, regno
);
9335 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9336 non-interworking calls to foo. We could decode the stubs
9337 to find the target but it's easier to use the symbol table. */
9338 namelen
= strlen (name
);
9339 if (name
[0] == '_' && name
[1] == '_'
9340 && ((namelen
> 2 + strlen ("_from_thumb")
9341 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
9342 || (namelen
> 2 + strlen ("_from_arm")
9343 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
9346 int target_len
= namelen
- 2;
9347 struct bound_minimal_symbol minsym
;
9348 struct objfile
*objfile
;
9349 struct obj_section
*sec
;
9351 if (name
[namelen
- 1] == 'b')
9352 target_len
-= strlen ("_from_thumb");
9354 target_len
-= strlen ("_from_arm");
9356 target_name
= alloca (target_len
+ 1);
9357 memcpy (target_name
, name
+ 2, target_len
);
9358 target_name
[target_len
] = '\0';
9360 sec
= find_pc_section (pc
);
9361 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
9362 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
9363 if (minsym
.minsym
!= NULL
)
9364 return BMSYMBOL_VALUE_ADDRESS (minsym
);
9369 return 0; /* not a stub */
9373 set_arm_command (char *args
, int from_tty
)
9375 printf_unfiltered (_("\
9376 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9377 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
9381 show_arm_command (char *args
, int from_tty
)
9383 cmd_show_list (showarmcmdlist
, from_tty
, "");
9387 arm_update_current_architecture (void)
9389 struct gdbarch_info info
;
9391 /* If the current architecture is not ARM, we have nothing to do. */
9392 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
9395 /* Update the architecture. */
9396 gdbarch_info_init (&info
);
9398 if (!gdbarch_update_p (info
))
9399 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
9403 set_fp_model_sfunc (char *args
, int from_tty
,
9404 struct cmd_list_element
*c
)
9408 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
9409 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
9411 arm_fp_model
= fp_model
;
9415 if (fp_model
== ARM_FLOAT_LAST
)
9416 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9419 arm_update_current_architecture ();
9423 show_fp_model (struct ui_file
*file
, int from_tty
,
9424 struct cmd_list_element
*c
, const char *value
)
9426 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9428 if (arm_fp_model
== ARM_FLOAT_AUTO
9429 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9430 fprintf_filtered (file
, _("\
9431 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9432 fp_model_strings
[tdep
->fp_model
]);
9434 fprintf_filtered (file
, _("\
9435 The current ARM floating point model is \"%s\".\n"),
9436 fp_model_strings
[arm_fp_model
]);
9440 arm_set_abi (char *args
, int from_tty
,
9441 struct cmd_list_element
*c
)
9445 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9446 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9448 arm_abi_global
= arm_abi
;
9452 if (arm_abi
== ARM_ABI_LAST
)
9453 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9456 arm_update_current_architecture ();
9460 arm_show_abi (struct ui_file
*file
, int from_tty
,
9461 struct cmd_list_element
*c
, const char *value
)
9463 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9465 if (arm_abi_global
== ARM_ABI_AUTO
9466 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9467 fprintf_filtered (file
, _("\
9468 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9469 arm_abi_strings
[tdep
->arm_abi
]);
9471 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9476 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9477 struct cmd_list_element
*c
, const char *value
)
9479 fprintf_filtered (file
,
9480 _("The current execution mode assumed "
9481 "(when symbols are unavailable) is \"%s\".\n"),
9482 arm_fallback_mode_string
);
9486 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9487 struct cmd_list_element
*c
, const char *value
)
9489 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9491 fprintf_filtered (file
,
9492 _("The current execution mode assumed "
9493 "(even when symbols are available) is \"%s\".\n"),
9494 arm_force_mode_string
);
9497 /* If the user changes the register disassembly style used for info
9498 register and other commands, we have to also switch the style used
9499 in opcodes for disassembly output. This function is run in the "set
9500 arm disassembly" command, and does that. */
9503 set_disassembly_style_sfunc (char *args
, int from_tty
,
9504 struct cmd_list_element
*c
)
9506 set_disassembly_style ();
9509 /* Return the ARM register name corresponding to register I. */
9511 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9513 const int num_regs
= gdbarch_num_regs (gdbarch
);
9515 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9516 && i
>= num_regs
&& i
< num_regs
+ 32)
9518 static const char *const vfp_pseudo_names
[] = {
9519 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9520 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9521 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9522 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9525 return vfp_pseudo_names
[i
- num_regs
];
9528 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9529 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9531 static const char *const neon_pseudo_names
[] = {
9532 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9533 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9536 return neon_pseudo_names
[i
- num_regs
- 32];
9539 if (i
>= ARRAY_SIZE (arm_register_names
))
9540 /* These registers are only supported on targets which supply
9541 an XML description. */
9544 return arm_register_names
[i
];
9548 set_disassembly_style (void)
9552 /* Find the style that the user wants. */
9553 for (current
= 0; current
< num_disassembly_options
; current
++)
9554 if (disassembly_style
== valid_disassembly_styles
[current
])
9556 gdb_assert (current
< num_disassembly_options
);
9558 /* Synchronize the disassembler. */
9559 set_arm_regname_option (current
);
9562 /* Test whether the coff symbol specific value corresponds to a Thumb
9566 coff_sym_is_thumb (int val
)
9568 return (val
== C_THUMBEXT
9569 || val
== C_THUMBSTAT
9570 || val
== C_THUMBEXTFUNC
9571 || val
== C_THUMBSTATFUNC
9572 || val
== C_THUMBLABEL
);
9575 /* arm_coff_make_msymbol_special()
9576 arm_elf_make_msymbol_special()
9578 These functions test whether the COFF or ELF symbol corresponds to
9579 an address in thumb code, and set a "special" bit in a minimal
9580 symbol to indicate that it does. */
9583 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9585 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9586 == ST_BRANCH_TO_THUMB
)
9587 MSYMBOL_SET_SPECIAL (msym
);
9591 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9593 if (coff_sym_is_thumb (val
))
9594 MSYMBOL_SET_SPECIAL (msym
);
9598 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9600 struct arm_per_objfile
*data
= arg
;
9603 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9604 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9608 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9611 const char *name
= bfd_asymbol_name (sym
);
9612 struct arm_per_objfile
*data
;
9613 VEC(arm_mapping_symbol_s
) **map_p
;
9614 struct arm_mapping_symbol new_map_sym
;
9616 gdb_assert (name
[0] == '$');
9617 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9620 data
= objfile_data (objfile
, arm_objfile_data_key
);
9623 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9624 struct arm_per_objfile
);
9625 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9626 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9627 objfile
->obfd
->section_count
,
9628 VEC(arm_mapping_symbol_s
) *);
9630 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9632 new_map_sym
.value
= sym
->value
;
9633 new_map_sym
.type
= name
[1];
9635 /* Assume that most mapping symbols appear in order of increasing
9636 value. If they were randomly distributed, it would be faster to
9637 always push here and then sort at first use. */
9638 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9640 struct arm_mapping_symbol
*prev_map_sym
;
9642 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9643 if (prev_map_sym
->value
>= sym
->value
)
9646 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9647 arm_compare_mapping_symbols
);
9648 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9653 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9657 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9659 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9660 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9662 /* If necessary, set the T bit. */
9665 ULONGEST val
, t_bit
;
9666 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9667 t_bit
= arm_psr_thumb_bit (gdbarch
);
9668 if (arm_pc_is_thumb (gdbarch
, pc
))
9669 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9672 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9677 /* Read the contents of a NEON quad register, by reading from two
9678 double registers. This is used to implement the quad pseudo
9679 registers, and for argument passing in case the quad registers are
9680 missing; vectors are passed in quad registers when using the VFP
9681 ABI, even if a NEON unit is not present. REGNUM is the index of
9682 the quad register, in [0, 15]. */
9684 static enum register_status
9685 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9686 int regnum
, gdb_byte
*buf
)
9689 gdb_byte reg_buf
[8];
9690 int offset
, double_regnum
;
9691 enum register_status status
;
9693 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9694 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9697 /* d0 is always the least significant half of q0. */
9698 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9703 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9704 if (status
!= REG_VALID
)
9706 memcpy (buf
+ offset
, reg_buf
, 8);
9708 offset
= 8 - offset
;
9709 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9710 if (status
!= REG_VALID
)
9712 memcpy (buf
+ offset
, reg_buf
, 8);
9717 static enum register_status
9718 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9719 int regnum
, gdb_byte
*buf
)
9721 const int num_regs
= gdbarch_num_regs (gdbarch
);
9723 gdb_byte reg_buf
[8];
9724 int offset
, double_regnum
;
9726 gdb_assert (regnum
>= num_regs
);
9729 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9730 /* Quad-precision register. */
9731 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9734 enum register_status status
;
9736 /* Single-precision register. */
9737 gdb_assert (regnum
< 32);
9739 /* s0 is always the least significant half of d0. */
9740 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9741 offset
= (regnum
& 1) ? 0 : 4;
9743 offset
= (regnum
& 1) ? 4 : 0;
9745 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9746 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9749 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9750 if (status
== REG_VALID
)
9751 memcpy (buf
, reg_buf
+ offset
, 4);
9756 /* Store the contents of BUF to a NEON quad register, by writing to
9757 two double registers. This is used to implement the quad pseudo
9758 registers, and for argument passing in case the quad registers are
9759 missing; vectors are passed in quad registers when using the VFP
9760 ABI, even if a NEON unit is not present. REGNUM is the index
9761 of the quad register, in [0, 15]. */
9764 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9765 int regnum
, const gdb_byte
*buf
)
9768 int offset
, double_regnum
;
9770 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9771 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9774 /* d0 is always the least significant half of q0. */
9775 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9780 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9781 offset
= 8 - offset
;
9782 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9786 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9787 int regnum
, const gdb_byte
*buf
)
9789 const int num_regs
= gdbarch_num_regs (gdbarch
);
9791 gdb_byte reg_buf
[8];
9792 int offset
, double_regnum
;
9794 gdb_assert (regnum
>= num_regs
);
9797 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9798 /* Quad-precision register. */
9799 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9802 /* Single-precision register. */
9803 gdb_assert (regnum
< 32);
9805 /* s0 is always the least significant half of d0. */
9806 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9807 offset
= (regnum
& 1) ? 0 : 4;
9809 offset
= (regnum
& 1) ? 4 : 0;
9811 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9812 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9815 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9816 memcpy (reg_buf
+ offset
, buf
, 4);
9817 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9821 static struct value
*
9822 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9824 const int *reg_p
= baton
;
9825 return value_of_register (*reg_p
, frame
);
9828 static enum gdb_osabi
9829 arm_elf_osabi_sniffer (bfd
*abfd
)
9831 unsigned int elfosabi
;
9832 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9834 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9836 if (elfosabi
== ELFOSABI_ARM
)
9837 /* GNU tools use this value. Check note sections in this case,
9839 bfd_map_over_sections (abfd
,
9840 generic_elf_osabi_sniff_abi_tag_sections
,
9843 /* Anything else will be handled by the generic ELF sniffer. */
9848 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9849 struct reggroup
*group
)
9851 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9852 this, FPS register belongs to save_regroup, restore_reggroup, and
9853 all_reggroup, of course. */
9854 if (regnum
== ARM_FPS_REGNUM
)
9855 return (group
== float_reggroup
9856 || group
== save_reggroup
9857 || group
== restore_reggroup
9858 || group
== all_reggroup
);
9860 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9864 /* For backward-compatibility we allow two 'g' packet lengths with
9865 the remote protocol depending on whether FPA registers are
9866 supplied. M-profile targets do not have FPA registers, but some
9867 stubs already exist in the wild which use a 'g' packet which
9868 supplies them albeit with dummy values. The packet format which
9869 includes FPA registers should be considered deprecated for
9870 M-profile targets. */
9873 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
9875 if (gdbarch_tdep (gdbarch
)->is_m
)
9877 /* If we know from the executable this is an M-profile target,
9878 cater for remote targets whose register set layout is the
9879 same as the FPA layout. */
9880 register_remote_g_packet_guess (gdbarch
,
9881 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9882 (16 * INT_REGISTER_SIZE
)
9883 + (8 * FP_REGISTER_SIZE
)
9884 + (2 * INT_REGISTER_SIZE
),
9885 tdesc_arm_with_m_fpa_layout
);
9887 /* The regular M-profile layout. */
9888 register_remote_g_packet_guess (gdbarch
,
9889 /* r0-r12,sp,lr,pc; xpsr */
9890 (16 * INT_REGISTER_SIZE
)
9891 + INT_REGISTER_SIZE
,
9894 /* M-profile plus M4F VFP. */
9895 register_remote_g_packet_guess (gdbarch
,
9896 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9897 (16 * INT_REGISTER_SIZE
)
9898 + (16 * VFP_REGISTER_SIZE
)
9899 + (2 * INT_REGISTER_SIZE
),
9900 tdesc_arm_with_m_vfp_d16
);
9903 /* Otherwise we don't have a useful guess. */
9907 /* Initialize the current architecture based on INFO. If possible,
9908 re-use an architecture from ARCHES, which is a list of
9909 architectures already created during this debugging session.
9911 Called e.g. at program startup, when reading a core file, and when
9912 reading a binary file. */
9914 static struct gdbarch
*
9915 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9917 struct gdbarch_tdep
*tdep
;
9918 struct gdbarch
*gdbarch
;
9919 struct gdbarch_list
*best_arch
;
9920 enum arm_abi_kind arm_abi
= arm_abi_global
;
9921 enum arm_float_model fp_model
= arm_fp_model
;
9922 struct tdesc_arch_data
*tdesc_data
= NULL
;
9924 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9925 int have_wmmx_registers
= 0;
9927 int have_fpa_registers
= 1;
9928 const struct target_desc
*tdesc
= info
.target_desc
;
9930 /* If we have an object to base this architecture on, try to determine
9933 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9935 int ei_osabi
, e_flags
;
9937 switch (bfd_get_flavour (info
.abfd
))
9939 case bfd_target_aout_flavour
:
9940 /* Assume it's an old APCS-style ABI. */
9941 arm_abi
= ARM_ABI_APCS
;
9944 case bfd_target_coff_flavour
:
9945 /* Assume it's an old APCS-style ABI. */
9947 arm_abi
= ARM_ABI_APCS
;
9950 case bfd_target_elf_flavour
:
9951 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9952 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9954 if (ei_osabi
== ELFOSABI_ARM
)
9956 /* GNU tools used to use this value, but do not for EABI
9957 objects. There's nowhere to tag an EABI version
9958 anyway, so assume APCS. */
9959 arm_abi
= ARM_ABI_APCS
;
9961 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
9963 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9964 int attr_arch
, attr_profile
;
9968 case EF_ARM_EABI_UNKNOWN
:
9969 /* Assume GNU tools. */
9970 arm_abi
= ARM_ABI_APCS
;
9973 case EF_ARM_EABI_VER4
:
9974 case EF_ARM_EABI_VER5
:
9975 arm_abi
= ARM_ABI_AAPCS
;
9976 /* EABI binaries default to VFP float ordering.
9977 They may also contain build attributes that can
9978 be used to identify if the VFP argument-passing
9980 if (fp_model
== ARM_FLOAT_AUTO
)
9983 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9987 case AEABI_VFP_args_base
:
9988 /* "The user intended FP parameter/result
9989 passing to conform to AAPCS, base
9991 fp_model
= ARM_FLOAT_SOFT_VFP
;
9993 case AEABI_VFP_args_vfp
:
9994 /* "The user intended FP parameter/result
9995 passing to conform to AAPCS, VFP
9997 fp_model
= ARM_FLOAT_VFP
;
9999 case AEABI_VFP_args_toolchain
:
10000 /* "The user intended FP parameter/result
10001 passing to conform to tool chain-specific
10002 conventions" - we don't know any such
10003 conventions, so leave it as "auto". */
10005 case AEABI_VFP_args_compatible
:
10006 /* "Code is compatible with both the base
10007 and VFP variants; the user did not permit
10008 non-variadic functions to pass FP
10009 parameters/results" - leave it as
10013 /* Attribute value not mentioned in the
10014 November 2012 ABI, so leave it as
10019 fp_model
= ARM_FLOAT_SOFT_VFP
;
10025 /* Leave it as "auto". */
10026 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
10031 /* Detect M-profile programs. This only works if the
10032 executable file includes build attributes; GCC does
10033 copy them to the executable, but e.g. RealView does
10035 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
10037 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
10039 Tag_CPU_arch_profile
);
10040 /* GCC specifies the profile for v6-M; RealView only
10041 specifies the profile for architectures starting with
10042 V7 (as opposed to architectures with a tag
10043 numerically greater than TAG_CPU_ARCH_V7). */
10044 if (!tdesc_has_registers (tdesc
)
10045 && (attr_arch
== TAG_CPU_ARCH_V6_M
10046 || attr_arch
== TAG_CPU_ARCH_V6S_M
10047 || attr_profile
== 'M'))
10052 if (fp_model
== ARM_FLOAT_AUTO
)
10054 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
10056 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
10059 /* Leave it as "auto". Strictly speaking this case
10060 means FPA, but almost nobody uses that now, and
10061 many toolchains fail to set the appropriate bits
10062 for the floating-point model they use. */
10064 case EF_ARM_SOFT_FLOAT
:
10065 fp_model
= ARM_FLOAT_SOFT_FPA
;
10067 case EF_ARM_VFP_FLOAT
:
10068 fp_model
= ARM_FLOAT_VFP
;
10070 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
10071 fp_model
= ARM_FLOAT_SOFT_VFP
;
10076 if (e_flags
& EF_ARM_BE8
)
10077 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
10082 /* Leave it as "auto". */
10087 /* Check any target description for validity. */
10088 if (tdesc_has_registers (tdesc
))
10090 /* For most registers we require GDB's default names; but also allow
10091 the numeric names for sp / lr / pc, as a convenience. */
10092 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
10093 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
10094 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
10096 const struct tdesc_feature
*feature
;
10099 feature
= tdesc_find_feature (tdesc
,
10100 "org.gnu.gdb.arm.core");
10101 if (feature
== NULL
)
10103 feature
= tdesc_find_feature (tdesc
,
10104 "org.gnu.gdb.arm.m-profile");
10105 if (feature
== NULL
)
10111 tdesc_data
= tdesc_data_alloc ();
10114 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
10115 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10116 arm_register_names
[i
]);
10117 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10120 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10123 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10127 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10128 ARM_PS_REGNUM
, "xpsr");
10130 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10131 ARM_PS_REGNUM
, "cpsr");
10135 tdesc_data_cleanup (tdesc_data
);
10139 feature
= tdesc_find_feature (tdesc
,
10140 "org.gnu.gdb.arm.fpa");
10141 if (feature
!= NULL
)
10144 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
10145 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10146 arm_register_names
[i
]);
10149 tdesc_data_cleanup (tdesc_data
);
10154 have_fpa_registers
= 0;
10156 feature
= tdesc_find_feature (tdesc
,
10157 "org.gnu.gdb.xscale.iwmmxt");
10158 if (feature
!= NULL
)
10160 static const char *const iwmmxt_names
[] = {
10161 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10162 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10163 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10164 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10168 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
10170 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10171 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10173 /* Check for the control registers, but do not fail if they
10175 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
10176 tdesc_numbered_register (feature
, tdesc_data
, i
,
10177 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10179 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
10181 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10182 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10186 tdesc_data_cleanup (tdesc_data
);
10190 have_wmmx_registers
= 1;
10193 /* If we have a VFP unit, check whether the single precision registers
10194 are present. If not, then we will synthesize them as pseudo
10196 feature
= tdesc_find_feature (tdesc
,
10197 "org.gnu.gdb.arm.vfp");
10198 if (feature
!= NULL
)
10200 static const char *const vfp_double_names
[] = {
10201 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10202 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10203 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10204 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10207 /* Require the double precision registers. There must be either
10210 for (i
= 0; i
< 32; i
++)
10212 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10214 vfp_double_names
[i
]);
10218 if (!valid_p
&& i
== 16)
10221 /* Also require FPSCR. */
10222 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10223 ARM_FPSCR_REGNUM
, "fpscr");
10226 tdesc_data_cleanup (tdesc_data
);
10230 if (tdesc_unnumbered_register (feature
, "s0") == 0)
10231 have_vfp_pseudos
= 1;
10233 vfp_register_count
= i
;
10235 /* If we have VFP, also check for NEON. The architecture allows
10236 NEON without VFP (integer vector operations only), but GDB
10237 does not support that. */
10238 feature
= tdesc_find_feature (tdesc
,
10239 "org.gnu.gdb.arm.neon");
10240 if (feature
!= NULL
)
10242 /* NEON requires 32 double-precision registers. */
10245 tdesc_data_cleanup (tdesc_data
);
10249 /* If there are quad registers defined by the stub, use
10250 their type; otherwise (normally) provide them with
10251 the default type. */
10252 if (tdesc_unnumbered_register (feature
, "q0") == 0)
10253 have_neon_pseudos
= 1;
10260 /* If there is already a candidate, use it. */
10261 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
10263 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
10265 if (arm_abi
!= ARM_ABI_AUTO
10266 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
10269 if (fp_model
!= ARM_FLOAT_AUTO
10270 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
10273 /* There are various other properties in tdep that we do not
10274 need to check here: those derived from a target description,
10275 since gdbarches with a different target description are
10276 automatically disqualified. */
10278 /* Do check is_m, though, since it might come from the binary. */
10279 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
10282 /* Found a match. */
10286 if (best_arch
!= NULL
)
10288 if (tdesc_data
!= NULL
)
10289 tdesc_data_cleanup (tdesc_data
);
10290 return best_arch
->gdbarch
;
10293 tdep
= xcalloc (1, sizeof (struct gdbarch_tdep
));
10294 gdbarch
= gdbarch_alloc (&info
, tdep
);
10296 /* Record additional information about the architecture we are defining.
10297 These are gdbarch discriminators, like the OSABI. */
10298 tdep
->arm_abi
= arm_abi
;
10299 tdep
->fp_model
= fp_model
;
10301 tdep
->have_fpa_registers
= have_fpa_registers
;
10302 tdep
->have_wmmx_registers
= have_wmmx_registers
;
10303 gdb_assert (vfp_register_count
== 0
10304 || vfp_register_count
== 16
10305 || vfp_register_count
== 32);
10306 tdep
->vfp_register_count
= vfp_register_count
;
10307 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
10308 tdep
->have_neon_pseudos
= have_neon_pseudos
;
10309 tdep
->have_neon
= have_neon
;
10311 arm_register_g_packet_guesses (gdbarch
);
10314 switch (info
.byte_order_for_code
)
10316 case BFD_ENDIAN_BIG
:
10317 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
10318 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
10319 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
10320 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
10324 case BFD_ENDIAN_LITTLE
:
10325 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
10326 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
10327 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
10328 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
10333 internal_error (__FILE__
, __LINE__
,
10334 _("arm_gdbarch_init: bad byte order for float format"));
10337 /* On ARM targets char defaults to unsigned. */
10338 set_gdbarch_char_signed (gdbarch
, 0);
10340 /* Note: for displaced stepping, this includes the breakpoint, and one word
10341 of additional scratch space. This setting isn't used for anything beside
10342 displaced stepping at present. */
10343 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
10345 /* This should be low enough for everything. */
10346 tdep
->lowest_pc
= 0x20;
10347 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
10349 /* The default, for both APCS and AAPCS, is to return small
10350 structures in registers. */
10351 tdep
->struct_return
= reg_struct_return
;
10353 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
10354 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
10356 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
10358 /* Frame handling. */
10359 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
10360 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
10361 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
10363 frame_base_set_default (gdbarch
, &arm_normal_base
);
10365 /* Address manipulation. */
10366 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
10368 /* Advance PC across function entry code. */
10369 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
10371 /* Detect whether PC is at a point where the stack has been destroyed. */
10372 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
10374 /* Skip trampolines. */
10375 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
10377 /* The stack grows downward. */
10378 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
10380 /* Breakpoint manipulation. */
10381 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
10382 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
10383 arm_remote_breakpoint_from_pc
);
10385 /* Information about registers, etc. */
10386 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
10387 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
10388 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
10389 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10390 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
10392 /* This "info float" is FPA-specific. Use the generic version if we
10393 do not have FPA. */
10394 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
10395 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
10397 /* Internal <-> external register number maps. */
10398 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
10399 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
10401 set_gdbarch_register_name (gdbarch
, arm_register_name
);
10403 /* Returning results. */
10404 set_gdbarch_return_value (gdbarch
, arm_return_value
);
10407 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
10409 /* Minsymbol frobbing. */
10410 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
10411 set_gdbarch_coff_make_msymbol_special (gdbarch
,
10412 arm_coff_make_msymbol_special
);
10413 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
10415 /* Thumb-2 IT block support. */
10416 set_gdbarch_adjust_breakpoint_address (gdbarch
,
10417 arm_adjust_breakpoint_address
);
10419 /* Virtual tables. */
10420 set_gdbarch_vbit_in_delta (gdbarch
, 1);
10422 /* Hook in the ABI-specific overrides, if they have been registered. */
10423 gdbarch_init_osabi (info
, gdbarch
);
10425 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
10427 /* Add some default predicates. */
10429 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
10430 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
10431 dwarf2_append_unwinders (gdbarch
);
10432 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
10433 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
10435 /* Now we have tuned the configuration, set a few final things,
10436 based on what the OS ABI has told us. */
10438 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10439 binaries are always marked. */
10440 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
10441 tdep
->arm_abi
= ARM_ABI_APCS
;
10443 /* Watchpoints are not steppable. */
10444 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
10446 /* We used to default to FPA for generic ARM, but almost nobody
10447 uses that now, and we now provide a way for the user to force
10448 the model. So default to the most useful variant. */
10449 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
10450 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
10452 if (tdep
->jb_pc
>= 0)
10453 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
10455 /* Floating point sizes and format. */
10456 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
10457 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
10459 set_gdbarch_double_format
10460 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10461 set_gdbarch_long_double_format
10462 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10466 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
10467 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10470 if (have_vfp_pseudos
)
10472 /* NOTE: These are the only pseudo registers used by
10473 the ARM target at the moment. If more are added, a
10474 little more care in numbering will be needed. */
10476 int num_pseudos
= 32;
10477 if (have_neon_pseudos
)
10479 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10480 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10481 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10486 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10488 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10490 /* Override tdesc_register_type to adjust the types of VFP
10491 registers for NEON. */
10492 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10495 /* Add standard register aliases. We add aliases even for those
10496 nanes which are used by the current architecture - it's simpler,
10497 and does no harm, since nothing ever lists user registers. */
10498 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10499 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10500 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10506 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10508 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10513 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10514 (unsigned long) tdep
->lowest_pc
);
10517 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10520 _initialize_arm_tdep (void)
10522 struct ui_file
*stb
;
10524 struct cmd_list_element
*new_set
, *new_show
;
10525 const char *setname
;
10526 const char *setdesc
;
10527 const char *const *regnames
;
10529 static char *helptext
;
10530 char regdesc
[1024], *rdptr
= regdesc
;
10531 size_t rest
= sizeof (regdesc
);
10533 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10535 arm_objfile_data_key
10536 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10538 /* Add ourselves to objfile event chain. */
10539 observer_attach_new_objfile (arm_exidx_new_objfile
);
10541 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10543 /* Register an ELF OS ABI sniffer for ARM binaries. */
10544 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10545 bfd_target_elf_flavour
,
10546 arm_elf_osabi_sniffer
);
10548 /* Initialize the standard target descriptions. */
10549 initialize_tdesc_arm_with_m ();
10550 initialize_tdesc_arm_with_m_fpa_layout ();
10551 initialize_tdesc_arm_with_m_vfp_d16 ();
10552 initialize_tdesc_arm_with_iwmmxt ();
10553 initialize_tdesc_arm_with_vfpv2 ();
10554 initialize_tdesc_arm_with_vfpv3 ();
10555 initialize_tdesc_arm_with_neon ();
10557 /* Get the number of possible sets of register names defined in opcodes. */
10558 num_disassembly_options
= get_arm_regname_num_options ();
10560 /* Add root prefix command for all "set arm"/"show arm" commands. */
10561 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10562 _("Various ARM-specific commands."),
10563 &setarmcmdlist
, "set arm ", 0, &setlist
);
10565 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10566 _("Various ARM-specific commands."),
10567 &showarmcmdlist
, "show arm ", 0, &showlist
);
10569 /* Sync the opcode insn printer with our register viewer. */
10570 parse_arm_disassembler_option ("reg-names-std");
10572 /* Initialize the array that will be passed to
10573 add_setshow_enum_cmd(). */
10574 valid_disassembly_styles
10575 = xmalloc ((num_disassembly_options
+ 1) * sizeof (char *));
10576 for (i
= 0; i
< num_disassembly_options
; i
++)
10578 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10579 valid_disassembly_styles
[i
] = setname
;
10580 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10583 /* When we find the default names, tell the disassembler to use
10585 if (!strcmp (setname
, "std"))
10587 disassembly_style
= setname
;
10588 set_arm_regname_option (i
);
10591 /* Mark the end of valid options. */
10592 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10594 /* Create the help text. */
10595 stb
= mem_fileopen ();
10596 fprintf_unfiltered (stb
, "%s%s%s",
10597 _("The valid values are:\n"),
10599 _("The default is \"std\"."));
10600 helptext
= ui_file_xstrdup (stb
, NULL
);
10601 ui_file_delete (stb
);
10603 add_setshow_enum_cmd("disassembler", no_class
,
10604 valid_disassembly_styles
, &disassembly_style
,
10605 _("Set the disassembly style."),
10606 _("Show the disassembly style."),
10608 set_disassembly_style_sfunc
,
10609 NULL
, /* FIXME: i18n: The disassembly style is
10611 &setarmcmdlist
, &showarmcmdlist
);
10613 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10614 _("Set usage of ARM 32-bit mode."),
10615 _("Show usage of ARM 32-bit mode."),
10616 _("When off, a 26-bit PC will be used."),
10618 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10620 &setarmcmdlist
, &showarmcmdlist
);
10622 /* Add a command to allow the user to force the FPU model. */
10623 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10624 _("Set the floating point type."),
10625 _("Show the floating point type."),
10626 _("auto - Determine the FP typefrom the OS-ABI.\n\
10627 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10628 fpa - FPA co-processor (GCC compiled).\n\
10629 softvfp - Software FP with pure-endian doubles.\n\
10630 vfp - VFP co-processor."),
10631 set_fp_model_sfunc
, show_fp_model
,
10632 &setarmcmdlist
, &showarmcmdlist
);
10634 /* Add a command to allow the user to force the ABI. */
10635 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10637 _("Show the ABI."),
10638 NULL
, arm_set_abi
, arm_show_abi
,
10639 &setarmcmdlist
, &showarmcmdlist
);
10641 /* Add two commands to allow the user to force the assumed
10643 add_setshow_enum_cmd ("fallback-mode", class_support
,
10644 arm_mode_strings
, &arm_fallback_mode_string
,
10645 _("Set the mode assumed when symbols are unavailable."),
10646 _("Show the mode assumed when symbols are unavailable."),
10647 NULL
, NULL
, arm_show_fallback_mode
,
10648 &setarmcmdlist
, &showarmcmdlist
);
10649 add_setshow_enum_cmd ("force-mode", class_support
,
10650 arm_mode_strings
, &arm_force_mode_string
,
10651 _("Set the mode assumed even when symbols are available."),
10652 _("Show the mode assumed even when symbols are available."),
10653 NULL
, NULL
, arm_show_force_mode
,
10654 &setarmcmdlist
, &showarmcmdlist
);
10656 /* Debugging flag. */
10657 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10658 _("Set ARM debugging."),
10659 _("Show ARM debugging."),
10660 _("When on, arm-specific debugging is enabled."),
10662 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10663 &setdebuglist
, &showdebuglist
);
10666 /* ARM-reversible process record data structures. */
10668 #define ARM_INSN_SIZE_BYTES 4
10669 #define THUMB_INSN_SIZE_BYTES 2
10670 #define THUMB2_INSN_SIZE_BYTES 4
10673 /* Position of the bit within a 32-bit ARM instruction
10674 that defines whether the instruction is a load or store. */
10675 #define INSN_S_L_BIT_NUM 20
10677 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10680 unsigned int reg_len = LENGTH; \
10683 REGS = XNEWVEC (uint32_t, reg_len); \
10684 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10689 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10692 unsigned int mem_len = LENGTH; \
10695 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10696 memcpy(&MEMS->len, &RECORD_BUF[0], \
10697 sizeof(struct arm_mem_r) * LENGTH); \
10702 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10703 #define INSN_RECORDED(ARM_RECORD) \
10704 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10706 /* ARM memory record structure. */
10709 uint32_t len
; /* Record length. */
10710 uint32_t addr
; /* Memory address. */
10713 /* ARM instruction record contains opcode of current insn
10714 and execution state (before entry to decode_insn()),
10715 contains list of to-be-modified registers and
10716 memory blocks (on return from decode_insn()). */
10718 typedef struct insn_decode_record_t
10720 struct gdbarch
*gdbarch
;
10721 struct regcache
*regcache
;
10722 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
10723 uint32_t arm_insn
; /* Should accommodate thumb. */
10724 uint32_t cond
; /* Condition code. */
10725 uint32_t opcode
; /* Insn opcode. */
10726 uint32_t decode
; /* Insn decode bits. */
10727 uint32_t mem_rec_count
; /* No of mem records. */
10728 uint32_t reg_rec_count
; /* No of reg records. */
10729 uint32_t *arm_regs
; /* Registers to be saved for this record. */
10730 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
10731 } insn_decode_record
;
10734 /* Checks ARM SBZ and SBO mandatory fields. */
10737 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
10739 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
10758 enum arm_record_result
10760 ARM_RECORD_SUCCESS
= 0,
10761 ARM_RECORD_FAILURE
= 1
10768 } arm_record_strx_t
;
10779 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10780 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10783 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10784 ULONGEST u_regval
[2]= {0};
10786 uint32_t reg_src1
= 0, reg_src2
= 0;
10787 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10788 uint32_t opcode1
= 0;
10790 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10791 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10792 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10795 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10797 /* 1) Handle misc store, immediate offset. */
10798 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10799 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10800 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10801 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10803 if (ARM_PC_REGNUM
== reg_src1
)
10805 /* If R15 was used as Rn, hence current PC+8. */
10806 u_regval
[0] = u_regval
[0] + 8;
10808 offset_8
= (immed_high
<< 4) | immed_low
;
10809 /* Calculate target store address. */
10810 if (14 == arm_insn_r
->opcode
)
10812 tgt_mem_addr
= u_regval
[0] + offset_8
;
10816 tgt_mem_addr
= u_regval
[0] - offset_8
;
10818 if (ARM_RECORD_STRH
== str_type
)
10820 record_buf_mem
[0] = 2;
10821 record_buf_mem
[1] = tgt_mem_addr
;
10822 arm_insn_r
->mem_rec_count
= 1;
10824 else if (ARM_RECORD_STRD
== str_type
)
10826 record_buf_mem
[0] = 4;
10827 record_buf_mem
[1] = tgt_mem_addr
;
10828 record_buf_mem
[2] = 4;
10829 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10830 arm_insn_r
->mem_rec_count
= 2;
10833 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10835 /* 2) Store, register offset. */
10837 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10839 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10840 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10841 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10842 if (15 == reg_src2
)
10844 /* If R15 was used as Rn, hence current PC+8. */
10845 u_regval
[0] = u_regval
[0] + 8;
10847 /* Calculate target store address, Rn +/- Rm, register offset. */
10848 if (12 == arm_insn_r
->opcode
)
10850 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10854 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10856 if (ARM_RECORD_STRH
== str_type
)
10858 record_buf_mem
[0] = 2;
10859 record_buf_mem
[1] = tgt_mem_addr
;
10860 arm_insn_r
->mem_rec_count
= 1;
10862 else if (ARM_RECORD_STRD
== str_type
)
10864 record_buf_mem
[0] = 4;
10865 record_buf_mem
[1] = tgt_mem_addr
;
10866 record_buf_mem
[2] = 4;
10867 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10868 arm_insn_r
->mem_rec_count
= 2;
10871 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10872 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10874 /* 3) Store, immediate pre-indexed. */
10875 /* 5) Store, immediate post-indexed. */
10876 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10877 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10878 offset_8
= (immed_high
<< 4) | immed_low
;
10879 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10880 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10881 /* Calculate target store address, Rn +/- Rm, register offset. */
10882 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10884 tgt_mem_addr
= u_regval
[0] + offset_8
;
10888 tgt_mem_addr
= u_regval
[0] - offset_8
;
10890 if (ARM_RECORD_STRH
== str_type
)
10892 record_buf_mem
[0] = 2;
10893 record_buf_mem
[1] = tgt_mem_addr
;
10894 arm_insn_r
->mem_rec_count
= 1;
10896 else if (ARM_RECORD_STRD
== str_type
)
10898 record_buf_mem
[0] = 4;
10899 record_buf_mem
[1] = tgt_mem_addr
;
10900 record_buf_mem
[2] = 4;
10901 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10902 arm_insn_r
->mem_rec_count
= 2;
10904 /* Record Rn also as it changes. */
10905 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10906 arm_insn_r
->reg_rec_count
= 1;
10908 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10909 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10911 /* 4) Store, register pre-indexed. */
10912 /* 6) Store, register post -indexed. */
10913 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10914 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10915 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10916 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10917 /* Calculate target store address, Rn +/- Rm, register offset. */
10918 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10920 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10924 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10926 if (ARM_RECORD_STRH
== str_type
)
10928 record_buf_mem
[0] = 2;
10929 record_buf_mem
[1] = tgt_mem_addr
;
10930 arm_insn_r
->mem_rec_count
= 1;
10932 else if (ARM_RECORD_STRD
== str_type
)
10934 record_buf_mem
[0] = 4;
10935 record_buf_mem
[1] = tgt_mem_addr
;
10936 record_buf_mem
[2] = 4;
10937 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10938 arm_insn_r
->mem_rec_count
= 2;
10940 /* Record Rn also as it changes. */
10941 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10942 arm_insn_r
->reg_rec_count
= 1;
10947 /* Handling ARM extension space insns. */
10950 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10952 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10953 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10954 uint32_t record_buf
[8], record_buf_mem
[8];
10955 uint32_t reg_src1
= 0;
10956 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10957 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10958 ULONGEST u_regval
= 0;
10960 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10961 /* Handle unconditional insn extension space. */
10963 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10964 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10965 if (arm_insn_r
->cond
)
10967 /* PLD has no affect on architectural state, it just affects
10969 if (5 == ((opcode1
& 0xE0) >> 5))
10972 record_buf
[0] = ARM_PS_REGNUM
;
10973 record_buf
[1] = ARM_LR_REGNUM
;
10974 arm_insn_r
->reg_rec_count
= 2;
10976 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10980 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10981 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10984 /* Undefined instruction on ARM V5; need to handle if later
10985 versions define it. */
10988 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
10989 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10990 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
10992 /* Handle arithmetic insn extension space. */
10993 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
10994 && !INSN_RECORDED(arm_insn_r
))
10996 /* Handle MLA(S) and MUL(S). */
10997 if (0 <= insn_op1
&& 3 >= insn_op1
)
10999 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11000 record_buf
[1] = ARM_PS_REGNUM
;
11001 arm_insn_r
->reg_rec_count
= 2;
11003 else if (4 <= insn_op1
&& 15 >= insn_op1
)
11005 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11006 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11007 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11008 record_buf
[2] = ARM_PS_REGNUM
;
11009 arm_insn_r
->reg_rec_count
= 3;
11013 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
11014 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
11015 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
11017 /* Handle control insn extension space. */
11019 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
11020 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
11022 if (!bit (arm_insn_r
->arm_insn
,25))
11024 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
11026 if ((0 == insn_op1
) || (2 == insn_op1
))
11029 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11030 arm_insn_r
->reg_rec_count
= 1;
11032 else if (1 == insn_op1
)
11034 /* CSPR is going to be changed. */
11035 record_buf
[0] = ARM_PS_REGNUM
;
11036 arm_insn_r
->reg_rec_count
= 1;
11038 else if (3 == insn_op1
)
11040 /* SPSR is going to be changed. */
11041 /* We need to get SPSR value, which is yet to be done. */
11042 printf_unfiltered (_("Process record does not support "
11043 "instruction 0x%0x at address %s.\n"),
11044 arm_insn_r
->arm_insn
,
11045 paddress (arm_insn_r
->gdbarch
,
11046 arm_insn_r
->this_addr
));
11050 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
11055 record_buf
[0] = ARM_PS_REGNUM
;
11056 arm_insn_r
->reg_rec_count
= 1;
11058 else if (3 == insn_op1
)
11061 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11062 arm_insn_r
->reg_rec_count
= 1;
11065 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
11068 record_buf
[0] = ARM_PS_REGNUM
;
11069 record_buf
[1] = ARM_LR_REGNUM
;
11070 arm_insn_r
->reg_rec_count
= 2;
11072 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
11074 /* QADD, QSUB, QDADD, QDSUB */
11075 record_buf
[0] = ARM_PS_REGNUM
;
11076 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11077 arm_insn_r
->reg_rec_count
= 2;
11079 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
11082 record_buf
[0] = ARM_PS_REGNUM
;
11083 record_buf
[1] = ARM_LR_REGNUM
;
11084 arm_insn_r
->reg_rec_count
= 2;
11086 /* Save SPSR also;how? */
11087 printf_unfiltered (_("Process record does not support "
11088 "instruction 0x%0x at address %s.\n"),
11089 arm_insn_r
->arm_insn
,
11090 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11093 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
11094 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
11095 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
11096 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
11099 if (0 == insn_op1
|| 1 == insn_op1
)
11101 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11102 /* We dont do optimization for SMULW<y> where we
11104 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11105 record_buf
[1] = ARM_PS_REGNUM
;
11106 arm_insn_r
->reg_rec_count
= 2;
11108 else if (2 == insn_op1
)
11111 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11112 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11113 arm_insn_r
->reg_rec_count
= 2;
11115 else if (3 == insn_op1
)
11118 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11119 arm_insn_r
->reg_rec_count
= 1;
11125 /* MSR : immediate form. */
11128 /* CSPR is going to be changed. */
11129 record_buf
[0] = ARM_PS_REGNUM
;
11130 arm_insn_r
->reg_rec_count
= 1;
11132 else if (3 == insn_op1
)
11134 /* SPSR is going to be changed. */
11135 /* we need to get SPSR value, which is yet to be done */
11136 printf_unfiltered (_("Process record does not support "
11137 "instruction 0x%0x at address %s.\n"),
11138 arm_insn_r
->arm_insn
,
11139 paddress (arm_insn_r
->gdbarch
,
11140 arm_insn_r
->this_addr
));
11146 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11147 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
11148 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
11150 /* Handle load/store insn extension space. */
11152 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
11153 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
11154 && !INSN_RECORDED(arm_insn_r
))
11159 /* These insn, changes register and memory as well. */
11160 /* SWP or SWPB insn. */
11161 /* Get memory address given by Rn. */
11162 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11163 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11164 /* SWP insn ?, swaps word. */
11165 if (8 == arm_insn_r
->opcode
)
11167 record_buf_mem
[0] = 4;
11171 /* SWPB insn, swaps only byte. */
11172 record_buf_mem
[0] = 1;
11174 record_buf_mem
[1] = u_regval
;
11175 arm_insn_r
->mem_rec_count
= 1;
11176 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11177 arm_insn_r
->reg_rec_count
= 1;
11179 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11182 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11185 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11188 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11189 record_buf
[1] = record_buf
[0] + 1;
11190 arm_insn_r
->reg_rec_count
= 2;
11192 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11195 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11198 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
11200 /* LDRH, LDRSB, LDRSH. */
11201 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11202 arm_insn_r
->reg_rec_count
= 1;
11207 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
11208 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
11209 && !INSN_RECORDED(arm_insn_r
))
11212 /* Handle coprocessor insn extension space. */
11215 /* To be done for ARMv5 and later; as of now we return -1. */
11217 printf_unfiltered (_("Process record does not support instruction x%0x "
11218 "at address %s.\n"),arm_insn_r
->arm_insn
,
11219 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11222 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11223 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11228 /* Handling opcode 000 insns. */
11231 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
11233 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11234 uint32_t record_buf
[8], record_buf_mem
[8];
11235 ULONGEST u_regval
[2] = {0};
11237 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11238 uint32_t immed_high
= 0, immed_low
= 0, offset_8
= 0, tgt_mem_addr
= 0;
11239 uint32_t opcode1
= 0;
11241 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11242 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11243 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
11245 /* Data processing insn /multiply insn. */
11246 if (9 == arm_insn_r
->decode
11247 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11248 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
11250 /* Handle multiply instructions. */
11251 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11252 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
11254 /* Handle MLA and MUL. */
11255 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11256 record_buf
[1] = ARM_PS_REGNUM
;
11257 arm_insn_r
->reg_rec_count
= 2;
11259 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11261 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11262 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11263 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11264 record_buf
[2] = ARM_PS_REGNUM
;
11265 arm_insn_r
->reg_rec_count
= 3;
11268 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11269 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
11271 /* Handle misc load insns, as 20th bit (L = 1). */
11272 /* LDR insn has a capability to do branching, if
11273 MOV LR, PC is precceded by LDR insn having Rn as R15
11274 in that case, it emulates branch and link insn, and hence we
11275 need to save CSPR and PC as well. I am not sure this is right
11276 place; as opcode = 010 LDR insn make this happen, if R15 was
11278 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11279 if (15 != reg_dest
)
11281 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11282 arm_insn_r
->reg_rec_count
= 1;
11286 record_buf
[0] = reg_dest
;
11287 record_buf
[1] = ARM_PS_REGNUM
;
11288 arm_insn_r
->reg_rec_count
= 2;
11291 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11292 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
11293 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11294 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
11296 /* Handle MSR insn. */
11297 if (9 == arm_insn_r
->opcode
)
11299 /* CSPR is going to be changed. */
11300 record_buf
[0] = ARM_PS_REGNUM
;
11301 arm_insn_r
->reg_rec_count
= 1;
11305 /* SPSR is going to be changed. */
11306 /* How to read SPSR value? */
11307 printf_unfiltered (_("Process record does not support instruction "
11308 "0x%0x at address %s.\n"),
11309 arm_insn_r
->arm_insn
,
11310 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11314 else if (9 == arm_insn_r
->decode
11315 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11316 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11318 /* Handling SWP, SWPB. */
11319 /* These insn, changes register and memory as well. */
11320 /* SWP or SWPB insn. */
11322 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11323 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11324 /* SWP insn ?, swaps word. */
11325 if (8 == arm_insn_r
->opcode
)
11327 record_buf_mem
[0] = 4;
11331 /* SWPB insn, swaps only byte. */
11332 record_buf_mem
[0] = 1;
11334 record_buf_mem
[1] = u_regval
[0];
11335 arm_insn_r
->mem_rec_count
= 1;
11336 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11337 arm_insn_r
->reg_rec_count
= 1;
11339 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
11340 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11342 /* Handle BLX, branch and link/exchange. */
11343 if (9 == arm_insn_r
->opcode
)
11345 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11346 and R14 stores the return address. */
11347 record_buf
[0] = ARM_PS_REGNUM
;
11348 record_buf
[1] = ARM_LR_REGNUM
;
11349 arm_insn_r
->reg_rec_count
= 2;
11352 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
11354 /* Handle enhanced software breakpoint insn, BKPT. */
11355 /* CPSR is changed to be executed in ARM state, disabling normal
11356 interrupts, entering abort mode. */
11357 /* According to high vector configuration PC is set. */
11358 /* user hit breakpoint and type reverse, in
11359 that case, we need to go back with previous CPSR and
11360 Program Counter. */
11361 record_buf
[0] = ARM_PS_REGNUM
;
11362 record_buf
[1] = ARM_LR_REGNUM
;
11363 arm_insn_r
->reg_rec_count
= 2;
11365 /* Save SPSR also; how? */
11366 printf_unfiltered (_("Process record does not support instruction "
11367 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11368 paddress (arm_insn_r
->gdbarch
,
11369 arm_insn_r
->this_addr
));
11372 else if (11 == arm_insn_r
->decode
11373 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11375 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11377 /* Handle str(x) insn */
11378 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11381 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
11382 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11384 /* Handle BX, branch and link/exchange. */
11385 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11386 record_buf
[0] = ARM_PS_REGNUM
;
11387 arm_insn_r
->reg_rec_count
= 1;
11389 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
11390 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
11391 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
11393 /* Count leading zeros: CLZ. */
11394 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11395 arm_insn_r
->reg_rec_count
= 1;
11397 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11398 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11399 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
11400 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
11403 /* Handle MRS insn. */
11404 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11405 arm_insn_r
->reg_rec_count
= 1;
11407 else if (arm_insn_r
->opcode
<= 15)
11409 /* Normal data processing insns. */
11410 /* Out of 11 shifter operands mode, all the insn modifies destination
11411 register, which is specified by 13-16 decode. */
11412 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11413 record_buf
[1] = ARM_PS_REGNUM
;
11414 arm_insn_r
->reg_rec_count
= 2;
11421 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11422 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11426 /* Handling opcode 001 insns. */
11429 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
11431 uint32_t record_buf
[8], record_buf_mem
[8];
11433 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11434 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11436 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11437 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
11438 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11441 /* Handle MSR insn. */
11442 if (9 == arm_insn_r
->opcode
)
11444 /* CSPR is going to be changed. */
11445 record_buf
[0] = ARM_PS_REGNUM
;
11446 arm_insn_r
->reg_rec_count
= 1;
11450 /* SPSR is going to be changed. */
11453 else if (arm_insn_r
->opcode
<= 15)
11455 /* Normal data processing insns. */
11456 /* Out of 11 shifter operands mode, all the insn modifies destination
11457 register, which is specified by 13-16 decode. */
11458 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11459 record_buf
[1] = ARM_PS_REGNUM
;
11460 arm_insn_r
->reg_rec_count
= 2;
11467 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11468 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11472 /* Handle ARM mode instructions with opcode 010. */
11475 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
11477 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11479 uint32_t reg_base
, reg_dest
;
11480 uint32_t offset_12
, tgt_mem_addr
;
11481 uint32_t record_buf
[8], record_buf_mem
[8];
11482 unsigned char wback
;
11485 /* Calculate wback. */
11486 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
11487 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
11489 arm_insn_r
->reg_rec_count
= 0;
11490 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11492 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11494 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11497 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11498 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
11500 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11501 preceeds a LDR instruction having R15 as reg_base, it
11502 emulates a branch and link instruction, and hence we need to save
11503 CPSR and PC as well. */
11504 if (ARM_PC_REGNUM
== reg_dest
)
11505 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11507 /* If wback is true, also save the base register, which is going to be
11510 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11514 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11516 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
11517 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11519 /* Handle bit U. */
11520 if (bit (arm_insn_r
->arm_insn
, 23))
11522 /* U == 1: Add the offset. */
11523 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
11527 /* U == 0: subtract the offset. */
11528 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
11531 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11533 if (bit (arm_insn_r
->arm_insn
, 22))
11535 /* STRB and STRBT: 1 byte. */
11536 record_buf_mem
[0] = 1;
11540 /* STR and STRT: 4 bytes. */
11541 record_buf_mem
[0] = 4;
11544 /* Handle bit P. */
11545 if (bit (arm_insn_r
->arm_insn
, 24))
11546 record_buf_mem
[1] = tgt_mem_addr
;
11548 record_buf_mem
[1] = (uint32_t) u_regval
;
11550 arm_insn_r
->mem_rec_count
= 1;
11552 /* If wback is true, also save the base register, which is going to be
11555 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11558 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11559 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11563 /* Handling opcode 011 insns. */
11566 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
11568 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11570 uint32_t shift_imm
= 0;
11571 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11572 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
11573 uint32_t record_buf
[8], record_buf_mem
[8];
11576 ULONGEST u_regval
[2];
11578 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11579 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11581 /* Handle enhanced store insns and LDRD DSP insn,
11582 order begins according to addressing modes for store insns
11586 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11588 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11589 /* LDR insn has a capability to do branching, if
11590 MOV LR, PC is precedded by LDR insn having Rn as R15
11591 in that case, it emulates branch and link insn, and hence we
11592 need to save CSPR and PC as well. */
11593 if (15 != reg_dest
)
11595 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11596 arm_insn_r
->reg_rec_count
= 1;
11600 record_buf
[0] = reg_dest
;
11601 record_buf
[1] = ARM_PS_REGNUM
;
11602 arm_insn_r
->reg_rec_count
= 2;
11607 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
11609 /* Store insn, register offset and register pre-indexed,
11610 register post-indexed. */
11612 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11614 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11615 regcache_raw_read_unsigned (reg_cache
, reg_src1
11617 regcache_raw_read_unsigned (reg_cache
, reg_src2
11619 if (15 == reg_src2
)
11621 /* If R15 was used as Rn, hence current PC+8. */
11622 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11623 u_regval
[0] = u_regval
[0] + 8;
11625 /* Calculate target store address, Rn +/- Rm, register offset. */
11627 if (bit (arm_insn_r
->arm_insn
, 23))
11629 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11633 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11636 switch (arm_insn_r
->opcode
)
11650 record_buf_mem
[0] = 4;
11665 record_buf_mem
[0] = 1;
11669 gdb_assert_not_reached ("no decoding pattern found");
11672 record_buf_mem
[1] = tgt_mem_addr
;
11673 arm_insn_r
->mem_rec_count
= 1;
11675 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11676 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11677 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11678 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11679 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11680 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11683 /* Rn is going to be changed in pre-indexed mode and
11684 post-indexed mode as well. */
11685 record_buf
[0] = reg_src2
;
11686 arm_insn_r
->reg_rec_count
= 1;
11691 /* Store insn, scaled register offset; scaled pre-indexed. */
11692 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11694 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11696 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11697 /* Get shift_imm. */
11698 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11699 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11700 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11701 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11702 /* Offset_12 used as shift. */
11706 /* Offset_12 used as index. */
11707 offset_12
= u_regval
[0] << shift_imm
;
11711 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11717 if (bit (u_regval
[0], 31))
11719 offset_12
= 0xFFFFFFFF;
11728 /* This is arithmetic shift. */
11729 offset_12
= s_word
>> shift_imm
;
11736 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11738 /* Get C flag value and shift it by 31. */
11739 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11740 | (u_regval
[0]) >> 1);
11744 offset_12
= (u_regval
[0] >> shift_imm
) \
11746 (sizeof(uint32_t) - shift_imm
));
11751 gdb_assert_not_reached ("no decoding pattern found");
11755 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11757 if (bit (arm_insn_r
->arm_insn
, 23))
11759 tgt_mem_addr
= u_regval
[1] + offset_12
;
11763 tgt_mem_addr
= u_regval
[1] - offset_12
;
11766 switch (arm_insn_r
->opcode
)
11780 record_buf_mem
[0] = 4;
11795 record_buf_mem
[0] = 1;
11799 gdb_assert_not_reached ("no decoding pattern found");
11802 record_buf_mem
[1] = tgt_mem_addr
;
11803 arm_insn_r
->mem_rec_count
= 1;
11805 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11806 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11807 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11808 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11809 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11810 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11813 /* Rn is going to be changed in register scaled pre-indexed
11814 mode,and scaled post indexed mode. */
11815 record_buf
[0] = reg_src2
;
11816 arm_insn_r
->reg_rec_count
= 1;
11821 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11822 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11826 /* Handle ARM mode instructions with opcode 100. */
11829 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11831 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11832 uint32_t register_count
= 0, register_bits
;
11833 uint32_t reg_base
, addr_mode
;
11834 uint32_t record_buf
[24], record_buf_mem
[48];
11838 /* Fetch the list of registers. */
11839 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11840 arm_insn_r
->reg_rec_count
= 0;
11842 /* Fetch the base register that contains the address we are loading data
11844 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11846 /* Calculate wback. */
11847 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11849 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11851 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11853 /* Find out which registers are going to be loaded from memory. */
11854 while (register_bits
)
11856 if (register_bits
& 0x00000001)
11857 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11858 register_bits
= register_bits
>> 1;
11863 /* If wback is true, also save the base register, which is going to be
11866 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11868 /* Save the CPSR register. */
11869 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11873 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11875 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11877 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11879 /* Find out how many registers are going to be stored to memory. */
11880 while (register_bits
)
11882 if (register_bits
& 0x00000001)
11884 register_bits
= register_bits
>> 1;
11889 /* STMDA (STMED): Decrement after. */
11891 record_buf_mem
[1] = (uint32_t) u_regval
11892 - register_count
* INT_REGISTER_SIZE
+ 4;
11894 /* STM (STMIA, STMEA): Increment after. */
11896 record_buf_mem
[1] = (uint32_t) u_regval
;
11898 /* STMDB (STMFD): Decrement before. */
11900 record_buf_mem
[1] = (uint32_t) u_regval
11901 - register_count
* INT_REGISTER_SIZE
;
11903 /* STMIB (STMFA): Increment before. */
11905 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11908 gdb_assert_not_reached ("no decoding pattern found");
11912 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11913 arm_insn_r
->mem_rec_count
= 1;
11915 /* If wback is true, also save the base register, which is going to be
11918 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11921 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11922 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11926 /* Handling opcode 101 insns. */
11929 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11931 uint32_t record_buf
[8];
11933 /* Handle B, BL, BLX(1) insns. */
11934 /* B simply branches so we do nothing here. */
11935 /* Note: BLX(1) doesnt fall here but instead it falls into
11936 extension space. */
11937 if (bit (arm_insn_r
->arm_insn
, 24))
11939 record_buf
[0] = ARM_LR_REGNUM
;
11940 arm_insn_r
->reg_rec_count
= 1;
11943 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11948 /* Handling opcode 110 insns. */
11951 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11953 printf_unfiltered (_("Process record does not support instruction "
11954 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11955 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11960 /* Record handler for vector data transfer instructions. */
11963 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11965 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11966 uint32_t record_buf
[4];
11968 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
11969 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11970 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11971 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11972 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11973 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11975 /* Handle VMOV instruction. */
11976 if (bit_l
&& bit_c
)
11978 record_buf
[0] = reg_t
;
11979 arm_insn_r
->reg_rec_count
= 1;
11981 else if (bit_l
&& !bit_c
)
11983 /* Handle VMOV instruction. */
11984 if (bits_a
== 0x00)
11986 if (bit (arm_insn_r
->arm_insn
, 20))
11987 record_buf
[0] = reg_t
;
11989 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
11992 arm_insn_r
->reg_rec_count
= 1;
11994 /* Handle VMRS instruction. */
11995 else if (bits_a
== 0x07)
11998 reg_t
= ARM_PS_REGNUM
;
12000 record_buf
[0] = reg_t
;
12001 arm_insn_r
->reg_rec_count
= 1;
12004 else if (!bit_l
&& !bit_c
)
12006 /* Handle VMOV instruction. */
12007 if (bits_a
== 0x00)
12009 if (bit (arm_insn_r
->arm_insn
, 20))
12010 record_buf
[0] = reg_t
;
12012 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12015 arm_insn_r
->reg_rec_count
= 1;
12017 /* Handle VMSR instruction. */
12018 else if (bits_a
== 0x07)
12020 record_buf
[0] = ARM_FPSCR_REGNUM
;
12021 arm_insn_r
->reg_rec_count
= 1;
12024 else if (!bit_l
&& bit_c
)
12026 /* Handle VMOV instruction. */
12027 if (!(bits_a
& 0x04))
12029 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
12031 arm_insn_r
->reg_rec_count
= 1;
12033 /* Handle VDUP instruction. */
12036 if (bit (arm_insn_r
->arm_insn
, 21))
12038 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12039 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12040 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
12041 arm_insn_r
->reg_rec_count
= 2;
12045 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12046 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12047 arm_insn_r
->reg_rec_count
= 1;
12052 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12056 /* Record handler for extension register load/store instructions. */
12059 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
12061 uint32_t opcode
, single_reg
;
12062 uint8_t op_vldm_vstm
;
12063 uint32_t record_buf
[8], record_buf_mem
[128];
12064 ULONGEST u_regval
= 0;
12066 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12067 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
12069 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
12070 single_reg
= bit (arm_insn_r
->arm_insn
, 8);
12071 op_vldm_vstm
= opcode
& 0x1b;
12073 /* Handle VMOV instructions. */
12074 if ((opcode
& 0x1e) == 0x04)
12076 if (bit (arm_insn_r
->arm_insn
, 4))
12078 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12079 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12080 arm_insn_r
->reg_rec_count
= 2;
12084 uint8_t reg_m
= (bits (arm_insn_r
->arm_insn
, 0, 3) << 1)
12085 | bit (arm_insn_r
->arm_insn
, 5);
12089 record_buf
[0] = num_regs
+ reg_m
;
12090 record_buf
[1] = num_regs
+ reg_m
+ 1;
12091 arm_insn_r
->reg_rec_count
= 2;
12095 record_buf
[0] = reg_m
+ ARM_D0_REGNUM
;
12096 arm_insn_r
->reg_rec_count
= 1;
12100 /* Handle VSTM and VPUSH instructions. */
12101 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
12102 || op_vldm_vstm
== 0x12)
12104 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12105 uint32_t memory_index
= 0;
12107 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12108 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12109 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12110 imm_off32
= imm_off8
<< 24;
12111 memory_count
= imm_off8
;
12113 if (bit (arm_insn_r
->arm_insn
, 23))
12114 start_address
= u_regval
;
12116 start_address
= u_regval
- imm_off32
;
12118 if (bit (arm_insn_r
->arm_insn
, 21))
12120 record_buf
[0] = reg_rn
;
12121 arm_insn_r
->reg_rec_count
= 1;
12124 while (memory_count
> 0)
12128 record_buf_mem
[memory_index
] = start_address
;
12129 record_buf_mem
[memory_index
+ 1] = 4;
12130 start_address
= start_address
+ 4;
12131 memory_index
= memory_index
+ 2;
12135 record_buf_mem
[memory_index
] = start_address
;
12136 record_buf_mem
[memory_index
+ 1] = 4;
12137 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12138 record_buf_mem
[memory_index
+ 3] = 4;
12139 start_address
= start_address
+ 8;
12140 memory_index
= memory_index
+ 4;
12144 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
12146 /* Handle VLDM instructions. */
12147 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
12148 || op_vldm_vstm
== 0x13)
12150 uint32_t reg_count
, reg_vd
;
12151 uint32_t reg_index
= 0;
12153 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12154 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
12157 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12159 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12161 if (bit (arm_insn_r
->arm_insn
, 21))
12162 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
12164 while (reg_count
> 0)
12167 record_buf
[reg_index
++] = num_regs
+ reg_vd
+ reg_count
- 1;
12169 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
12173 arm_insn_r
->reg_rec_count
= reg_index
;
12175 /* VSTR Vector store register. */
12176 else if ((opcode
& 0x13) == 0x10)
12178 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12179 uint32_t memory_index
= 0;
12181 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12182 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12183 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12184 imm_off32
= imm_off8
<< 24;
12185 memory_count
= imm_off8
;
12187 if (bit (arm_insn_r
->arm_insn
, 23))
12188 start_address
= u_regval
+ imm_off32
;
12190 start_address
= u_regval
- imm_off32
;
12194 record_buf_mem
[memory_index
] = start_address
;
12195 record_buf_mem
[memory_index
+ 1] = 4;
12196 arm_insn_r
->mem_rec_count
= 1;
12200 record_buf_mem
[memory_index
] = start_address
;
12201 record_buf_mem
[memory_index
+ 1] = 4;
12202 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12203 record_buf_mem
[memory_index
+ 3] = 4;
12204 arm_insn_r
->mem_rec_count
= 2;
12207 /* VLDR Vector load register. */
12208 else if ((opcode
& 0x13) == 0x11)
12210 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12214 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12215 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
12219 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12220 record_buf
[0] = num_regs
+ reg_vd
;
12222 arm_insn_r
->reg_rec_count
= 1;
12225 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12226 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
12230 /* Record handler for arm/thumb mode VFP data processing instructions. */
12233 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
12235 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
12236 uint32_t record_buf
[4];
12237 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
12238 enum insn_types curr_insn_type
= INSN_INV
;
12240 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12241 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
12242 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
12243 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
12244 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
12245 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
12246 opc1
= opc1
& 0x04;
12248 /* Handle VMLA, VMLS. */
12251 if (bit (arm_insn_r
->arm_insn
, 10))
12253 if (bit (arm_insn_r
->arm_insn
, 6))
12254 curr_insn_type
= INSN_T0
;
12256 curr_insn_type
= INSN_T1
;
12261 curr_insn_type
= INSN_T1
;
12263 curr_insn_type
= INSN_T2
;
12266 /* Handle VNMLA, VNMLS, VNMUL. */
12267 else if (opc1
== 0x01)
12270 curr_insn_type
= INSN_T1
;
12272 curr_insn_type
= INSN_T2
;
12275 else if (opc1
== 0x02 && !(opc3
& 0x01))
12277 if (bit (arm_insn_r
->arm_insn
, 10))
12279 if (bit (arm_insn_r
->arm_insn
, 6))
12280 curr_insn_type
= INSN_T0
;
12282 curr_insn_type
= INSN_T1
;
12287 curr_insn_type
= INSN_T1
;
12289 curr_insn_type
= INSN_T2
;
12292 /* Handle VADD, VSUB. */
12293 else if (opc1
== 0x03)
12295 if (!bit (arm_insn_r
->arm_insn
, 9))
12297 if (bit (arm_insn_r
->arm_insn
, 6))
12298 curr_insn_type
= INSN_T0
;
12300 curr_insn_type
= INSN_T1
;
12305 curr_insn_type
= INSN_T1
;
12307 curr_insn_type
= INSN_T2
;
12311 else if (opc1
== 0x0b)
12314 curr_insn_type
= INSN_T1
;
12316 curr_insn_type
= INSN_T2
;
12318 /* Handle all other vfp data processing instructions. */
12319 else if (opc1
== 0x0b)
12322 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
12324 if (bit (arm_insn_r
->arm_insn
, 4))
12326 if (bit (arm_insn_r
->arm_insn
, 6))
12327 curr_insn_type
= INSN_T0
;
12329 curr_insn_type
= INSN_T1
;
12334 curr_insn_type
= INSN_T1
;
12336 curr_insn_type
= INSN_T2
;
12339 /* Handle VNEG and VABS. */
12340 else if ((opc2
== 0x01 && opc3
== 0x01)
12341 || (opc2
== 0x00 && opc3
== 0x03))
12343 if (!bit (arm_insn_r
->arm_insn
, 11))
12345 if (bit (arm_insn_r
->arm_insn
, 6))
12346 curr_insn_type
= INSN_T0
;
12348 curr_insn_type
= INSN_T1
;
12353 curr_insn_type
= INSN_T1
;
12355 curr_insn_type
= INSN_T2
;
12358 /* Handle VSQRT. */
12359 else if (opc2
== 0x01 && opc3
== 0x03)
12362 curr_insn_type
= INSN_T1
;
12364 curr_insn_type
= INSN_T2
;
12367 else if (opc2
== 0x07 && opc3
== 0x03)
12370 curr_insn_type
= INSN_T1
;
12372 curr_insn_type
= INSN_T2
;
12374 else if (opc3
& 0x01)
12377 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
12379 if (!bit (arm_insn_r
->arm_insn
, 18))
12380 curr_insn_type
= INSN_T2
;
12384 curr_insn_type
= INSN_T1
;
12386 curr_insn_type
= INSN_T2
;
12390 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
12393 curr_insn_type
= INSN_T1
;
12395 curr_insn_type
= INSN_T2
;
12397 /* Handle VCVTB, VCVTT. */
12398 else if ((opc2
& 0x0e) == 0x02)
12399 curr_insn_type
= INSN_T2
;
12400 /* Handle VCMP, VCMPE. */
12401 else if ((opc2
& 0x0e) == 0x04)
12402 curr_insn_type
= INSN_T3
;
12406 switch (curr_insn_type
)
12409 reg_vd
= reg_vd
| (bit_d
<< 4);
12410 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12411 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
12412 arm_insn_r
->reg_rec_count
= 2;
12416 reg_vd
= reg_vd
| (bit_d
<< 4);
12417 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12418 arm_insn_r
->reg_rec_count
= 1;
12422 reg_vd
= (reg_vd
<< 1) | bit_d
;
12423 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12424 arm_insn_r
->reg_rec_count
= 1;
12428 record_buf
[0] = ARM_FPSCR_REGNUM
;
12429 arm_insn_r
->reg_rec_count
= 1;
12433 gdb_assert_not_reached ("no decoding pattern found");
12437 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12441 /* Handling opcode 110 insns. */
12444 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
12446 uint32_t op
, op1
, op1_sbit
, op1_ebit
, coproc
;
12448 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12449 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
12450 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12452 if ((coproc
& 0x0e) == 0x0a)
12454 /* Handle extension register ld/st instructions. */
12456 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12458 /* 64-bit transfers between arm core and extension registers. */
12459 if ((op1
& 0x3e) == 0x04)
12460 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12464 /* Handle coprocessor ld/st instructions. */
12469 return arm_record_unsupported_insn (arm_insn_r
);
12472 return arm_record_unsupported_insn (arm_insn_r
);
12475 /* Move to coprocessor from two arm core registers. */
12477 return arm_record_unsupported_insn (arm_insn_r
);
12479 /* Move to two arm core registers from coprocessor. */
12484 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12485 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12486 arm_insn_r
->reg_rec_count
= 2;
12488 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
12492 return arm_record_unsupported_insn (arm_insn_r
);
12495 /* Handling opcode 111 insns. */
12498 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
12500 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
12501 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
12502 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12503 ULONGEST u_regval
= 0;
12505 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
12506 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12507 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
12508 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12509 op
= bit (arm_insn_r
->arm_insn
, 4);
12511 /* Handle arm SWI/SVC system call instructions. */
12514 if (tdep
->arm_syscall_record
!= NULL
)
12516 ULONGEST svc_operand
, svc_number
;
12518 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
12520 if (svc_operand
) /* OABI. */
12521 svc_number
= svc_operand
- 0x900000;
12523 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
12525 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
12529 printf_unfiltered (_("no syscall record support\n"));
12534 if ((coproc
& 0x0e) == 0x0a)
12536 /* VFP data-processing instructions. */
12537 if (!op1_sbit
&& !op
)
12538 return arm_record_vfp_data_proc_insn (arm_insn_r
);
12540 /* Advanced SIMD, VFP instructions. */
12541 if (!op1_sbit
&& op
)
12542 return arm_record_vdata_transfer_insn (arm_insn_r
);
12546 /* Coprocessor data operations. */
12547 if (!op1_sbit
&& !op
)
12548 return arm_record_unsupported_insn (arm_insn_r
);
12550 /* Move to Coprocessor from ARM core register. */
12551 if (!op1_sbit
&& !op1_ebit
&& op
)
12552 return arm_record_unsupported_insn (arm_insn_r
);
12554 /* Move to arm core register from coprocessor. */
12555 if (!op1_sbit
&& op1_ebit
&& op
)
12557 uint32_t record_buf
[1];
12559 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12560 if (record_buf
[0] == 15)
12561 record_buf
[0] = ARM_PS_REGNUM
;
12563 arm_insn_r
->reg_rec_count
= 1;
12564 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
12570 return arm_record_unsupported_insn (arm_insn_r
);
12573 /* Handling opcode 000 insns. */
12576 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12578 uint32_t record_buf
[8];
12579 uint32_t reg_src1
= 0;
12581 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12583 record_buf
[0] = ARM_PS_REGNUM
;
12584 record_buf
[1] = reg_src1
;
12585 thumb_insn_r
->reg_rec_count
= 2;
12587 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12593 /* Handling opcode 001 insns. */
12596 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12598 uint32_t record_buf
[8];
12599 uint32_t reg_src1
= 0;
12601 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12603 record_buf
[0] = ARM_PS_REGNUM
;
12604 record_buf
[1] = reg_src1
;
12605 thumb_insn_r
->reg_rec_count
= 2;
12607 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12612 /* Handling opcode 010 insns. */
12615 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12617 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12618 uint32_t record_buf
[8], record_buf_mem
[8];
12620 uint32_t reg_src1
= 0, reg_src2
= 0;
12621 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12623 ULONGEST u_regval
[2] = {0};
12625 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12627 if (bit (thumb_insn_r
->arm_insn
, 12))
12629 /* Handle load/store register offset. */
12630 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
12631 if (opcode2
>= 12 && opcode2
<= 15)
12633 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12634 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12635 record_buf
[0] = reg_src1
;
12636 thumb_insn_r
->reg_rec_count
= 1;
12638 else if (opcode2
>= 8 && opcode2
<= 10)
12640 /* STR(2), STRB(2), STRH(2) . */
12641 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12642 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12643 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12644 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12646 record_buf_mem
[0] = 4; /* STR (2). */
12647 else if (10 == opcode2
)
12648 record_buf_mem
[0] = 1; /* STRB (2). */
12649 else if (9 == opcode2
)
12650 record_buf_mem
[0] = 2; /* STRH (2). */
12651 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12652 thumb_insn_r
->mem_rec_count
= 1;
12655 else if (bit (thumb_insn_r
->arm_insn
, 11))
12657 /* Handle load from literal pool. */
12659 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12660 record_buf
[0] = reg_src1
;
12661 thumb_insn_r
->reg_rec_count
= 1;
12665 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12666 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12667 if ((3 == opcode2
) && (!opcode3
))
12669 /* Branch with exchange. */
12670 record_buf
[0] = ARM_PS_REGNUM
;
12671 thumb_insn_r
->reg_rec_count
= 1;
12675 /* Format 8; special data processing insns. */
12676 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12677 record_buf
[0] = ARM_PS_REGNUM
;
12678 record_buf
[1] = reg_src1
;
12679 thumb_insn_r
->reg_rec_count
= 2;
12684 /* Format 5; data processing insns. */
12685 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12686 if (bit (thumb_insn_r
->arm_insn
, 7))
12688 reg_src1
= reg_src1
+ 8;
12690 record_buf
[0] = ARM_PS_REGNUM
;
12691 record_buf
[1] = reg_src1
;
12692 thumb_insn_r
->reg_rec_count
= 2;
12695 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12696 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12702 /* Handling opcode 001 insns. */
12705 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12707 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12708 uint32_t record_buf
[8], record_buf_mem
[8];
12710 uint32_t reg_src1
= 0;
12711 uint32_t opcode
= 0, immed_5
= 0;
12713 ULONGEST u_regval
= 0;
12715 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12720 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12721 record_buf
[0] = reg_src1
;
12722 thumb_insn_r
->reg_rec_count
= 1;
12727 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12728 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12729 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12730 record_buf_mem
[0] = 4;
12731 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12732 thumb_insn_r
->mem_rec_count
= 1;
12735 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12736 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12742 /* Handling opcode 100 insns. */
12745 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12747 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12748 uint32_t record_buf
[8], record_buf_mem
[8];
12750 uint32_t reg_src1
= 0;
12751 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12753 ULONGEST u_regval
= 0;
12755 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12760 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12761 record_buf
[0] = reg_src1
;
12762 thumb_insn_r
->reg_rec_count
= 1;
12764 else if (1 == opcode
)
12767 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12768 record_buf
[0] = reg_src1
;
12769 thumb_insn_r
->reg_rec_count
= 1;
12771 else if (2 == opcode
)
12774 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12775 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12776 record_buf_mem
[0] = 4;
12777 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12778 thumb_insn_r
->mem_rec_count
= 1;
12780 else if (0 == opcode
)
12783 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12784 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12785 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12786 record_buf_mem
[0] = 2;
12787 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12788 thumb_insn_r
->mem_rec_count
= 1;
12791 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12792 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12798 /* Handling opcode 101 insns. */
12801 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12803 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12805 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
12806 uint32_t register_bits
= 0, register_count
= 0;
12807 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12808 uint32_t record_buf
[24], record_buf_mem
[48];
12811 ULONGEST u_regval
= 0;
12813 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12814 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12815 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
12820 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12821 while (register_bits
)
12823 if (register_bits
& 0x00000001)
12824 record_buf
[index
++] = register_count
;
12825 register_bits
= register_bits
>> 1;
12828 record_buf
[index
++] = ARM_PS_REGNUM
;
12829 record_buf
[index
++] = ARM_SP_REGNUM
;
12830 thumb_insn_r
->reg_rec_count
= index
;
12832 else if (10 == opcode2
)
12835 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12836 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12837 while (register_bits
)
12839 if (register_bits
& 0x00000001)
12841 register_bits
= register_bits
>> 1;
12843 start_address
= u_regval
- \
12844 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12845 thumb_insn_r
->mem_rec_count
= register_count
;
12846 while (register_count
)
12848 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12849 record_buf_mem
[(register_count
* 2) - 2] = 4;
12850 start_address
= start_address
+ 4;
12853 record_buf
[0] = ARM_SP_REGNUM
;
12854 thumb_insn_r
->reg_rec_count
= 1;
12856 else if (0x1E == opcode1
)
12859 /* Handle enhanced software breakpoint insn, BKPT. */
12860 /* CPSR is changed to be executed in ARM state, disabling normal
12861 interrupts, entering abort mode. */
12862 /* According to high vector configuration PC is set. */
12863 /* User hits breakpoint and type reverse, in that case, we need to go back with
12864 previous CPSR and Program Counter. */
12865 record_buf
[0] = ARM_PS_REGNUM
;
12866 record_buf
[1] = ARM_LR_REGNUM
;
12867 thumb_insn_r
->reg_rec_count
= 2;
12868 /* We need to save SPSR value, which is not yet done. */
12869 printf_unfiltered (_("Process record does not support instruction "
12870 "0x%0x at address %s.\n"),
12871 thumb_insn_r
->arm_insn
,
12872 paddress (thumb_insn_r
->gdbarch
,
12873 thumb_insn_r
->this_addr
));
12876 else if ((0 == opcode
) || (1 == opcode
))
12878 /* ADD(5), ADD(6). */
12879 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12880 record_buf
[0] = reg_src1
;
12881 thumb_insn_r
->reg_rec_count
= 1;
12883 else if (2 == opcode
)
12885 /* ADD(7), SUB(4). */
12886 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12887 record_buf
[0] = ARM_SP_REGNUM
;
12888 thumb_insn_r
->reg_rec_count
= 1;
12891 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12892 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12898 /* Handling opcode 110 insns. */
12901 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12903 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12904 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12906 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12907 uint32_t reg_src1
= 0;
12908 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12909 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12910 uint32_t record_buf
[24], record_buf_mem
[48];
12912 ULONGEST u_regval
= 0;
12914 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12915 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12921 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12923 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12924 while (register_bits
)
12926 if (register_bits
& 0x00000001)
12927 record_buf
[index
++] = register_count
;
12928 register_bits
= register_bits
>> 1;
12931 record_buf
[index
++] = reg_src1
;
12932 thumb_insn_r
->reg_rec_count
= index
;
12934 else if (0 == opcode2
)
12936 /* It handles both STMIA. */
12937 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12939 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12940 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12941 while (register_bits
)
12943 if (register_bits
& 0x00000001)
12945 register_bits
= register_bits
>> 1;
12947 start_address
= u_regval
;
12948 thumb_insn_r
->mem_rec_count
= register_count
;
12949 while (register_count
)
12951 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12952 record_buf_mem
[(register_count
* 2) - 2] = 4;
12953 start_address
= start_address
+ 4;
12957 else if (0x1F == opcode1
)
12959 /* Handle arm syscall insn. */
12960 if (tdep
->arm_syscall_record
!= NULL
)
12962 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12963 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12967 printf_unfiltered (_("no syscall record support\n"));
12972 /* B (1), conditional branch is automatically taken care in process_record,
12973 as PC is saved there. */
12975 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12976 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12982 /* Handling opcode 111 insns. */
12985 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12987 uint32_t record_buf
[8];
12988 uint32_t bits_h
= 0;
12990 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12992 if (2 == bits_h
|| 3 == bits_h
)
12995 record_buf
[0] = ARM_LR_REGNUM
;
12996 thumb_insn_r
->reg_rec_count
= 1;
12998 else if (1 == bits_h
)
13001 record_buf
[0] = ARM_PS_REGNUM
;
13002 record_buf
[1] = ARM_LR_REGNUM
;
13003 thumb_insn_r
->reg_rec_count
= 2;
13006 /* B(2) is automatically taken care in process_record, as PC is
13009 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
13014 /* Handler for thumb2 load/store multiple instructions. */
13017 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
13019 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13021 uint32_t reg_rn
, op
;
13022 uint32_t register_bits
= 0, register_count
= 0;
13023 uint32_t index
= 0, start_address
= 0;
13024 uint32_t record_buf
[24], record_buf_mem
[48];
13026 ULONGEST u_regval
= 0;
13028 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13029 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13031 if (0 == op
|| 3 == op
)
13033 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13035 /* Handle RFE instruction. */
13036 record_buf
[0] = ARM_PS_REGNUM
;
13037 thumb2_insn_r
->reg_rec_count
= 1;
13041 /* Handle SRS instruction after reading banked SP. */
13042 return arm_record_unsupported_insn (thumb2_insn_r
);
13045 else if (1 == op
|| 2 == op
)
13047 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13049 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13050 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13051 while (register_bits
)
13053 if (register_bits
& 0x00000001)
13054 record_buf
[index
++] = register_count
;
13057 register_bits
= register_bits
>> 1;
13059 record_buf
[index
++] = reg_rn
;
13060 record_buf
[index
++] = ARM_PS_REGNUM
;
13061 thumb2_insn_r
->reg_rec_count
= index
;
13065 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13066 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13067 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13068 while (register_bits
)
13070 if (register_bits
& 0x00000001)
13073 register_bits
= register_bits
>> 1;
13078 /* Start address calculation for LDMDB/LDMEA. */
13079 start_address
= u_regval
;
13083 /* Start address calculation for LDMDB/LDMEA. */
13084 start_address
= u_regval
- register_count
* 4;
13087 thumb2_insn_r
->mem_rec_count
= register_count
;
13088 while (register_count
)
13090 record_buf_mem
[register_count
* 2 - 1] = start_address
;
13091 record_buf_mem
[register_count
* 2 - 2] = 4;
13092 start_address
= start_address
+ 4;
13095 record_buf
[0] = reg_rn
;
13096 record_buf
[1] = ARM_PS_REGNUM
;
13097 thumb2_insn_r
->reg_rec_count
= 2;
13101 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13103 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13105 return ARM_RECORD_SUCCESS
;
13108 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13112 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
13114 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13116 uint32_t reg_rd
, reg_rn
, offset_imm
;
13117 uint32_t reg_dest1
, reg_dest2
;
13118 uint32_t address
, offset_addr
;
13119 uint32_t record_buf
[8], record_buf_mem
[8];
13120 uint32_t op1
, op2
, op3
;
13123 ULONGEST u_regval
[2];
13125 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13126 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
13127 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13129 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13131 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
13133 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13134 record_buf
[0] = reg_dest1
;
13135 record_buf
[1] = ARM_PS_REGNUM
;
13136 thumb2_insn_r
->reg_rec_count
= 2;
13139 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
13141 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13142 record_buf
[2] = reg_dest2
;
13143 thumb2_insn_r
->reg_rec_count
= 3;
13148 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13149 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13151 if (0 == op1
&& 0 == op2
)
13153 /* Handle STREX. */
13154 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13155 address
= u_regval
[0] + (offset_imm
* 4);
13156 record_buf_mem
[0] = 4;
13157 record_buf_mem
[1] = address
;
13158 thumb2_insn_r
->mem_rec_count
= 1;
13159 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13160 record_buf
[0] = reg_rd
;
13161 thumb2_insn_r
->reg_rec_count
= 1;
13163 else if (1 == op1
&& 0 == op2
)
13165 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13166 record_buf
[0] = reg_rd
;
13167 thumb2_insn_r
->reg_rec_count
= 1;
13168 address
= u_regval
[0];
13169 record_buf_mem
[1] = address
;
13173 /* Handle STREXB. */
13174 record_buf_mem
[0] = 1;
13175 thumb2_insn_r
->mem_rec_count
= 1;
13179 /* Handle STREXH. */
13180 record_buf_mem
[0] = 2 ;
13181 thumb2_insn_r
->mem_rec_count
= 1;
13185 /* Handle STREXD. */
13186 address
= u_regval
[0];
13187 record_buf_mem
[0] = 4;
13188 record_buf_mem
[2] = 4;
13189 record_buf_mem
[3] = address
+ 4;
13190 thumb2_insn_r
->mem_rec_count
= 2;
13195 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13197 if (bit (thumb2_insn_r
->arm_insn
, 24))
13199 if (bit (thumb2_insn_r
->arm_insn
, 23))
13200 offset_addr
= u_regval
[0] + (offset_imm
* 4);
13202 offset_addr
= u_regval
[0] - (offset_imm
* 4);
13204 address
= offset_addr
;
13207 address
= u_regval
[0];
13209 record_buf_mem
[0] = 4;
13210 record_buf_mem
[1] = address
;
13211 record_buf_mem
[2] = 4;
13212 record_buf_mem
[3] = address
+ 4;
13213 thumb2_insn_r
->mem_rec_count
= 2;
13214 record_buf
[0] = reg_rn
;
13215 thumb2_insn_r
->reg_rec_count
= 1;
13219 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13221 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13223 return ARM_RECORD_SUCCESS
;
13226 /* Handler for thumb2 data processing (shift register and modified immediate)
13230 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
13232 uint32_t reg_rd
, op
;
13233 uint32_t record_buf
[8];
13235 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
13236 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13238 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
13240 record_buf
[0] = ARM_PS_REGNUM
;
13241 thumb2_insn_r
->reg_rec_count
= 1;
13245 record_buf
[0] = reg_rd
;
13246 record_buf
[1] = ARM_PS_REGNUM
;
13247 thumb2_insn_r
->reg_rec_count
= 2;
13250 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13252 return ARM_RECORD_SUCCESS
;
13255 /* Generic handler for thumb2 instructions which effect destination and PS
13259 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
13262 uint32_t record_buf
[8];
13264 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13266 record_buf
[0] = reg_rd
;
13267 record_buf
[1] = ARM_PS_REGNUM
;
13268 thumb2_insn_r
->reg_rec_count
= 2;
13270 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13272 return ARM_RECORD_SUCCESS
;
13275 /* Handler for thumb2 branch and miscellaneous control instructions. */
13278 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
13280 uint32_t op
, op1
, op2
;
13281 uint32_t record_buf
[8];
13283 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13284 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
13285 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13287 /* Handle MSR insn. */
13288 if (!(op1
& 0x2) && 0x38 == op
)
13292 /* CPSR is going to be changed. */
13293 record_buf
[0] = ARM_PS_REGNUM
;
13294 thumb2_insn_r
->reg_rec_count
= 1;
13298 arm_record_unsupported_insn(thumb2_insn_r
);
13302 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
13305 record_buf
[0] = ARM_PS_REGNUM
;
13306 record_buf
[1] = ARM_LR_REGNUM
;
13307 thumb2_insn_r
->reg_rec_count
= 2;
13310 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13312 return ARM_RECORD_SUCCESS
;
13315 /* Handler for thumb2 store single data item instructions. */
13318 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
13320 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13322 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
13323 uint32_t address
, offset_addr
;
13324 uint32_t record_buf
[8], record_buf_mem
[8];
13327 ULONGEST u_regval
[2];
13329 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
13330 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
13331 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13332 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13334 if (bit (thumb2_insn_r
->arm_insn
, 23))
13337 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
13338 offset_addr
= u_regval
[0] + offset_imm
;
13339 address
= offset_addr
;
13344 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
13346 /* Handle STRB (register). */
13347 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13348 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
13349 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
13350 offset_addr
= u_regval
[1] << shift_imm
;
13351 address
= u_regval
[0] + offset_addr
;
13355 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13356 if (bit (thumb2_insn_r
->arm_insn
, 10))
13358 if (bit (thumb2_insn_r
->arm_insn
, 9))
13359 offset_addr
= u_regval
[0] + offset_imm
;
13361 offset_addr
= u_regval
[0] - offset_imm
;
13363 address
= offset_addr
;
13366 address
= u_regval
[0];
13372 /* Store byte instructions. */
13375 record_buf_mem
[0] = 1;
13377 /* Store half word instructions. */
13380 record_buf_mem
[0] = 2;
13382 /* Store word instructions. */
13385 record_buf_mem
[0] = 4;
13389 gdb_assert_not_reached ("no decoding pattern found");
13393 record_buf_mem
[1] = address
;
13394 thumb2_insn_r
->mem_rec_count
= 1;
13395 record_buf
[0] = reg_rn
;
13396 thumb2_insn_r
->reg_rec_count
= 1;
13398 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13400 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13402 return ARM_RECORD_SUCCESS
;
13405 /* Handler for thumb2 load memory hints instructions. */
13408 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
13410 uint32_t record_buf
[8];
13411 uint32_t reg_rt
, reg_rn
;
13413 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13414 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13416 if (ARM_PC_REGNUM
!= reg_rt
)
13418 record_buf
[0] = reg_rt
;
13419 record_buf
[1] = reg_rn
;
13420 record_buf
[2] = ARM_PS_REGNUM
;
13421 thumb2_insn_r
->reg_rec_count
= 3;
13423 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13425 return ARM_RECORD_SUCCESS
;
13428 return ARM_RECORD_FAILURE
;
13431 /* Handler for thumb2 load word instructions. */
13434 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
13436 uint32_t opcode1
= 0, opcode2
= 0;
13437 uint32_t record_buf
[8];
13439 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13440 record_buf
[1] = ARM_PS_REGNUM
;
13441 thumb2_insn_r
->reg_rec_count
= 2;
13443 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13445 return ARM_RECORD_SUCCESS
;
13448 /* Handler for thumb2 long multiply, long multiply accumulate, and
13449 divide instructions. */
13452 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
13454 uint32_t opcode1
= 0, opcode2
= 0;
13455 uint32_t record_buf
[8];
13456 uint32_t reg_src1
= 0;
13458 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
13459 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13461 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
13463 /* Handle SMULL, UMULL, SMULAL. */
13464 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13465 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13466 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13467 record_buf
[2] = ARM_PS_REGNUM
;
13468 thumb2_insn_r
->reg_rec_count
= 3;
13470 else if (1 == opcode1
|| 3 == opcode2
)
13472 /* Handle SDIV and UDIV. */
13473 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13474 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13475 record_buf
[2] = ARM_PS_REGNUM
;
13476 thumb2_insn_r
->reg_rec_count
= 3;
13479 return ARM_RECORD_FAILURE
;
13481 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13483 return ARM_RECORD_SUCCESS
;
13486 /* Record handler for thumb32 coprocessor instructions. */
13489 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
13491 if (bit (thumb2_insn_r
->arm_insn
, 25))
13492 return arm_record_coproc_data_proc (thumb2_insn_r
);
13494 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
13497 /* Record handler for advance SIMD structure load/store instructions. */
13500 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
13502 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13503 uint32_t l_bit
, a_bit
, b_bits
;
13504 uint32_t record_buf
[128], record_buf_mem
[128];
13505 uint32_t reg_rn
, reg_vd
, address
, f_esize
, f_elem
;
13506 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
13509 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
13510 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
13511 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13512 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13513 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13514 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
13515 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
13516 f_esize
= 8 * f_ebytes
;
13517 f_elem
= 8 / f_ebytes
;
13521 ULONGEST u_regval
= 0;
13522 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13523 address
= u_regval
;
13528 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13530 if (b_bits
== 0x07)
13532 else if (b_bits
== 0x0a)
13534 else if (b_bits
== 0x06)
13536 else if (b_bits
== 0x02)
13541 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13543 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13545 record_buf_mem
[index_m
++] = f_ebytes
;
13546 record_buf_mem
[index_m
++] = address
;
13547 address
= address
+ f_ebytes
;
13548 thumb2_insn_r
->mem_rec_count
+= 1;
13553 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13555 if (b_bits
== 0x09 || b_bits
== 0x08)
13557 else if (b_bits
== 0x03)
13562 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13563 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13565 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13567 record_buf_mem
[index_m
++] = f_ebytes
;
13568 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13569 thumb2_insn_r
->mem_rec_count
+= 1;
13571 address
= address
+ (2 * f_ebytes
);
13575 else if ((b_bits
& 0x0e) == 0x04)
13577 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13579 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13581 record_buf_mem
[index_m
++] = f_ebytes
;
13582 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13583 thumb2_insn_r
->mem_rec_count
+= 1;
13585 address
= address
+ (3 * f_ebytes
);
13589 else if (!(b_bits
& 0x0e))
13591 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13593 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13595 record_buf_mem
[index_m
++] = f_ebytes
;
13596 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13597 thumb2_insn_r
->mem_rec_count
+= 1;
13599 address
= address
+ (4 * f_ebytes
);
13605 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13607 if (bft_size
== 0x00)
13609 else if (bft_size
== 0x01)
13611 else if (bft_size
== 0x02)
13617 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13618 thumb2_insn_r
->mem_rec_count
= 1;
13620 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13621 thumb2_insn_r
->mem_rec_count
= 2;
13623 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13624 thumb2_insn_r
->mem_rec_count
= 3;
13626 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13627 thumb2_insn_r
->mem_rec_count
= 4;
13629 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13631 record_buf_mem
[index_m
] = f_ebytes
;
13632 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13641 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13642 thumb2_insn_r
->reg_rec_count
= 1;
13644 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13645 thumb2_insn_r
->reg_rec_count
= 2;
13647 else if ((b_bits
& 0x0e) == 0x04)
13648 thumb2_insn_r
->reg_rec_count
= 3;
13650 else if (!(b_bits
& 0x0e))
13651 thumb2_insn_r
->reg_rec_count
= 4;
13656 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13657 thumb2_insn_r
->reg_rec_count
= 1;
13659 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13660 thumb2_insn_r
->reg_rec_count
= 2;
13662 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13663 thumb2_insn_r
->reg_rec_count
= 3;
13665 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13666 thumb2_insn_r
->reg_rec_count
= 4;
13668 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13669 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13673 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13675 record_buf
[index_r
] = reg_rn
;
13676 thumb2_insn_r
->reg_rec_count
+= 1;
13679 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13681 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13686 /* Decodes thumb2 instruction type and invokes its record handler. */
13688 static unsigned int
13689 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13691 uint32_t op
, op1
, op2
;
13693 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13694 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13695 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13699 if (!(op2
& 0x64 ))
13701 /* Load/store multiple instruction. */
13702 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13704 else if (!((op2
& 0x64) ^ 0x04))
13706 /* Load/store (dual/exclusive) and table branch instruction. */
13707 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13709 else if (!((op2
& 0x20) ^ 0x20))
13711 /* Data-processing (shifted register). */
13712 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13714 else if (op2
& 0x40)
13716 /* Co-processor instructions. */
13717 return thumb2_record_coproc_insn (thumb2_insn_r
);
13720 else if (op1
== 0x02)
13724 /* Branches and miscellaneous control instructions. */
13725 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13727 else if (op2
& 0x20)
13729 /* Data-processing (plain binary immediate) instruction. */
13730 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13734 /* Data-processing (modified immediate). */
13735 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13738 else if (op1
== 0x03)
13740 if (!(op2
& 0x71 ))
13742 /* Store single data item. */
13743 return thumb2_record_str_single_data (thumb2_insn_r
);
13745 else if (!((op2
& 0x71) ^ 0x10))
13747 /* Advanced SIMD or structure load/store instructions. */
13748 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13750 else if (!((op2
& 0x67) ^ 0x01))
13752 /* Load byte, memory hints instruction. */
13753 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13755 else if (!((op2
& 0x67) ^ 0x03))
13757 /* Load halfword, memory hints instruction. */
13758 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13760 else if (!((op2
& 0x67) ^ 0x05))
13762 /* Load word instruction. */
13763 return thumb2_record_ld_word (thumb2_insn_r
);
13765 else if (!((op2
& 0x70) ^ 0x20))
13767 /* Data-processing (register) instruction. */
13768 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13770 else if (!((op2
& 0x78) ^ 0x30))
13772 /* Multiply, multiply accumulate, abs diff instruction. */
13773 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13775 else if (!((op2
& 0x78) ^ 0x38))
13777 /* Long multiply, long multiply accumulate, and divide. */
13778 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13780 else if (op2
& 0x40)
13782 /* Co-processor instructions. */
13783 return thumb2_record_coproc_insn (thumb2_insn_r
);
13790 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13791 and positive val on fauilure. */
13794 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
13796 gdb_byte buf
[insn_size
];
13798 memset (&buf
[0], 0, insn_size
);
13800 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
13802 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13804 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13808 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13810 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13814 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
13815 uint32_t insn_size
)
13818 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13819 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13821 arm_record_data_proc_misc_ld_str
, /* 000. */
13822 arm_record_data_proc_imm
, /* 001. */
13823 arm_record_ld_st_imm_offset
, /* 010. */
13824 arm_record_ld_st_reg_offset
, /* 011. */
13825 arm_record_ld_st_multiple
, /* 100. */
13826 arm_record_b_bl
, /* 101. */
13827 arm_record_asimd_vfp_coproc
, /* 110. */
13828 arm_record_coproc_data_proc
/* 111. */
13831 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13832 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13834 thumb_record_shift_add_sub
, /* 000. */
13835 thumb_record_add_sub_cmp_mov
, /* 001. */
13836 thumb_record_ld_st_reg_offset
, /* 010. */
13837 thumb_record_ld_st_imm_offset
, /* 011. */
13838 thumb_record_ld_st_stack
, /* 100. */
13839 thumb_record_misc
, /* 101. */
13840 thumb_record_ldm_stm_swi
, /* 110. */
13841 thumb_record_branch
/* 111. */
13844 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13845 uint32_t insn_id
= 0;
13847 if (extract_arm_insn (arm_record
, insn_size
))
13851 printf_unfiltered (_("Process record: error reading memory at "
13852 "addr %s len = %d.\n"),
13853 paddress (arm_record
->gdbarch
, arm_record
->this_addr
), insn_size
);
13857 else if (ARM_RECORD
== record_type
)
13859 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13860 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13861 ret
= arm_record_extension_space (arm_record
);
13862 /* If this insn has fallen into extension space
13863 then we need not decode it anymore. */
13864 if (ret
!= -1 && !INSN_RECORDED(arm_record
))
13866 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13869 else if (THUMB_RECORD
== record_type
)
13871 /* As thumb does not have condition codes, we set negative. */
13872 arm_record
->cond
= -1;
13873 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13874 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13876 else if (THUMB2_RECORD
== record_type
)
13878 /* As thumb does not have condition codes, we set negative. */
13879 arm_record
->cond
= -1;
13881 /* Swap first half of 32bit thumb instruction with second half. */
13882 arm_record
->arm_insn
13883 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13885 insn_id
= thumb2_record_decode_insn_handler (arm_record
);
13887 if (insn_id
!= ARM_RECORD_SUCCESS
)
13889 arm_record_unsupported_insn (arm_record
);
13895 /* Throw assertion. */
13896 gdb_assert_not_reached ("not a valid instruction, could not decode");
13903 /* Cleans up local record registers and memory allocations. */
13906 deallocate_reg_mem (insn_decode_record
*record
)
13908 xfree (record
->arm_regs
);
13909 xfree (record
->arm_mems
);
13913 /* Parse the current instruction and record the values of the registers and
13914 memory that will be changed in current instruction to record_arch_list".
13915 Return -1 if something is wrong. */
13918 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13919 CORE_ADDR insn_addr
)
13922 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
13923 uint32_t no_of_rec
= 0;
13924 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13925 ULONGEST t_bit
= 0, insn_id
= 0;
13927 ULONGEST u_regval
= 0;
13929 insn_decode_record arm_record
;
13931 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13932 arm_record
.regcache
= regcache
;
13933 arm_record
.this_addr
= insn_addr
;
13934 arm_record
.gdbarch
= gdbarch
;
13937 if (record_debug
> 1)
13939 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13941 paddress (gdbarch
, arm_record
.this_addr
));
13944 if (extract_arm_insn (&arm_record
, 2))
13948 printf_unfiltered (_("Process record: error reading memory at "
13949 "addr %s len = %d.\n"),
13950 paddress (arm_record
.gdbarch
,
13951 arm_record
.this_addr
), 2);
13956 /* Check the insn, whether it is thumb or arm one. */
13958 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13959 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13962 if (!(u_regval
& t_bit
))
13964 /* We are decoding arm insn. */
13965 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13969 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13970 /* is it thumb2 insn? */
13971 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13973 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13974 THUMB2_INSN_SIZE_BYTES
);
13978 /* We are decoding thumb insn. */
13979 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
13985 /* Record registers. */
13986 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13987 if (arm_record
.arm_regs
)
13989 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13991 if (record_full_arch_list_add_reg
13992 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13996 /* Record memories. */
13997 if (arm_record
.arm_mems
)
13999 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
14001 if (record_full_arch_list_add_mem
14002 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
14003 arm_record
.arm_mems
[no_of_rec
].len
))
14008 if (record_full_arch_list_add_end ())
14013 deallocate_reg_mem (&arm_record
);