1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "gdb_string.h"
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
58 #include "features/arm-with-m.c"
59 #include "features/arm-with-iwmmxt.c"
60 #include "features/arm-with-vfpv2.c"
61 #include "features/arm-with-vfpv3.c"
62 #include "features/arm-with-neon.c"
66 /* Macros for setting and testing a bit in a minimal symbol that marks
67 it as Thumb function. The MSB of the minimal symbol's "info" field
68 is used for this purpose.
70 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
71 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
73 #define MSYMBOL_SET_SPECIAL(msym) \
74 MSYMBOL_TARGET_FLAG_1 (msym) = 1
76 #define MSYMBOL_IS_SPECIAL(msym) \
77 MSYMBOL_TARGET_FLAG_1 (msym)
79 /* Per-objfile data used for mapping symbols. */
80 static const struct objfile_data
*arm_objfile_data_key
;
82 struct arm_mapping_symbol
87 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
88 DEF_VEC_O(arm_mapping_symbol_s
);
90 struct arm_per_objfile
92 VEC(arm_mapping_symbol_s
) **section_maps
;
95 /* The list of available "set arm ..." and "show arm ..." commands. */
96 static struct cmd_list_element
*setarmcmdlist
= NULL
;
97 static struct cmd_list_element
*showarmcmdlist
= NULL
;
99 /* The type of floating-point to use. Keep this in sync with enum
100 arm_float_model, and the help string in _initialize_arm_tdep. */
101 static const char *fp_model_strings
[] =
111 /* A variable that can be configured by the user. */
112 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
113 static const char *current_fp_model
= "auto";
115 /* The ABI to use. Keep this in sync with arm_abi_kind. */
116 static const char *arm_abi_strings
[] =
124 /* A variable that can be configured by the user. */
125 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
126 static const char *arm_abi_string
= "auto";
128 /* The execution mode to assume. */
129 static const char *arm_mode_strings
[] =
137 static const char *arm_fallback_mode_string
= "auto";
138 static const char *arm_force_mode_string
= "auto";
140 /* Internal override of the execution mode. -1 means no override,
141 0 means override to ARM mode, 1 means override to Thumb mode.
142 The effect is the same as if arm_force_mode has been set by the
143 user (except the internal override has precedence over a user's
144 arm_force_mode override). */
145 static int arm_override_mode
= -1;
147 /* Number of different reg name sets (options). */
148 static int num_disassembly_options
;
150 /* The standard register names, and all the valid aliases for them. Note
151 that `fp', `sp' and `pc' are not added in this alias list, because they
152 have been added as builtin user registers in
153 std-regs.c:_initialize_frame_reg. */
158 } arm_register_aliases
[] = {
159 /* Basic register numbers. */
176 /* Synonyms (argument and variable registers). */
189 /* Other platform-specific names for r9. */
195 /* Names used by GCC (not listed in the ARM EABI). */
197 /* A special name from the older ATPCS. */
201 static const char *const arm_register_names
[] =
202 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
203 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
204 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
205 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
206 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
207 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
208 "fps", "cpsr" }; /* 24 25 */
210 /* Valid register name styles. */
211 static const char **valid_disassembly_styles
;
213 /* Disassembly style to use. Default to "std" register names. */
214 static const char *disassembly_style
;
216 /* This is used to keep the bfd arch_info in sync with the disassembly
218 static void set_disassembly_style_sfunc(char *, int,
219 struct cmd_list_element
*);
220 static void set_disassembly_style (void);
222 static void convert_from_extended (const struct floatformat
*, const void *,
224 static void convert_to_extended (const struct floatformat
*, void *,
227 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
228 struct regcache
*regcache
,
229 int regnum
, gdb_byte
*buf
);
230 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
231 struct regcache
*regcache
,
232 int regnum
, const gdb_byte
*buf
);
234 struct arm_prologue_cache
236 /* The stack pointer at the time this frame was created; i.e. the
237 caller's stack pointer when this function was called. It is used
238 to identify this frame. */
241 /* The frame base for this frame is just prev_sp - frame size.
242 FRAMESIZE is the distance from the frame pointer to the
243 initial stack pointer. */
247 /* The register used to hold the frame pointer for this frame. */
250 /* Saved register offsets. */
251 struct trad_frame_saved_reg
*saved_regs
;
254 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
255 CORE_ADDR prologue_start
,
256 CORE_ADDR prologue_end
,
257 struct arm_prologue_cache
*cache
);
259 /* Architecture version for displaced stepping. This effects the behaviour of
260 certain instructions, and really should not be hard-wired. */
262 #define DISPLACED_STEPPING_ARCH_VERSION 5
264 /* Addresses for calling Thumb functions have the bit 0 set.
265 Here are some macros to test, set, or clear bit 0 of addresses. */
266 #define IS_THUMB_ADDR(addr) ((addr) & 1)
267 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
268 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
270 /* Set to true if the 32-bit mode is in use. */
274 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
277 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
279 if (gdbarch_tdep (gdbarch
)->is_m
)
285 /* Determine if FRAME is executing in Thumb mode. */
288 arm_frame_is_thumb (struct frame_info
*frame
)
291 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
293 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
294 directly (from a signal frame or dummy frame) or by interpreting
295 the saved LR (from a prologue or DWARF frame). So consult it and
296 trust the unwinders. */
297 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
299 return (cpsr
& t_bit
) != 0;
302 /* Callback for VEC_lower_bound. */
305 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
306 const struct arm_mapping_symbol
*rhs
)
308 return lhs
->value
< rhs
->value
;
311 /* Search for the mapping symbol covering MEMADDR. If one is found,
312 return its type. Otherwise, return 0. If START is non-NULL,
313 set *START to the location of the mapping symbol. */
316 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
318 struct obj_section
*sec
;
320 /* If there are mapping symbols, consult them. */
321 sec
= find_pc_section (memaddr
);
324 struct arm_per_objfile
*data
;
325 VEC(arm_mapping_symbol_s
) *map
;
326 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
330 data
= objfile_data (sec
->objfile
, arm_objfile_data_key
);
333 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
334 if (!VEC_empty (arm_mapping_symbol_s
, map
))
336 struct arm_mapping_symbol
*map_sym
;
338 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
339 arm_compare_mapping_symbols
);
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
347 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
348 if (map_sym
->value
== map_key
.value
)
351 *start
= map_sym
->value
+ obj_section_addr (sec
);
352 return map_sym
->type
;
358 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
360 *start
= map_sym
->value
+ obj_section_addr (sec
);
361 return map_sym
->type
;
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
375 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
377 struct obj_section
*sec
;
378 struct minimal_symbol
*sym
;
380 struct displaced_step_closure
* dsc
381 = get_displaced_step_closure_by_addr(memaddr
);
383 /* If checking the mode of displaced instruction in copy area, the mode
384 should be determined by instruction on the original address. */
388 fprintf_unfiltered (gdb_stdlog
,
389 "displaced: check mode of %.8lx instead of %.8lx\n",
390 (unsigned long) dsc
->insn_addr
,
391 (unsigned long) memaddr
);
392 memaddr
= dsc
->insn_addr
;
395 /* If bit 0 of the address is set, assume this is a Thumb address. */
396 if (IS_THUMB_ADDR (memaddr
))
399 /* Respect internal mode override if active. */
400 if (arm_override_mode
!= -1)
401 return arm_override_mode
;
403 /* If the user wants to override the symbol table, let him. */
404 if (strcmp (arm_force_mode_string
, "arm") == 0)
406 if (strcmp (arm_force_mode_string
, "thumb") == 0)
409 /* ARM v6-M and v7-M are always in Thumb mode. */
410 if (gdbarch_tdep (gdbarch
)->is_m
)
413 /* If there are mapping symbols, consult them. */
414 type
= arm_find_mapping_symbol (memaddr
, NULL
);
418 /* Thumb functions have a "special" bit set in minimal symbols. */
419 sym
= lookup_minimal_symbol_by_pc (memaddr
);
421 return (MSYMBOL_IS_SPECIAL (sym
));
423 /* If the user wants to override the fallback mode, let them. */
424 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
426 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
429 /* If we couldn't find any symbol, but we're talking to a running
430 target, then trust the current value of $cpsr. This lets
431 "display/i $pc" always show the correct mode (though if there is
432 a symbol table we will not reach here, so it still may not be
433 displayed in the mode it will be executed). */
434 if (target_has_registers
)
435 return arm_frame_is_thumb (get_current_frame ());
437 /* Otherwise we're out of luck; we assume ARM. */
441 /* Remove useless bits from addresses in a running program. */
443 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
446 return UNMAKE_THUMB_ADDR (val
);
448 return (val
& 0x03fffffc);
451 /* When reading symbols, we need to zap the low bit of the address,
452 which may be set to 1 for Thumb functions. */
454 arm_smash_text_address (struct gdbarch
*gdbarch
, CORE_ADDR val
)
459 /* Return 1 if PC is the start of a compiler helper function which
460 can be safely ignored during prologue skipping. IS_THUMB is true
461 if the function is known to be a Thumb function due to the way it
464 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
466 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
467 struct minimal_symbol
*msym
;
469 msym
= lookup_minimal_symbol_by_pc (pc
);
471 && SYMBOL_VALUE_ADDRESS (msym
) == pc
472 && SYMBOL_LINKAGE_NAME (msym
) != NULL
)
474 const char *name
= SYMBOL_LINKAGE_NAME (msym
);
476 /* The GNU linker's Thumb call stub to foo is named
478 if (strstr (name
, "_from_thumb") != NULL
)
481 /* On soft-float targets, __truncdfsf2 is called to convert promoted
482 arguments to their argument types in non-prototyped
484 if (strncmp (name
, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
486 if (strncmp (name
, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
489 /* Internal functions related to thread-local storage. */
490 if (strncmp (name
, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
492 if (strncmp (name
, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
497 /* If we run against a stripped glibc, we may be unable to identify
498 special functions by name. Check for one important case,
499 __aeabi_read_tp, by comparing the *code* against the default
500 implementation (this is hand-written ARM assembler in glibc). */
503 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
504 == 0xe3e00a0f /* mov r0, #0xffff0fff */
505 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
506 == 0xe240f01f) /* sub pc, r0, #31 */
513 /* Support routines for instruction parsing. */
514 #define submask(x) ((1L << ((x) + 1)) - 1)
515 #define bit(obj,st) (((obj) >> (st)) & 1)
516 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
517 #define sbits(obj,st,fn) \
518 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
519 #define BranchDest(addr,instr) \
520 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
522 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
523 the first 16-bit of instruction, and INSN2 is the second 16-bit of
525 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
526 ((bits ((insn1), 0, 3) << 12) \
527 | (bits ((insn1), 10, 10) << 11) \
528 | (bits ((insn2), 12, 14) << 8) \
529 | bits ((insn2), 0, 7))
531 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
532 the 32-bit instruction. */
533 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
534 ((bits ((insn), 16, 19) << 12) \
535 | bits ((insn), 0, 11))
537 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
540 thumb_expand_immediate (unsigned int imm
)
542 unsigned int count
= imm
>> 7;
550 return (imm
& 0xff) | ((imm
& 0xff) << 16);
552 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
554 return (imm
& 0xff) | ((imm
& 0xff) << 8)
555 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
558 return (0x80 | (imm
& 0x7f)) << (32 - count
);
561 /* Return 1 if the 16-bit Thumb instruction INST might change
562 control flow, 0 otherwise. */
565 thumb_instruction_changes_pc (unsigned short inst
)
567 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
570 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
573 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
576 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
579 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
582 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
588 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
589 might change control flow, 0 otherwise. */
592 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
594 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
596 /* Branches and miscellaneous control instructions. */
598 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
603 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
605 /* SUBS PC, LR, #imm8. */
608 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
610 /* Conditional branch. */
617 if ((inst1
& 0xfe50) == 0xe810)
619 /* Load multiple or RFE. */
621 if (bit (inst1
, 7) && !bit (inst1
, 8))
627 else if (!bit (inst1
, 7) && bit (inst1
, 8))
633 else if (bit (inst1
, 7) && bit (inst1
, 8))
638 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
647 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
649 /* MOV PC or MOVS PC. */
653 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
656 if (bits (inst1
, 0, 3) == 15)
662 if ((inst2
& 0x0fc0) == 0x0000)
668 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
674 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
683 /* Analyze a Thumb prologue, looking for a recognizable stack frame
684 and frame pointer. Scan until we encounter a store that could
685 clobber the stack frame unexpectedly, or an unknown instruction.
686 Return the last address which is definitely safe to skip for an
687 initial breakpoint. */
690 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
691 CORE_ADDR start
, CORE_ADDR limit
,
692 struct arm_prologue_cache
*cache
)
694 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
695 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
698 struct pv_area
*stack
;
699 struct cleanup
*back_to
;
701 CORE_ADDR unrecognized_pc
= 0;
703 for (i
= 0; i
< 16; i
++)
704 regs
[i
] = pv_register (i
, 0);
705 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
706 back_to
= make_cleanup_free_pv_area (stack
);
708 while (start
< limit
)
712 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
714 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
719 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
722 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
723 whether to save LR (R14). */
724 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
726 /* Calculate offsets of saved R0-R7 and LR. */
727 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
728 if (mask
& (1 << regno
))
730 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
732 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
735 else if ((insn
& 0xff00) == 0xb000) /* add sp, #simm OR
738 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
739 if (insn
& 0x80) /* Check for SUB. */
740 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
743 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
746 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
747 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
749 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
750 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
751 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
753 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
754 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
755 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
757 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
758 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
759 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
760 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
761 regs
[bits (insn
, 6, 8)]);
762 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
763 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
765 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
766 int rm
= bits (insn
, 3, 6);
767 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
769 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
771 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
772 int src_reg
= (insn
& 0x78) >> 3;
773 regs
[dst_reg
] = regs
[src_reg
];
775 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
777 /* Handle stores to the stack. Normally pushes are used,
778 but with GCC -mtpcs-frame, there may be other stores
779 in the prologue to create the frame. */
780 int regno
= (insn
>> 8) & 0x7;
783 offset
= (insn
& 0xff) << 2;
784 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
786 if (pv_area_store_would_trash (stack
, addr
))
789 pv_area_store (stack
, addr
, 4, regs
[regno
]);
791 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
793 int rd
= bits (insn
, 0, 2);
794 int rn
= bits (insn
, 3, 5);
797 offset
= bits (insn
, 6, 10) << 2;
798 addr
= pv_add_constant (regs
[rn
], offset
);
800 if (pv_area_store_would_trash (stack
, addr
))
803 pv_area_store (stack
, addr
, 4, regs
[rd
]);
805 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
806 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
807 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
808 /* Ignore stores of argument registers to the stack. */
810 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
811 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
812 /* Ignore block loads from the stack, potentially copying
813 parameters from memory. */
815 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
816 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
817 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
818 /* Similarly ignore single loads from the stack. */
820 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
821 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
822 /* Skip register copies, i.e. saves to another register
823 instead of the stack. */
825 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
826 /* Recognize constant loads; even with small stacks these are necessary
828 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
829 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
831 /* Constant pool loads, for the same reason. */
832 unsigned int constant
;
835 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
836 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
837 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
839 else if ((insn
& 0xe000) == 0xe000)
841 unsigned short inst2
;
843 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
844 byte_order_for_code
);
846 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
848 /* BL, BLX. Allow some special function calls when
849 skipping the prologue; GCC generates these before
850 storing arguments to the stack. */
852 int j1
, j2
, imm1
, imm2
;
854 imm1
= sbits (insn
, 0, 10);
855 imm2
= bits (inst2
, 0, 10);
856 j1
= bit (inst2
, 13);
857 j2
= bit (inst2
, 11);
859 offset
= ((imm1
<< 12) + (imm2
<< 1));
860 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
862 nextpc
= start
+ 4 + offset
;
863 /* For BLX make sure to clear the low bits. */
864 if (bit (inst2
, 12) == 0)
865 nextpc
= nextpc
& 0xfffffffc;
867 if (!skip_prologue_function (gdbarch
, nextpc
,
868 bit (inst2
, 12) != 0))
872 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
874 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
876 pv_t addr
= regs
[bits (insn
, 0, 3)];
879 if (pv_area_store_would_trash (stack
, addr
))
882 /* Calculate offsets of saved registers. */
883 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
884 if (inst2
& (1 << regno
))
886 addr
= pv_add_constant (addr
, -4);
887 pv_area_store (stack
, addr
, 4, regs
[regno
]);
891 regs
[bits (insn
, 0, 3)] = addr
;
894 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
896 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
898 int regno1
= bits (inst2
, 12, 15);
899 int regno2
= bits (inst2
, 8, 11);
900 pv_t addr
= regs
[bits (insn
, 0, 3)];
902 offset
= inst2
& 0xff;
904 addr
= pv_add_constant (addr
, offset
);
906 addr
= pv_add_constant (addr
, -offset
);
908 if (pv_area_store_would_trash (stack
, addr
))
911 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
912 pv_area_store (stack
, pv_add_constant (addr
, 4),
916 regs
[bits (insn
, 0, 3)] = addr
;
919 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
920 && (inst2
& 0x0c00) == 0x0c00
921 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
923 int regno
= bits (inst2
, 12, 15);
924 pv_t addr
= regs
[bits (insn
, 0, 3)];
926 offset
= inst2
& 0xff;
928 addr
= pv_add_constant (addr
, offset
);
930 addr
= pv_add_constant (addr
, -offset
);
932 if (pv_area_store_would_trash (stack
, addr
))
935 pv_area_store (stack
, addr
, 4, regs
[regno
]);
938 regs
[bits (insn
, 0, 3)] = addr
;
941 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
942 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
944 int regno
= bits (inst2
, 12, 15);
947 offset
= inst2
& 0xfff;
948 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
950 if (pv_area_store_would_trash (stack
, addr
))
953 pv_area_store (stack
, addr
, 4, regs
[regno
]);
956 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
957 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
958 /* Ignore stores of argument registers to the stack. */
961 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
962 && (inst2
& 0x0d00) == 0x0c00
963 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
964 /* Ignore stores of argument registers to the stack. */
967 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
969 && (inst2
& 0x8000) == 0x0000
970 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
971 /* Ignore block loads from the stack, potentially copying
972 parameters from memory. */
975 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
977 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
978 /* Similarly ignore dual loads from the stack. */
981 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
982 && (inst2
& 0x0d00) == 0x0c00
983 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
984 /* Similarly ignore single loads from the stack. */
987 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
988 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
993 && (inst2
& 0x8000) == 0x0000)
995 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
996 | (bits (inst2
, 12, 14) << 8)
997 | bits (inst2
, 0, 7));
999 regs
[bits (inst2
, 8, 11)]
1000 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1001 thumb_expand_immediate (imm
));
1004 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1005 && (inst2
& 0x8000) == 0x0000)
1007 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1008 | (bits (inst2
, 12, 14) << 8)
1009 | bits (inst2
, 0, 7));
1011 regs
[bits (inst2
, 8, 11)]
1012 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1015 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1016 && (inst2
& 0x8000) == 0x0000)
1018 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1019 | (bits (inst2
, 12, 14) << 8)
1020 | bits (inst2
, 0, 7));
1022 regs
[bits (inst2
, 8, 11)]
1023 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1024 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1027 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1028 && (inst2
& 0x8000) == 0x0000)
1030 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1031 | (bits (inst2
, 12, 14) << 8)
1032 | bits (inst2
, 0, 7));
1034 regs
[bits (inst2
, 8, 11)]
1035 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1038 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1040 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1041 | (bits (inst2
, 12, 14) << 8)
1042 | bits (inst2
, 0, 7));
1044 regs
[bits (inst2
, 8, 11)]
1045 = pv_constant (thumb_expand_immediate (imm
));
1048 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1051 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1053 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1056 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1057 && (inst2
& 0xf0f0) == 0)
1059 int dst_reg
= (inst2
& 0x0f00) >> 8;
1060 int src_reg
= inst2
& 0xf;
1061 regs
[dst_reg
] = regs
[src_reg
];
1064 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1066 /* Constant pool loads. */
1067 unsigned int constant
;
1070 offset
= bits (insn
, 0, 11);
1072 loc
= start
+ 4 + offset
;
1074 loc
= start
+ 4 - offset
;
1076 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1077 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1080 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1082 /* Constant pool loads. */
1083 unsigned int constant
;
1086 offset
= bits (insn
, 0, 7) << 2;
1088 loc
= start
+ 4 + offset
;
1090 loc
= start
+ 4 - offset
;
1092 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1093 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1095 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1096 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1099 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1101 /* Don't scan past anything that might change control flow. */
1106 /* The optimizer might shove anything into the prologue,
1107 so we just skip what we don't recognize. */
1108 unrecognized_pc
= start
;
1113 else if (thumb_instruction_changes_pc (insn
))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc
= start
;
1129 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1130 paddress (gdbarch
, start
));
1132 if (unrecognized_pc
== 0)
1133 unrecognized_pc
= start
;
1137 do_cleanups (back_to
);
1138 return unrecognized_pc
;
1141 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1143 /* Frame pointer is fp. Frame size is constant. */
1144 cache
->framereg
= ARM_FP_REGNUM
;
1145 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1147 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1149 /* Frame pointer is r7. Frame size is constant. */
1150 cache
->framereg
= THUMB_FP_REGNUM
;
1151 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1153 else if (pv_is_register (regs
[ARM_SP_REGNUM
], ARM_SP_REGNUM
))
1155 /* Try the stack pointer... this is a bit desperate. */
1156 cache
->framereg
= ARM_SP_REGNUM
;
1157 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1161 /* We're just out of luck. We don't know where the frame is. */
1162 cache
->framereg
= -1;
1163 cache
->framesize
= 0;
1166 for (i
= 0; i
< 16; i
++)
1167 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1168 cache
->saved_regs
[i
].addr
= offset
;
1170 do_cleanups (back_to
);
1171 return unrecognized_pc
;
1175 /* Try to analyze the instructions starting from PC, which load symbol
1176 __stack_chk_guard. Return the address of instruction after loading this
1177 symbol, set the dest register number to *BASEREG, and set the size of
1178 instructions for loading symbol in OFFSET. Return 0 if instructions are
1182 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1183 unsigned int *destreg
, int *offset
)
1185 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1186 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1187 unsigned int low
, high
, address
;
1192 unsigned short insn1
1193 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1195 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1197 *destreg
= bits (insn1
, 8, 10);
1199 address
= bits (insn1
, 0, 7);
1201 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1203 unsigned short insn2
1204 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1206 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1209 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1211 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1213 /* movt Rd, #const */
1214 if ((insn1
& 0xfbc0) == 0xf2c0)
1216 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1217 *destreg
= bits (insn2
, 8, 11);
1219 address
= (high
<< 16 | low
);
1226 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1228 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1230 address
= bits (insn
, 0, 11);
1231 *destreg
= bits (insn
, 12, 15);
1234 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1236 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1239 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1241 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1243 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1244 *destreg
= bits (insn
, 12, 15);
1246 address
= (high
<< 16 | low
);
1254 /* Try to skip a sequence of instructions used for stack protector. If PC
1255 points to the first instruction of this sequence, return the address of
1256 first instruction after this sequence, otherwise, return original PC.
1258 On arm, this sequence of instructions is composed of mainly three steps,
1259 Step 1: load symbol __stack_chk_guard,
1260 Step 2: load from address of __stack_chk_guard,
1261 Step 3: store it to somewhere else.
1263 Usually, instructions on step 2 and step 3 are the same on various ARM
1264 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1265 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1266 instructions in step 1 vary from different ARM architectures. On ARMv7,
1269 movw Rn, #:lower16:__stack_chk_guard
1270 movt Rn, #:upper16:__stack_chk_guard
1277 .word __stack_chk_guard
1279 Since ldr/str is a very popular instruction, we can't use them as
1280 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1281 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1282 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1285 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1287 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1288 unsigned int address
, basereg
;
1289 struct minimal_symbol
*stack_chk_guard
;
1291 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1294 /* Try to parse the instructions in Step 1. */
1295 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1300 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1301 /* If name of symbol doesn't start with '__stack_chk_guard', this
1302 instruction sequence is not for stack protector. If symbol is
1303 removed, we conservatively think this sequence is for stack protector. */
1305 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard
), "__stack_chk_guard",
1306 strlen ("__stack_chk_guard")) != 0)
1311 unsigned int destreg
;
1313 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1315 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1316 if ((insn
& 0xf800) != 0x6800)
1318 if (bits (insn
, 3, 5) != basereg
)
1320 destreg
= bits (insn
, 0, 2);
1322 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1323 byte_order_for_code
);
1324 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1325 if ((insn
& 0xf800) != 0x6000)
1327 if (destreg
!= bits (insn
, 0, 2))
1332 unsigned int destreg
;
1334 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1336 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1337 if ((insn
& 0x0e500000) != 0x04100000)
1339 if (bits (insn
, 16, 19) != basereg
)
1341 destreg
= bits (insn
, 12, 15);
1342 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1343 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1344 4, byte_order_for_code
);
1345 if ((insn
& 0x0e500000) != 0x04000000)
1347 if (bits (insn
, 12, 15) != destreg
)
1350 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1353 return pc
+ offset
+ 4;
1355 return pc
+ offset
+ 8;
1358 /* Advance the PC across any function entry prologue instructions to
1359 reach some "real" code.
1361 The APCS (ARM Procedure Call Standard) defines the following
1365 [stmfd sp!, {a1,a2,a3,a4}]
1366 stmfd sp!, {...,fp,ip,lr,pc}
1367 [stfe f7, [sp, #-12]!]
1368 [stfe f6, [sp, #-12]!]
1369 [stfe f5, [sp, #-12]!]
1370 [stfe f4, [sp, #-12]!]
1371 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1374 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1376 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1379 CORE_ADDR func_addr
, limit_pc
;
1380 struct symtab_and_line sal
;
1382 /* See if we can determine the end of the prologue via the symbol table.
1383 If so, then return either PC, or the PC after the prologue, whichever
1385 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1387 CORE_ADDR post_prologue_pc
1388 = skip_prologue_using_sal (gdbarch
, func_addr
);
1389 struct symtab
*s
= find_pc_symtab (func_addr
);
1391 if (post_prologue_pc
)
1393 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1396 /* GCC always emits a line note before the prologue and another
1397 one after, even if the two are at the same address or on the
1398 same line. Take advantage of this so that we do not need to
1399 know every instruction that might appear in the prologue. We
1400 will have producer information for most binaries; if it is
1401 missing (e.g. for -gstabs), assuming the GNU tools. */
1402 if (post_prologue_pc
1404 || s
->producer
== NULL
1405 || strncmp (s
->producer
, "GNU ", sizeof ("GNU ") - 1) == 0))
1406 return post_prologue_pc
;
1408 if (post_prologue_pc
!= 0)
1410 CORE_ADDR analyzed_limit
;
1412 /* For non-GCC compilers, make sure the entire line is an
1413 acceptable prologue; GDB will round this function's
1414 return value up to the end of the following line so we
1415 can not skip just part of a line (and we do not want to).
1417 RealView does not treat the prologue specially, but does
1418 associate prologue code with the opening brace; so this
1419 lets us skip the first line if we think it is the opening
1421 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1422 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1423 post_prologue_pc
, NULL
);
1425 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1426 post_prologue_pc
, NULL
);
1428 if (analyzed_limit
!= post_prologue_pc
)
1431 return post_prologue_pc
;
1435 /* Can't determine prologue from the symbol table, need to examine
1438 /* Find an upper limit on the function prologue using the debug
1439 information. If the debug information could not be used to provide
1440 that bound, then use an arbitrary large number as the upper bound. */
1441 /* Like arm_scan_prologue, stop no later than pc + 64. */
1442 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1444 limit_pc
= pc
+ 64; /* Magic. */
1447 /* Check if this is Thumb code. */
1448 if (arm_pc_is_thumb (gdbarch
, pc
))
1449 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1451 for (skip_pc
= pc
; skip_pc
< limit_pc
; skip_pc
+= 4)
1453 inst
= read_memory_unsigned_integer (skip_pc
, 4, byte_order_for_code
);
1455 /* "mov ip, sp" is no longer a required part of the prologue. */
1456 if (inst
== 0xe1a0c00d) /* mov ip, sp */
1459 if ((inst
& 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1462 if ((inst
& 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1465 /* Some prologues begin with "str lr, [sp, #-4]!". */
1466 if (inst
== 0xe52de004) /* str lr, [sp, #-4]! */
1469 if ((inst
& 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1472 if ((inst
& 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1475 /* Any insns after this point may float into the code, if it makes
1476 for better instruction scheduling, so we skip them only if we
1477 find them, but still consider the function to be frame-ful. */
1479 /* We may have either one sfmfd instruction here, or several stfe
1480 insns, depending on the version of floating point code we
1482 if ((inst
& 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1485 if ((inst
& 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1488 if ((inst
& 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1491 if ((inst
& 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1494 if ((inst
& 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1495 || (inst
& 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1496 || (inst
& 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1499 if ((inst
& 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1500 || (inst
& 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1501 || (inst
& 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1504 /* Un-recognized instruction; stop scanning. */
1508 return skip_pc
; /* End of prologue. */
1512 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1513 This function decodes a Thumb function prologue to determine:
1514 1) the size of the stack frame
1515 2) which registers are saved on it
1516 3) the offsets of saved regs
1517 4) the offset from the stack pointer to the frame pointer
1519 A typical Thumb function prologue would create this stack frame
1520 (offsets relative to FP)
1521 old SP -> 24 stack parameters
1524 R7 -> 0 local variables (16 bytes)
1525 SP -> -12 additional stack space (12 bytes)
1526 The frame size would thus be 36 bytes, and the frame offset would be
1527 12 bytes. The frame register is R7.
1529 The comments for thumb_skip_prolog() describe the algorithm we use
1530 to detect the end of the prolog. */
1534 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1535 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1537 CORE_ADDR prologue_start
;
1538 CORE_ADDR prologue_end
;
1539 CORE_ADDR current_pc
;
1541 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1544 /* See comment in arm_scan_prologue for an explanation of
1546 if (prologue_end
> prologue_start
+ 64)
1548 prologue_end
= prologue_start
+ 64;
1552 /* We're in the boondocks: we have no idea where the start of the
1556 prologue_end
= min (prologue_end
, prev_pc
);
1558 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1561 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1564 arm_instruction_changes_pc (uint32_t this_instr
)
1566 if (bits (this_instr
, 28, 31) == INST_NV
)
1567 /* Unconditional instructions. */
1568 switch (bits (this_instr
, 24, 27))
1572 /* Branch with Link and change to Thumb. */
1577 /* Coprocessor register transfer. */
1578 if (bits (this_instr
, 12, 15) == 15)
1579 error (_("Invalid update to pc in instruction"));
1585 switch (bits (this_instr
, 25, 27))
1588 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1590 /* Multiplies and extra load/stores. */
1591 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1592 /* Neither multiplies nor extension load/stores are allowed
1596 /* Otherwise, miscellaneous instructions. */
1598 /* BX <reg>, BXJ <reg>, BLX <reg> */
1599 if (bits (this_instr
, 4, 27) == 0x12fff1
1600 || bits (this_instr
, 4, 27) == 0x12fff2
1601 || bits (this_instr
, 4, 27) == 0x12fff3)
1604 /* Other miscellaneous instructions are unpredictable if they
1608 /* Data processing instruction. Fall through. */
1611 if (bits (this_instr
, 12, 15) == 15)
1618 /* Media instructions and architecturally undefined instructions. */
1619 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1623 if (bit (this_instr
, 20) == 0)
1627 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1633 /* Load/store multiple. */
1634 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1640 /* Branch and branch with link. */
1645 /* Coprocessor transfers or SWIs can not affect PC. */
1649 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1653 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1654 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1655 fill it in. Return the first address not recognized as a prologue
1658 We recognize all the instructions typically found in ARM prologues,
1659 plus harmless instructions which can be skipped (either for analysis
1660 purposes, or a more restrictive set that can be skipped when finding
1661 the end of the prologue). */
1664 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1665 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1666 struct arm_prologue_cache
*cache
)
1668 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1669 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1671 CORE_ADDR offset
, current_pc
;
1672 pv_t regs
[ARM_FPS_REGNUM
];
1673 struct pv_area
*stack
;
1674 struct cleanup
*back_to
;
1675 int framereg
, framesize
;
1676 CORE_ADDR unrecognized_pc
= 0;
1678 /* Search the prologue looking for instructions that set up the
1679 frame pointer, adjust the stack pointer, and save registers.
1681 Be careful, however, and if it doesn't look like a prologue,
1682 don't try to scan it. If, for instance, a frameless function
1683 begins with stmfd sp!, then we will tell ourselves there is
1684 a frame, which will confuse stack traceback, as well as "finish"
1685 and other operations that rely on a knowledge of the stack
1688 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1689 regs
[regno
] = pv_register (regno
, 0);
1690 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1691 back_to
= make_cleanup_free_pv_area (stack
);
1693 for (current_pc
= prologue_start
;
1694 current_pc
< prologue_end
;
1698 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1700 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1702 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1705 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1706 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1708 unsigned imm
= insn
& 0xff; /* immediate value */
1709 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1710 int rd
= bits (insn
, 12, 15);
1711 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1712 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1715 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1716 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1718 unsigned imm
= insn
& 0xff; /* immediate value */
1719 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1720 int rd
= bits (insn
, 12, 15);
1721 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1722 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1725 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1728 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1730 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1731 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1732 regs
[bits (insn
, 12, 15)]);
1735 else if ((insn
& 0xffff0000) == 0xe92d0000)
1736 /* stmfd sp!, {..., fp, ip, lr, pc}
1738 stmfd sp!, {a1, a2, a3, a4} */
1740 int mask
= insn
& 0xffff;
1742 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1745 /* Calculate offsets of saved registers. */
1746 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1747 if (mask
& (1 << regno
))
1750 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1751 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1754 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1755 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1756 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1758 /* No need to add this to saved_regs -- it's just an arg reg. */
1761 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1762 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1763 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1765 /* No need to add this to saved_regs -- it's just an arg reg. */
1768 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1770 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1772 /* No need to add this to saved_regs -- it's just arg regs. */
1775 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1777 unsigned imm
= insn
& 0xff; /* immediate value */
1778 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1779 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1780 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1782 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1784 unsigned imm
= insn
& 0xff; /* immediate value */
1785 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1786 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1787 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1789 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1791 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1793 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1796 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1797 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1798 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1800 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1802 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1804 int n_saved_fp_regs
;
1805 unsigned int fp_start_reg
, fp_bound_reg
;
1807 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1810 if ((insn
& 0x800) == 0x800) /* N0 is set */
1812 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1813 n_saved_fp_regs
= 3;
1815 n_saved_fp_regs
= 1;
1819 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1820 n_saved_fp_regs
= 2;
1822 n_saved_fp_regs
= 4;
1825 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1826 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1827 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1829 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1830 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1831 regs
[fp_start_reg
++]);
1834 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1836 /* Allow some special function calls when skipping the
1837 prologue; GCC generates these before storing arguments to
1839 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1841 if (skip_prologue_function (gdbarch
, dest
, 0))
1846 else if ((insn
& 0xf0000000) != 0xe0000000)
1847 break; /* Condition not true, exit early. */
1848 else if (arm_instruction_changes_pc (insn
))
1849 /* Don't scan past anything that might change control flow. */
1851 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1852 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1853 /* Ignore block loads from the stack, potentially copying
1854 parameters from memory. */
1856 else if ((insn
& 0xfc500000) == 0xe4100000
1857 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1858 /* Similarly ignore single loads from the stack. */
1860 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1861 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1862 register instead of the stack. */
1866 /* The optimizer might shove anything into the prologue,
1867 so we just skip what we don't recognize. */
1868 unrecognized_pc
= current_pc
;
1873 if (unrecognized_pc
== 0)
1874 unrecognized_pc
= current_pc
;
1876 /* The frame size is just the distance from the frame register
1877 to the original stack pointer. */
1878 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1880 /* Frame pointer is fp. */
1881 framereg
= ARM_FP_REGNUM
;
1882 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1884 else if (pv_is_register (regs
[ARM_SP_REGNUM
], ARM_SP_REGNUM
))
1886 /* Try the stack pointer... this is a bit desperate. */
1887 framereg
= ARM_SP_REGNUM
;
1888 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1892 /* We're just out of luck. We don't know where the frame is. */
1899 cache
->framereg
= framereg
;
1900 cache
->framesize
= framesize
;
1902 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1903 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1904 cache
->saved_regs
[regno
].addr
= offset
;
1908 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1909 paddress (gdbarch
, unrecognized_pc
));
1911 do_cleanups (back_to
);
1912 return unrecognized_pc
;
1916 arm_scan_prologue (struct frame_info
*this_frame
,
1917 struct arm_prologue_cache
*cache
)
1919 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1920 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1922 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1923 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1924 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1925 pv_t regs
[ARM_FPS_REGNUM
];
1926 struct pv_area
*stack
;
1927 struct cleanup
*back_to
;
1930 /* Assume there is no frame until proven otherwise. */
1931 cache
->framereg
= ARM_SP_REGNUM
;
1932 cache
->framesize
= 0;
1934 /* Check for Thumb prologue. */
1935 if (arm_frame_is_thumb (this_frame
))
1937 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1941 /* Find the function prologue. If we can't find the function in
1942 the symbol table, peek in the stack frame to find the PC. */
1943 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1946 /* One way to find the end of the prologue (which works well
1947 for unoptimized code) is to do the following:
1949 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1952 prologue_end = prev_pc;
1953 else if (sal.end < prologue_end)
1954 prologue_end = sal.end;
1956 This mechanism is very accurate so long as the optimizer
1957 doesn't move any instructions from the function body into the
1958 prologue. If this happens, sal.end will be the last
1959 instruction in the first hunk of prologue code just before
1960 the first instruction that the scheduler has moved from
1961 the body to the prologue.
1963 In order to make sure that we scan all of the prologue
1964 instructions, we use a slightly less accurate mechanism which
1965 may scan more than necessary. To help compensate for this
1966 lack of accuracy, the prologue scanning loop below contains
1967 several clauses which'll cause the loop to terminate early if
1968 an implausible prologue instruction is encountered.
1974 is a suitable endpoint since it accounts for the largest
1975 possible prologue plus up to five instructions inserted by
1978 if (prologue_end
> prologue_start
+ 64)
1980 prologue_end
= prologue_start
+ 64; /* See above. */
1985 /* We have no symbol information. Our only option is to assume this
1986 function has a standard stack frame and the normal frame register.
1987 Then, we can find the value of our frame pointer on entrance to
1988 the callee (or at the present moment if this is the innermost frame).
1989 The value stored there should be the address of the stmfd + 8. */
1990 CORE_ADDR frame_loc
;
1991 LONGEST return_value
;
1993 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1994 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
1998 prologue_start
= gdbarch_addr_bits_remove
1999 (gdbarch
, return_value
) - 8;
2000 prologue_end
= prologue_start
+ 64; /* See above. */
2004 if (prev_pc
< prologue_end
)
2005 prologue_end
= prev_pc
;
2007 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
2010 static struct arm_prologue_cache
*
2011 arm_make_prologue_cache (struct frame_info
*this_frame
)
2014 struct arm_prologue_cache
*cache
;
2015 CORE_ADDR unwound_fp
;
2017 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2018 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2020 arm_scan_prologue (this_frame
, cache
);
2022 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2023 if (unwound_fp
== 0)
2026 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2028 /* Calculate actual addresses of saved registers using offsets
2029 determined by arm_scan_prologue. */
2030 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2031 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2032 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2037 /* Our frame ID for a normal frame is the current function's starting PC
2038 and the caller's SP when we were called. */
2041 arm_prologue_this_id (struct frame_info
*this_frame
,
2043 struct frame_id
*this_id
)
2045 struct arm_prologue_cache
*cache
;
2049 if (*this_cache
== NULL
)
2050 *this_cache
= arm_make_prologue_cache (this_frame
);
2051 cache
= *this_cache
;
2053 /* This is meant to halt the backtrace at "_start". */
2054 pc
= get_frame_pc (this_frame
);
2055 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2058 /* If we've hit a wall, stop. */
2059 if (cache
->prev_sp
== 0)
2062 /* Use function start address as part of the frame ID. If we cannot
2063 identify the start address (due to missing symbol information),
2064 fall back to just using the current PC. */
2065 func
= get_frame_func (this_frame
);
2069 id
= frame_id_build (cache
->prev_sp
, func
);
2073 static struct value
*
2074 arm_prologue_prev_register (struct frame_info
*this_frame
,
2078 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2079 struct arm_prologue_cache
*cache
;
2081 if (*this_cache
== NULL
)
2082 *this_cache
= arm_make_prologue_cache (this_frame
);
2083 cache
= *this_cache
;
2085 /* If we are asked to unwind the PC, then we need to return the LR
2086 instead. The prologue may save PC, but it will point into this
2087 frame's prologue, not the next frame's resume location. Also
2088 strip the saved T bit. A valid LR may have the low bit set, but
2089 a valid PC never does. */
2090 if (prev_regnum
== ARM_PC_REGNUM
)
2094 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2095 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2096 arm_addr_bits_remove (gdbarch
, lr
));
2099 /* SP is generally not saved to the stack, but this frame is
2100 identified by the next frame's stack pointer at the time of the call.
2101 The value was already reconstructed into PREV_SP. */
2102 if (prev_regnum
== ARM_SP_REGNUM
)
2103 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2105 /* The CPSR may have been changed by the call instruction and by the
2106 called function. The only bit we can reconstruct is the T bit,
2107 by checking the low bit of LR as of the call. This is a reliable
2108 indicator of Thumb-ness except for some ARM v4T pre-interworking
2109 Thumb code, which could get away with a clear low bit as long as
2110 the called function did not use bx. Guess that all other
2111 bits are unchanged; the condition flags are presumably lost,
2112 but the processor status is likely valid. */
2113 if (prev_regnum
== ARM_PS_REGNUM
)
2116 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2118 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2119 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2120 if (IS_THUMB_ADDR (lr
))
2124 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2127 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2131 struct frame_unwind arm_prologue_unwind
= {
2133 default_frame_unwind_stop_reason
,
2134 arm_prologue_this_id
,
2135 arm_prologue_prev_register
,
2137 default_frame_sniffer
2140 /* Maintain a list of ARM exception table entries per objfile, similar to the
2141 list of mapping symbols. We only cache entries for standard ARM-defined
2142 personality routines; the cache will contain only the frame unwinding
2143 instructions associated with the entry (not the descriptors). */
2145 static const struct objfile_data
*arm_exidx_data_key
;
2147 struct arm_exidx_entry
2152 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2153 DEF_VEC_O(arm_exidx_entry_s
);
2155 struct arm_exidx_data
2157 VEC(arm_exidx_entry_s
) **section_maps
;
2161 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2163 struct arm_exidx_data
*data
= arg
;
2166 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2167 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2171 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2172 const struct arm_exidx_entry
*rhs
)
2174 return lhs
->addr
< rhs
->addr
;
2177 static struct obj_section
*
2178 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2180 struct obj_section
*osect
;
2182 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2183 if (bfd_get_section_flags (objfile
->obfd
,
2184 osect
->the_bfd_section
) & SEC_ALLOC
)
2186 bfd_vma start
, size
;
2187 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2188 size
= bfd_get_section_size (osect
->the_bfd_section
);
2190 if (start
<= vma
&& vma
< start
+ size
)
2197 /* Parse contents of exception table and exception index sections
2198 of OBJFILE, and fill in the exception table entry cache.
2200 For each entry that refers to a standard ARM-defined personality
2201 routine, extract the frame unwinding instructions (from either
2202 the index or the table section). The unwinding instructions
2204 - extracting them from the rest of the table data
2205 - converting to host endianness
2206 - appending the implicit 0xb0 ("Finish") code
2208 The extracted and normalized instructions are stored for later
2209 retrieval by the arm_find_exidx_entry routine. */
2212 arm_exidx_new_objfile (struct objfile
*objfile
)
2214 struct cleanup
*cleanups
;
2215 struct arm_exidx_data
*data
;
2216 asection
*exidx
, *extab
;
2217 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2218 bfd_size_type exidx_size
= 0, extab_size
= 0;
2219 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2222 /* If we've already touched this file, do nothing. */
2223 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2225 cleanups
= make_cleanup (null_cleanup
, NULL
);
2227 /* Read contents of exception table and index. */
2228 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2231 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2232 exidx_size
= bfd_get_section_size (exidx
);
2233 exidx_data
= xmalloc (exidx_size
);
2234 make_cleanup (xfree
, exidx_data
);
2236 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2237 exidx_data
, 0, exidx_size
))
2239 do_cleanups (cleanups
);
2244 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2247 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2248 extab_size
= bfd_get_section_size (extab
);
2249 extab_data
= xmalloc (extab_size
);
2250 make_cleanup (xfree
, extab_data
);
2252 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2253 extab_data
, 0, extab_size
))
2255 do_cleanups (cleanups
);
2260 /* Allocate exception table data structure. */
2261 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2262 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2263 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2264 objfile
->obfd
->section_count
,
2265 VEC(arm_exidx_entry_s
) *);
2267 /* Fill in exception table. */
2268 for (i
= 0; i
< exidx_size
/ 8; i
++)
2270 struct arm_exidx_entry new_exidx_entry
;
2271 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2272 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2273 bfd_vma addr
= 0, word
= 0;
2274 int n_bytes
= 0, n_words
= 0;
2275 struct obj_section
*sec
;
2276 gdb_byte
*entry
= NULL
;
2278 /* Extract address of start of function. */
2279 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2280 idx
+= exidx_vma
+ i
* 8;
2282 /* Find section containing function and compute section offset. */
2283 sec
= arm_obj_section_from_vma (objfile
, idx
);
2286 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2288 /* Determine address of exception table entry. */
2291 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2293 else if ((val
& 0xff000000) == 0x80000000)
2295 /* Exception table entry embedded in .ARM.exidx
2296 -- must be short form. */
2300 else if (!(val
& 0x80000000))
2302 /* Exception table entry in .ARM.extab. */
2303 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2304 addr
+= exidx_vma
+ i
* 8 + 4;
2306 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2308 word
= bfd_h_get_32 (objfile
->obfd
,
2309 extab_data
+ addr
- extab_vma
);
2312 if ((word
& 0xff000000) == 0x80000000)
2317 else if ((word
& 0xff000000) == 0x81000000
2318 || (word
& 0xff000000) == 0x82000000)
2322 n_words
= ((word
>> 16) & 0xff);
2324 else if (!(word
& 0x80000000))
2327 struct obj_section
*pers_sec
;
2328 int gnu_personality
= 0;
2330 /* Custom personality routine. */
2331 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2332 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2334 /* Check whether we've got one of the variants of the
2335 GNU personality routines. */
2336 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2339 static const char *personality
[] =
2341 "__gcc_personality_v0",
2342 "__gxx_personality_v0",
2343 "__gcj_personality_v0",
2344 "__gnu_objc_personality_v0",
2348 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2351 for (k
= 0; personality
[k
]; k
++)
2352 if (lookup_minimal_symbol_by_pc_name
2353 (pc
, personality
[k
], objfile
))
2355 gnu_personality
= 1;
2360 /* If so, the next word contains a word count in the high
2361 byte, followed by the same unwind instructions as the
2362 pre-defined forms. */
2364 && addr
+ 4 <= extab_vma
+ extab_size
)
2366 word
= bfd_h_get_32 (objfile
->obfd
,
2367 extab_data
+ addr
- extab_vma
);
2370 n_words
= ((word
>> 24) & 0xff);
2376 /* Sanity check address. */
2378 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2379 n_words
= n_bytes
= 0;
2381 /* The unwind instructions reside in WORD (only the N_BYTES least
2382 significant bytes are valid), followed by N_WORDS words in the
2383 extab section starting at ADDR. */
2384 if (n_bytes
|| n_words
)
2386 gdb_byte
*p
= entry
= obstack_alloc (&objfile
->objfile_obstack
,
2387 n_bytes
+ n_words
* 4 + 1);
2390 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2394 word
= bfd_h_get_32 (objfile
->obfd
,
2395 extab_data
+ addr
- extab_vma
);
2398 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2399 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2400 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2401 *p
++ = (gdb_byte
) (word
& 0xff);
2404 /* Implied "Finish" to terminate the list. */
2408 /* Push entry onto vector. They are guaranteed to always
2409 appear in order of increasing addresses. */
2410 new_exidx_entry
.addr
= idx
;
2411 new_exidx_entry
.entry
= entry
;
2412 VEC_safe_push (arm_exidx_entry_s
,
2413 data
->section_maps
[sec
->the_bfd_section
->index
],
2417 do_cleanups (cleanups
);
2420 /* Search for the exception table entry covering MEMADDR. If one is found,
2421 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2422 set *START to the start of the region covered by this entry. */
2425 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2427 struct obj_section
*sec
;
2429 sec
= find_pc_section (memaddr
);
2432 struct arm_exidx_data
*data
;
2433 VEC(arm_exidx_entry_s
) *map
;
2434 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2437 data
= objfile_data (sec
->objfile
, arm_exidx_data_key
);
2440 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2441 if (!VEC_empty (arm_exidx_entry_s
, map
))
2443 struct arm_exidx_entry
*map_sym
;
2445 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2446 arm_compare_exidx_entries
);
2448 /* VEC_lower_bound finds the earliest ordered insertion
2449 point. If the following symbol starts at this exact
2450 address, we use that; otherwise, the preceding
2451 exception table entry covers this address. */
2452 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2454 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2455 if (map_sym
->addr
== map_key
.addr
)
2458 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2459 return map_sym
->entry
;
2465 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2467 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2468 return map_sym
->entry
;
2477 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2478 instruction list from the ARM exception table entry ENTRY, allocate and
2479 return a prologue cache structure describing how to unwind this frame.
2481 Return NULL if the unwinding instruction list contains a "spare",
2482 "reserved" or "refuse to unwind" instruction as defined in section
2483 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2484 for the ARM Architecture" document. */
2486 static struct arm_prologue_cache
*
2487 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2492 struct arm_prologue_cache
*cache
;
2493 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2494 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2500 /* Whenever we reload SP, we actually have to retrieve its
2501 actual value in the current frame. */
2504 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2506 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2507 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2511 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2512 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2518 /* Decode next unwind instruction. */
2521 if ((insn
& 0xc0) == 0)
2523 int offset
= insn
& 0x3f;
2524 vsp
+= (offset
<< 2) + 4;
2526 else if ((insn
& 0xc0) == 0x40)
2528 int offset
= insn
& 0x3f;
2529 vsp
-= (offset
<< 2) + 4;
2531 else if ((insn
& 0xf0) == 0x80)
2533 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2536 /* The special case of an all-zero mask identifies
2537 "Refuse to unwind". We return NULL to fall back
2538 to the prologue analyzer. */
2542 /* Pop registers r4..r15 under mask. */
2543 for (i
= 0; i
< 12; i
++)
2544 if (mask
& (1 << i
))
2546 cache
->saved_regs
[4 + i
].addr
= vsp
;
2550 /* Special-case popping SP -- we need to reload vsp. */
2551 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2554 else if ((insn
& 0xf0) == 0x90)
2556 int reg
= insn
& 0xf;
2558 /* Reserved cases. */
2559 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2562 /* Set SP from another register and mark VSP for reload. */
2563 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2566 else if ((insn
& 0xf0) == 0xa0)
2568 int count
= insn
& 0x7;
2569 int pop_lr
= (insn
& 0x8) != 0;
2572 /* Pop r4..r[4+count]. */
2573 for (i
= 0; i
<= count
; i
++)
2575 cache
->saved_regs
[4 + i
].addr
= vsp
;
2579 /* If indicated by flag, pop LR as well. */
2582 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2586 else if (insn
== 0xb0)
2588 /* We could only have updated PC by popping into it; if so, it
2589 will show up as address. Otherwise, copy LR into PC. */
2590 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2591 cache
->saved_regs
[ARM_PC_REGNUM
]
2592 = cache
->saved_regs
[ARM_LR_REGNUM
];
2597 else if (insn
== 0xb1)
2599 int mask
= *entry
++;
2602 /* All-zero mask and mask >= 16 is "spare". */
2603 if (mask
== 0 || mask
>= 16)
2606 /* Pop r0..r3 under mask. */
2607 for (i
= 0; i
< 4; i
++)
2608 if (mask
& (1 << i
))
2610 cache
->saved_regs
[i
].addr
= vsp
;
2614 else if (insn
== 0xb2)
2616 ULONGEST offset
= 0;
2621 offset
|= (*entry
& 0x7f) << shift
;
2624 while (*entry
++ & 0x80);
2626 vsp
+= 0x204 + (offset
<< 2);
2628 else if (insn
== 0xb3)
2630 int start
= *entry
>> 4;
2631 int count
= (*entry
++) & 0xf;
2634 /* Only registers D0..D15 are valid here. */
2635 if (start
+ count
>= 16)
2638 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2639 for (i
= 0; i
<= count
; i
++)
2641 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2645 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2648 else if ((insn
& 0xf8) == 0xb8)
2650 int count
= insn
& 0x7;
2653 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2654 for (i
= 0; i
<= count
; i
++)
2656 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2660 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2663 else if (insn
== 0xc6)
2665 int start
= *entry
>> 4;
2666 int count
= (*entry
++) & 0xf;
2669 /* Only registers WR0..WR15 are valid. */
2670 if (start
+ count
>= 16)
2673 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2674 for (i
= 0; i
<= count
; i
++)
2676 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2680 else if (insn
== 0xc7)
2682 int mask
= *entry
++;
2685 /* All-zero mask and mask >= 16 is "spare". */
2686 if (mask
== 0 || mask
>= 16)
2689 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2690 for (i
= 0; i
< 4; i
++)
2691 if (mask
& (1 << i
))
2693 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2697 else if ((insn
& 0xf8) == 0xc0)
2699 int count
= insn
& 0x7;
2702 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2703 for (i
= 0; i
<= count
; i
++)
2705 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2709 else if (insn
== 0xc8)
2711 int start
= *entry
>> 4;
2712 int count
= (*entry
++) & 0xf;
2715 /* Only registers D0..D31 are valid. */
2716 if (start
+ count
>= 16)
2719 /* Pop VFP double-precision registers
2720 D[16+start]..D[16+start+count]. */
2721 for (i
= 0; i
<= count
; i
++)
2723 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2727 else if (insn
== 0xc9)
2729 int start
= *entry
>> 4;
2730 int count
= (*entry
++) & 0xf;
2733 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2734 for (i
= 0; i
<= count
; i
++)
2736 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2740 else if ((insn
& 0xf8) == 0xd0)
2742 int count
= insn
& 0x7;
2745 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2746 for (i
= 0; i
<= count
; i
++)
2748 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2754 /* Everything else is "spare". */
2759 /* If we restore SP from a register, assume this was the frame register.
2760 Otherwise just fall back to SP as frame register. */
2761 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2762 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2764 cache
->framereg
= ARM_SP_REGNUM
;
2766 /* Determine offset to previous frame. */
2768 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2770 /* We already got the previous SP. */
2771 cache
->prev_sp
= vsp
;
2776 /* Unwinding via ARM exception table entries. Note that the sniffer
2777 already computes a filled-in prologue cache, which is then used
2778 with the same arm_prologue_this_id and arm_prologue_prev_register
2779 routines also used for prologue-parsing based unwinding. */
2782 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2783 struct frame_info
*this_frame
,
2784 void **this_prologue_cache
)
2786 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2787 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2788 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2789 struct arm_prologue_cache
*cache
;
2792 /* See if we have an ARM exception table entry covering this address. */
2793 addr_in_block
= get_frame_address_in_block (this_frame
);
2794 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2798 /* The ARM exception table does not describe unwind information
2799 for arbitrary PC values, but is guaranteed to be correct only
2800 at call sites. We have to decide here whether we want to use
2801 ARM exception table information for this frame, or fall back
2802 to using prologue parsing. (Note that if we have DWARF CFI,
2803 this sniffer isn't even called -- CFI is always preferred.)
2805 Before we make this decision, however, we check whether we
2806 actually have *symbol* information for the current frame.
2807 If not, prologue parsing would not work anyway, so we might
2808 as well use the exception table and hope for the best. */
2809 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2813 /* If the next frame is "normal", we are at a call site in this
2814 frame, so exception information is guaranteed to be valid. */
2815 if (get_next_frame (this_frame
)
2816 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2819 /* We also assume exception information is valid if we're currently
2820 blocked in a system call. The system library is supposed to
2821 ensure this, so that e.g. pthread cancellation works. */
2822 if (arm_frame_is_thumb (this_frame
))
2826 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2827 byte_order_for_code
, &insn
)
2828 && (insn
& 0xff00) == 0xdf00 /* svc */)
2835 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2836 byte_order_for_code
, &insn
)
2837 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2841 /* Bail out if we don't know that exception information is valid. */
2845 /* The ARM exception index does not mark the *end* of the region
2846 covered by the entry, and some functions will not have any entry.
2847 To correctly recognize the end of the covered region, the linker
2848 should have inserted dummy records with a CANTUNWIND marker.
2850 Unfortunately, current versions of GNU ld do not reliably do
2851 this, and thus we may have found an incorrect entry above.
2852 As a (temporary) sanity check, we only use the entry if it
2853 lies *within* the bounds of the function. Note that this check
2854 might reject perfectly valid entries that just happen to cover
2855 multiple functions; therefore this check ought to be removed
2856 once the linker is fixed. */
2857 if (func_start
> exidx_region
)
2861 /* Decode the list of unwinding instructions into a prologue cache.
2862 Note that this may fail due to e.g. a "refuse to unwind" code. */
2863 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2867 *this_prologue_cache
= cache
;
2871 struct frame_unwind arm_exidx_unwind
= {
2873 default_frame_unwind_stop_reason
,
2874 arm_prologue_this_id
,
2875 arm_prologue_prev_register
,
2877 arm_exidx_unwind_sniffer
2880 static struct arm_prologue_cache
*
2881 arm_make_stub_cache (struct frame_info
*this_frame
)
2883 struct arm_prologue_cache
*cache
;
2885 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2886 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2888 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2893 /* Our frame ID for a stub frame is the current SP and LR. */
2896 arm_stub_this_id (struct frame_info
*this_frame
,
2898 struct frame_id
*this_id
)
2900 struct arm_prologue_cache
*cache
;
2902 if (*this_cache
== NULL
)
2903 *this_cache
= arm_make_stub_cache (this_frame
);
2904 cache
= *this_cache
;
2906 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2910 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2911 struct frame_info
*this_frame
,
2912 void **this_prologue_cache
)
2914 CORE_ADDR addr_in_block
;
2917 addr_in_block
= get_frame_address_in_block (this_frame
);
2918 if (in_plt_section (addr_in_block
, NULL
)
2919 /* We also use the stub winder if the target memory is unreadable
2920 to avoid having the prologue unwinder trying to read it. */
2921 || target_read_memory (get_frame_pc (this_frame
), dummy
, 4) != 0)
2927 struct frame_unwind arm_stub_unwind
= {
2929 default_frame_unwind_stop_reason
,
2931 arm_prologue_prev_register
,
2933 arm_stub_unwind_sniffer
2937 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
2939 struct arm_prologue_cache
*cache
;
2941 if (*this_cache
== NULL
)
2942 *this_cache
= arm_make_prologue_cache (this_frame
);
2943 cache
= *this_cache
;
2945 return cache
->prev_sp
- cache
->framesize
;
2948 struct frame_base arm_normal_base
= {
2949 &arm_prologue_unwind
,
2950 arm_normal_frame_base
,
2951 arm_normal_frame_base
,
2952 arm_normal_frame_base
2955 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2956 dummy frame. The frame ID's base needs to match the TOS value
2957 saved by save_dummy_frame_tos() and returned from
2958 arm_push_dummy_call, and the PC needs to match the dummy frame's
2961 static struct frame_id
2962 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
2964 return frame_id_build (get_frame_register_unsigned (this_frame
,
2966 get_frame_pc (this_frame
));
2969 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2970 be used to construct the previous frame's ID, after looking up the
2971 containing function). */
2974 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
2977 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
2978 return arm_addr_bits_remove (gdbarch
, pc
);
2982 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
2984 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2987 static struct value
*
2988 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
2991 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
2993 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2998 /* The PC is normally copied from the return column, which
2999 describes saves of LR. However, that version may have an
3000 extra bit set to indicate Thumb state. The bit is not
3002 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3003 return frame_unwind_got_constant (this_frame
, regnum
,
3004 arm_addr_bits_remove (gdbarch
, lr
));
3007 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3008 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3009 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3010 if (IS_THUMB_ADDR (lr
))
3014 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3017 internal_error (__FILE__
, __LINE__
,
3018 _("Unexpected register %d"), regnum
);
3023 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3024 struct dwarf2_frame_state_reg
*reg
,
3025 struct frame_info
*this_frame
)
3031 reg
->how
= DWARF2_FRAME_REG_FN
;
3032 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3035 reg
->how
= DWARF2_FRAME_REG_CFA
;
3040 /* Return true if we are in the function's epilogue, i.e. after the
3041 instruction that destroyed the function's stack frame. */
3044 thumb_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3046 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3047 unsigned int insn
, insn2
;
3048 int found_return
= 0, found_stack_adjust
= 0;
3049 CORE_ADDR func_start
, func_end
;
3053 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3056 /* The epilogue is a sequence of instructions along the following lines:
3058 - add stack frame size to SP or FP
3059 - [if frame pointer used] restore SP from FP
3060 - restore registers from SP [may include PC]
3061 - a return-type instruction [if PC wasn't already restored]
3063 In a first pass, we scan forward from the current PC and verify the
3064 instructions we find as compatible with this sequence, ending in a
3067 However, this is not sufficient to distinguish indirect function calls
3068 within a function from indirect tail calls in the epilogue in some cases.
3069 Therefore, if we didn't already find any SP-changing instruction during
3070 forward scan, we add a backward scanning heuristic to ensure we actually
3071 are in the epilogue. */
3074 while (scan_pc
< func_end
&& !found_return
)
3076 if (target_read_memory (scan_pc
, buf
, 2))
3080 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3082 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3084 else if (insn
== 0x46f7) /* mov pc, lr */
3086 else if (insn
== 0x46bd) /* mov sp, r7 */
3087 found_stack_adjust
= 1;
3088 else if ((insn
& 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3089 found_stack_adjust
= 1;
3090 else if ((insn
& 0xfe00) == 0xbc00) /* pop <registers> */
3092 found_stack_adjust
= 1;
3093 if (insn
& 0x0100) /* <registers> include PC. */
3096 else if ((insn
& 0xe000) == 0xe000) /* 32-bit Thumb-2 instruction */
3098 if (target_read_memory (scan_pc
, buf
, 2))
3102 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3104 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3106 found_stack_adjust
= 1;
3107 if (insn2
& 0x8000) /* <registers> include PC. */
3110 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3111 && (insn2
& 0x0fff) == 0x0b04)
3113 found_stack_adjust
= 1;
3114 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3117 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3118 && (insn2
& 0x0e00) == 0x0a00)
3119 found_stack_adjust
= 1;
3130 /* Since any instruction in the epilogue sequence, with the possible
3131 exception of return itself, updates the stack pointer, we need to
3132 scan backwards for at most one instruction. Try either a 16-bit or
3133 a 32-bit instruction. This is just a heuristic, so we do not worry
3134 too much about false positives. */
3136 if (!found_stack_adjust
)
3138 if (pc
- 4 < func_start
)
3140 if (target_read_memory (pc
- 4, buf
, 4))
3143 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3144 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3146 if (insn2
== 0x46bd) /* mov sp, r7 */
3147 found_stack_adjust
= 1;
3148 else if ((insn2
& 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3149 found_stack_adjust
= 1;
3150 else if ((insn2
& 0xff00) == 0xbc00) /* pop <registers> without PC */
3151 found_stack_adjust
= 1;
3152 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3153 found_stack_adjust
= 1;
3154 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3155 && (insn2
& 0x0fff) == 0x0b04)
3156 found_stack_adjust
= 1;
3157 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3158 && (insn2
& 0x0e00) == 0x0a00)
3159 found_stack_adjust
= 1;
3162 return found_stack_adjust
;
3165 /* Return true if we are in the function's epilogue, i.e. after the
3166 instruction that destroyed the function's stack frame. */
3169 arm_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3171 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3173 int found_return
, found_stack_adjust
;
3174 CORE_ADDR func_start
, func_end
;
3176 if (arm_pc_is_thumb (gdbarch
, pc
))
3177 return thumb_in_function_epilogue_p (gdbarch
, pc
);
3179 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3182 /* We are in the epilogue if the previous instruction was a stack
3183 adjustment and the next instruction is a possible return (bx, mov
3184 pc, or pop). We could have to scan backwards to find the stack
3185 adjustment, or forwards to find the return, but this is a decent
3186 approximation. First scan forwards. */
3189 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3190 if (bits (insn
, 28, 31) != INST_NV
)
3192 if ((insn
& 0x0ffffff0) == 0x012fff10)
3195 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3198 else if ((insn
& 0x0fff0000) == 0x08bd0000
3199 && (insn
& 0x0000c000) != 0)
3200 /* POP (LDMIA), including PC or LR. */
3207 /* Scan backwards. This is just a heuristic, so do not worry about
3208 false positives from mode changes. */
3210 if (pc
< func_start
+ 4)
3213 found_stack_adjust
= 0;
3214 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3215 if (bits (insn
, 28, 31) != INST_NV
)
3217 if ((insn
& 0x0df0f000) == 0x0080d000)
3218 /* ADD SP (register or immediate). */
3219 found_stack_adjust
= 1;
3220 else if ((insn
& 0x0df0f000) == 0x0040d000)
3221 /* SUB SP (register or immediate). */
3222 found_stack_adjust
= 1;
3223 else if ((insn
& 0x0ffffff0) == 0x01a0d000)
3225 found_stack_adjust
= 1;
3226 else if ((insn
& 0x0fff0000) == 0x08bd0000)
3228 found_stack_adjust
= 1;
3231 if (found_stack_adjust
)
3238 /* When arguments must be pushed onto the stack, they go on in reverse
3239 order. The code below implements a FILO (stack) to do this. */
3244 struct stack_item
*prev
;
3248 static struct stack_item
*
3249 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3251 struct stack_item
*si
;
3252 si
= xmalloc (sizeof (struct stack_item
));
3253 si
->data
= xmalloc (len
);
3256 memcpy (si
->data
, contents
, len
);
3260 static struct stack_item
*
3261 pop_stack_item (struct stack_item
*si
)
3263 struct stack_item
*dead
= si
;
3271 /* Return the alignment (in bytes) of the given type. */
3274 arm_type_align (struct type
*t
)
3280 t
= check_typedef (t
);
3281 switch (TYPE_CODE (t
))
3284 /* Should never happen. */
3285 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3289 case TYPE_CODE_ENUM
:
3293 case TYPE_CODE_RANGE
:
3294 case TYPE_CODE_BITSTRING
:
3296 case TYPE_CODE_CHAR
:
3297 case TYPE_CODE_BOOL
:
3298 return TYPE_LENGTH (t
);
3300 case TYPE_CODE_ARRAY
:
3301 case TYPE_CODE_COMPLEX
:
3302 /* TODO: What about vector types? */
3303 return arm_type_align (TYPE_TARGET_TYPE (t
));
3305 case TYPE_CODE_STRUCT
:
3306 case TYPE_CODE_UNION
:
3308 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3310 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3318 /* Possible base types for a candidate for passing and returning in
3321 enum arm_vfp_cprc_base_type
3330 /* The length of one element of base type B. */
3333 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3337 case VFP_CPRC_SINGLE
:
3339 case VFP_CPRC_DOUBLE
:
3341 case VFP_CPRC_VEC64
:
3343 case VFP_CPRC_VEC128
:
3346 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3351 /* The character ('s', 'd' or 'q') for the type of VFP register used
3352 for passing base type B. */
3355 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3359 case VFP_CPRC_SINGLE
:
3361 case VFP_CPRC_DOUBLE
:
3363 case VFP_CPRC_VEC64
:
3365 case VFP_CPRC_VEC128
:
3368 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3373 /* Determine whether T may be part of a candidate for passing and
3374 returning in VFP registers, ignoring the limit on the total number
3375 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3376 classification of the first valid component found; if it is not
3377 VFP_CPRC_UNKNOWN, all components must have the same classification
3378 as *BASE_TYPE. If it is found that T contains a type not permitted
3379 for passing and returning in VFP registers, a type differently
3380 classified from *BASE_TYPE, or two types differently classified
3381 from each other, return -1, otherwise return the total number of
3382 base-type elements found (possibly 0 in an empty structure or
3383 array). Vectors and complex types are not currently supported,
3384 matching the generic AAPCS support. */
3387 arm_vfp_cprc_sub_candidate (struct type
*t
,
3388 enum arm_vfp_cprc_base_type
*base_type
)
3390 t
= check_typedef (t
);
3391 switch (TYPE_CODE (t
))
3394 switch (TYPE_LENGTH (t
))
3397 if (*base_type
== VFP_CPRC_UNKNOWN
)
3398 *base_type
= VFP_CPRC_SINGLE
;
3399 else if (*base_type
!= VFP_CPRC_SINGLE
)
3404 if (*base_type
== VFP_CPRC_UNKNOWN
)
3405 *base_type
= VFP_CPRC_DOUBLE
;
3406 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3415 case TYPE_CODE_ARRAY
:
3419 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3422 if (TYPE_LENGTH (t
) == 0)
3424 gdb_assert (count
== 0);
3427 else if (count
== 0)
3429 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3430 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3431 return TYPE_LENGTH (t
) / unitlen
;
3435 case TYPE_CODE_STRUCT
:
3440 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3442 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3444 if (sub_count
== -1)
3448 if (TYPE_LENGTH (t
) == 0)
3450 gdb_assert (count
== 0);
3453 else if (count
== 0)
3455 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3456 if (TYPE_LENGTH (t
) != unitlen
* count
)
3461 case TYPE_CODE_UNION
:
3466 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3468 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3470 if (sub_count
== -1)
3472 count
= (count
> sub_count
? count
: sub_count
);
3474 if (TYPE_LENGTH (t
) == 0)
3476 gdb_assert (count
== 0);
3479 else if (count
== 0)
3481 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3482 if (TYPE_LENGTH (t
) != unitlen
* count
)
3494 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3495 if passed to or returned from a non-variadic function with the VFP
3496 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3497 *BASE_TYPE to the base type for T and *COUNT to the number of
3498 elements of that base type before returning. */
3501 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3504 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3505 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3506 if (c
<= 0 || c
> 4)
3513 /* Return 1 if the VFP ABI should be used for passing arguments to and
3514 returning values from a function of type FUNC_TYPE, 0
3518 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3520 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3521 /* Variadic functions always use the base ABI. Assume that functions
3522 without debug info are not variadic. */
3523 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3525 /* The VFP ABI is only supported as a variant of AAPCS. */
3526 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3528 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3531 /* We currently only support passing parameters in integer registers, which
3532 conforms with GCC's default model, and VFP argument passing following
3533 the VFP variant of AAPCS. Several other variants exist and
3534 we should probably support some of them based on the selected ABI. */
3537 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3538 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3539 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3540 CORE_ADDR struct_addr
)
3542 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3546 struct stack_item
*si
= NULL
;
3549 unsigned vfp_regs_free
= (1 << 16) - 1;
3551 /* Determine the type of this function and whether the VFP ABI
3553 ftype
= check_typedef (value_type (function
));
3554 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3555 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3556 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3558 /* Set the return address. For the ARM, the return breakpoint is
3559 always at BP_ADDR. */
3560 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3562 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3564 /* Walk through the list of args and determine how large a temporary
3565 stack is required. Need to take care here as structs may be
3566 passed on the stack, and we have to push them. */
3569 argreg
= ARM_A1_REGNUM
;
3572 /* The struct_return pointer occupies the first parameter
3573 passing register. */
3577 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3578 gdbarch_register_name (gdbarch
, argreg
),
3579 paddress (gdbarch
, struct_addr
));
3580 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3584 for (argnum
= 0; argnum
< nargs
; argnum
++)
3587 struct type
*arg_type
;
3588 struct type
*target_type
;
3589 enum type_code typecode
;
3590 const bfd_byte
*val
;
3592 enum arm_vfp_cprc_base_type vfp_base_type
;
3594 int may_use_core_reg
= 1;
3596 arg_type
= check_typedef (value_type (args
[argnum
]));
3597 len
= TYPE_LENGTH (arg_type
);
3598 target_type
= TYPE_TARGET_TYPE (arg_type
);
3599 typecode
= TYPE_CODE (arg_type
);
3600 val
= value_contents (args
[argnum
]);
3602 align
= arm_type_align (arg_type
);
3603 /* Round alignment up to a whole number of words. */
3604 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3605 /* Different ABIs have different maximum alignments. */
3606 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3608 /* The APCS ABI only requires word alignment. */
3609 align
= INT_REGISTER_SIZE
;
3613 /* The AAPCS requires at most doubleword alignment. */
3614 if (align
> INT_REGISTER_SIZE
* 2)
3615 align
= INT_REGISTER_SIZE
* 2;
3619 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3627 /* Because this is a CPRC it cannot go in a core register or
3628 cause a core register to be skipped for alignment.
3629 Either it goes in VFP registers and the rest of this loop
3630 iteration is skipped for this argument, or it goes on the
3631 stack (and the stack alignment code is correct for this
3633 may_use_core_reg
= 0;
3635 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3636 shift
= unit_length
/ 4;
3637 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3638 for (regno
= 0; regno
< 16; regno
+= shift
)
3639 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3648 vfp_regs_free
&= ~(mask
<< regno
);
3649 reg_scaled
= regno
/ shift
;
3650 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3651 for (i
= 0; i
< vfp_base_count
; i
++)
3655 if (reg_char
== 'q')
3656 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3657 val
+ i
* unit_length
);
3660 sprintf (name_buf
, "%c%d", reg_char
, reg_scaled
+ i
);
3661 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3663 regcache_cooked_write (regcache
, regnum
,
3664 val
+ i
* unit_length
);
3671 /* This CPRC could not go in VFP registers, so all VFP
3672 registers are now marked as used. */
3677 /* Push stack padding for dowubleword alignment. */
3678 if (nstack
& (align
- 1))
3680 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3681 nstack
+= INT_REGISTER_SIZE
;
3684 /* Doubleword aligned quantities must go in even register pairs. */
3685 if (may_use_core_reg
3686 && argreg
<= ARM_LAST_ARG_REGNUM
3687 && align
> INT_REGISTER_SIZE
3691 /* If the argument is a pointer to a function, and it is a
3692 Thumb function, create a LOCAL copy of the value and set
3693 the THUMB bit in it. */
3694 if (TYPE_CODE_PTR
== typecode
3695 && target_type
!= NULL
3696 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3698 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3699 if (arm_pc_is_thumb (gdbarch
, regval
))
3701 bfd_byte
*copy
= alloca (len
);
3702 store_unsigned_integer (copy
, len
, byte_order
,
3703 MAKE_THUMB_ADDR (regval
));
3708 /* Copy the argument to general registers or the stack in
3709 register-sized pieces. Large arguments are split between
3710 registers and stack. */
3713 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3715 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3717 /* The argument is being passed in a general purpose
3720 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3721 if (byte_order
== BFD_ENDIAN_BIG
)
3722 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3724 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3726 gdbarch_register_name
3728 phex (regval
, INT_REGISTER_SIZE
));
3729 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3734 /* Push the arguments onto the stack. */
3736 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3738 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3739 nstack
+= INT_REGISTER_SIZE
;
3746 /* If we have an odd number of words to push, then decrement the stack
3747 by one word now, so first stack argument will be dword aligned. */
3754 write_memory (sp
, si
->data
, si
->len
);
3755 si
= pop_stack_item (si
);
3758 /* Finally, update teh SP register. */
3759 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3765 /* Always align the frame to an 8-byte boundary. This is required on
3766 some platforms and harmless on the rest. */
3769 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3771 /* Align the stack to eight bytes. */
3772 return sp
& ~ (CORE_ADDR
) 7;
3776 print_fpu_flags (int flags
)
3778 if (flags
& (1 << 0))
3779 fputs ("IVO ", stdout
);
3780 if (flags
& (1 << 1))
3781 fputs ("DVZ ", stdout
);
3782 if (flags
& (1 << 2))
3783 fputs ("OFL ", stdout
);
3784 if (flags
& (1 << 3))
3785 fputs ("UFL ", stdout
);
3786 if (flags
& (1 << 4))
3787 fputs ("INX ", stdout
);
3791 /* Print interesting information about the floating point processor
3792 (if present) or emulator. */
3794 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3795 struct frame_info
*frame
, const char *args
)
3797 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3800 type
= (status
>> 24) & 127;
3801 if (status
& (1 << 31))
3802 printf (_("Hardware FPU type %d\n"), type
);
3804 printf (_("Software FPU type %d\n"), type
);
3805 /* i18n: [floating point unit] mask */
3806 fputs (_("mask: "), stdout
);
3807 print_fpu_flags (status
>> 16);
3808 /* i18n: [floating point unit] flags */
3809 fputs (_("flags: "), stdout
);
3810 print_fpu_flags (status
);
3813 /* Construct the ARM extended floating point type. */
3814 static struct type
*
3815 arm_ext_type (struct gdbarch
*gdbarch
)
3817 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3819 if (!tdep
->arm_ext_type
)
3821 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
3822 floatformats_arm_ext
);
3824 return tdep
->arm_ext_type
;
3827 static struct type
*
3828 arm_neon_double_type (struct gdbarch
*gdbarch
)
3830 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3832 if (tdep
->neon_double_type
== NULL
)
3834 struct type
*t
, *elem
;
3836 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
3838 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3839 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
3840 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3841 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
3842 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3843 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
3844 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3845 append_composite_type_field (t
, "u64", elem
);
3846 elem
= builtin_type (gdbarch
)->builtin_float
;
3847 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
3848 elem
= builtin_type (gdbarch
)->builtin_double
;
3849 append_composite_type_field (t
, "f64", elem
);
3851 TYPE_VECTOR (t
) = 1;
3852 TYPE_NAME (t
) = "neon_d";
3853 tdep
->neon_double_type
= t
;
3856 return tdep
->neon_double_type
;
3859 /* FIXME: The vector types are not correctly ordered on big-endian
3860 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3861 bits of d0 - regardless of what unit size is being held in d0. So
3862 the offset of the first uint8 in d0 is 7, but the offset of the
3863 first float is 4. This code works as-is for little-endian
3866 static struct type
*
3867 arm_neon_quad_type (struct gdbarch
*gdbarch
)
3869 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3871 if (tdep
->neon_quad_type
== NULL
)
3873 struct type
*t
, *elem
;
3875 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
3877 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3878 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
3879 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3880 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
3881 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3882 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
3883 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3884 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
3885 elem
= builtin_type (gdbarch
)->builtin_float
;
3886 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
3887 elem
= builtin_type (gdbarch
)->builtin_double
;
3888 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
3890 TYPE_VECTOR (t
) = 1;
3891 TYPE_NAME (t
) = "neon_q";
3892 tdep
->neon_quad_type
= t
;
3895 return tdep
->neon_quad_type
;
3898 /* Return the GDB type object for the "standard" data type of data in
3901 static struct type
*
3902 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
3904 int num_regs
= gdbarch_num_regs (gdbarch
);
3906 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
3907 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
3908 return builtin_type (gdbarch
)->builtin_float
;
3910 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
3911 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
3912 return arm_neon_quad_type (gdbarch
);
3914 /* If the target description has register information, we are only
3915 in this function so that we can override the types of
3916 double-precision registers for NEON. */
3917 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
3919 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
3921 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
3922 && TYPE_CODE (t
) == TYPE_CODE_FLT
3923 && gdbarch_tdep (gdbarch
)->have_neon
)
3924 return arm_neon_double_type (gdbarch
);
3929 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
3931 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
3932 return builtin_type (gdbarch
)->builtin_void
;
3934 return arm_ext_type (gdbarch
);
3936 else if (regnum
== ARM_SP_REGNUM
)
3937 return builtin_type (gdbarch
)->builtin_data_ptr
;
3938 else if (regnum
== ARM_PC_REGNUM
)
3939 return builtin_type (gdbarch
)->builtin_func_ptr
;
3940 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
3941 /* These registers are only supported on targets which supply
3942 an XML description. */
3943 return builtin_type (gdbarch
)->builtin_int0
;
3945 return builtin_type (gdbarch
)->builtin_uint32
;
3948 /* Map a DWARF register REGNUM onto the appropriate GDB register
3952 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
3954 /* Core integer regs. */
3955 if (reg
>= 0 && reg
<= 15)
3958 /* Legacy FPA encoding. These were once used in a way which
3959 overlapped with VFP register numbering, so their use is
3960 discouraged, but GDB doesn't support the ARM toolchain
3961 which used them for VFP. */
3962 if (reg
>= 16 && reg
<= 23)
3963 return ARM_F0_REGNUM
+ reg
- 16;
3965 /* New assignments for the FPA registers. */
3966 if (reg
>= 96 && reg
<= 103)
3967 return ARM_F0_REGNUM
+ reg
- 96;
3969 /* WMMX register assignments. */
3970 if (reg
>= 104 && reg
<= 111)
3971 return ARM_WCGR0_REGNUM
+ reg
- 104;
3973 if (reg
>= 112 && reg
<= 127)
3974 return ARM_WR0_REGNUM
+ reg
- 112;
3976 if (reg
>= 192 && reg
<= 199)
3977 return ARM_WC0_REGNUM
+ reg
- 192;
3979 /* VFP v2 registers. A double precision value is actually
3980 in d1 rather than s2, but the ABI only defines numbering
3981 for the single precision registers. This will "just work"
3982 in GDB for little endian targets (we'll read eight bytes,
3983 starting in s0 and then progressing to s1), but will be
3984 reversed on big endian targets with VFP. This won't
3985 be a problem for the new Neon quad registers; you're supposed
3986 to use DW_OP_piece for those. */
3987 if (reg
>= 64 && reg
<= 95)
3991 sprintf (name_buf
, "s%d", reg
- 64);
3992 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3996 /* VFP v3 / Neon registers. This range is also used for VFP v2
3997 registers, except that it now describes d0 instead of s0. */
3998 if (reg
>= 256 && reg
<= 287)
4002 sprintf (name_buf
, "d%d", reg
- 256);
4003 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4010 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4012 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4015 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4017 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4018 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4020 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4021 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4023 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4024 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4026 if (reg
< NUM_GREGS
)
4027 return SIM_ARM_R0_REGNUM
+ reg
;
4030 if (reg
< NUM_FREGS
)
4031 return SIM_ARM_FP0_REGNUM
+ reg
;
4034 if (reg
< NUM_SREGS
)
4035 return SIM_ARM_FPS_REGNUM
+ reg
;
4038 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4041 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4042 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4043 It is thought that this is is the floating-point register format on
4044 little-endian systems. */
4047 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4048 void *dbl
, int endianess
)
4052 if (endianess
== BFD_ENDIAN_BIG
)
4053 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4055 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4057 floatformat_from_doublest (fmt
, &d
, dbl
);
4061 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4066 floatformat_to_doublest (fmt
, ptr
, &d
);
4067 if (endianess
== BFD_ENDIAN_BIG
)
4068 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4070 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4075 condition_true (unsigned long cond
, unsigned long status_reg
)
4077 if (cond
== INST_AL
|| cond
== INST_NV
)
4083 return ((status_reg
& FLAG_Z
) != 0);
4085 return ((status_reg
& FLAG_Z
) == 0);
4087 return ((status_reg
& FLAG_C
) != 0);
4089 return ((status_reg
& FLAG_C
) == 0);
4091 return ((status_reg
& FLAG_N
) != 0);
4093 return ((status_reg
& FLAG_N
) == 0);
4095 return ((status_reg
& FLAG_V
) != 0);
4097 return ((status_reg
& FLAG_V
) == 0);
4099 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4101 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4103 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4105 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4107 return (((status_reg
& FLAG_Z
) == 0)
4108 && (((status_reg
& FLAG_N
) == 0)
4109 == ((status_reg
& FLAG_V
) == 0)));
4111 return (((status_reg
& FLAG_Z
) != 0)
4112 || (((status_reg
& FLAG_N
) == 0)
4113 != ((status_reg
& FLAG_V
) == 0)));
4118 static unsigned long
4119 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4120 unsigned long pc_val
, unsigned long status_reg
)
4122 unsigned long res
, shift
;
4123 int rm
= bits (inst
, 0, 3);
4124 unsigned long shifttype
= bits (inst
, 5, 6);
4128 int rs
= bits (inst
, 8, 11);
4129 shift
= (rs
== 15 ? pc_val
+ 8
4130 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4133 shift
= bits (inst
, 7, 11);
4135 res
= (rm
== ARM_PC_REGNUM
4136 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4137 : get_frame_register_unsigned (frame
, rm
));
4142 res
= shift
>= 32 ? 0 : res
<< shift
;
4146 res
= shift
>= 32 ? 0 : res
>> shift
;
4152 res
= ((res
& 0x80000000L
)
4153 ? ~((~res
) >> shift
) : res
>> shift
);
4156 case 3: /* ROR/RRX */
4159 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4161 res
= (res
>> shift
) | (res
<< (32 - shift
));
4165 return res
& 0xffffffff;
4168 /* Return number of 1-bits in VAL. */
4171 bitcount (unsigned long val
)
4174 for (nbits
= 0; val
!= 0; nbits
++)
4175 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4179 /* Return the size in bytes of the complete Thumb instruction whose
4180 first halfword is INST1. */
4183 thumb_insn_size (unsigned short inst1
)
4185 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4192 thumb_advance_itstate (unsigned int itstate
)
4194 /* Preserve IT[7:5], the first three bits of the condition. Shift
4195 the upcoming condition flags left by one bit. */
4196 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4198 /* If we have finished the IT block, clear the state. */
4199 if ((itstate
& 0x0f) == 0)
4205 /* Find the next PC after the current instruction executes. In some
4206 cases we can not statically determine the answer (see the IT state
4207 handling in this function); in that case, a breakpoint may be
4208 inserted in addition to the returned PC, which will be used to set
4209 another breakpoint by our caller. */
4212 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4214 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4215 struct address_space
*aspace
= get_frame_address_space (frame
);
4216 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4217 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4218 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4219 unsigned short inst1
;
4220 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4221 unsigned long offset
;
4222 ULONGEST status
, itstate
;
4224 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4225 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4227 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4229 /* Thumb-2 conditional execution support. There are eight bits in
4230 the CPSR which describe conditional execution state. Once
4231 reconstructed (they're in a funny order), the low five bits
4232 describe the low bit of the condition for each instruction and
4233 how many instructions remain. The high three bits describe the
4234 base condition. One of the low four bits will be set if an IT
4235 block is active. These bits read as zero on earlier
4237 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4238 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4240 /* If-Then handling. On GNU/Linux, where this routine is used, we
4241 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4242 can disable execution of the undefined instruction. So we might
4243 miss the breakpoint if we set it on a skipped conditional
4244 instruction. Because conditional instructions can change the
4245 flags, affecting the execution of further instructions, we may
4246 need to set two breakpoints. */
4248 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4250 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4252 /* An IT instruction. Because this instruction does not
4253 modify the flags, we can accurately predict the next
4254 executed instruction. */
4255 itstate
= inst1
& 0x00ff;
4256 pc
+= thumb_insn_size (inst1
);
4258 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4260 inst1
= read_memory_unsigned_integer (pc
, 2,
4261 byte_order_for_code
);
4262 pc
+= thumb_insn_size (inst1
);
4263 itstate
= thumb_advance_itstate (itstate
);
4266 return MAKE_THUMB_ADDR (pc
);
4268 else if (itstate
!= 0)
4270 /* We are in a conditional block. Check the condition. */
4271 if (! condition_true (itstate
>> 4, status
))
4273 /* Advance to the next executed instruction. */
4274 pc
+= thumb_insn_size (inst1
);
4275 itstate
= thumb_advance_itstate (itstate
);
4277 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4279 inst1
= read_memory_unsigned_integer (pc
, 2,
4280 byte_order_for_code
);
4281 pc
+= thumb_insn_size (inst1
);
4282 itstate
= thumb_advance_itstate (itstate
);
4285 return MAKE_THUMB_ADDR (pc
);
4287 else if ((itstate
& 0x0f) == 0x08)
4289 /* This is the last instruction of the conditional
4290 block, and it is executed. We can handle it normally
4291 because the following instruction is not conditional,
4292 and we must handle it normally because it is
4293 permitted to branch. Fall through. */
4299 /* There are conditional instructions after this one.
4300 If this instruction modifies the flags, then we can
4301 not predict what the next executed instruction will
4302 be. Fortunately, this instruction is architecturally
4303 forbidden to branch; we know it will fall through.
4304 Start by skipping past it. */
4305 pc
+= thumb_insn_size (inst1
);
4306 itstate
= thumb_advance_itstate (itstate
);
4308 /* Set a breakpoint on the following instruction. */
4309 gdb_assert ((itstate
& 0x0f) != 0);
4310 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4311 MAKE_THUMB_ADDR (pc
));
4312 cond_negated
= (itstate
>> 4) & 1;
4314 /* Skip all following instructions with the same
4315 condition. If there is a later instruction in the IT
4316 block with the opposite condition, set the other
4317 breakpoint there. If not, then set a breakpoint on
4318 the instruction after the IT block. */
4321 inst1
= read_memory_unsigned_integer (pc
, 2,
4322 byte_order_for_code
);
4323 pc
+= thumb_insn_size (inst1
);
4324 itstate
= thumb_advance_itstate (itstate
);
4326 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4328 return MAKE_THUMB_ADDR (pc
);
4332 else if (itstate
& 0x0f)
4334 /* We are in a conditional block. Check the condition. */
4335 int cond
= itstate
>> 4;
4337 if (! condition_true (cond
, status
))
4339 /* Advance to the next instruction. All the 32-bit
4340 instructions share a common prefix. */
4341 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4342 return MAKE_THUMB_ADDR (pc
+ 4);
4344 return MAKE_THUMB_ADDR (pc
+ 2);
4347 /* Otherwise, handle the instruction normally. */
4350 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4354 /* Fetch the saved PC from the stack. It's stored above
4355 all of the other registers. */
4356 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4357 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4358 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4360 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4362 unsigned long cond
= bits (inst1
, 8, 11);
4363 if (cond
== 0x0f) /* 0x0f = SWI */
4365 struct gdbarch_tdep
*tdep
;
4366 tdep
= gdbarch_tdep (gdbarch
);
4368 if (tdep
->syscall_next_pc
!= NULL
)
4369 nextpc
= tdep
->syscall_next_pc (frame
);
4372 else if (cond
!= 0x0f && condition_true (cond
, status
))
4373 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4375 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4377 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4379 else if ((inst1
& 0xe000) == 0xe000) /* 32-bit instruction */
4381 unsigned short inst2
;
4382 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4384 /* Default to the next instruction. */
4386 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4388 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4390 /* Branches and miscellaneous control instructions. */
4392 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4395 int j1
, j2
, imm1
, imm2
;
4397 imm1
= sbits (inst1
, 0, 10);
4398 imm2
= bits (inst2
, 0, 10);
4399 j1
= bit (inst2
, 13);
4400 j2
= bit (inst2
, 11);
4402 offset
= ((imm1
<< 12) + (imm2
<< 1));
4403 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4405 nextpc
= pc_val
+ offset
;
4406 /* For BLX make sure to clear the low bits. */
4407 if (bit (inst2
, 12) == 0)
4408 nextpc
= nextpc
& 0xfffffffc;
4410 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4412 /* SUBS PC, LR, #imm8. */
4413 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4414 nextpc
-= inst2
& 0x00ff;
4416 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4418 /* Conditional branch. */
4419 if (condition_true (bits (inst1
, 6, 9), status
))
4421 int sign
, j1
, j2
, imm1
, imm2
;
4423 sign
= sbits (inst1
, 10, 10);
4424 imm1
= bits (inst1
, 0, 5);
4425 imm2
= bits (inst2
, 0, 10);
4426 j1
= bit (inst2
, 13);
4427 j2
= bit (inst2
, 11);
4429 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4430 offset
+= (imm1
<< 12) + (imm2
<< 1);
4432 nextpc
= pc_val
+ offset
;
4436 else if ((inst1
& 0xfe50) == 0xe810)
4438 /* Load multiple or RFE. */
4439 int rn
, offset
, load_pc
= 1;
4441 rn
= bits (inst1
, 0, 3);
4442 if (bit (inst1
, 7) && !bit (inst1
, 8))
4445 if (!bit (inst2
, 15))
4447 offset
= bitcount (inst2
) * 4 - 4;
4449 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4452 if (!bit (inst2
, 15))
4456 else if (bit (inst1
, 7) && bit (inst1
, 8))
4461 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4471 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4472 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4475 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4477 /* MOV PC or MOVS PC. */
4478 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4479 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4481 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4485 int rn
, load_pc
= 1;
4487 rn
= bits (inst1
, 0, 3);
4488 base
= get_frame_register_unsigned (frame
, rn
);
4489 if (rn
== ARM_PC_REGNUM
)
4491 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4493 base
+= bits (inst2
, 0, 11);
4495 base
-= bits (inst2
, 0, 11);
4497 else if (bit (inst1
, 7))
4498 base
+= bits (inst2
, 0, 11);
4499 else if (bit (inst2
, 11))
4501 if (bit (inst2
, 10))
4504 base
+= bits (inst2
, 0, 7);
4506 base
-= bits (inst2
, 0, 7);
4509 else if ((inst2
& 0x0fc0) == 0x0000)
4511 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4512 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4519 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4521 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4524 CORE_ADDR tbl_reg
, table
, offset
, length
;
4526 tbl_reg
= bits (inst1
, 0, 3);
4527 if (tbl_reg
== 0x0f)
4528 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4530 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4532 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4533 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4534 nextpc
= pc_val
+ length
;
4536 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4539 CORE_ADDR tbl_reg
, table
, offset
, length
;
4541 tbl_reg
= bits (inst1
, 0, 3);
4542 if (tbl_reg
== 0x0f)
4543 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4545 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4547 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4548 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4549 nextpc
= pc_val
+ length
;
4552 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4554 if (bits (inst1
, 3, 6) == 0x0f)
4557 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4559 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4561 if (bits (inst1
, 3, 6) == 0x0f)
4564 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4566 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4568 else if ((inst1
& 0xf500) == 0xb100)
4571 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4572 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4574 if (bit (inst1
, 11) && reg
!= 0)
4575 nextpc
= pc_val
+ imm
;
4576 else if (!bit (inst1
, 11) && reg
== 0)
4577 nextpc
= pc_val
+ imm
;
4582 /* Get the raw next address. PC is the current program counter, in
4583 FRAME, which is assumed to be executing in ARM mode.
4585 The value returned has the execution state of the next instruction
4586 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4587 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4591 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4593 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4594 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4595 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4596 unsigned long pc_val
;
4597 unsigned long this_instr
;
4598 unsigned long status
;
4601 pc_val
= (unsigned long) pc
;
4602 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4604 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4605 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4607 if (bits (this_instr
, 28, 31) == INST_NV
)
4608 switch (bits (this_instr
, 24, 27))
4613 /* Branch with Link and change to Thumb. */
4614 nextpc
= BranchDest (pc
, this_instr
);
4615 nextpc
|= bit (this_instr
, 24) << 1;
4616 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4622 /* Coprocessor register transfer. */
4623 if (bits (this_instr
, 12, 15) == 15)
4624 error (_("Invalid update to pc in instruction"));
4627 else if (condition_true (bits (this_instr
, 28, 31), status
))
4629 switch (bits (this_instr
, 24, 27))
4632 case 0x1: /* data processing */
4636 unsigned long operand1
, operand2
, result
= 0;
4640 if (bits (this_instr
, 12, 15) != 15)
4643 if (bits (this_instr
, 22, 25) == 0
4644 && bits (this_instr
, 4, 7) == 9) /* multiply */
4645 error (_("Invalid update to pc in instruction"));
4647 /* BX <reg>, BLX <reg> */
4648 if (bits (this_instr
, 4, 27) == 0x12fff1
4649 || bits (this_instr
, 4, 27) == 0x12fff3)
4651 rn
= bits (this_instr
, 0, 3);
4652 nextpc
= ((rn
== ARM_PC_REGNUM
)
4654 : get_frame_register_unsigned (frame
, rn
));
4659 /* Multiply into PC. */
4660 c
= (status
& FLAG_C
) ? 1 : 0;
4661 rn
= bits (this_instr
, 16, 19);
4662 operand1
= ((rn
== ARM_PC_REGNUM
)
4664 : get_frame_register_unsigned (frame
, rn
));
4666 if (bit (this_instr
, 25))
4668 unsigned long immval
= bits (this_instr
, 0, 7);
4669 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4670 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4673 else /* operand 2 is a shifted register. */
4674 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4677 switch (bits (this_instr
, 21, 24))
4680 result
= operand1
& operand2
;
4684 result
= operand1
^ operand2
;
4688 result
= operand1
- operand2
;
4692 result
= operand2
- operand1
;
4696 result
= operand1
+ operand2
;
4700 result
= operand1
+ operand2
+ c
;
4704 result
= operand1
- operand2
+ c
;
4708 result
= operand2
- operand1
+ c
;
4714 case 0xb: /* tst, teq, cmp, cmn */
4715 result
= (unsigned long) nextpc
;
4719 result
= operand1
| operand2
;
4723 /* Always step into a function. */
4728 result
= operand1
& ~operand2
;
4736 /* In 26-bit APCS the bottom two bits of the result are
4737 ignored, and we always end up in ARM state. */
4739 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4747 case 0x5: /* data transfer */
4750 if (bit (this_instr
, 20))
4753 if (bits (this_instr
, 12, 15) == 15)
4759 if (bit (this_instr
, 22))
4760 error (_("Invalid update to pc in instruction"));
4762 /* byte write to PC */
4763 rn
= bits (this_instr
, 16, 19);
4764 base
= ((rn
== ARM_PC_REGNUM
)
4766 : get_frame_register_unsigned (frame
, rn
));
4768 if (bit (this_instr
, 24))
4771 int c
= (status
& FLAG_C
) ? 1 : 0;
4772 unsigned long offset
=
4773 (bit (this_instr
, 25)
4774 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4775 : bits (this_instr
, 0, 11));
4777 if (bit (this_instr
, 23))
4783 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
4790 case 0x9: /* block transfer */
4791 if (bit (this_instr
, 20))
4794 if (bit (this_instr
, 15))
4798 unsigned long rn_val
4799 = get_frame_register_unsigned (frame
,
4800 bits (this_instr
, 16, 19));
4802 if (bit (this_instr
, 23))
4805 unsigned long reglist
= bits (this_instr
, 0, 14);
4806 offset
= bitcount (reglist
) * 4;
4807 if (bit (this_instr
, 24)) /* pre */
4810 else if (bit (this_instr
, 24))
4814 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
4821 case 0xb: /* branch & link */
4822 case 0xa: /* branch */
4824 nextpc
= BranchDest (pc
, this_instr
);
4830 case 0xe: /* coproc ops */
4834 struct gdbarch_tdep
*tdep
;
4835 tdep
= gdbarch_tdep (gdbarch
);
4837 if (tdep
->syscall_next_pc
!= NULL
)
4838 nextpc
= tdep
->syscall_next_pc (frame
);
4844 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
4852 /* Determine next PC after current instruction executes. Will call either
4853 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4854 loop is detected. */
4857 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
4861 if (arm_frame_is_thumb (frame
))
4863 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
4864 if (nextpc
== MAKE_THUMB_ADDR (pc
))
4865 error (_("Infinite loop detected"));
4869 nextpc
= arm_get_next_pc_raw (frame
, pc
);
4871 error (_("Infinite loop detected"));
4877 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4878 of the appropriate mode (as encoded in the PC value), even if this
4879 differs from what would be expected according to the symbol tables. */
4882 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
4883 struct address_space
*aspace
,
4886 struct cleanup
*old_chain
4887 = make_cleanup_restore_integer (&arm_override_mode
);
4889 arm_override_mode
= IS_THUMB_ADDR (pc
);
4890 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
4892 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
4894 do_cleanups (old_chain
);
4897 /* single_step() is called just before we want to resume the inferior,
4898 if we want to single-step it but there is no hardware or kernel
4899 single-step support. We find the target of the coming instruction
4900 and breakpoint it. */
4903 arm_software_single_step (struct frame_info
*frame
)
4905 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4906 struct address_space
*aspace
= get_frame_address_space (frame
);
4907 CORE_ADDR next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
4909 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
4914 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4915 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4916 NULL if an error occurs. BUF is freed. */
4919 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4920 int old_len
, int new_len
)
4922 gdb_byte
*new_buf
, *middle
;
4923 int bytes_to_read
= new_len
- old_len
;
4925 new_buf
= xmalloc (new_len
);
4926 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4928 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4936 /* An IT block is at most the 2-byte IT instruction followed by
4937 four 4-byte instructions. The furthest back we must search to
4938 find an IT block that affects the current instruction is thus
4939 2 + 3 * 4 == 14 bytes. */
4940 #define MAX_IT_BLOCK_PREFIX 14
4942 /* Use a quick scan if there are more than this many bytes of
4944 #define IT_SCAN_THRESHOLD 32
4946 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4947 A breakpoint in an IT block may not be hit, depending on the
4950 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4954 CORE_ADDR boundary
, func_start
;
4955 int buf_len
, buf2_len
;
4956 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4957 int i
, any
, last_it
, last_it_count
;
4959 /* If we are using BKPT breakpoints, none of this is necessary. */
4960 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4963 /* ARM mode does not have this problem. */
4964 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4967 /* We are setting a breakpoint in Thumb code that could potentially
4968 contain an IT block. The first step is to find how much Thumb
4969 code there is; we do not need to read outside of known Thumb
4971 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4973 /* Thumb-2 code must have mapping symbols to have a chance. */
4976 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4978 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4979 && func_start
> boundary
)
4980 boundary
= func_start
;
4982 /* Search for a candidate IT instruction. We have to do some fancy
4983 footwork to distinguish a real IT instruction from the second
4984 half of a 32-bit instruction, but there is no need for that if
4985 there's no candidate. */
4986 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
4988 /* No room for an IT instruction. */
4991 buf
= xmalloc (buf_len
);
4992 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4995 for (i
= 0; i
< buf_len
; i
+= 2)
4997 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4998 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5010 /* OK, the code bytes before this instruction contain at least one
5011 halfword which resembles an IT instruction. We know that it's
5012 Thumb code, but there are still two possibilities. Either the
5013 halfword really is an IT instruction, or it is the second half of
5014 a 32-bit Thumb instruction. The only way we can tell is to
5015 scan forwards from a known instruction boundary. */
5016 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5020 /* There's a lot of code before this instruction. Start with an
5021 optimistic search; it's easy to recognize halfwords that can
5022 not be the start of a 32-bit instruction, and use that to
5023 lock on to the instruction boundaries. */
5024 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5027 buf_len
= IT_SCAN_THRESHOLD
;
5030 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5032 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5033 if (thumb_insn_size (inst1
) == 2)
5040 /* At this point, if DEFINITE, BUF[I] is the first place we
5041 are sure that we know the instruction boundaries, and it is far
5042 enough from BPADDR that we could not miss an IT instruction
5043 affecting BPADDR. If ! DEFINITE, give up - start from a
5047 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5051 buf_len
= bpaddr
- boundary
;
5057 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5060 buf_len
= bpaddr
- boundary
;
5064 /* Scan forwards. Find the last IT instruction before BPADDR. */
5069 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5071 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5076 else if (inst1
& 0x0002)
5078 else if (inst1
& 0x0004)
5083 i
+= thumb_insn_size (inst1
);
5089 /* There wasn't really an IT instruction after all. */
5092 if (last_it_count
< 1)
5093 /* It was too far away. */
5096 /* This really is a trouble spot. Move the breakpoint to the IT
5098 return bpaddr
- buf_len
+ last_it
;
5101 /* ARM displaced stepping support.
5103 Generally ARM displaced stepping works as follows:
5105 1. When an instruction is to be single-stepped, it is first decoded by
5106 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5107 Depending on the type of instruction, it is then copied to a scratch
5108 location, possibly in a modified form. The copy_* set of functions
5109 performs such modification, as necessary. A breakpoint is placed after
5110 the modified instruction in the scratch space to return control to GDB.
5111 Note in particular that instructions which modify the PC will no longer
5112 do so after modification.
5114 2. The instruction is single-stepped, by setting the PC to the scratch
5115 location address, and resuming. Control returns to GDB when the
5118 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5119 function used for the current instruction. This function's job is to
5120 put the CPU/memory state back to what it would have been if the
5121 instruction had been executed unmodified in its original location. */
5123 /* NOP instruction (mov r0, r0). */
5124 #define ARM_NOP 0xe1a00000
5125 #define THUMB_NOP 0x4600
5127 /* Helper for register reads for displaced stepping. In particular, this
5128 returns the PC as it would be seen by the instruction at its original
5132 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5136 CORE_ADDR from
= dsc
->insn_addr
;
5138 if (regno
== ARM_PC_REGNUM
)
5140 /* Compute pipeline offset:
5141 - When executing an ARM instruction, PC reads as the address of the
5142 current instruction plus 8.
5143 - When executing a Thumb instruction, PC reads as the address of the
5144 current instruction plus 4. */
5151 if (debug_displaced
)
5152 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5153 (unsigned long) from
);
5154 return (ULONGEST
) from
;
5158 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5159 if (debug_displaced
)
5160 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5161 regno
, (unsigned long) ret
);
5167 displaced_in_arm_mode (struct regcache
*regs
)
5170 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5172 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5174 return (ps
& t_bit
) == 0;
5177 /* Write to the PC as from a branch instruction. */
5180 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5184 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5185 architecture versions < 6. */
5186 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5187 val
& ~(ULONGEST
) 0x3);
5189 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5190 val
& ~(ULONGEST
) 0x1);
5193 /* Write to the PC as from a branch-exchange instruction. */
5196 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5199 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5201 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5205 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5206 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5208 else if ((val
& 2) == 0)
5210 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5211 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5215 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5216 mode, align dest to 4 bytes). */
5217 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5218 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5219 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5223 /* Write to the PC as if from a load instruction. */
5226 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5229 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5230 bx_write_pc (regs
, val
);
5232 branch_write_pc (regs
, dsc
, val
);
5235 /* Write to the PC as if from an ALU instruction. */
5238 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5241 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5242 bx_write_pc (regs
, val
);
5244 branch_write_pc (regs
, dsc
, val
);
5247 /* Helper for writing to registers for displaced stepping. Writing to the PC
5248 has a varying effects depending on the instruction which does the write:
5249 this is controlled by the WRITE_PC argument. */
5252 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5253 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5255 if (regno
== ARM_PC_REGNUM
)
5257 if (debug_displaced
)
5258 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5259 (unsigned long) val
);
5262 case BRANCH_WRITE_PC
:
5263 branch_write_pc (regs
, dsc
, val
);
5267 bx_write_pc (regs
, val
);
5271 load_write_pc (regs
, dsc
, val
);
5275 alu_write_pc (regs
, dsc
, val
);
5278 case CANNOT_WRITE_PC
:
5279 warning (_("Instruction wrote to PC in an unexpected way when "
5280 "single-stepping"));
5284 internal_error (__FILE__
, __LINE__
,
5285 _("Invalid argument to displaced_write_reg"));
5288 dsc
->wrote_to_pc
= 1;
5292 if (debug_displaced
)
5293 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5294 regno
, (unsigned long) val
);
5295 regcache_cooked_write_unsigned (regs
, regno
, val
);
5299 /* This function is used to concisely determine if an instruction INSN
5300 references PC. Register fields of interest in INSN should have the
5301 corresponding fields of BITMASK set to 0b1111. The function
5302 returns return 1 if any of these fields in INSN reference the PC
5303 (also 0b1111, r15), else it returns 0. */
5306 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5308 uint32_t lowbit
= 1;
5310 while (bitmask
!= 0)
5314 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5320 mask
= lowbit
* 0xf;
5322 if ((insn
& mask
) == mask
)
5331 /* The simplest copy function. Many instructions have the same effect no
5332 matter what address they are executed at: in those cases, use this. */
5335 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5336 const char *iname
, struct displaced_step_closure
*dsc
)
5338 if (debug_displaced
)
5339 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5340 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5343 dsc
->modinsn
[0] = insn
;
5349 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5350 uint16_t insn2
, const char *iname
,
5351 struct displaced_step_closure
*dsc
)
5353 if (debug_displaced
)
5354 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5355 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5358 dsc
->modinsn
[0] = insn1
;
5359 dsc
->modinsn
[1] = insn2
;
5365 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5368 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5370 struct displaced_step_closure
*dsc
)
5372 if (debug_displaced
)
5373 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5374 "opcode/class '%s' unmodified\n", insn
,
5377 dsc
->modinsn
[0] = insn
;
5382 /* Preload instructions with immediate offset. */
5385 cleanup_preload (struct gdbarch
*gdbarch
,
5386 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5388 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5389 if (!dsc
->u
.preload
.immed
)
5390 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5394 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5395 struct displaced_step_closure
*dsc
, unsigned int rn
)
5398 /* Preload instructions:
5400 {pli/pld} [rn, #+/-imm]
5402 {pli/pld} [r0, #+/-imm]. */
5404 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5405 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5406 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5407 dsc
->u
.preload
.immed
= 1;
5409 dsc
->cleanup
= &cleanup_preload
;
5413 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5414 struct displaced_step_closure
*dsc
)
5416 unsigned int rn
= bits (insn
, 16, 19);
5418 if (!insn_references_pc (insn
, 0x000f0000ul
))
5419 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5421 if (debug_displaced
)
5422 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5423 (unsigned long) insn
);
5425 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5427 install_preload (gdbarch
, regs
, dsc
, rn
);
5433 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5434 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5436 unsigned int rn
= bits (insn1
, 0, 3);
5437 unsigned int u_bit
= bit (insn1
, 7);
5438 int imm12
= bits (insn2
, 0, 11);
5441 if (rn
!= ARM_PC_REGNUM
)
5442 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5444 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5445 PLD (literal) Encoding T1. */
5446 if (debug_displaced
)
5447 fprintf_unfiltered (gdb_stdlog
,
5448 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5449 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5455 /* Rewrite instruction {pli/pld} PC imm12 into:
5456 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5460 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5462 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5463 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5465 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5467 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5468 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5469 dsc
->u
.preload
.immed
= 0;
5471 /* {pli/pld} [r0, r1] */
5472 dsc
->modinsn
[0] = insn1
& 0xfff0;
5473 dsc
->modinsn
[1] = 0xf001;
5476 dsc
->cleanup
= &cleanup_preload
;
5480 /* Preload instructions with register offset. */
5483 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5484 struct displaced_step_closure
*dsc
, unsigned int rn
,
5487 ULONGEST rn_val
, rm_val
;
5489 /* Preload register-offset instructions:
5491 {pli/pld} [rn, rm {, shift}]
5493 {pli/pld} [r0, r1 {, shift}]. */
5495 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5496 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5497 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5498 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5499 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5500 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5501 dsc
->u
.preload
.immed
= 0;
5503 dsc
->cleanup
= &cleanup_preload
;
5507 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5508 struct regcache
*regs
,
5509 struct displaced_step_closure
*dsc
)
5511 unsigned int rn
= bits (insn
, 16, 19);
5512 unsigned int rm
= bits (insn
, 0, 3);
5515 if (!insn_references_pc (insn
, 0x000f000ful
))
5516 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5518 if (debug_displaced
)
5519 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5520 (unsigned long) insn
);
5522 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5524 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5528 /* Copy/cleanup coprocessor load and store instructions. */
5531 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5532 struct regcache
*regs
,
5533 struct displaced_step_closure
*dsc
)
5535 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5537 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5539 if (dsc
->u
.ldst
.writeback
)
5540 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5544 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5545 struct displaced_step_closure
*dsc
,
5546 int writeback
, unsigned int rn
)
5550 /* Coprocessor load/store instructions:
5552 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5554 {stc/stc2} [r0, #+/-imm].
5556 ldc/ldc2 are handled identically. */
5558 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5559 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5560 /* PC should be 4-byte aligned. */
5561 rn_val
= rn_val
& 0xfffffffc;
5562 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5564 dsc
->u
.ldst
.writeback
= writeback
;
5565 dsc
->u
.ldst
.rn
= rn
;
5567 dsc
->cleanup
= &cleanup_copro_load_store
;
5571 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
5572 struct regcache
*regs
,
5573 struct displaced_step_closure
*dsc
)
5575 unsigned int rn
= bits (insn
, 16, 19);
5577 if (!insn_references_pc (insn
, 0x000f0000ul
))
5578 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
5580 if (debug_displaced
)
5581 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5582 "load/store insn %.8lx\n", (unsigned long) insn
);
5584 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5586 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
5592 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
5593 uint16_t insn2
, struct regcache
*regs
,
5594 struct displaced_step_closure
*dsc
)
5596 unsigned int rn
= bits (insn1
, 0, 3);
5598 if (rn
!= ARM_PC_REGNUM
)
5599 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
5600 "copro load/store", dsc
);
5602 if (debug_displaced
)
5603 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5604 "load/store insn %.4x%.4x\n", insn1
, insn2
);
5606 dsc
->modinsn
[0] = insn1
& 0xfff0;
5607 dsc
->modinsn
[1] = insn2
;
5610 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5611 doesn't support writeback, so pass 0. */
5612 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
5617 /* Clean up branch instructions (actually perform the branch, by setting
5621 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5622 struct displaced_step_closure
*dsc
)
5624 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5625 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
5626 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
5627 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
5632 if (dsc
->u
.branch
.link
)
5634 /* The value of LR should be the next insn of current one. In order
5635 not to confuse logic hanlding later insn `bx lr', if current insn mode
5636 is Thumb, the bit 0 of LR value should be set to 1. */
5637 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
5640 next_insn_addr
|= 0x1;
5642 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
5646 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
5649 /* Copy B/BL/BLX instructions with immediate destinations. */
5652 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5653 struct displaced_step_closure
*dsc
,
5654 unsigned int cond
, int exchange
, int link
, long offset
)
5656 /* Implement "BL<cond> <label>" as:
5658 Preparation: cond <- instruction condition
5659 Insn: mov r0, r0 (nop)
5660 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5662 B<cond> similar, but don't set r14 in cleanup. */
5664 dsc
->u
.branch
.cond
= cond
;
5665 dsc
->u
.branch
.link
= link
;
5666 dsc
->u
.branch
.exchange
= exchange
;
5668 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
5669 if (link
&& exchange
)
5670 /* For BLX, offset is computed from the Align (PC, 4). */
5671 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
5674 dsc
->u
.branch
.dest
+= 4 + offset
;
5676 dsc
->u
.branch
.dest
+= 8 + offset
;
5678 dsc
->cleanup
= &cleanup_branch
;
5681 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
5682 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5684 unsigned int cond
= bits (insn
, 28, 31);
5685 int exchange
= (cond
== 0xf);
5686 int link
= exchange
|| bit (insn
, 24);
5689 if (debug_displaced
)
5690 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
5691 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
5692 (unsigned long) insn
);
5694 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5695 then arrange the switch into Thumb mode. */
5696 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
5698 offset
= bits (insn
, 0, 23) << 2;
5700 if (bit (offset
, 25))
5701 offset
= offset
| ~0x3ffffff;
5703 dsc
->modinsn
[0] = ARM_NOP
;
5705 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5710 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
5711 uint16_t insn2
, struct regcache
*regs
,
5712 struct displaced_step_closure
*dsc
)
5714 int link
= bit (insn2
, 14);
5715 int exchange
= link
&& !bit (insn2
, 12);
5718 int j1
= bit (insn2
, 13);
5719 int j2
= bit (insn2
, 11);
5720 int s
= sbits (insn1
, 10, 10);
5721 int i1
= !(j1
^ bit (insn1
, 10));
5722 int i2
= !(j2
^ bit (insn1
, 10));
5724 if (!link
&& !exchange
) /* B */
5726 offset
= (bits (insn2
, 0, 10) << 1);
5727 if (bit (insn2
, 12)) /* Encoding T4 */
5729 offset
|= (bits (insn1
, 0, 9) << 12)
5735 else /* Encoding T3 */
5737 offset
|= (bits (insn1
, 0, 5) << 12)
5741 cond
= bits (insn1
, 6, 9);
5746 offset
= (bits (insn1
, 0, 9) << 12);
5747 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
5748 offset
|= exchange
?
5749 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
5752 if (debug_displaced
)
5753 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
5754 "%.4x %.4x with offset %.8lx\n",
5755 link
? (exchange
) ? "blx" : "bl" : "b",
5756 insn1
, insn2
, offset
);
5758 dsc
->modinsn
[0] = THUMB_NOP
;
5760 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5764 /* Copy B Thumb instructions. */
5766 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
5767 struct displaced_step_closure
*dsc
)
5769 unsigned int cond
= 0;
5771 unsigned short bit_12_15
= bits (insn
, 12, 15);
5772 CORE_ADDR from
= dsc
->insn_addr
;
5774 if (bit_12_15
== 0xd)
5776 /* offset = SignExtend (imm8:0, 32) */
5777 offset
= sbits ((insn
<< 1), 0, 8);
5778 cond
= bits (insn
, 8, 11);
5780 else if (bit_12_15
== 0xe) /* Encoding T2 */
5782 offset
= sbits ((insn
<< 1), 0, 11);
5786 if (debug_displaced
)
5787 fprintf_unfiltered (gdb_stdlog
,
5788 "displaced: copying b immediate insn %.4x "
5789 "with offset %d\n", insn
, offset
);
5791 dsc
->u
.branch
.cond
= cond
;
5792 dsc
->u
.branch
.link
= 0;
5793 dsc
->u
.branch
.exchange
= 0;
5794 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5796 dsc
->modinsn
[0] = THUMB_NOP
;
5798 dsc
->cleanup
= &cleanup_branch
;
5803 /* Copy BX/BLX with register-specified destinations. */
5806 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5807 struct displaced_step_closure
*dsc
, int link
,
5808 unsigned int cond
, unsigned int rm
)
5810 /* Implement {BX,BLX}<cond> <reg>" as:
5812 Preparation: cond <- instruction condition
5813 Insn: mov r0, r0 (nop)
5814 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5816 Don't set r14 in cleanup for BX. */
5818 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5820 dsc
->u
.branch
.cond
= cond
;
5821 dsc
->u
.branch
.link
= link
;
5823 dsc
->u
.branch
.exchange
= 1;
5825 dsc
->cleanup
= &cleanup_branch
;
5829 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5830 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5832 unsigned int cond
= bits (insn
, 28, 31);
5835 int link
= bit (insn
, 5);
5836 unsigned int rm
= bits (insn
, 0, 3);
5838 if (debug_displaced
)
5839 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
5840 (unsigned long) insn
);
5842 dsc
->modinsn
[0] = ARM_NOP
;
5844 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5849 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5850 struct regcache
*regs
,
5851 struct displaced_step_closure
*dsc
)
5853 int link
= bit (insn
, 7);
5854 unsigned int rm
= bits (insn
, 3, 6);
5856 if (debug_displaced
)
5857 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
5858 (unsigned short) insn
);
5860 dsc
->modinsn
[0] = THUMB_NOP
;
5862 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5868 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5871 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5872 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5874 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5875 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5876 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5877 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5881 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5882 struct displaced_step_closure
*dsc
)
5884 unsigned int rn
= bits (insn
, 16, 19);
5885 unsigned int rd
= bits (insn
, 12, 15);
5886 unsigned int op
= bits (insn
, 21, 24);
5887 int is_mov
= (op
== 0xd);
5888 ULONGEST rd_val
, rn_val
;
5890 if (!insn_references_pc (insn
, 0x000ff000ul
))
5891 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5893 if (debug_displaced
)
5894 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
5895 "%.8lx\n", is_mov
? "move" : "ALU",
5896 (unsigned long) insn
);
5898 /* Instruction is of form:
5900 <op><cond> rd, [rn,] #imm
5904 Preparation: tmp1, tmp2 <- r0, r1;
5906 Insn: <op><cond> r0, r1, #imm
5907 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5910 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5911 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5912 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5913 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5914 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5915 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5919 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5921 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5923 dsc
->cleanup
= &cleanup_alu_imm
;
5929 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5930 uint16_t insn2
, struct regcache
*regs
,
5931 struct displaced_step_closure
*dsc
)
5933 unsigned int op
= bits (insn1
, 5, 8);
5934 unsigned int rn
, rm
, rd
;
5935 ULONGEST rd_val
, rn_val
;
5937 rn
= bits (insn1
, 0, 3); /* Rn */
5938 rm
= bits (insn2
, 0, 3); /* Rm */
5939 rd
= bits (insn2
, 8, 11); /* Rd */
5941 /* This routine is only called for instruction MOV. */
5942 gdb_assert (op
== 0x2 && rn
== 0xf);
5944 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5945 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5947 if (debug_displaced
)
5948 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
5949 "ALU", insn1
, insn2
);
5951 /* Instruction is of form:
5953 <op><cond> rd, [rn,] #imm
5957 Preparation: tmp1, tmp2 <- r0, r1;
5959 Insn: <op><cond> r0, r1, #imm
5960 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5963 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5964 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5965 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5966 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5967 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5968 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5971 dsc
->modinsn
[0] = insn1
;
5972 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5975 dsc
->cleanup
= &cleanup_alu_imm
;
5980 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5983 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5984 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5989 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5991 for (i
= 0; i
< 3; i
++)
5992 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5994 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5998 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5999 struct displaced_step_closure
*dsc
,
6000 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6002 ULONGEST rd_val
, rn_val
, rm_val
;
6004 /* Instruction is of form:
6006 <op><cond> rd, [rn,] rm [, <shift>]
6010 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6011 r0, r1, r2 <- rd, rn, rm
6012 Insn: <op><cond> r0, r1, r2 [, <shift>]
6013 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6016 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6017 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6018 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6019 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6020 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6021 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6022 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6023 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6024 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6027 dsc
->cleanup
= &cleanup_alu_reg
;
6031 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6032 struct displaced_step_closure
*dsc
)
6034 unsigned int op
= bits (insn
, 21, 24);
6035 int is_mov
= (op
== 0xd);
6037 if (!insn_references_pc (insn
, 0x000ff00ful
))
6038 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6040 if (debug_displaced
)
6041 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6042 is_mov
? "move" : "ALU", (unsigned long) insn
);
6045 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6047 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6049 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6055 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6056 struct regcache
*regs
,
6057 struct displaced_step_closure
*dsc
)
6059 unsigned rn
, rm
, rd
;
6061 rd
= bits (insn
, 3, 6);
6062 rn
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6065 if (rd
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6066 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6068 if (debug_displaced
)
6069 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x\n",
6070 "ALU", (unsigned short) insn
);
6072 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x08);
6074 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
);
6079 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6082 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6083 struct regcache
*regs
,
6084 struct displaced_step_closure
*dsc
)
6086 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6089 for (i
= 0; i
< 4; i
++)
6090 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6092 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6096 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6097 struct displaced_step_closure
*dsc
,
6098 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6102 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6104 /* Instruction is of form:
6106 <op><cond> rd, [rn,] rm, <shift> rs
6110 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6111 r0, r1, r2, r3 <- rd, rn, rm, rs
6112 Insn: <op><cond> r0, r1, r2, <shift> r3
6114 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6118 for (i
= 0; i
< 4; i
++)
6119 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6121 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6122 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6123 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6124 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6125 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6126 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6127 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6128 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6130 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6134 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6135 struct regcache
*regs
,
6136 struct displaced_step_closure
*dsc
)
6138 unsigned int op
= bits (insn
, 21, 24);
6139 int is_mov
= (op
== 0xd);
6140 unsigned int rd
, rn
, rm
, rs
;
6142 if (!insn_references_pc (insn
, 0x000fff0ful
))
6143 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6145 if (debug_displaced
)
6146 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6147 "%.8lx\n", is_mov
? "move" : "ALU",
6148 (unsigned long) insn
);
6150 rn
= bits (insn
, 16, 19);
6151 rm
= bits (insn
, 0, 3);
6152 rs
= bits (insn
, 8, 11);
6153 rd
= bits (insn
, 12, 15);
6156 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6158 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6160 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6165 /* Clean up load instructions. */
6168 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6169 struct displaced_step_closure
*dsc
)
6171 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6173 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6174 if (dsc
->u
.ldst
.xfersize
== 8)
6175 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6176 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6178 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6179 if (dsc
->u
.ldst
.xfersize
> 4)
6180 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6181 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6182 if (!dsc
->u
.ldst
.immed
)
6183 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6185 /* Handle register writeback. */
6186 if (dsc
->u
.ldst
.writeback
)
6187 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6188 /* Put result in right place. */
6189 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6190 if (dsc
->u
.ldst
.xfersize
== 8)
6191 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6194 /* Clean up store instructions. */
6197 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6198 struct displaced_step_closure
*dsc
)
6200 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6202 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6203 if (dsc
->u
.ldst
.xfersize
> 4)
6204 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6205 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6206 if (!dsc
->u
.ldst
.immed
)
6207 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6208 if (!dsc
->u
.ldst
.restore_r4
)
6209 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6212 if (dsc
->u
.ldst
.writeback
)
6213 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6216 /* Copy "extra" load/store instructions. These are halfword/doubleword
6217 transfers, which have a different encoding to byte/word transfers. */
6220 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6221 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6223 unsigned int op1
= bits (insn
, 20, 24);
6224 unsigned int op2
= bits (insn
, 5, 6);
6225 unsigned int rt
= bits (insn
, 12, 15);
6226 unsigned int rn
= bits (insn
, 16, 19);
6227 unsigned int rm
= bits (insn
, 0, 3);
6228 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6229 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6230 int immed
= (op1
& 0x4) != 0;
6232 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6234 if (!insn_references_pc (insn
, 0x000ff00ful
))
6235 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6237 if (debug_displaced
)
6238 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6239 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6240 (unsigned long) insn
);
6242 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6245 internal_error (__FILE__
, __LINE__
,
6246 _("copy_extra_ld_st: instruction decode error"));
6248 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6249 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6250 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6252 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6254 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6255 if (bytesize
[opcode
] == 8)
6256 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6257 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6259 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6261 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6262 if (bytesize
[opcode
] == 8)
6263 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6264 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6266 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6269 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6270 dsc
->u
.ldst
.rn
= rn
;
6271 dsc
->u
.ldst
.immed
= immed
;
6272 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6273 dsc
->u
.ldst
.restore_r4
= 0;
6276 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6278 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6279 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6281 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6283 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6284 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6286 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6291 /* Copy byte/half word/word loads and stores. */
6294 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6295 struct displaced_step_closure
*dsc
, int load
,
6296 int immed
, int writeback
, int size
, int usermode
,
6297 int rt
, int rm
, int rn
)
6299 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6301 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6302 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6304 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6306 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6308 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6309 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6311 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6313 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6314 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6316 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6318 dsc
->u
.ldst
.xfersize
= size
;
6319 dsc
->u
.ldst
.rn
= rn
;
6320 dsc
->u
.ldst
.immed
= immed
;
6321 dsc
->u
.ldst
.writeback
= writeback
;
6323 /* To write PC we can do:
6325 Before this sequence of instructions:
6326 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6327 r2 is the Rn value got from dispalced_read_reg.
6329 Insn1: push {pc} Write address of STR instruction + offset on stack
6330 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6331 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6332 = addr(Insn1) + offset - addr(Insn3) - 8
6334 Insn4: add r4, r4, #8 r4 = offset - 8
6335 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6337 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6339 Otherwise we don't know what value to write for PC, since the offset is
6340 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6341 of this can be found in Section "Saving from r15" in
6342 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6344 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6349 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6350 uint16_t insn2
, struct regcache
*regs
,
6351 struct displaced_step_closure
*dsc
, int size
)
6353 unsigned int u_bit
= bit (insn1
, 7);
6354 unsigned int rt
= bits (insn2
, 12, 15);
6355 int imm12
= bits (insn2
, 0, 11);
6358 if (debug_displaced
)
6359 fprintf_unfiltered (gdb_stdlog
,
6360 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6361 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6367 /* Rewrite instruction LDR Rt imm12 into:
6369 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6373 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6376 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6377 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6378 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6380 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6382 pc_val
= pc_val
& 0xfffffffc;
6384 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6385 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6389 dsc
->u
.ldst
.xfersize
= size
;
6390 dsc
->u
.ldst
.immed
= 0;
6391 dsc
->u
.ldst
.writeback
= 0;
6392 dsc
->u
.ldst
.restore_r4
= 0;
6394 /* LDR R0, R2, R3 */
6395 dsc
->modinsn
[0] = 0xf852;
6396 dsc
->modinsn
[1] = 0x3;
6399 dsc
->cleanup
= &cleanup_load
;
6405 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6406 uint16_t insn2
, struct regcache
*regs
,
6407 struct displaced_step_closure
*dsc
,
6408 int writeback
, int immed
)
6410 unsigned int rt
= bits (insn2
, 12, 15);
6411 unsigned int rn
= bits (insn1
, 0, 3);
6412 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6413 /* In LDR (register), there is also a register Rm, which is not allowed to
6414 be PC, so we don't have to check it. */
6416 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6417 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6420 if (debug_displaced
)
6421 fprintf_unfiltered (gdb_stdlog
,
6422 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6423 rt
, rn
, insn1
, insn2
);
6425 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6428 dsc
->u
.ldst
.restore_r4
= 0;
6431 /* ldr[b]<cond> rt, [rn, #imm], etc.
6433 ldr[b]<cond> r0, [r2, #imm]. */
6435 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6436 dsc
->modinsn
[1] = insn2
& 0x0fff;
6439 /* ldr[b]<cond> rt, [rn, rm], etc.
6441 ldr[b]<cond> r0, [r2, r3]. */
6443 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6444 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6454 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6455 struct regcache
*regs
,
6456 struct displaced_step_closure
*dsc
,
6457 int load
, int size
, int usermode
)
6459 int immed
= !bit (insn
, 25);
6460 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6461 unsigned int rt
= bits (insn
, 12, 15);
6462 unsigned int rn
= bits (insn
, 16, 19);
6463 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6465 if (!insn_references_pc (insn
, 0x000ff00ful
))
6466 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6468 if (debug_displaced
)
6469 fprintf_unfiltered (gdb_stdlog
,
6470 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6471 load
? (size
== 1 ? "ldrb" : "ldr")
6472 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6474 (unsigned long) insn
);
6476 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6477 usermode
, rt
, rm
, rn
);
6479 if (load
|| rt
!= ARM_PC_REGNUM
)
6481 dsc
->u
.ldst
.restore_r4
= 0;
6484 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6486 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6487 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6489 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6491 {ldr,str}[b]<cond> r0, [r2, r3]. */
6492 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6496 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6497 dsc
->u
.ldst
.restore_r4
= 1;
6498 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6499 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6500 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6501 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6502 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6506 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6508 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6513 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6518 /* Cleanup LDM instructions with fully-populated register list. This is an
6519 unfortunate corner case: it's impossible to implement correctly by modifying
6520 the instruction. The issue is as follows: we have an instruction,
6524 which we must rewrite to avoid loading PC. A possible solution would be to
6525 do the load in two halves, something like (with suitable cleanup
6529 ldm[id][ab] r8!, {r0-r7}
6531 ldm[id][ab] r8, {r7-r14}
6534 but at present there's no suitable place for <temp>, since the scratch space
6535 is overwritten before the cleanup routine is called. For now, we simply
6536 emulate the instruction. */
6539 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6540 struct displaced_step_closure
*dsc
)
6542 int inc
= dsc
->u
.block
.increment
;
6543 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6544 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6545 uint32_t regmask
= dsc
->u
.block
.regmask
;
6546 int regno
= inc
? 0 : 15;
6547 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6548 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6549 && (regmask
& 0x8000) != 0;
6550 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6551 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6552 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6557 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6558 sensible we can do here. Complain loudly. */
6559 if (exception_return
)
6560 error (_("Cannot single-step exception return"));
6562 /* We don't handle any stores here for now. */
6563 gdb_assert (dsc
->u
.block
.load
!= 0);
6565 if (debug_displaced
)
6566 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
6567 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
6568 dsc
->u
.block
.increment
? "inc" : "dec",
6569 dsc
->u
.block
.before
? "before" : "after");
6576 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
6579 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
6582 xfer_addr
+= bump_before
;
6584 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
6585 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
6587 xfer_addr
+= bump_after
;
6589 regmask
&= ~(1 << regno
);
6592 if (dsc
->u
.block
.writeback
)
6593 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
6597 /* Clean up an STM which included the PC in the register list. */
6600 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6601 struct displaced_step_closure
*dsc
)
6603 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6604 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
6605 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
6606 CORE_ADDR stm_insn_addr
;
6609 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6611 /* If condition code fails, there's nothing else to do. */
6612 if (!store_executed
)
6615 if (dsc
->u
.block
.increment
)
6617 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
6619 if (dsc
->u
.block
.before
)
6624 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
6626 if (dsc
->u
.block
.before
)
6630 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
6631 stm_insn_addr
= dsc
->scratch_base
;
6632 offset
= pc_val
- stm_insn_addr
;
6634 if (debug_displaced
)
6635 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
6636 "STM instruction\n", offset
);
6638 /* Rewrite the stored PC to the proper value for the non-displaced original
6640 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
6641 dsc
->insn_addr
+ offset
);
6644 /* Clean up an LDM which includes the PC in the register list. We clumped all
6645 the registers in the transferred list into a contiguous range r0...rX (to
6646 avoid loading PC directly and losing control of the debugged program), so we
6647 must undo that here. */
6650 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
6651 struct regcache
*regs
,
6652 struct displaced_step_closure
*dsc
)
6654 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6655 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
), i
;
6656 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
6657 unsigned int regs_loaded
= bitcount (mask
);
6658 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
6660 /* The method employed here will fail if the register list is fully populated
6661 (we need to avoid loading PC directly). */
6662 gdb_assert (num_to_shuffle
< 16);
6667 clobbered
= (1 << num_to_shuffle
) - 1;
6669 while (num_to_shuffle
> 0)
6671 if ((mask
& (1 << write_reg
)) != 0)
6673 unsigned int read_reg
= num_to_shuffle
- 1;
6675 if (read_reg
!= write_reg
)
6677 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
6678 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
6679 if (debug_displaced
)
6680 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
6681 "loaded register r%d to r%d\n"), read_reg
,
6684 else if (debug_displaced
)
6685 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
6686 "r%d already in the right place\n"),
6689 clobbered
&= ~(1 << write_reg
);
6697 /* Restore any registers we scribbled over. */
6698 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
6700 if ((clobbered
& (1 << write_reg
)) != 0)
6702 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
6704 if (debug_displaced
)
6705 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
6706 "clobbered register r%d\n"), write_reg
);
6707 clobbered
&= ~(1 << write_reg
);
6711 /* Perform register writeback manually. */
6712 if (dsc
->u
.block
.writeback
)
6714 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
6716 if (dsc
->u
.block
.increment
)
6717 new_rn_val
+= regs_loaded
* 4;
6719 new_rn_val
-= regs_loaded
* 4;
6721 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
6726 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6727 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6730 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
6731 struct regcache
*regs
,
6732 struct displaced_step_closure
*dsc
)
6734 int load
= bit (insn
, 20);
6735 int user
= bit (insn
, 22);
6736 int increment
= bit (insn
, 23);
6737 int before
= bit (insn
, 24);
6738 int writeback
= bit (insn
, 21);
6739 int rn
= bits (insn
, 16, 19);
6741 /* Block transfers which don't mention PC can be run directly
6743 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
6744 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
6746 if (rn
== ARM_PC_REGNUM
)
6748 warning (_("displaced: Unpredictable LDM or STM with "
6749 "base register r15"));
6750 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
6753 if (debug_displaced
)
6754 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6755 "%.8lx\n", (unsigned long) insn
);
6757 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6758 dsc
->u
.block
.rn
= rn
;
6760 dsc
->u
.block
.load
= load
;
6761 dsc
->u
.block
.user
= user
;
6762 dsc
->u
.block
.increment
= increment
;
6763 dsc
->u
.block
.before
= before
;
6764 dsc
->u
.block
.writeback
= writeback
;
6765 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
6767 dsc
->u
.block
.regmask
= insn
& 0xffff;
6771 if ((insn
& 0xffff) == 0xffff)
6773 /* LDM with a fully-populated register list. This case is
6774 particularly tricky. Implement for now by fully emulating the
6775 instruction (which might not behave perfectly in all cases, but
6776 these instructions should be rare enough for that not to matter
6778 dsc
->modinsn
[0] = ARM_NOP
;
6780 dsc
->cleanup
= &cleanup_block_load_all
;
6784 /* LDM of a list of registers which includes PC. Implement by
6785 rewriting the list of registers to be transferred into a
6786 contiguous chunk r0...rX before doing the transfer, then shuffling
6787 registers into the correct places in the cleanup routine. */
6788 unsigned int regmask
= insn
& 0xffff;
6789 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
6790 unsigned int to
= 0, from
= 0, i
, new_rn
;
6792 for (i
= 0; i
< num_in_list
; i
++)
6793 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6795 /* Writeback makes things complicated. We need to avoid clobbering
6796 the base register with one of the registers in our modified
6797 register list, but just using a different register can't work in
6800 ldm r14!, {r0-r13,pc}
6802 which would need to be rewritten as:
6806 but that can't work, because there's no free register for N.
6808 Solve this by turning off the writeback bit, and emulating
6809 writeback manually in the cleanup routine. */
6814 new_regmask
= (1 << num_in_list
) - 1;
6816 if (debug_displaced
)
6817 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6818 "{..., pc}: original reg list %.4x, modified "
6819 "list %.4x\n"), rn
, writeback
? "!" : "",
6820 (int) insn
& 0xffff, new_regmask
);
6822 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6824 dsc
->cleanup
= &cleanup_block_load_pc
;
6829 /* STM of a list of registers which includes PC. Run the instruction
6830 as-is, but out of line: this will store the wrong value for the PC,
6831 so we must manually fix up the memory in the cleanup routine.
6832 Doing things this way has the advantage that we can auto-detect
6833 the offset of the PC write (which is architecture-dependent) in
6834 the cleanup routine. */
6835 dsc
->modinsn
[0] = insn
;
6837 dsc
->cleanup
= &cleanup_block_store_pc
;
6844 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6845 struct regcache
*regs
,
6846 struct displaced_step_closure
*dsc
)
6848 int rn
= bits (insn1
, 0, 3);
6849 int load
= bit (insn1
, 4);
6850 int writeback
= bit (insn1
, 5);
6852 /* Block transfers which don't mention PC can be run directly
6854 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6855 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6857 if (rn
== ARM_PC_REGNUM
)
6859 warning (_("displaced: Unpredictable LDM or STM with "
6860 "base register r15"));
6861 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6862 "unpredictable ldm/stm", dsc
);
6865 if (debug_displaced
)
6866 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6867 "%.4x%.4x\n", insn1
, insn2
);
6869 /* Clear bit 13, since it should be always zero. */
6870 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6871 dsc
->u
.block
.rn
= rn
;
6873 dsc
->u
.block
.load
= load
;
6874 dsc
->u
.block
.user
= 0;
6875 dsc
->u
.block
.increment
= bit (insn1
, 7);
6876 dsc
->u
.block
.before
= bit (insn1
, 8);
6877 dsc
->u
.block
.writeback
= writeback
;
6878 dsc
->u
.block
.cond
= INST_AL
;
6879 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6883 if (dsc
->u
.block
.regmask
== 0xffff)
6885 /* This branch is impossible to happen. */
6890 unsigned int regmask
= dsc
->u
.block
.regmask
;
6891 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
6892 unsigned int to
= 0, from
= 0, i
, new_rn
;
6894 for (i
= 0; i
< num_in_list
; i
++)
6895 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6900 new_regmask
= (1 << num_in_list
) - 1;
6902 if (debug_displaced
)
6903 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6904 "{..., pc}: original reg list %.4x, modified "
6905 "list %.4x\n"), rn
, writeback
? "!" : "",
6906 (int) dsc
->u
.block
.regmask
, new_regmask
);
6908 dsc
->modinsn
[0] = insn1
;
6909 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6912 dsc
->cleanup
= &cleanup_block_load_pc
;
6917 dsc
->modinsn
[0] = insn1
;
6918 dsc
->modinsn
[1] = insn2
;
6920 dsc
->cleanup
= &cleanup_block_store_pc
;
6925 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6926 for Linux, where some SVC instructions must be treated specially. */
6929 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6930 struct displaced_step_closure
*dsc
)
6932 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6934 if (debug_displaced
)
6935 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
6936 "%.8lx\n", (unsigned long) resume_addr
);
6938 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6942 /* Common copy routine for svc instruciton. */
6945 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6946 struct displaced_step_closure
*dsc
)
6948 /* Preparation: none.
6949 Insn: unmodified svc.
6950 Cleanup: pc <- insn_addr + insn_size. */
6952 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6954 dsc
->wrote_to_pc
= 1;
6956 /* Allow OS-specific code to override SVC handling. */
6957 if (dsc
->u
.svc
.copy_svc_os
)
6958 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6961 dsc
->cleanup
= &cleanup_svc
;
6967 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6968 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6971 if (debug_displaced
)
6972 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
6973 (unsigned long) insn
);
6975 dsc
->modinsn
[0] = insn
;
6977 return install_svc (gdbarch
, regs
, dsc
);
6981 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6982 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6985 if (debug_displaced
)
6986 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
6989 dsc
->modinsn
[0] = insn
;
6991 return install_svc (gdbarch
, regs
, dsc
);
6994 /* Copy undefined instructions. */
6997 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6998 struct displaced_step_closure
*dsc
)
7000 if (debug_displaced
)
7001 fprintf_unfiltered (gdb_stdlog
,
7002 "displaced: copying undefined insn %.8lx\n",
7003 (unsigned long) insn
);
7005 dsc
->modinsn
[0] = insn
;
7011 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7012 struct displaced_step_closure
*dsc
)
7015 if (debug_displaced
)
7016 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7017 "%.4x %.4x\n", (unsigned short) insn1
,
7018 (unsigned short) insn2
);
7020 dsc
->modinsn
[0] = insn1
;
7021 dsc
->modinsn
[1] = insn2
;
7027 /* Copy unpredictable instructions. */
7030 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7031 struct displaced_step_closure
*dsc
)
7033 if (debug_displaced
)
7034 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7035 "%.8lx\n", (unsigned long) insn
);
7037 dsc
->modinsn
[0] = insn
;
7042 /* The decode_* functions are instruction decoding helpers. They mostly follow
7043 the presentation in the ARM ARM. */
7046 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7047 struct regcache
*regs
,
7048 struct displaced_step_closure
*dsc
)
7050 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7051 unsigned int rn
= bits (insn
, 16, 19);
7053 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7054 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7055 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7056 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7057 else if ((op1
& 0x60) == 0x20)
7058 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7059 else if ((op1
& 0x71) == 0x40)
7060 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7062 else if ((op1
& 0x77) == 0x41)
7063 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7064 else if ((op1
& 0x77) == 0x45)
7065 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7066 else if ((op1
& 0x77) == 0x51)
7069 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7071 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7073 else if ((op1
& 0x77) == 0x55)
7074 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7075 else if (op1
== 0x57)
7078 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7079 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7080 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7081 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7082 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7084 else if ((op1
& 0x63) == 0x43)
7085 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7086 else if ((op2
& 0x1) == 0x0)
7087 switch (op1
& ~0x80)
7090 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7092 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7093 case 0x71: case 0x75:
7095 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7096 case 0x63: case 0x67: case 0x73: case 0x77:
7097 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7099 return arm_copy_undef (gdbarch
, insn
, dsc
);
7102 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7106 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7107 struct regcache
*regs
,
7108 struct displaced_step_closure
*dsc
)
7110 if (bit (insn
, 27) == 0)
7111 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7112 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7113 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7116 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7119 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7121 case 0x4: case 0x5: case 0x6: case 0x7:
7122 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7125 switch ((insn
& 0xe00000) >> 21)
7127 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7129 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7132 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7135 return arm_copy_undef (gdbarch
, insn
, dsc
);
7140 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7141 switch ((insn
& 0xe00000) >> 21)
7144 /* ldc/ldc2 imm (undefined for rn == pc). */
7145 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7146 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7149 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7151 case 0x4: case 0x5: case 0x6: case 0x7:
7152 /* ldc/ldc2 lit (undefined for rn != pc). */
7153 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7154 : arm_copy_undef (gdbarch
, insn
, dsc
);
7157 return arm_copy_undef (gdbarch
, insn
, dsc
);
7162 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7165 if (bits (insn
, 16, 19) == 0xf)
7167 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7169 return arm_copy_undef (gdbarch
, insn
, dsc
);
7173 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7175 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7179 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7181 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7184 return arm_copy_undef (gdbarch
, insn
, dsc
);
7188 /* Decode miscellaneous instructions in dp/misc encoding space. */
7191 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7192 struct regcache
*regs
,
7193 struct displaced_step_closure
*dsc
)
7195 unsigned int op2
= bits (insn
, 4, 6);
7196 unsigned int op
= bits (insn
, 21, 22);
7197 unsigned int op1
= bits (insn
, 16, 19);
7202 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7205 if (op
== 0x1) /* bx. */
7206 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7208 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7210 return arm_copy_undef (gdbarch
, insn
, dsc
);
7214 /* Not really supported. */
7215 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7217 return arm_copy_undef (gdbarch
, insn
, dsc
);
7221 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7222 regs
, dsc
); /* blx register. */
7224 return arm_copy_undef (gdbarch
, insn
, dsc
);
7227 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7231 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7233 /* Not really supported. */
7234 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7237 return arm_copy_undef (gdbarch
, insn
, dsc
);
7242 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7243 struct regcache
*regs
,
7244 struct displaced_step_closure
*dsc
)
7247 switch (bits (insn
, 20, 24))
7250 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7253 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7255 case 0x12: case 0x16:
7256 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7259 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7263 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7265 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7266 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7267 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7268 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7269 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7270 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7271 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7272 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7273 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7274 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7275 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7276 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7277 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7278 /* 2nd arg means "unpriveleged". */
7279 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7283 /* Should be unreachable. */
7288 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7289 struct regcache
*regs
,
7290 struct displaced_step_closure
*dsc
)
7292 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7293 uint32_t op1
= bits (insn
, 20, 24);
7294 int rn_f
= bits (insn
, 16, 19) == 0xf;
7296 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7297 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7298 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7299 else if ((!a
&& (op1
& 0x17) == 0x02)
7300 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7301 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7302 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7303 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7304 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7305 else if ((!a
&& (op1
& 0x17) == 0x03)
7306 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7307 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7308 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7309 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7310 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7311 else if ((!a
&& (op1
& 0x17) == 0x06)
7312 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7313 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7314 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7315 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7316 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7317 else if ((!a
&& (op1
& 0x17) == 0x07)
7318 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7319 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7321 /* Should be unreachable. */
7326 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7327 struct displaced_step_closure
*dsc
)
7329 switch (bits (insn
, 20, 24))
7331 case 0x00: case 0x01: case 0x02: case 0x03:
7332 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7334 case 0x04: case 0x05: case 0x06: case 0x07:
7335 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7337 case 0x08: case 0x09: case 0x0a: case 0x0b:
7338 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7339 return arm_copy_unmodified (gdbarch
, insn
,
7340 "decode/pack/unpack/saturate/reverse", dsc
);
7343 if (bits (insn
, 5, 7) == 0) /* op2. */
7345 if (bits (insn
, 12, 15) == 0xf)
7346 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7348 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7351 return arm_copy_undef (gdbarch
, insn
, dsc
);
7353 case 0x1a: case 0x1b:
7354 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7355 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7357 return arm_copy_undef (gdbarch
, insn
, dsc
);
7359 case 0x1c: case 0x1d:
7360 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7362 if (bits (insn
, 0, 3) == 0xf)
7363 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7365 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7368 return arm_copy_undef (gdbarch
, insn
, dsc
);
7370 case 0x1e: case 0x1f:
7371 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7372 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7374 return arm_copy_undef (gdbarch
, insn
, dsc
);
7377 /* Should be unreachable. */
7382 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7383 struct regcache
*regs
,
7384 struct displaced_step_closure
*dsc
)
7387 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7389 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7393 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7394 struct regcache
*regs
,
7395 struct displaced_step_closure
*dsc
)
7397 unsigned int opcode
= bits (insn
, 20, 24);
7401 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7402 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7404 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7405 case 0x12: case 0x16:
7406 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7408 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7409 case 0x13: case 0x17:
7410 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7412 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7413 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7414 /* Note: no writeback for these instructions. Bit 25 will always be
7415 zero though (via caller), so the following works OK. */
7416 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7419 /* Should be unreachable. */
7423 /* Decode shifted register instructions. */
7426 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7427 uint16_t insn2
, struct regcache
*regs
,
7428 struct displaced_step_closure
*dsc
)
7430 /* PC is only allowed to be used in instruction MOV. */
7432 unsigned int op
= bits (insn1
, 5, 8);
7433 unsigned int rn
= bits (insn1
, 0, 3);
7435 if (op
== 0x2 && rn
== 0xf) /* MOV */
7436 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7438 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7439 "dp (shift reg)", dsc
);
7443 /* Decode extension register load/store. Exactly the same as
7444 arm_decode_ext_reg_ld_st. */
7447 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7448 uint16_t insn2
, struct regcache
*regs
,
7449 struct displaced_step_closure
*dsc
)
7451 unsigned int opcode
= bits (insn1
, 4, 8);
7455 case 0x04: case 0x05:
7456 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7457 "vfp/neon vmov", dsc
);
7459 case 0x08: case 0x0c: /* 01x00 */
7460 case 0x0a: case 0x0e: /* 01x10 */
7461 case 0x12: case 0x16: /* 10x10 */
7462 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7463 "vfp/neon vstm/vpush", dsc
);
7465 case 0x09: case 0x0d: /* 01x01 */
7466 case 0x0b: case 0x0f: /* 01x11 */
7467 case 0x13: case 0x17: /* 10x11 */
7468 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7469 "vfp/neon vldm/vpop", dsc
);
7471 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7472 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7474 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7475 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7478 /* Should be unreachable. */
7483 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7484 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7486 unsigned int op1
= bits (insn
, 20, 25);
7487 int op
= bit (insn
, 4);
7488 unsigned int coproc
= bits (insn
, 8, 11);
7489 unsigned int rn
= bits (insn
, 16, 19);
7491 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7492 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7493 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7494 && (coproc
& 0xe) != 0xa)
7496 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7497 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7498 && (coproc
& 0xe) != 0xa)
7499 /* ldc/ldc2 imm/lit. */
7500 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7501 else if ((op1
& 0x3e) == 0x00)
7502 return arm_copy_undef (gdbarch
, insn
, dsc
);
7503 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7504 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7505 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7506 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7507 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7508 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7509 else if ((op1
& 0x30) == 0x20 && !op
)
7511 if ((coproc
& 0xe) == 0xa)
7512 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7514 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7516 else if ((op1
& 0x30) == 0x20 && op
)
7517 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7518 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7519 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7520 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7521 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7522 else if ((op1
& 0x30) == 0x30)
7523 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7525 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7529 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7530 uint16_t insn2
, struct regcache
*regs
,
7531 struct displaced_step_closure
*dsc
)
7533 unsigned int coproc
= bits (insn2
, 8, 11);
7534 unsigned int op1
= bits (insn1
, 4, 9);
7535 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7536 unsigned int bit_9
= bit (insn1
, 9);
7537 unsigned int bit_4
= bit (insn1
, 4);
7538 unsigned int rn
= bits (insn1
, 0, 3);
7543 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7544 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7546 else if (bit_5_8
== 0) /* UNDEFINED. */
7547 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7550 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7551 if ((coproc
& 0xe) == 0xa)
7552 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7554 else /* coproc is not 101x. */
7556 if (bit_4
== 0) /* STC/STC2. */
7557 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7559 else /* LDC/LDC2 {literal, immeidate}. */
7560 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
7566 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7572 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7573 struct displaced_step_closure
*dsc
, int rd
)
7579 Preparation: Rd <- PC
7585 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7586 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
7590 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7591 struct displaced_step_closure
*dsc
,
7592 int rd
, unsigned int imm
)
7595 /* Encoding T2: ADDS Rd, #imm */
7596 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
7598 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7604 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
7605 struct regcache
*regs
,
7606 struct displaced_step_closure
*dsc
)
7608 unsigned int rd
= bits (insn
, 8, 10);
7609 unsigned int imm8
= bits (insn
, 0, 7);
7611 if (debug_displaced
)
7612 fprintf_unfiltered (gdb_stdlog
,
7613 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7616 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
7620 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7621 uint16_t insn2
, struct regcache
*regs
,
7622 struct displaced_step_closure
*dsc
)
7624 unsigned int rd
= bits (insn2
, 8, 11);
7625 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7626 extract raw immediate encoding rather than computing immediate. When
7627 generating ADD or SUB instruction, we can simply perform OR operation to
7628 set immediate into ADD. */
7629 unsigned int imm_3_8
= insn2
& 0x70ff;
7630 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
7632 if (debug_displaced
)
7633 fprintf_unfiltered (gdb_stdlog
,
7634 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7635 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
7637 if (bit (insn1
, 7)) /* Encoding T2 */
7639 /* Encoding T3: SUB Rd, Rd, #imm */
7640 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
7641 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7643 else /* Encoding T3 */
7645 /* Encoding T3: ADD Rd, Rd, #imm */
7646 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
7647 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7651 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7657 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
7658 struct regcache
*regs
,
7659 struct displaced_step_closure
*dsc
)
7661 unsigned int rt
= bits (insn1
, 8, 10);
7663 int imm8
= (bits (insn1
, 0, 7) << 2);
7664 CORE_ADDR from
= dsc
->insn_addr
;
7670 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7672 Insn: LDR R0, [R2, R3];
7673 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7675 if (debug_displaced
)
7676 fprintf_unfiltered (gdb_stdlog
,
7677 "displaced: copying thumb ldr r%d [pc #%d]\n"
7680 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
7681 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
7682 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
7683 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7684 /* The assembler calculates the required value of the offset from the
7685 Align(PC,4) value of this instruction to the label. */
7686 pc
= pc
& 0xfffffffc;
7688 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
7689 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
7692 dsc
->u
.ldst
.xfersize
= 4;
7694 dsc
->u
.ldst
.immed
= 0;
7695 dsc
->u
.ldst
.writeback
= 0;
7696 dsc
->u
.ldst
.restore_r4
= 0;
7698 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7700 dsc
->cleanup
= &cleanup_load
;
7705 /* Copy Thumb cbnz/cbz insruction. */
7708 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
7709 struct regcache
*regs
,
7710 struct displaced_step_closure
*dsc
)
7712 int non_zero
= bit (insn1
, 11);
7713 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
7714 CORE_ADDR from
= dsc
->insn_addr
;
7715 int rn
= bits (insn1
, 0, 2);
7716 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7718 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7719 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7720 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7721 condition is false, let it be, cleanup_branch will do nothing. */
7722 if (dsc
->u
.branch
.cond
)
7724 dsc
->u
.branch
.cond
= INST_AL
;
7725 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7728 dsc
->u
.branch
.dest
= from
+ 2;
7730 dsc
->u
.branch
.link
= 0;
7731 dsc
->u
.branch
.exchange
= 0;
7733 if (debug_displaced
)
7734 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
7735 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
7736 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7738 dsc
->modinsn
[0] = THUMB_NOP
;
7740 dsc
->cleanup
= &cleanup_branch
;
7744 /* Copy Table Branch Byte/Halfword */
7746 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7747 uint16_t insn2
, struct regcache
*regs
,
7748 struct displaced_step_closure
*dsc
)
7750 ULONGEST rn_val
, rm_val
;
7751 int is_tbh
= bit (insn2
, 4);
7752 CORE_ADDR halfwords
= 0;
7753 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7755 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7756 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7762 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7763 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7769 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7770 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7773 if (debug_displaced
)
7774 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
7775 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
7776 (unsigned int) rn_val
, (unsigned int) rm_val
,
7777 (unsigned int) halfwords
);
7779 dsc
->u
.branch
.cond
= INST_AL
;
7780 dsc
->u
.branch
.link
= 0;
7781 dsc
->u
.branch
.exchange
= 0;
7782 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7784 dsc
->cleanup
= &cleanup_branch
;
7790 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7791 struct displaced_step_closure
*dsc
)
7794 int val
= displaced_read_reg (regs
, dsc
, 7);
7795 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7798 val
= displaced_read_reg (regs
, dsc
, 8);
7799 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7802 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7807 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
7808 struct regcache
*regs
,
7809 struct displaced_step_closure
*dsc
)
7811 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7813 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7816 (1) register list is full, that is, r0-r7 are used.
7817 Prepare: tmp[0] <- r8
7819 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7820 MOV r8, r7; Move value of r7 to r8;
7821 POP {r7}; Store PC value into r7.
7823 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7825 (2) register list is not full, supposing there are N registers in
7826 register list (except PC, 0 <= N <= 7).
7827 Prepare: for each i, 0 - N, tmp[i] <- ri.
7829 POP {r0, r1, ...., rN};
7831 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7832 from tmp[] properly.
7834 if (debug_displaced
)
7835 fprintf_unfiltered (gdb_stdlog
,
7836 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7837 dsc
->u
.block
.regmask
, insn1
);
7839 if (dsc
->u
.block
.regmask
== 0xff)
7841 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7843 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7844 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7845 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7848 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7852 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
7853 unsigned int new_regmask
, bit
= 1;
7854 unsigned int to
= 0, from
= 0, i
, new_rn
;
7856 for (i
= 0; i
< num_in_list
+ 1; i
++)
7857 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7859 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7861 if (debug_displaced
)
7862 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
7863 "{..., pc}: original reg list %.4x,"
7864 " modified list %.4x\n"),
7865 (int) dsc
->u
.block
.regmask
, new_regmask
);
7867 dsc
->u
.block
.regmask
|= 0x8000;
7868 dsc
->u
.block
.writeback
= 0;
7869 dsc
->u
.block
.cond
= INST_AL
;
7871 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7873 dsc
->cleanup
= &cleanup_block_load_pc
;
7880 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7881 struct regcache
*regs
,
7882 struct displaced_step_closure
*dsc
)
7884 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7885 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7888 /* 16-bit thumb instructions. */
7889 switch (op_bit_12_15
)
7891 /* Shift (imme), add, subtract, move and compare. */
7892 case 0: case 1: case 2: case 3:
7893 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7894 "shift/add/sub/mov/cmp",
7898 switch (op_bit_10_11
)
7900 case 0: /* Data-processing */
7901 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7905 case 1: /* Special data instructions and branch and exchange. */
7907 unsigned short op
= bits (insn1
, 7, 9);
7908 if (op
== 6 || op
== 7) /* BX or BLX */
7909 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7910 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7911 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7913 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7917 default: /* LDR (literal) */
7918 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7921 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7922 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7925 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7926 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7927 else /* Generate SP-relative address */
7928 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7930 case 11: /* Misc 16-bit instructions */
7932 switch (bits (insn1
, 8, 11))
7934 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7935 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7937 case 12: case 13: /* POP */
7938 if (bit (insn1
, 8)) /* PC is in register list. */
7939 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7941 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7943 case 15: /* If-Then, and hints */
7944 if (bits (insn1
, 0, 3))
7945 /* If-Then makes up to four following instructions conditional.
7946 IT instruction itself is not conditional, so handle it as a
7947 common unmodified instruction. */
7948 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7951 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7954 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7959 if (op_bit_10_11
< 2) /* Store multiple registers */
7960 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7961 else /* Load multiple registers */
7962 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7964 case 13: /* Conditional branch and supervisor call */
7965 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7966 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7968 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7970 case 14: /* Unconditional branch */
7971 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7978 internal_error (__FILE__
, __LINE__
,
7979 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7983 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7984 uint16_t insn1
, uint16_t insn2
,
7985 struct regcache
*regs
,
7986 struct displaced_step_closure
*dsc
)
7988 int rt
= bits (insn2
, 12, 15);
7989 int rn
= bits (insn1
, 0, 3);
7990 int op1
= bits (insn1
, 7, 8);
7993 switch (bits (insn1
, 5, 6))
7995 case 0: /* Load byte and memory hints */
7996 if (rt
== 0xf) /* PLD/PLI */
7999 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8000 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8002 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8007 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8008 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8011 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8012 "ldrb{reg, immediate}/ldrbt",
8017 case 1: /* Load halfword and memory hints. */
8018 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8019 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8020 "pld/unalloc memhint", dsc
);
8024 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8027 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8031 case 2: /* Load word */
8033 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8036 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8037 else if (op1
== 0x1) /* Encoding T3 */
8038 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8040 else /* op1 == 0x0 */
8042 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8043 /* LDR (immediate) */
8044 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8045 dsc
, bit (insn2
, 8), 1);
8046 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8047 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8050 /* LDR (register) */
8051 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8057 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8064 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8065 uint16_t insn2
, struct regcache
*regs
,
8066 struct displaced_step_closure
*dsc
)
8069 unsigned short op
= bit (insn2
, 15);
8070 unsigned int op1
= bits (insn1
, 11, 12);
8076 switch (bits (insn1
, 9, 10))
8081 /* Load/store {dual, execlusive}, table branch. */
8082 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8083 && bits (insn2
, 5, 7) == 0)
8084 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8087 /* PC is not allowed to use in load/store {dual, exclusive}
8089 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8090 "load/store dual/ex", dsc
);
8092 else /* load/store multiple */
8094 switch (bits (insn1
, 7, 8))
8096 case 0: case 3: /* SRS, RFE */
8097 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8100 case 1: case 2: /* LDM/STM/PUSH/POP */
8101 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8108 /* Data-processing (shift register). */
8109 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8112 default: /* Coprocessor instructions. */
8113 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8118 case 2: /* op1 = 2 */
8119 if (op
) /* Branch and misc control. */
8121 if (bit (insn2
, 14) /* BLX/BL */
8122 || bit (insn2
, 12) /* Unconditional branch */
8123 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8124 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8126 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8131 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8133 int op
= bits (insn1
, 4, 8);
8134 int rn
= bits (insn1
, 0, 3);
8135 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8136 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8139 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8142 else /* Data processing (modified immeidate) */
8143 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8147 case 3: /* op1 = 3 */
8148 switch (bits (insn1
, 9, 10))
8152 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8154 else /* NEON Load/Store and Store single data item */
8155 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8156 "neon elt/struct load/store",
8159 case 1: /* op1 = 3, bits (9, 10) == 1 */
8160 switch (bits (insn1
, 7, 8))
8162 case 0: case 1: /* Data processing (register) */
8163 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8166 case 2: /* Multiply and absolute difference */
8167 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8168 "mul/mua/diff", dsc
);
8170 case 3: /* Long multiply and divide */
8171 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8176 default: /* Coprocessor instructions */
8177 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8186 internal_error (__FILE__
, __LINE__
,
8187 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8192 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8193 CORE_ADDR to
, struct regcache
*regs
,
8194 struct displaced_step_closure
*dsc
)
8196 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8198 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8200 if (debug_displaced
)
8201 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8202 "at %.8lx\n", insn1
, (unsigned long) from
);
8205 dsc
->insn_size
= thumb_insn_size (insn1
);
8206 if (thumb_insn_size (insn1
) == 4)
8209 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8210 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8213 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8217 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8218 CORE_ADDR to
, struct regcache
*regs
,
8219 struct displaced_step_closure
*dsc
)
8222 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8225 /* Most displaced instructions use a 1-instruction scratch space, so set this
8226 here and override below if/when necessary. */
8228 dsc
->insn_addr
= from
;
8229 dsc
->scratch_base
= to
;
8230 dsc
->cleanup
= NULL
;
8231 dsc
->wrote_to_pc
= 0;
8233 if (!displaced_in_arm_mode (regs
))
8234 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8238 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8239 if (debug_displaced
)
8240 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8241 "at %.8lx\n", (unsigned long) insn
,
8242 (unsigned long) from
);
8244 if ((insn
& 0xf0000000) == 0xf0000000)
8245 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8246 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8248 case 0x0: case 0x1: case 0x2: case 0x3:
8249 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8252 case 0x4: case 0x5: case 0x6:
8253 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8257 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8260 case 0x8: case 0x9: case 0xa: case 0xb:
8261 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8264 case 0xc: case 0xd: case 0xe: case 0xf:
8265 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8270 internal_error (__FILE__
, __LINE__
,
8271 _("arm_process_displaced_insn: Instruction decode error"));
8274 /* Actually set up the scratch space for a displaced instruction. */
8277 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8278 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8280 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8281 unsigned int i
, len
, offset
;
8282 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8283 int size
= dsc
->is_thumb
? 2 : 4;
8284 const unsigned char *bkp_insn
;
8287 /* Poke modified instruction(s). */
8288 for (i
= 0; i
< dsc
->numinsns
; i
++)
8290 if (debug_displaced
)
8292 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8294 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8297 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8298 (unsigned short)dsc
->modinsn
[i
]);
8300 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8301 (unsigned long) to
+ offset
);
8304 write_memory_unsigned_integer (to
+ offset
, size
,
8305 byte_order_for_code
,
8310 /* Choose the correct breakpoint instruction. */
8313 bkp_insn
= tdep
->thumb_breakpoint
;
8314 len
= tdep
->thumb_breakpoint_size
;
8318 bkp_insn
= tdep
->arm_breakpoint
;
8319 len
= tdep
->arm_breakpoint_size
;
8322 /* Put breakpoint afterwards. */
8323 write_memory (to
+ offset
, bkp_insn
, len
);
8325 if (debug_displaced
)
8326 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8327 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8330 /* Entry point for copying an instruction into scratch space for displaced
8333 struct displaced_step_closure
*
8334 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8335 CORE_ADDR from
, CORE_ADDR to
,
8336 struct regcache
*regs
)
8338 struct displaced_step_closure
*dsc
8339 = xmalloc (sizeof (struct displaced_step_closure
));
8340 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8341 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8346 /* Entry point for cleaning things up after a displaced instruction has been
8350 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8351 struct displaced_step_closure
*dsc
,
8352 CORE_ADDR from
, CORE_ADDR to
,
8353 struct regcache
*regs
)
8356 dsc
->cleanup (gdbarch
, regs
, dsc
);
8358 if (!dsc
->wrote_to_pc
)
8359 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8360 dsc
->insn_addr
+ dsc
->insn_size
);
8364 #include "bfd-in2.h"
8365 #include "libcoff.h"
8368 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8370 struct gdbarch
*gdbarch
= info
->application_data
;
8372 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8374 static asymbol
*asym
;
8375 static combined_entry_type ce
;
8376 static struct coff_symbol_struct csym
;
8377 static struct bfd fake_bfd
;
8378 static bfd_target fake_target
;
8380 if (csym
.native
== NULL
)
8382 /* Create a fake symbol vector containing a Thumb symbol.
8383 This is solely so that the code in print_insn_little_arm()
8384 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8385 the presence of a Thumb symbol and switch to decoding
8386 Thumb instructions. */
8388 fake_target
.flavour
= bfd_target_coff_flavour
;
8389 fake_bfd
.xvec
= &fake_target
;
8390 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8392 csym
.symbol
.the_bfd
= &fake_bfd
;
8393 csym
.symbol
.name
= "fake";
8394 asym
= (asymbol
*) & csym
;
8397 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8398 info
->symbols
= &asym
;
8401 info
->symbols
= NULL
;
8403 if (info
->endian
== BFD_ENDIAN_BIG
)
8404 return print_insn_big_arm (memaddr
, info
);
8406 return print_insn_little_arm (memaddr
, info
);
8409 /* The following define instruction sequences that will cause ARM
8410 cpu's to take an undefined instruction trap. These are used to
8411 signal a breakpoint to GDB.
8413 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8414 modes. A different instruction is required for each mode. The ARM
8415 cpu's can also be big or little endian. Thus four different
8416 instructions are needed to support all cases.
8418 Note: ARMv4 defines several new instructions that will take the
8419 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8420 not in fact add the new instructions. The new undefined
8421 instructions in ARMv4 are all instructions that had no defined
8422 behaviour in earlier chips. There is no guarantee that they will
8423 raise an exception, but may be treated as NOP's. In practice, it
8424 may only safe to rely on instructions matching:
8426 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8427 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8428 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8430 Even this may only true if the condition predicate is true. The
8431 following use a condition predicate of ALWAYS so it is always TRUE.
8433 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8434 and NetBSD all use a software interrupt rather than an undefined
8435 instruction to force a trap. This can be handled by by the
8436 abi-specific code during establishment of the gdbarch vector. */
8438 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8439 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8440 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8441 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8443 static const char arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8444 static const char arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8445 static const char arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8446 static const char arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8448 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8449 the program counter value to determine whether a 16-bit or 32-bit
8450 breakpoint should be used. It returns a pointer to a string of
8451 bytes that encode a breakpoint instruction, stores the length of
8452 the string to *lenptr, and adjusts the program counter (if
8453 necessary) to point to the actual memory location where the
8454 breakpoint should be inserted. */
8456 static const unsigned char *
8457 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8459 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8460 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8462 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8464 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8466 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8467 check whether we are replacing a 32-bit instruction. */
8468 if (tdep
->thumb2_breakpoint
!= NULL
)
8471 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8473 unsigned short inst1
;
8474 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8475 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
8477 *lenptr
= tdep
->thumb2_breakpoint_size
;
8478 return tdep
->thumb2_breakpoint
;
8483 *lenptr
= tdep
->thumb_breakpoint_size
;
8484 return tdep
->thumb_breakpoint
;
8488 *lenptr
= tdep
->arm_breakpoint_size
;
8489 return tdep
->arm_breakpoint
;
8494 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8497 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8499 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8501 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8502 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8503 that this is not confused with a 32-bit ARM breakpoint. */
8507 /* Extract from an array REGBUF containing the (raw) register state a
8508 function return value of type TYPE, and copy that, in virtual
8509 format, into VALBUF. */
8512 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8515 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8516 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8518 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8520 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8524 /* The value is in register F0 in internal format. We need to
8525 extract the raw value and then convert it to the desired
8527 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8529 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8530 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8531 valbuf
, gdbarch_byte_order (gdbarch
));
8535 case ARM_FLOAT_SOFT_FPA
:
8536 case ARM_FLOAT_SOFT_VFP
:
8537 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8538 not using the VFP ABI code. */
8540 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8541 if (TYPE_LENGTH (type
) > 4)
8542 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8543 valbuf
+ INT_REGISTER_SIZE
);
8547 internal_error (__FILE__
, __LINE__
,
8548 _("arm_extract_return_value: "
8549 "Floating point model not supported"));
8553 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8554 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8555 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8556 || TYPE_CODE (type
) == TYPE_CODE_PTR
8557 || TYPE_CODE (type
) == TYPE_CODE_REF
8558 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8560 /* If the type is a plain integer, then the access is
8561 straight-forward. Otherwise we have to play around a bit
8563 int len
= TYPE_LENGTH (type
);
8564 int regno
= ARM_A1_REGNUM
;
8569 /* By using store_unsigned_integer we avoid having to do
8570 anything special for small big-endian values. */
8571 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8572 store_unsigned_integer (valbuf
,
8573 (len
> INT_REGISTER_SIZE
8574 ? INT_REGISTER_SIZE
: len
),
8576 len
-= INT_REGISTER_SIZE
;
8577 valbuf
+= INT_REGISTER_SIZE
;
8582 /* For a structure or union the behaviour is as if the value had
8583 been stored to word-aligned memory and then loaded into
8584 registers with 32-bit load instruction(s). */
8585 int len
= TYPE_LENGTH (type
);
8586 int regno
= ARM_A1_REGNUM
;
8587 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8591 regcache_cooked_read (regs
, regno
++, tmpbuf
);
8592 memcpy (valbuf
, tmpbuf
,
8593 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8594 len
-= INT_REGISTER_SIZE
;
8595 valbuf
+= INT_REGISTER_SIZE
;
8601 /* Will a function return an aggregate type in memory or in a
8602 register? Return 0 if an aggregate type can be returned in a
8603 register, 1 if it must be returned in memory. */
8606 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
8609 enum type_code code
;
8611 CHECK_TYPEDEF (type
);
8613 /* In the ARM ABI, "integer" like aggregate types are returned in
8614 registers. For an aggregate type to be integer like, its size
8615 must be less than or equal to INT_REGISTER_SIZE and the
8616 offset of each addressable subfield must be zero. Note that bit
8617 fields are not addressable, and all addressable subfields of
8618 unions always start at offset zero.
8620 This function is based on the behaviour of GCC 2.95.1.
8621 See: gcc/arm.c: arm_return_in_memory() for details.
8623 Note: All versions of GCC before GCC 2.95.2 do not set up the
8624 parameters correctly for a function returning the following
8625 structure: struct { float f;}; This should be returned in memory,
8626 not a register. Richard Earnshaw sent me a patch, but I do not
8627 know of any way to detect if a function like the above has been
8628 compiled with the correct calling convention. */
8630 /* All aggregate types that won't fit in a register must be returned
8632 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
8637 /* The AAPCS says all aggregates not larger than a word are returned
8639 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
8642 /* The only aggregate types that can be returned in a register are
8643 structs and unions. Arrays must be returned in memory. */
8644 code
= TYPE_CODE (type
);
8645 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
8650 /* Assume all other aggregate types can be returned in a register.
8651 Run a check for structures, unions and arrays. */
8654 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
8657 /* Need to check if this struct/union is "integer" like. For
8658 this to be true, its size must be less than or equal to
8659 INT_REGISTER_SIZE and the offset of each addressable
8660 subfield must be zero. Note that bit fields are not
8661 addressable, and unions always start at offset zero. If any
8662 of the subfields is a floating point type, the struct/union
8663 cannot be an integer type. */
8665 /* For each field in the object, check:
8666 1) Is it FP? --> yes, nRc = 1;
8667 2) Is it addressable (bitpos != 0) and
8668 not packed (bitsize == 0)?
8672 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
8674 enum type_code field_type_code
;
8675 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
8678 /* Is it a floating point type field? */
8679 if (field_type_code
== TYPE_CODE_FLT
)
8685 /* If bitpos != 0, then we have to care about it. */
8686 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
8688 /* Bitfields are not addressable. If the field bitsize is
8689 zero, then the field is not packed. Hence it cannot be
8690 a bitfield or any other packed type. */
8691 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8703 /* Write into appropriate registers a function return value of type
8704 TYPE, given in virtual format. */
8707 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8708 const gdb_byte
*valbuf
)
8710 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8711 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8713 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
8715 char buf
[MAX_REGISTER_SIZE
];
8717 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8721 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
8722 gdbarch_byte_order (gdbarch
));
8723 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
8726 case ARM_FLOAT_SOFT_FPA
:
8727 case ARM_FLOAT_SOFT_VFP
:
8728 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8729 not using the VFP ABI code. */
8731 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
8732 if (TYPE_LENGTH (type
) > 4)
8733 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
8734 valbuf
+ INT_REGISTER_SIZE
);
8738 internal_error (__FILE__
, __LINE__
,
8739 _("arm_store_return_value: Floating "
8740 "point model not supported"));
8744 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8745 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8746 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8747 || TYPE_CODE (type
) == TYPE_CODE_PTR
8748 || TYPE_CODE (type
) == TYPE_CODE_REF
8749 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8751 if (TYPE_LENGTH (type
) <= 4)
8753 /* Values of one word or less are zero/sign-extended and
8755 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8756 LONGEST val
= unpack_long (type
, valbuf
);
8758 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
8759 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
8763 /* Integral values greater than one word are stored in consecutive
8764 registers starting with r0. This will always be a multiple of
8765 the regiser size. */
8766 int len
= TYPE_LENGTH (type
);
8767 int regno
= ARM_A1_REGNUM
;
8771 regcache_cooked_write (regs
, regno
++, valbuf
);
8772 len
-= INT_REGISTER_SIZE
;
8773 valbuf
+= INT_REGISTER_SIZE
;
8779 /* For a structure or union the behaviour is as if the value had
8780 been stored to word-aligned memory and then loaded into
8781 registers with 32-bit load instruction(s). */
8782 int len
= TYPE_LENGTH (type
);
8783 int regno
= ARM_A1_REGNUM
;
8784 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8788 memcpy (tmpbuf
, valbuf
,
8789 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8790 regcache_cooked_write (regs
, regno
++, tmpbuf
);
8791 len
-= INT_REGISTER_SIZE
;
8792 valbuf
+= INT_REGISTER_SIZE
;
8798 /* Handle function return values. */
8800 static enum return_value_convention
8801 arm_return_value (struct gdbarch
*gdbarch
, struct type
*func_type
,
8802 struct type
*valtype
, struct regcache
*regcache
,
8803 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8805 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8806 enum arm_vfp_cprc_base_type vfp_base_type
;
8809 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8810 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8812 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8813 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8815 for (i
= 0; i
< vfp_base_count
; i
++)
8817 if (reg_char
== 'q')
8820 arm_neon_quad_write (gdbarch
, regcache
, i
,
8821 writebuf
+ i
* unit_length
);
8824 arm_neon_quad_read (gdbarch
, regcache
, i
,
8825 readbuf
+ i
* unit_length
);
8832 sprintf (name_buf
, "%c%d", reg_char
, i
);
8833 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8836 regcache_cooked_write (regcache
, regnum
,
8837 writebuf
+ i
* unit_length
);
8839 regcache_cooked_read (regcache
, regnum
,
8840 readbuf
+ i
* unit_length
);
8843 return RETURN_VALUE_REGISTER_CONVENTION
;
8846 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
8847 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
8848 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
8850 if (tdep
->struct_return
== pcc_struct_return
8851 || arm_return_in_memory (gdbarch
, valtype
))
8852 return RETURN_VALUE_STRUCT_CONVENTION
;
8856 arm_store_return_value (valtype
, regcache
, writebuf
);
8859 arm_extract_return_value (valtype
, regcache
, readbuf
);
8861 return RETURN_VALUE_REGISTER_CONVENTION
;
8866 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8868 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8869 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8870 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8872 char buf
[INT_REGISTER_SIZE
];
8874 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8876 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8880 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
8884 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8885 return the target PC. Otherwise return 0. */
8888 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8892 CORE_ADDR start_addr
;
8894 /* Find the starting address and name of the function containing the PC. */
8895 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8898 /* If PC is in a Thumb call or return stub, return the address of the
8899 target PC, which is in a register. The thunk functions are called
8900 _call_via_xx, where x is the register name. The possible names
8901 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8902 functions, named __ARM_call_via_r[0-7]. */
8903 if (strncmp (name
, "_call_via_", 10) == 0
8904 || strncmp (name
, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
8906 /* Use the name suffix to determine which register contains the
8908 static char *table
[15] =
8909 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8910 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8913 int offset
= strlen (name
) - 2;
8915 for (regno
= 0; regno
<= 14; regno
++)
8916 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8917 return get_frame_register_unsigned (frame
, regno
);
8920 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8921 non-interworking calls to foo. We could decode the stubs
8922 to find the target but it's easier to use the symbol table. */
8923 namelen
= strlen (name
);
8924 if (name
[0] == '_' && name
[1] == '_'
8925 && ((namelen
> 2 + strlen ("_from_thumb")
8926 && strncmp (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb",
8927 strlen ("_from_thumb")) == 0)
8928 || (namelen
> 2 + strlen ("_from_arm")
8929 && strncmp (name
+ namelen
- strlen ("_from_arm"), "_from_arm",
8930 strlen ("_from_arm")) == 0)))
8933 int target_len
= namelen
- 2;
8934 struct minimal_symbol
*minsym
;
8935 struct objfile
*objfile
;
8936 struct obj_section
*sec
;
8938 if (name
[namelen
- 1] == 'b')
8939 target_len
-= strlen ("_from_thumb");
8941 target_len
-= strlen ("_from_arm");
8943 target_name
= alloca (target_len
+ 1);
8944 memcpy (target_name
, name
+ 2, target_len
);
8945 target_name
[target_len
] = '\0';
8947 sec
= find_pc_section (pc
);
8948 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8949 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8951 return SYMBOL_VALUE_ADDRESS (minsym
);
8956 return 0; /* not a stub */
8960 set_arm_command (char *args
, int from_tty
)
8962 printf_unfiltered (_("\
8963 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8964 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
8968 show_arm_command (char *args
, int from_tty
)
8970 cmd_show_list (showarmcmdlist
, from_tty
, "");
8974 arm_update_current_architecture (void)
8976 struct gdbarch_info info
;
8978 /* If the current architecture is not ARM, we have nothing to do. */
8979 if (gdbarch_bfd_arch_info (target_gdbarch
)->arch
!= bfd_arch_arm
)
8982 /* Update the architecture. */
8983 gdbarch_info_init (&info
);
8985 if (!gdbarch_update_p (info
))
8986 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8990 set_fp_model_sfunc (char *args
, int from_tty
,
8991 struct cmd_list_element
*c
)
8993 enum arm_float_model fp_model
;
8995 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8996 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8998 arm_fp_model
= fp_model
;
9002 if (fp_model
== ARM_FLOAT_LAST
)
9003 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9006 arm_update_current_architecture ();
9010 show_fp_model (struct ui_file
*file
, int from_tty
,
9011 struct cmd_list_element
*c
, const char *value
)
9013 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch
);
9015 if (arm_fp_model
== ARM_FLOAT_AUTO
9016 && gdbarch_bfd_arch_info (target_gdbarch
)->arch
== bfd_arch_arm
)
9017 fprintf_filtered (file
, _("\
9018 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9019 fp_model_strings
[tdep
->fp_model
]);
9021 fprintf_filtered (file
, _("\
9022 The current ARM floating point model is \"%s\".\n"),
9023 fp_model_strings
[arm_fp_model
]);
9027 arm_set_abi (char *args
, int from_tty
,
9028 struct cmd_list_element
*c
)
9030 enum arm_abi_kind arm_abi
;
9032 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9033 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9035 arm_abi_global
= arm_abi
;
9039 if (arm_abi
== ARM_ABI_LAST
)
9040 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9043 arm_update_current_architecture ();
9047 arm_show_abi (struct ui_file
*file
, int from_tty
,
9048 struct cmd_list_element
*c
, const char *value
)
9050 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch
);
9052 if (arm_abi_global
== ARM_ABI_AUTO
9053 && gdbarch_bfd_arch_info (target_gdbarch
)->arch
== bfd_arch_arm
)
9054 fprintf_filtered (file
, _("\
9055 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9056 arm_abi_strings
[tdep
->arm_abi
]);
9058 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9063 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9064 struct cmd_list_element
*c
, const char *value
)
9066 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch
);
9068 fprintf_filtered (file
,
9069 _("The current execution mode assumed "
9070 "(when symbols are unavailable) is \"%s\".\n"),
9071 arm_fallback_mode_string
);
9075 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9076 struct cmd_list_element
*c
, const char *value
)
9078 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch
);
9080 fprintf_filtered (file
,
9081 _("The current execution mode assumed "
9082 "(even when symbols are available) is \"%s\".\n"),
9083 arm_force_mode_string
);
9086 /* If the user changes the register disassembly style used for info
9087 register and other commands, we have to also switch the style used
9088 in opcodes for disassembly output. This function is run in the "set
9089 arm disassembly" command, and does that. */
9092 set_disassembly_style_sfunc (char *args
, int from_tty
,
9093 struct cmd_list_element
*c
)
9095 set_disassembly_style ();
9098 /* Return the ARM register name corresponding to register I. */
9100 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9102 const int num_regs
= gdbarch_num_regs (gdbarch
);
9104 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9105 && i
>= num_regs
&& i
< num_regs
+ 32)
9107 static const char *const vfp_pseudo_names
[] = {
9108 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9109 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9110 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9111 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9114 return vfp_pseudo_names
[i
- num_regs
];
9117 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9118 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9120 static const char *const neon_pseudo_names
[] = {
9121 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9122 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9125 return neon_pseudo_names
[i
- num_regs
- 32];
9128 if (i
>= ARRAY_SIZE (arm_register_names
))
9129 /* These registers are only supported on targets which supply
9130 an XML description. */
9133 return arm_register_names
[i
];
9137 set_disassembly_style (void)
9141 /* Find the style that the user wants. */
9142 for (current
= 0; current
< num_disassembly_options
; current
++)
9143 if (disassembly_style
== valid_disassembly_styles
[current
])
9145 gdb_assert (current
< num_disassembly_options
);
9147 /* Synchronize the disassembler. */
9148 set_arm_regname_option (current
);
9151 /* Test whether the coff symbol specific value corresponds to a Thumb
9155 coff_sym_is_thumb (int val
)
9157 return (val
== C_THUMBEXT
9158 || val
== C_THUMBSTAT
9159 || val
== C_THUMBEXTFUNC
9160 || val
== C_THUMBSTATFUNC
9161 || val
== C_THUMBLABEL
);
9164 /* arm_coff_make_msymbol_special()
9165 arm_elf_make_msymbol_special()
9167 These functions test whether the COFF or ELF symbol corresponds to
9168 an address in thumb code, and set a "special" bit in a minimal
9169 symbol to indicate that it does. */
9172 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9174 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9175 == ST_BRANCH_TO_THUMB
)
9176 MSYMBOL_SET_SPECIAL (msym
);
9180 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9182 if (coff_sym_is_thumb (val
))
9183 MSYMBOL_SET_SPECIAL (msym
);
9187 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9189 struct arm_per_objfile
*data
= arg
;
9192 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9193 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9197 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9200 const char *name
= bfd_asymbol_name (sym
);
9201 struct arm_per_objfile
*data
;
9202 VEC(arm_mapping_symbol_s
) **map_p
;
9203 struct arm_mapping_symbol new_map_sym
;
9205 gdb_assert (name
[0] == '$');
9206 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9209 data
= objfile_data (objfile
, arm_objfile_data_key
);
9212 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9213 struct arm_per_objfile
);
9214 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9215 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9216 objfile
->obfd
->section_count
,
9217 VEC(arm_mapping_symbol_s
) *);
9219 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9221 new_map_sym
.value
= sym
->value
;
9222 new_map_sym
.type
= name
[1];
9224 /* Assume that most mapping symbols appear in order of increasing
9225 value. If they were randomly distributed, it would be faster to
9226 always push here and then sort at first use. */
9227 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9229 struct arm_mapping_symbol
*prev_map_sym
;
9231 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9232 if (prev_map_sym
->value
>= sym
->value
)
9235 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9236 arm_compare_mapping_symbols
);
9237 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9242 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9246 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9248 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9249 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9251 /* If necessary, set the T bit. */
9254 ULONGEST val
, t_bit
;
9255 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9256 t_bit
= arm_psr_thumb_bit (gdbarch
);
9257 if (arm_pc_is_thumb (gdbarch
, pc
))
9258 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9261 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9266 /* Read the contents of a NEON quad register, by reading from two
9267 double registers. This is used to implement the quad pseudo
9268 registers, and for argument passing in case the quad registers are
9269 missing; vectors are passed in quad registers when using the VFP
9270 ABI, even if a NEON unit is not present. REGNUM is the index of
9271 the quad register, in [0, 15]. */
9273 static enum register_status
9274 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9275 int regnum
, gdb_byte
*buf
)
9278 gdb_byte reg_buf
[8];
9279 int offset
, double_regnum
;
9280 enum register_status status
;
9282 sprintf (name_buf
, "d%d", regnum
<< 1);
9283 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9286 /* d0 is always the least significant half of q0. */
9287 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9292 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9293 if (status
!= REG_VALID
)
9295 memcpy (buf
+ offset
, reg_buf
, 8);
9297 offset
= 8 - offset
;
9298 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9299 if (status
!= REG_VALID
)
9301 memcpy (buf
+ offset
, reg_buf
, 8);
9306 static enum register_status
9307 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9308 int regnum
, gdb_byte
*buf
)
9310 const int num_regs
= gdbarch_num_regs (gdbarch
);
9312 gdb_byte reg_buf
[8];
9313 int offset
, double_regnum
;
9315 gdb_assert (regnum
>= num_regs
);
9318 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9319 /* Quad-precision register. */
9320 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9323 enum register_status status
;
9325 /* Single-precision register. */
9326 gdb_assert (regnum
< 32);
9328 /* s0 is always the least significant half of d0. */
9329 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9330 offset
= (regnum
& 1) ? 0 : 4;
9332 offset
= (regnum
& 1) ? 4 : 0;
9334 sprintf (name_buf
, "d%d", regnum
>> 1);
9335 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9338 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9339 if (status
== REG_VALID
)
9340 memcpy (buf
, reg_buf
+ offset
, 4);
9345 /* Store the contents of BUF to a NEON quad register, by writing to
9346 two double registers. This is used to implement the quad pseudo
9347 registers, and for argument passing in case the quad registers are
9348 missing; vectors are passed in quad registers when using the VFP
9349 ABI, even if a NEON unit is not present. REGNUM is the index
9350 of the quad register, in [0, 15]. */
9353 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9354 int regnum
, const gdb_byte
*buf
)
9357 gdb_byte reg_buf
[8];
9358 int offset
, double_regnum
;
9360 sprintf (name_buf
, "d%d", regnum
<< 1);
9361 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9364 /* d0 is always the least significant half of q0. */
9365 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9370 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9371 offset
= 8 - offset
;
9372 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9376 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9377 int regnum
, const gdb_byte
*buf
)
9379 const int num_regs
= gdbarch_num_regs (gdbarch
);
9381 gdb_byte reg_buf
[8];
9382 int offset
, double_regnum
;
9384 gdb_assert (regnum
>= num_regs
);
9387 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9388 /* Quad-precision register. */
9389 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9392 /* Single-precision register. */
9393 gdb_assert (regnum
< 32);
9395 /* s0 is always the least significant half of d0. */
9396 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9397 offset
= (regnum
& 1) ? 0 : 4;
9399 offset
= (regnum
& 1) ? 4 : 0;
9401 sprintf (name_buf
, "d%d", regnum
>> 1);
9402 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9405 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9406 memcpy (reg_buf
+ offset
, buf
, 4);
9407 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9411 static struct value
*
9412 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9414 const int *reg_p
= baton
;
9415 return value_of_register (*reg_p
, frame
);
9418 static enum gdb_osabi
9419 arm_elf_osabi_sniffer (bfd
*abfd
)
9421 unsigned int elfosabi
;
9422 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9424 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9426 if (elfosabi
== ELFOSABI_ARM
)
9427 /* GNU tools use this value. Check note sections in this case,
9429 bfd_map_over_sections (abfd
,
9430 generic_elf_osabi_sniff_abi_tag_sections
,
9433 /* Anything else will be handled by the generic ELF sniffer. */
9438 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9439 struct reggroup
*group
)
9441 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9442 this, FPS register belongs to save_regroup, restore_reggroup, and
9443 all_reggroup, of course. */
9444 if (regnum
== ARM_FPS_REGNUM
)
9445 return (group
== float_reggroup
9446 || group
== save_reggroup
9447 || group
== restore_reggroup
9448 || group
== all_reggroup
);
9450 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9454 /* Initialize the current architecture based on INFO. If possible,
9455 re-use an architecture from ARCHES, which is a list of
9456 architectures already created during this debugging session.
9458 Called e.g. at program startup, when reading a core file, and when
9459 reading a binary file. */
9461 static struct gdbarch
*
9462 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9464 struct gdbarch_tdep
*tdep
;
9465 struct gdbarch
*gdbarch
;
9466 struct gdbarch_list
*best_arch
;
9467 enum arm_abi_kind arm_abi
= arm_abi_global
;
9468 enum arm_float_model fp_model
= arm_fp_model
;
9469 struct tdesc_arch_data
*tdesc_data
= NULL
;
9471 int have_vfp_registers
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9473 int have_fpa_registers
= 1;
9474 const struct target_desc
*tdesc
= info
.target_desc
;
9476 /* If we have an object to base this architecture on, try to determine
9479 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9481 int ei_osabi
, e_flags
;
9483 switch (bfd_get_flavour (info
.abfd
))
9485 case bfd_target_aout_flavour
:
9486 /* Assume it's an old APCS-style ABI. */
9487 arm_abi
= ARM_ABI_APCS
;
9490 case bfd_target_coff_flavour
:
9491 /* Assume it's an old APCS-style ABI. */
9493 arm_abi
= ARM_ABI_APCS
;
9496 case bfd_target_elf_flavour
:
9497 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9498 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9500 if (ei_osabi
== ELFOSABI_ARM
)
9502 /* GNU tools used to use this value, but do not for EABI
9503 objects. There's nowhere to tag an EABI version
9504 anyway, so assume APCS. */
9505 arm_abi
= ARM_ABI_APCS
;
9507 else if (ei_osabi
== ELFOSABI_NONE
)
9509 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9510 int attr_arch
, attr_profile
;
9514 case EF_ARM_EABI_UNKNOWN
:
9515 /* Assume GNU tools. */
9516 arm_abi
= ARM_ABI_APCS
;
9519 case EF_ARM_EABI_VER4
:
9520 case EF_ARM_EABI_VER5
:
9521 arm_abi
= ARM_ABI_AAPCS
;
9522 /* EABI binaries default to VFP float ordering.
9523 They may also contain build attributes that can
9524 be used to identify if the VFP argument-passing
9526 if (fp_model
== ARM_FLOAT_AUTO
)
9529 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9534 /* "The user intended FP parameter/result
9535 passing to conform to AAPCS, base
9537 fp_model
= ARM_FLOAT_SOFT_VFP
;
9540 /* "The user intended FP parameter/result
9541 passing to conform to AAPCS, VFP
9543 fp_model
= ARM_FLOAT_VFP
;
9546 /* "The user intended FP parameter/result
9547 passing to conform to tool chain-specific
9548 conventions" - we don't know any such
9549 conventions, so leave it as "auto". */
9552 /* Attribute value not mentioned in the
9553 October 2008 ABI, so leave it as
9558 fp_model
= ARM_FLOAT_SOFT_VFP
;
9564 /* Leave it as "auto". */
9565 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
9570 /* Detect M-profile programs. This only works if the
9571 executable file includes build attributes; GCC does
9572 copy them to the executable, but e.g. RealView does
9574 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9576 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
9578 Tag_CPU_arch_profile
);
9579 /* GCC specifies the profile for v6-M; RealView only
9580 specifies the profile for architectures starting with
9581 V7 (as opposed to architectures with a tag
9582 numerically greater than TAG_CPU_ARCH_V7). */
9583 if (!tdesc_has_registers (tdesc
)
9584 && (attr_arch
== TAG_CPU_ARCH_V6_M
9585 || attr_arch
== TAG_CPU_ARCH_V6S_M
9586 || attr_profile
== 'M'))
9587 tdesc
= tdesc_arm_with_m
;
9591 if (fp_model
== ARM_FLOAT_AUTO
)
9593 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9595 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
9598 /* Leave it as "auto". Strictly speaking this case
9599 means FPA, but almost nobody uses that now, and
9600 many toolchains fail to set the appropriate bits
9601 for the floating-point model they use. */
9603 case EF_ARM_SOFT_FLOAT
:
9604 fp_model
= ARM_FLOAT_SOFT_FPA
;
9606 case EF_ARM_VFP_FLOAT
:
9607 fp_model
= ARM_FLOAT_VFP
;
9609 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
9610 fp_model
= ARM_FLOAT_SOFT_VFP
;
9615 if (e_flags
& EF_ARM_BE8
)
9616 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
9621 /* Leave it as "auto". */
9626 /* Check any target description for validity. */
9627 if (tdesc_has_registers (tdesc
))
9629 /* For most registers we require GDB's default names; but also allow
9630 the numeric names for sp / lr / pc, as a convenience. */
9631 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9632 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9633 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9635 const struct tdesc_feature
*feature
;
9638 feature
= tdesc_find_feature (tdesc
,
9639 "org.gnu.gdb.arm.core");
9640 if (feature
== NULL
)
9642 feature
= tdesc_find_feature (tdesc
,
9643 "org.gnu.gdb.arm.m-profile");
9644 if (feature
== NULL
)
9650 tdesc_data
= tdesc_data_alloc ();
9653 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9654 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9655 arm_register_names
[i
]);
9656 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9659 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9662 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9666 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9667 ARM_PS_REGNUM
, "xpsr");
9669 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9670 ARM_PS_REGNUM
, "cpsr");
9674 tdesc_data_cleanup (tdesc_data
);
9678 feature
= tdesc_find_feature (tdesc
,
9679 "org.gnu.gdb.arm.fpa");
9680 if (feature
!= NULL
)
9683 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9684 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9685 arm_register_names
[i
]);
9688 tdesc_data_cleanup (tdesc_data
);
9693 have_fpa_registers
= 0;
9695 feature
= tdesc_find_feature (tdesc
,
9696 "org.gnu.gdb.xscale.iwmmxt");
9697 if (feature
!= NULL
)
9699 static const char *const iwmmxt_names
[] = {
9700 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9701 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9702 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9703 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9707 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9709 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9710 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9712 /* Check for the control registers, but do not fail if they
9714 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9715 tdesc_numbered_register (feature
, tdesc_data
, i
,
9716 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9718 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9720 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9721 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9725 tdesc_data_cleanup (tdesc_data
);
9730 /* If we have a VFP unit, check whether the single precision registers
9731 are present. If not, then we will synthesize them as pseudo
9733 feature
= tdesc_find_feature (tdesc
,
9734 "org.gnu.gdb.arm.vfp");
9735 if (feature
!= NULL
)
9737 static const char *const vfp_double_names
[] = {
9738 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9739 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9740 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9741 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9744 /* Require the double precision registers. There must be either
9747 for (i
= 0; i
< 32; i
++)
9749 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9751 vfp_double_names
[i
]);
9755 if (!valid_p
&& i
== 16)
9758 /* Also require FPSCR. */
9759 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9760 ARM_FPSCR_REGNUM
, "fpscr");
9763 tdesc_data_cleanup (tdesc_data
);
9767 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9768 have_vfp_pseudos
= 1;
9770 have_vfp_registers
= 1;
9772 /* If we have VFP, also check for NEON. The architecture allows
9773 NEON without VFP (integer vector operations only), but GDB
9774 does not support that. */
9775 feature
= tdesc_find_feature (tdesc
,
9776 "org.gnu.gdb.arm.neon");
9777 if (feature
!= NULL
)
9779 /* NEON requires 32 double-precision registers. */
9782 tdesc_data_cleanup (tdesc_data
);
9786 /* If there are quad registers defined by the stub, use
9787 their type; otherwise (normally) provide them with
9788 the default type. */
9789 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9790 have_neon_pseudos
= 1;
9797 /* If there is already a candidate, use it. */
9798 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9800 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9802 if (arm_abi
!= ARM_ABI_AUTO
9803 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9806 if (fp_model
!= ARM_FLOAT_AUTO
9807 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9810 /* There are various other properties in tdep that we do not
9811 need to check here: those derived from a target description,
9812 since gdbarches with a different target description are
9813 automatically disqualified. */
9815 /* Do check is_m, though, since it might come from the binary. */
9816 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9819 /* Found a match. */
9823 if (best_arch
!= NULL
)
9825 if (tdesc_data
!= NULL
)
9826 tdesc_data_cleanup (tdesc_data
);
9827 return best_arch
->gdbarch
;
9830 tdep
= xcalloc (1, sizeof (struct gdbarch_tdep
));
9831 gdbarch
= gdbarch_alloc (&info
, tdep
);
9833 /* Record additional information about the architecture we are defining.
9834 These are gdbarch discriminators, like the OSABI. */
9835 tdep
->arm_abi
= arm_abi
;
9836 tdep
->fp_model
= fp_model
;
9838 tdep
->have_fpa_registers
= have_fpa_registers
;
9839 tdep
->have_vfp_registers
= have_vfp_registers
;
9840 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9841 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9842 tdep
->have_neon
= have_neon
;
9845 switch (info
.byte_order_for_code
)
9847 case BFD_ENDIAN_BIG
:
9848 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9849 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9850 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9851 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9855 case BFD_ENDIAN_LITTLE
:
9856 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9857 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9858 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9859 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9864 internal_error (__FILE__
, __LINE__
,
9865 _("arm_gdbarch_init: bad byte order for float format"));
9868 /* On ARM targets char defaults to unsigned. */
9869 set_gdbarch_char_signed (gdbarch
, 0);
9871 /* Note: for displaced stepping, this includes the breakpoint, and one word
9872 of additional scratch space. This setting isn't used for anything beside
9873 displaced stepping at present. */
9874 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
9876 /* This should be low enough for everything. */
9877 tdep
->lowest_pc
= 0x20;
9878 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9880 /* The default, for both APCS and AAPCS, is to return small
9881 structures in registers. */
9882 tdep
->struct_return
= reg_struct_return
;
9884 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9885 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9887 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9889 /* Frame handling. */
9890 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
9891 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
9892 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
9894 frame_base_set_default (gdbarch
, &arm_normal_base
);
9896 /* Address manipulation. */
9897 set_gdbarch_smash_text_address (gdbarch
, arm_smash_text_address
);
9898 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9900 /* Advance PC across function entry code. */
9901 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9903 /* Detect whether PC is in function epilogue. */
9904 set_gdbarch_in_function_epilogue_p (gdbarch
, arm_in_function_epilogue_p
);
9906 /* Skip trampolines. */
9907 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9909 /* The stack grows downward. */
9910 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9912 /* Breakpoint manipulation. */
9913 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
9914 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
9915 arm_remote_breakpoint_from_pc
);
9917 /* Information about registers, etc. */
9918 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9919 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9920 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9921 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9922 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9924 /* This "info float" is FPA-specific. Use the generic version if we
9926 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9927 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9929 /* Internal <-> external register number maps. */
9930 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9931 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9933 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9935 /* Returning results. */
9936 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9939 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9941 /* Minsymbol frobbing. */
9942 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9943 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9944 arm_coff_make_msymbol_special
);
9945 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9947 /* Thumb-2 IT block support. */
9948 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9949 arm_adjust_breakpoint_address
);
9951 /* Virtual tables. */
9952 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9954 /* Hook in the ABI-specific overrides, if they have been registered. */
9955 gdbarch_init_osabi (info
, gdbarch
);
9957 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9959 /* Add some default predicates. */
9960 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9961 dwarf2_append_unwinders (gdbarch
);
9962 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9963 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9965 /* Now we have tuned the configuration, set a few final things,
9966 based on what the OS ABI has told us. */
9968 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9969 binaries are always marked. */
9970 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9971 tdep
->arm_abi
= ARM_ABI_APCS
;
9973 /* Watchpoints are not steppable. */
9974 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9976 /* We used to default to FPA for generic ARM, but almost nobody
9977 uses that now, and we now provide a way for the user to force
9978 the model. So default to the most useful variant. */
9979 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9980 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9982 if (tdep
->jb_pc
>= 0)
9983 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9985 /* Floating point sizes and format. */
9986 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9987 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9989 set_gdbarch_double_format
9990 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9991 set_gdbarch_long_double_format
9992 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9996 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9997 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10000 if (have_vfp_pseudos
)
10002 /* NOTE: These are the only pseudo registers used by
10003 the ARM target at the moment. If more are added, a
10004 little more care in numbering will be needed. */
10006 int num_pseudos
= 32;
10007 if (have_neon_pseudos
)
10009 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10010 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10011 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10016 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10018 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10020 /* Override tdesc_register_type to adjust the types of VFP
10021 registers for NEON. */
10022 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10025 /* Add standard register aliases. We add aliases even for those
10026 nanes which are used by the current architecture - it's simpler,
10027 and does no harm, since nothing ever lists user registers. */
10028 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10029 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10030 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10036 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10038 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10043 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10044 (unsigned long) tdep
->lowest_pc
);
10047 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10050 _initialize_arm_tdep (void)
10052 struct ui_file
*stb
;
10054 struct cmd_list_element
*new_set
, *new_show
;
10055 const char *setname
;
10056 const char *setdesc
;
10057 const char *const *regnames
;
10059 static char *helptext
;
10060 char regdesc
[1024], *rdptr
= regdesc
;
10061 size_t rest
= sizeof (regdesc
);
10063 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10065 arm_objfile_data_key
10066 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10068 /* Add ourselves to objfile event chain. */
10069 observer_attach_new_objfile (arm_exidx_new_objfile
);
10071 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10073 /* Register an ELF OS ABI sniffer for ARM binaries. */
10074 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10075 bfd_target_elf_flavour
,
10076 arm_elf_osabi_sniffer
);
10078 /* Initialize the standard target descriptions. */
10079 initialize_tdesc_arm_with_m ();
10080 initialize_tdesc_arm_with_iwmmxt ();
10081 initialize_tdesc_arm_with_vfpv2 ();
10082 initialize_tdesc_arm_with_vfpv3 ();
10083 initialize_tdesc_arm_with_neon ();
10085 /* Get the number of possible sets of register names defined in opcodes. */
10086 num_disassembly_options
= get_arm_regname_num_options ();
10088 /* Add root prefix command for all "set arm"/"show arm" commands. */
10089 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10090 _("Various ARM-specific commands."),
10091 &setarmcmdlist
, "set arm ", 0, &setlist
);
10093 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10094 _("Various ARM-specific commands."),
10095 &showarmcmdlist
, "show arm ", 0, &showlist
);
10097 /* Sync the opcode insn printer with our register viewer. */
10098 parse_arm_disassembler_option ("reg-names-std");
10100 /* Initialize the array that will be passed to
10101 add_setshow_enum_cmd(). */
10102 valid_disassembly_styles
10103 = xmalloc ((num_disassembly_options
+ 1) * sizeof (char *));
10104 for (i
= 0; i
< num_disassembly_options
; i
++)
10106 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10107 valid_disassembly_styles
[i
] = setname
;
10108 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10111 /* When we find the default names, tell the disassembler to use
10113 if (!strcmp (setname
, "std"))
10115 disassembly_style
= setname
;
10116 set_arm_regname_option (i
);
10119 /* Mark the end of valid options. */
10120 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10122 /* Create the help text. */
10123 stb
= mem_fileopen ();
10124 fprintf_unfiltered (stb
, "%s%s%s",
10125 _("The valid values are:\n"),
10127 _("The default is \"std\"."));
10128 helptext
= ui_file_xstrdup (stb
, NULL
);
10129 ui_file_delete (stb
);
10131 add_setshow_enum_cmd("disassembler", no_class
,
10132 valid_disassembly_styles
, &disassembly_style
,
10133 _("Set the disassembly style."),
10134 _("Show the disassembly style."),
10136 set_disassembly_style_sfunc
,
10137 NULL
, /* FIXME: i18n: The disassembly style is
10139 &setarmcmdlist
, &showarmcmdlist
);
10141 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10142 _("Set usage of ARM 32-bit mode."),
10143 _("Show usage of ARM 32-bit mode."),
10144 _("When off, a 26-bit PC will be used."),
10146 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10148 &setarmcmdlist
, &showarmcmdlist
);
10150 /* Add a command to allow the user to force the FPU model. */
10151 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10152 _("Set the floating point type."),
10153 _("Show the floating point type."),
10154 _("auto - Determine the FP typefrom the OS-ABI.\n\
10155 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10156 fpa - FPA co-processor (GCC compiled).\n\
10157 softvfp - Software FP with pure-endian doubles.\n\
10158 vfp - VFP co-processor."),
10159 set_fp_model_sfunc
, show_fp_model
,
10160 &setarmcmdlist
, &showarmcmdlist
);
10162 /* Add a command to allow the user to force the ABI. */
10163 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10165 _("Show the ABI."),
10166 NULL
, arm_set_abi
, arm_show_abi
,
10167 &setarmcmdlist
, &showarmcmdlist
);
10169 /* Add two commands to allow the user to force the assumed
10171 add_setshow_enum_cmd ("fallback-mode", class_support
,
10172 arm_mode_strings
, &arm_fallback_mode_string
,
10173 _("Set the mode assumed when symbols are unavailable."),
10174 _("Show the mode assumed when symbols are unavailable."),
10175 NULL
, NULL
, arm_show_fallback_mode
,
10176 &setarmcmdlist
, &showarmcmdlist
);
10177 add_setshow_enum_cmd ("force-mode", class_support
,
10178 arm_mode_strings
, &arm_force_mode_string
,
10179 _("Set the mode assumed even when symbols are available."),
10180 _("Show the mode assumed even when symbols are available."),
10181 NULL
, NULL
, arm_show_force_mode
,
10182 &setarmcmdlist
, &showarmcmdlist
);
10184 /* Debugging flag. */
10185 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10186 _("Set ARM debugging."),
10187 _("Show ARM debugging."),
10188 _("When on, arm-specific debugging is enabled."),
10190 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10191 &setdebuglist
, &showdebuglist
);