1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
58 #include "gdbsupport/vec.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
73 #include "gdbsupport/selftest.h"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 struct arm_mapping_symbol
96 bool operator< (const arm_mapping_symbol
&other
) const
97 { return this->value
< other
.value
; }
100 typedef std::vector
<arm_mapping_symbol
> arm_mapping_symbol_vec
;
102 struct arm_per_objfile
104 explicit arm_per_objfile (size_t num_sections
)
105 : section_maps (new arm_mapping_symbol_vec
[num_sections
]),
106 section_maps_sorted (new bool[num_sections
] ())
109 DISABLE_COPY_AND_ASSIGN (arm_per_objfile
);
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr
<arm_mapping_symbol_vec
[]> section_maps
;
121 /* For each corresponding element of section_maps above, is this vector
123 std::unique_ptr
<bool[]> section_maps_sorted
;
126 /* Per-objfile data used for mapping symbols. */
127 static objfile_key
<arm_per_objfile
> arm_objfile_data_key
;
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element
*setarmcmdlist
= NULL
;
131 static struct cmd_list_element
*showarmcmdlist
= NULL
;
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings
[] =
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
147 static const char *current_fp_model
= "auto";
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings
[] =
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
160 static const char *arm_abi_string
= "auto";
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings
[] =
171 static const char *arm_fallback_mode_string
= "auto";
172 static const char *arm_force_mode_string
= "auto";
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
182 } arm_register_aliases
[] = {
183 /* Basic register numbers. */
200 /* Synonyms (argument and variable registers). */
213 /* Other platform-specific names for r9. */
219 /* Names used by GCC (not listed in the ARM EABI). */
221 /* A special name from the older ATPCS. */
225 static const char *const arm_register_names
[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options
;
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles
;
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style
;
243 /* This is used to keep the bfd arch_info in sync with the disassembly
245 static void set_disassembly_style_sfunc (const char *, int,
246 struct cmd_list_element
*);
247 static void show_disassembly_style_sfunc (struct ui_file
*, int,
248 struct cmd_list_element
*,
251 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
252 readable_regcache
*regcache
,
253 int regnum
, gdb_byte
*buf
);
254 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
255 struct regcache
*regcache
,
256 int regnum
, const gdb_byte
*buf
);
259 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
262 /* get_next_pcs operations. */
263 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
264 arm_get_next_pcs_read_memory_unsigned_integer
,
265 arm_get_next_pcs_syscall_next_pc
,
266 arm_get_next_pcs_addr_bits_remove
,
267 arm_get_next_pcs_is_thumb
,
271 struct arm_prologue_cache
273 /* The stack pointer at the time this frame was created; i.e. the
274 caller's stack pointer when this function was called. It is used
275 to identify this frame. */
278 /* The frame base for this frame is just prev_sp - frame size.
279 FRAMESIZE is the distance from the frame pointer to the
280 initial stack pointer. */
284 /* The register used to hold the frame pointer for this frame. */
287 /* Saved register offsets. */
288 struct trad_frame_saved_reg
*saved_regs
;
291 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
292 CORE_ADDR prologue_start
,
293 CORE_ADDR prologue_end
,
294 struct arm_prologue_cache
*cache
);
296 /* Architecture version for displaced stepping. This effects the behaviour of
297 certain instructions, and really should not be hard-wired. */
299 #define DISPLACED_STEPPING_ARCH_VERSION 5
301 /* Set to true if the 32-bit mode is in use. */
305 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
308 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
310 if (gdbarch_tdep (gdbarch
)->is_m
)
316 /* Determine if the processor is currently executing in Thumb mode. */
319 arm_is_thumb (struct regcache
*regcache
)
322 ULONGEST t_bit
= arm_psr_thumb_bit (regcache
->arch ());
324 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
326 return (cpsr
& t_bit
) != 0;
329 /* Determine if FRAME is executing in Thumb mode. */
332 arm_frame_is_thumb (struct frame_info
*frame
)
335 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
337 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
338 directly (from a signal frame or dummy frame) or by interpreting
339 the saved LR (from a prologue or DWARF frame). So consult it and
340 trust the unwinders. */
341 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
343 return (cpsr
& t_bit
) != 0;
346 /* Search for the mapping symbol covering MEMADDR. If one is found,
347 return its type. Otherwise, return 0. If START is non-NULL,
348 set *START to the location of the mapping symbol. */
351 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
353 struct obj_section
*sec
;
355 /* If there are mapping symbols, consult them. */
356 sec
= find_pc_section (memaddr
);
359 arm_per_objfile
*data
= arm_objfile_data_key
.get (sec
->objfile
);
362 unsigned int section_idx
= sec
->the_bfd_section
->index
;
363 arm_mapping_symbol_vec
&map
364 = data
->section_maps
[section_idx
];
366 /* Sort the vector on first use. */
367 if (!data
->section_maps_sorted
[section_idx
])
369 std::sort (map
.begin (), map
.end ());
370 data
->section_maps_sorted
[section_idx
] = true;
373 struct arm_mapping_symbol map_key
374 = { memaddr
- obj_section_addr (sec
), 0 };
375 arm_mapping_symbol_vec::const_iterator it
376 = std::lower_bound (map
.begin (), map
.end (), map_key
);
378 /* std::lower_bound finds the earliest ordered insertion
379 point. If the symbol at this position starts at this exact
380 address, we use that; otherwise, the preceding
381 mapping symbol covers this address. */
384 if (it
->value
== map_key
.value
)
387 *start
= it
->value
+ obj_section_addr (sec
);
392 if (it
> map
.begin ())
394 arm_mapping_symbol_vec::const_iterator prev_it
398 *start
= prev_it
->value
+ obj_section_addr (sec
);
399 return prev_it
->type
;
407 /* Determine if the program counter specified in MEMADDR is in a Thumb
408 function. This function should be called for addresses unrelated to
409 any executing frame; otherwise, prefer arm_frame_is_thumb. */
412 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
414 struct bound_minimal_symbol sym
;
416 arm_displaced_step_closure
*dsc
417 = ((arm_displaced_step_closure
* )
418 get_displaced_step_closure_by_addr (memaddr
));
420 /* If checking the mode of displaced instruction in copy area, the mode
421 should be determined by instruction on the original address. */
425 fprintf_unfiltered (gdb_stdlog
,
426 "displaced: check mode of %.8lx instead of %.8lx\n",
427 (unsigned long) dsc
->insn_addr
,
428 (unsigned long) memaddr
);
429 memaddr
= dsc
->insn_addr
;
432 /* If bit 0 of the address is set, assume this is a Thumb address. */
433 if (IS_THUMB_ADDR (memaddr
))
436 /* If the user wants to override the symbol table, let him. */
437 if (strcmp (arm_force_mode_string
, "arm") == 0)
439 if (strcmp (arm_force_mode_string
, "thumb") == 0)
442 /* ARM v6-M and v7-M are always in Thumb mode. */
443 if (gdbarch_tdep (gdbarch
)->is_m
)
446 /* If there are mapping symbols, consult them. */
447 type
= arm_find_mapping_symbol (memaddr
, NULL
);
451 /* Thumb functions have a "special" bit set in minimal symbols. */
452 sym
= lookup_minimal_symbol_by_pc (memaddr
);
454 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
456 /* If the user wants to override the fallback mode, let them. */
457 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
459 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
462 /* If we couldn't find any symbol, but we're talking to a running
463 target, then trust the current value of $cpsr. This lets
464 "display/i $pc" always show the correct mode (though if there is
465 a symbol table we will not reach here, so it still may not be
466 displayed in the mode it will be executed). */
467 if (target_has_registers
)
468 return arm_frame_is_thumb (get_current_frame ());
470 /* Otherwise we're out of luck; we assume ARM. */
474 /* Determine if the address specified equals any of these magic return
475 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
478 From ARMv6-M Reference Manual B1.5.8
479 Table B1-5 Exception return behavior
481 EXC_RETURN Return To Return Stack
482 0xFFFFFFF1 Handler mode Main
483 0xFFFFFFF9 Thread mode Main
484 0xFFFFFFFD Thread mode Process
486 From ARMv7-M Reference Manual B1.5.8
487 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
489 EXC_RETURN Return To Return Stack
490 0xFFFFFFF1 Handler mode Main
491 0xFFFFFFF9 Thread mode Main
492 0xFFFFFFFD Thread mode Process
494 Table B1-9 EXC_RETURN definition of exception return behavior, with
497 EXC_RETURN Return To Return Stack Frame Type
498 0xFFFFFFE1 Handler mode Main Extended
499 0xFFFFFFE9 Thread mode Main Extended
500 0xFFFFFFED Thread mode Process Extended
501 0xFFFFFFF1 Handler mode Main Basic
502 0xFFFFFFF9 Thread mode Main Basic
503 0xFFFFFFFD Thread mode Process Basic
505 For more details see "B1.5.8 Exception return behavior"
506 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
509 arm_m_addr_is_magic (CORE_ADDR addr
)
513 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
514 the exception return behavior. */
521 /* Address is magic. */
525 /* Address is not magic. */
530 /* Remove useless bits from addresses in a running program. */
532 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
534 /* On M-profile devices, do not strip the low bit from EXC_RETURN
535 (the magic exception return address). */
536 if (gdbarch_tdep (gdbarch
)->is_m
537 && arm_m_addr_is_magic (val
))
541 return UNMAKE_THUMB_ADDR (val
);
543 return (val
& 0x03fffffc);
546 /* Return 1 if PC is the start of a compiler helper function which
547 can be safely ignored during prologue skipping. IS_THUMB is true
548 if the function is known to be a Thumb function due to the way it
551 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
553 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
554 struct bound_minimal_symbol msym
;
556 msym
= lookup_minimal_symbol_by_pc (pc
);
557 if (msym
.minsym
!= NULL
558 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
559 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
561 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
563 /* The GNU linker's Thumb call stub to foo is named
565 if (strstr (name
, "_from_thumb") != NULL
)
568 /* On soft-float targets, __truncdfsf2 is called to convert promoted
569 arguments to their argument types in non-prototyped
571 if (startswith (name
, "__truncdfsf2"))
573 if (startswith (name
, "__aeabi_d2f"))
576 /* Internal functions related to thread-local storage. */
577 if (startswith (name
, "__tls_get_addr"))
579 if (startswith (name
, "__aeabi_read_tp"))
584 /* If we run against a stripped glibc, we may be unable to identify
585 special functions by name. Check for one important case,
586 __aeabi_read_tp, by comparing the *code* against the default
587 implementation (this is hand-written ARM assembler in glibc). */
590 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
591 == 0xe3e00a0f /* mov r0, #0xffff0fff */
592 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
593 == 0xe240f01f) /* sub pc, r0, #31 */
600 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
601 the first 16-bit of instruction, and INSN2 is the second 16-bit of
603 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
604 ((bits ((insn1), 0, 3) << 12) \
605 | (bits ((insn1), 10, 10) << 11) \
606 | (bits ((insn2), 12, 14) << 8) \
607 | bits ((insn2), 0, 7))
609 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
610 the 32-bit instruction. */
611 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
612 ((bits ((insn), 16, 19) << 12) \
613 | bits ((insn), 0, 11))
615 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
618 thumb_expand_immediate (unsigned int imm
)
620 unsigned int count
= imm
>> 7;
628 return (imm
& 0xff) | ((imm
& 0xff) << 16);
630 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
632 return (imm
& 0xff) | ((imm
& 0xff) << 8)
633 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
636 return (0x80 | (imm
& 0x7f)) << (32 - count
);
639 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
640 epilogue, 0 otherwise. */
643 thumb_instruction_restores_sp (unsigned short insn
)
645 return (insn
== 0x46bd /* mov sp, r7 */
646 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
647 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
650 /* Analyze a Thumb prologue, looking for a recognizable stack frame
651 and frame pointer. Scan until we encounter a store that could
652 clobber the stack frame unexpectedly, or an unknown instruction.
653 Return the last address which is definitely safe to skip for an
654 initial breakpoint. */
657 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
658 CORE_ADDR start
, CORE_ADDR limit
,
659 struct arm_prologue_cache
*cache
)
661 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
662 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
666 CORE_ADDR unrecognized_pc
= 0;
668 for (i
= 0; i
< 16; i
++)
669 regs
[i
] = pv_register (i
, 0);
670 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
672 while (start
< limit
)
676 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
678 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
683 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
686 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
687 whether to save LR (R14). */
688 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
690 /* Calculate offsets of saved R0-R7 and LR. */
691 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
692 if (mask
& (1 << regno
))
694 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
696 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
699 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
701 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
702 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
705 else if (thumb_instruction_restores_sp (insn
))
707 /* Don't scan past the epilogue. */
710 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
711 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
713 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
714 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
715 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
717 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
718 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
719 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
721 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
722 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
723 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
724 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
725 regs
[bits (insn
, 6, 8)]);
726 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
727 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
729 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
730 int rm
= bits (insn
, 3, 6);
731 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
733 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
735 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
736 int src_reg
= (insn
& 0x78) >> 3;
737 regs
[dst_reg
] = regs
[src_reg
];
739 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
741 /* Handle stores to the stack. Normally pushes are used,
742 but with GCC -mtpcs-frame, there may be other stores
743 in the prologue to create the frame. */
744 int regno
= (insn
>> 8) & 0x7;
747 offset
= (insn
& 0xff) << 2;
748 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
750 if (stack
.store_would_trash (addr
))
753 stack
.store (addr
, 4, regs
[regno
]);
755 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
757 int rd
= bits (insn
, 0, 2);
758 int rn
= bits (insn
, 3, 5);
761 offset
= bits (insn
, 6, 10) << 2;
762 addr
= pv_add_constant (regs
[rn
], offset
);
764 if (stack
.store_would_trash (addr
))
767 stack
.store (addr
, 4, regs
[rd
]);
769 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
770 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
771 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
772 /* Ignore stores of argument registers to the stack. */
774 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
775 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
776 /* Ignore block loads from the stack, potentially copying
777 parameters from memory. */
779 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
780 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
781 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
782 /* Similarly ignore single loads from the stack. */
784 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
785 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
786 /* Skip register copies, i.e. saves to another register
787 instead of the stack. */
789 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
790 /* Recognize constant loads; even with small stacks these are necessary
792 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
793 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
795 /* Constant pool loads, for the same reason. */
796 unsigned int constant
;
799 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
800 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
801 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
803 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
805 unsigned short inst2
;
807 inst2
= read_code_unsigned_integer (start
+ 2, 2,
808 byte_order_for_code
);
810 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
812 /* BL, BLX. Allow some special function calls when
813 skipping the prologue; GCC generates these before
814 storing arguments to the stack. */
816 int j1
, j2
, imm1
, imm2
;
818 imm1
= sbits (insn
, 0, 10);
819 imm2
= bits (inst2
, 0, 10);
820 j1
= bit (inst2
, 13);
821 j2
= bit (inst2
, 11);
823 offset
= ((imm1
<< 12) + (imm2
<< 1));
824 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
826 nextpc
= start
+ 4 + offset
;
827 /* For BLX make sure to clear the low bits. */
828 if (bit (inst2
, 12) == 0)
829 nextpc
= nextpc
& 0xfffffffc;
831 if (!skip_prologue_function (gdbarch
, nextpc
,
832 bit (inst2
, 12) != 0))
836 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
838 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
840 pv_t addr
= regs
[bits (insn
, 0, 3)];
843 if (stack
.store_would_trash (addr
))
846 /* Calculate offsets of saved registers. */
847 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
848 if (inst2
& (1 << regno
))
850 addr
= pv_add_constant (addr
, -4);
851 stack
.store (addr
, 4, regs
[regno
]);
855 regs
[bits (insn
, 0, 3)] = addr
;
858 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
860 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
862 int regno1
= bits (inst2
, 12, 15);
863 int regno2
= bits (inst2
, 8, 11);
864 pv_t addr
= regs
[bits (insn
, 0, 3)];
866 offset
= inst2
& 0xff;
868 addr
= pv_add_constant (addr
, offset
);
870 addr
= pv_add_constant (addr
, -offset
);
872 if (stack
.store_would_trash (addr
))
875 stack
.store (addr
, 4, regs
[regno1
]);
876 stack
.store (pv_add_constant (addr
, 4),
880 regs
[bits (insn
, 0, 3)] = addr
;
883 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
884 && (inst2
& 0x0c00) == 0x0c00
885 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
887 int regno
= bits (inst2
, 12, 15);
888 pv_t addr
= regs
[bits (insn
, 0, 3)];
890 offset
= inst2
& 0xff;
892 addr
= pv_add_constant (addr
, offset
);
894 addr
= pv_add_constant (addr
, -offset
);
896 if (stack
.store_would_trash (addr
))
899 stack
.store (addr
, 4, regs
[regno
]);
902 regs
[bits (insn
, 0, 3)] = addr
;
905 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
906 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
908 int regno
= bits (inst2
, 12, 15);
911 offset
= inst2
& 0xfff;
912 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
914 if (stack
.store_would_trash (addr
))
917 stack
.store (addr
, 4, regs
[regno
]);
920 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
921 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
922 /* Ignore stores of argument registers to the stack. */
925 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
926 && (inst2
& 0x0d00) == 0x0c00
927 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
928 /* Ignore stores of argument registers to the stack. */
931 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
933 && (inst2
& 0x8000) == 0x0000
934 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
935 /* Ignore block loads from the stack, potentially copying
936 parameters from memory. */
939 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
941 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
942 /* Similarly ignore dual loads from the stack. */
945 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
946 && (inst2
& 0x0d00) == 0x0c00
947 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
948 /* Similarly ignore single loads from the stack. */
951 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
952 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
953 /* Similarly ignore single loads from the stack. */
956 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
957 && (inst2
& 0x8000) == 0x0000)
959 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
960 | (bits (inst2
, 12, 14) << 8)
961 | bits (inst2
, 0, 7));
963 regs
[bits (inst2
, 8, 11)]
964 = pv_add_constant (regs
[bits (insn
, 0, 3)],
965 thumb_expand_immediate (imm
));
968 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
969 && (inst2
& 0x8000) == 0x0000)
971 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
972 | (bits (inst2
, 12, 14) << 8)
973 | bits (inst2
, 0, 7));
975 regs
[bits (inst2
, 8, 11)]
976 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
979 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
980 && (inst2
& 0x8000) == 0x0000)
982 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
983 | (bits (inst2
, 12, 14) << 8)
984 | bits (inst2
, 0, 7));
986 regs
[bits (inst2
, 8, 11)]
987 = pv_add_constant (regs
[bits (insn
, 0, 3)],
988 - (CORE_ADDR
) thumb_expand_immediate (imm
));
991 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
992 && (inst2
& 0x8000) == 0x0000)
994 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
995 | (bits (inst2
, 12, 14) << 8)
996 | bits (inst2
, 0, 7));
998 regs
[bits (inst2
, 8, 11)]
999 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1002 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1004 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1005 | (bits (inst2
, 12, 14) << 8)
1006 | bits (inst2
, 0, 7));
1008 regs
[bits (inst2
, 8, 11)]
1009 = pv_constant (thumb_expand_immediate (imm
));
1012 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1015 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1017 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1020 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1021 && (inst2
& 0xf0f0) == 0)
1023 int dst_reg
= (inst2
& 0x0f00) >> 8;
1024 int src_reg
= inst2
& 0xf;
1025 regs
[dst_reg
] = regs
[src_reg
];
1028 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1030 /* Constant pool loads. */
1031 unsigned int constant
;
1034 offset
= bits (inst2
, 0, 11);
1036 loc
= start
+ 4 + offset
;
1038 loc
= start
+ 4 - offset
;
1040 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1041 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1044 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1046 /* Constant pool loads. */
1047 unsigned int constant
;
1050 offset
= bits (inst2
, 0, 7) << 2;
1052 loc
= start
+ 4 + offset
;
1054 loc
= start
+ 4 - offset
;
1056 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1057 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1059 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1060 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1063 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc
= start
;
1077 else if (thumb_instruction_changes_pc (insn
))
1079 /* Don't scan past anything that might change control flow. */
1084 /* The optimizer might shove anything into the prologue,
1085 so we just skip what we don't recognize. */
1086 unrecognized_pc
= start
;
1093 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1094 paddress (gdbarch
, start
));
1096 if (unrecognized_pc
== 0)
1097 unrecognized_pc
= start
;
1100 return unrecognized_pc
;
1102 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache
->framereg
= ARM_FP_REGNUM
;
1106 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1108 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache
->framereg
= THUMB_FP_REGNUM
;
1112 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache
->framereg
= ARM_SP_REGNUM
;
1118 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1121 for (i
= 0; i
< 16; i
++)
1122 if (stack
.find_reg (gdbarch
, i
, &offset
))
1123 cache
->saved_regs
[i
].addr
= offset
;
1125 return unrecognized_pc
;
1129 /* Try to analyze the instructions starting from PC, which load symbol
1130 __stack_chk_guard. Return the address of instruction after loading this
1131 symbol, set the dest register number to *BASEREG, and set the size of
1132 instructions for loading symbol in OFFSET. Return 0 if instructions are
1136 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1137 unsigned int *destreg
, int *offset
)
1139 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1140 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1141 unsigned int low
, high
, address
;
1146 unsigned short insn1
1147 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1149 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1151 *destreg
= bits (insn1
, 8, 10);
1153 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1154 address
= read_memory_unsigned_integer (address
, 4,
1155 byte_order_for_code
);
1157 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1159 unsigned short insn2
1160 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1162 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1165 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1167 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1169 /* movt Rd, #const */
1170 if ((insn1
& 0xfbc0) == 0xf2c0)
1172 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1173 *destreg
= bits (insn2
, 8, 11);
1175 address
= (high
<< 16 | low
);
1182 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1184 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1186 address
= bits (insn
, 0, 11) + pc
+ 8;
1187 address
= read_memory_unsigned_integer (address
, 4,
1188 byte_order_for_code
);
1190 *destreg
= bits (insn
, 12, 15);
1193 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1195 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1198 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1200 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1202 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1203 *destreg
= bits (insn
, 12, 15);
1205 address
= (high
<< 16 | low
);
1213 /* Try to skip a sequence of instructions used for stack protector. If PC
1214 points to the first instruction of this sequence, return the address of
1215 first instruction after this sequence, otherwise, return original PC.
1217 On arm, this sequence of instructions is composed of mainly three steps,
1218 Step 1: load symbol __stack_chk_guard,
1219 Step 2: load from address of __stack_chk_guard,
1220 Step 3: store it to somewhere else.
1222 Usually, instructions on step 2 and step 3 are the same on various ARM
1223 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1224 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1225 instructions in step 1 vary from different ARM architectures. On ARMv7,
1228 movw Rn, #:lower16:__stack_chk_guard
1229 movt Rn, #:upper16:__stack_chk_guard
1236 .word __stack_chk_guard
1238 Since ldr/str is a very popular instruction, we can't use them as
1239 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1240 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1241 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1244 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1246 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1247 unsigned int basereg
;
1248 struct bound_minimal_symbol stack_chk_guard
;
1250 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1253 /* Try to parse the instructions in Step 1. */
1254 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1259 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1260 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1261 Otherwise, this sequence cannot be for stack protector. */
1262 if (stack_chk_guard
.minsym
== NULL
1263 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1268 unsigned int destreg
;
1270 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1272 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1273 if ((insn
& 0xf800) != 0x6800)
1275 if (bits (insn
, 3, 5) != basereg
)
1277 destreg
= bits (insn
, 0, 2);
1279 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1280 byte_order_for_code
);
1281 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1282 if ((insn
& 0xf800) != 0x6000)
1284 if (destreg
!= bits (insn
, 0, 2))
1289 unsigned int destreg
;
1291 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1293 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1294 if ((insn
& 0x0e500000) != 0x04100000)
1296 if (bits (insn
, 16, 19) != basereg
)
1298 destreg
= bits (insn
, 12, 15);
1299 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1300 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1301 4, byte_order_for_code
);
1302 if ((insn
& 0x0e500000) != 0x04000000)
1304 if (bits (insn
, 12, 15) != destreg
)
1307 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1310 return pc
+ offset
+ 4;
1312 return pc
+ offset
+ 8;
1315 /* Advance the PC across any function entry prologue instructions to
1316 reach some "real" code.
1318 The APCS (ARM Procedure Call Standard) defines the following
1322 [stmfd sp!, {a1,a2,a3,a4}]
1323 stmfd sp!, {...,fp,ip,lr,pc}
1324 [stfe f7, [sp, #-12]!]
1325 [stfe f6, [sp, #-12]!]
1326 [stfe f5, [sp, #-12]!]
1327 [stfe f4, [sp, #-12]!]
1328 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1331 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1333 CORE_ADDR func_addr
, limit_pc
;
1335 /* See if we can determine the end of the prologue via the symbol table.
1336 If so, then return either PC, or the PC after the prologue, whichever
1338 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1340 CORE_ADDR post_prologue_pc
1341 = skip_prologue_using_sal (gdbarch
, func_addr
);
1342 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1344 if (post_prologue_pc
)
1346 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1349 /* GCC always emits a line note before the prologue and another
1350 one after, even if the two are at the same address or on the
1351 same line. Take advantage of this so that we do not need to
1352 know every instruction that might appear in the prologue. We
1353 will have producer information for most binaries; if it is
1354 missing (e.g. for -gstabs), assuming the GNU tools. */
1355 if (post_prologue_pc
1357 || COMPUNIT_PRODUCER (cust
) == NULL
1358 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1359 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1360 return post_prologue_pc
;
1362 if (post_prologue_pc
!= 0)
1364 CORE_ADDR analyzed_limit
;
1366 /* For non-GCC compilers, make sure the entire line is an
1367 acceptable prologue; GDB will round this function's
1368 return value up to the end of the following line so we
1369 can not skip just part of a line (and we do not want to).
1371 RealView does not treat the prologue specially, but does
1372 associate prologue code with the opening brace; so this
1373 lets us skip the first line if we think it is the opening
1375 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1376 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1377 post_prologue_pc
, NULL
);
1379 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1380 post_prologue_pc
, NULL
);
1382 if (analyzed_limit
!= post_prologue_pc
)
1385 return post_prologue_pc
;
1389 /* Can't determine prologue from the symbol table, need to examine
1392 /* Find an upper limit on the function prologue using the debug
1393 information. If the debug information could not be used to provide
1394 that bound, then use an arbitrary large number as the upper bound. */
1395 /* Like arm_scan_prologue, stop no later than pc + 64. */
1396 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1398 limit_pc
= pc
+ 64; /* Magic. */
1401 /* Check if this is Thumb code. */
1402 if (arm_pc_is_thumb (gdbarch
, pc
))
1403 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1405 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1409 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1410 This function decodes a Thumb function prologue to determine:
1411 1) the size of the stack frame
1412 2) which registers are saved on it
1413 3) the offsets of saved regs
1414 4) the offset from the stack pointer to the frame pointer
1416 A typical Thumb function prologue would create this stack frame
1417 (offsets relative to FP)
1418 old SP -> 24 stack parameters
1421 R7 -> 0 local variables (16 bytes)
1422 SP -> -12 additional stack space (12 bytes)
1423 The frame size would thus be 36 bytes, and the frame offset would be
1424 12 bytes. The frame register is R7.
1426 The comments for thumb_skip_prolog() describe the algorithm we use
1427 to detect the end of the prolog. */
1431 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1432 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1434 CORE_ADDR prologue_start
;
1435 CORE_ADDR prologue_end
;
1437 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1440 /* See comment in arm_scan_prologue for an explanation of
1442 if (prologue_end
> prologue_start
+ 64)
1444 prologue_end
= prologue_start
+ 64;
1448 /* We're in the boondocks: we have no idea where the start of the
1452 prologue_end
= std::min (prologue_end
, prev_pc
);
1454 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1457 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1461 arm_instruction_restores_sp (unsigned int insn
)
1463 if (bits (insn
, 28, 31) != INST_NV
)
1465 if ((insn
& 0x0df0f000) == 0x0080d000
1466 /* ADD SP (register or immediate). */
1467 || (insn
& 0x0df0f000) == 0x0040d000
1468 /* SUB SP (register or immediate). */
1469 || (insn
& 0x0ffffff0) == 0x01a0d000
1471 || (insn
& 0x0fff0000) == 0x08bd0000
1473 || (insn
& 0x0fff0000) == 0x049d0000)
1474 /* POP of a single register. */
1481 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1482 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1483 fill it in. Return the first address not recognized as a prologue
1486 We recognize all the instructions typically found in ARM prologues,
1487 plus harmless instructions which can be skipped (either for analysis
1488 purposes, or a more restrictive set that can be skipped when finding
1489 the end of the prologue). */
1492 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1493 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1494 struct arm_prologue_cache
*cache
)
1496 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1498 CORE_ADDR offset
, current_pc
;
1499 pv_t regs
[ARM_FPS_REGNUM
];
1500 CORE_ADDR unrecognized_pc
= 0;
1502 /* Search the prologue looking for instructions that set up the
1503 frame pointer, adjust the stack pointer, and save registers.
1505 Be careful, however, and if it doesn't look like a prologue,
1506 don't try to scan it. If, for instance, a frameless function
1507 begins with stmfd sp!, then we will tell ourselves there is
1508 a frame, which will confuse stack traceback, as well as "finish"
1509 and other operations that rely on a knowledge of the stack
1512 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1513 regs
[regno
] = pv_register (regno
, 0);
1514 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1516 for (current_pc
= prologue_start
;
1517 current_pc
< prologue_end
;
1521 = read_code_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1523 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1525 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1528 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1531 unsigned imm
= insn
& 0xff; /* immediate value */
1532 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1533 int rd
= bits (insn
, 12, 15);
1534 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1535 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1538 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1541 unsigned imm
= insn
& 0xff; /* immediate value */
1542 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1543 int rd
= bits (insn
, 12, 15);
1544 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1545 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1548 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1551 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1553 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1554 stack
.store (regs
[ARM_SP_REGNUM
], 4,
1555 regs
[bits (insn
, 12, 15)]);
1558 else if ((insn
& 0xffff0000) == 0xe92d0000)
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1561 stmfd sp!, {a1, a2, a3, a4} */
1563 int mask
= insn
& 0xffff;
1565 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1568 /* Calculate offsets of saved registers. */
1569 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1570 if (mask
& (1 << regno
))
1573 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1574 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1577 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1579 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1586 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1593 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1598 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1600 unsigned imm
= insn
& 0xff; /* immediate value */
1601 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1602 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1603 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1605 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1607 unsigned imm
= insn
& 0xff; /* immediate value */
1608 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1609 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1610 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1612 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1614 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1616 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1619 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1620 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1621 stack
.store (regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1623 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1625 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1627 int n_saved_fp_regs
;
1628 unsigned int fp_start_reg
, fp_bound_reg
;
1630 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1633 if ((insn
& 0x800) == 0x800) /* N0 is set */
1635 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs
= 3;
1638 n_saved_fp_regs
= 1;
1642 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs
= 2;
1645 n_saved_fp_regs
= 4;
1648 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1649 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1650 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1652 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1653 stack
.store (regs
[ARM_SP_REGNUM
], 12,
1654 regs
[fp_start_reg
++]);
1657 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1662 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1664 if (skip_prologue_function (gdbarch
, dest
, 0))
1669 else if ((insn
& 0xf0000000) != 0xe0000000)
1670 break; /* Condition not true, exit early. */
1671 else if (arm_instruction_changes_pc (insn
))
1672 /* Don't scan past anything that might change control flow. */
1674 else if (arm_instruction_restores_sp (insn
))
1676 /* Don't scan past the epilogue. */
1679 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1684 else if ((insn
& 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1686 /* Similarly ignore single loads from the stack. */
1688 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1700 unrecognized_pc
= current_pc
;
1708 if (unrecognized_pc
== 0)
1709 unrecognized_pc
= current_pc
;
1713 int framereg
, framesize
;
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1719 /* Frame pointer is fp. */
1720 framereg
= ARM_FP_REGNUM
;
1721 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg
= ARM_SP_REGNUM
;
1727 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1730 cache
->framereg
= framereg
;
1731 cache
->framesize
= framesize
;
1733 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1734 if (stack
.find_reg (gdbarch
, regno
, &offset
))
1735 cache
->saved_regs
[regno
].addr
= offset
;
1739 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch
, unrecognized_pc
));
1742 return unrecognized_pc
;
1746 arm_scan_prologue (struct frame_info
*this_frame
,
1747 struct arm_prologue_cache
*cache
)
1749 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1750 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1751 CORE_ADDR prologue_start
, prologue_end
;
1752 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1753 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1755 /* Assume there is no frame until proven otherwise. */
1756 cache
->framereg
= ARM_SP_REGNUM
;
1757 cache
->framesize
= 0;
1759 /* Check for Thumb prologue. */
1760 if (arm_frame_is_thumb (this_frame
))
1762 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1766 /* Find the function prologue. If we can't find the function in
1767 the symbol table, peek in the stack frame to find the PC. */
1768 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1771 /* One way to find the end of the prologue (which works well
1772 for unoptimized code) is to do the following:
1774 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1777 prologue_end = prev_pc;
1778 else if (sal.end < prologue_end)
1779 prologue_end = sal.end;
1781 This mechanism is very accurate so long as the optimizer
1782 doesn't move any instructions from the function body into the
1783 prologue. If this happens, sal.end will be the last
1784 instruction in the first hunk of prologue code just before
1785 the first instruction that the scheduler has moved from
1786 the body to the prologue.
1788 In order to make sure that we scan all of the prologue
1789 instructions, we use a slightly less accurate mechanism which
1790 may scan more than necessary. To help compensate for this
1791 lack of accuracy, the prologue scanning loop below contains
1792 several clauses which'll cause the loop to terminate early if
1793 an implausible prologue instruction is encountered.
1799 is a suitable endpoint since it accounts for the largest
1800 possible prologue plus up to five instructions inserted by
1803 if (prologue_end
> prologue_start
+ 64)
1805 prologue_end
= prologue_start
+ 64; /* See above. */
1810 /* We have no symbol information. Our only option is to assume this
1811 function has a standard stack frame and the normal frame register.
1812 Then, we can find the value of our frame pointer on entrance to
1813 the callee (or at the present moment if this is the innermost frame).
1814 The value stored there should be the address of the stmfd + 8. */
1815 CORE_ADDR frame_loc
;
1816 ULONGEST return_value
;
1818 /* AAPCS does not use a frame register, so we can abort here. */
1819 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_AAPCS
)
1822 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1823 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1828 prologue_start
= gdbarch_addr_bits_remove
1829 (gdbarch
, return_value
) - 8;
1830 prologue_end
= prologue_start
+ 64; /* See above. */
1834 if (prev_pc
< prologue_end
)
1835 prologue_end
= prev_pc
;
1837 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1840 static struct arm_prologue_cache
*
1841 arm_make_prologue_cache (struct frame_info
*this_frame
)
1844 struct arm_prologue_cache
*cache
;
1845 CORE_ADDR unwound_fp
;
1847 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1848 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1850 arm_scan_prologue (this_frame
, cache
);
1852 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1853 if (unwound_fp
== 0)
1856 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
1860 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1861 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
1862 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
1867 /* Implementation of the stop_reason hook for arm_prologue frames. */
1869 static enum unwind_stop_reason
1870 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1873 struct arm_prologue_cache
*cache
;
1876 if (*this_cache
== NULL
)
1877 *this_cache
= arm_make_prologue_cache (this_frame
);
1878 cache
= (struct arm_prologue_cache
*) *this_cache
;
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc
= get_frame_pc (this_frame
);
1882 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
1883 return UNWIND_OUTERMOST
;
1885 /* If we've hit a wall, stop. */
1886 if (cache
->prev_sp
== 0)
1887 return UNWIND_OUTERMOST
;
1889 return UNWIND_NO_REASON
;
1892 /* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
1896 arm_prologue_this_id (struct frame_info
*this_frame
,
1898 struct frame_id
*this_id
)
1900 struct arm_prologue_cache
*cache
;
1904 if (*this_cache
== NULL
)
1905 *this_cache
= arm_make_prologue_cache (this_frame
);
1906 cache
= (struct arm_prologue_cache
*) *this_cache
;
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
1911 pc
= get_frame_pc (this_frame
);
1912 func
= get_frame_func (this_frame
);
1916 id
= frame_id_build (cache
->prev_sp
, func
);
1920 static struct value
*
1921 arm_prologue_prev_register (struct frame_info
*this_frame
,
1925 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1926 struct arm_prologue_cache
*cache
;
1928 if (*this_cache
== NULL
)
1929 *this_cache
= arm_make_prologue_cache (this_frame
);
1930 cache
= (struct arm_prologue_cache
*) *this_cache
;
1932 /* If we are asked to unwind the PC, then we need to return the LR
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
1937 if (prev_regnum
== ARM_PC_REGNUM
)
1941 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1942 return frame_unwind_got_constant (this_frame
, prev_regnum
,
1943 arm_addr_bits_remove (gdbarch
, lr
));
1946 /* SP is generally not saved to the stack, but this frame is
1947 identified by the next frame's stack pointer at the time of the call.
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum
== ARM_SP_REGNUM
)
1950 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum
== ARM_PS_REGNUM
)
1963 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
1965 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
1966 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1967 if (IS_THUMB_ADDR (lr
))
1971 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
1974 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
1978 struct frame_unwind arm_prologue_unwind
= {
1980 arm_prologue_unwind_stop_reason
,
1981 arm_prologue_this_id
,
1982 arm_prologue_prev_register
,
1984 default_frame_sniffer
1987 /* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1992 static const struct objfile_data
*arm_exidx_data_key
;
1994 struct arm_exidx_entry
1999 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2000 DEF_VEC_O(arm_exidx_entry_s
);
2002 struct arm_exidx_data
2004 VEC(arm_exidx_entry_s
) **section_maps
;
2008 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2010 struct arm_exidx_data
*data
= (struct arm_exidx_data
*) arg
;
2013 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2014 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2018 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2019 const struct arm_exidx_entry
*rhs
)
2021 return lhs
->addr
< rhs
->addr
;
2024 static struct obj_section
*
2025 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2027 struct obj_section
*osect
;
2029 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2030 if (bfd_get_section_flags (objfile
->obfd
,
2031 osect
->the_bfd_section
) & SEC_ALLOC
)
2033 bfd_vma start
, size
;
2034 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2035 size
= bfd_get_section_size (osect
->the_bfd_section
);
2037 if (start
<= vma
&& vma
< start
+ size
)
2044 /* Parse contents of exception table and exception index sections
2045 of OBJFILE, and fill in the exception table entry cache.
2047 For each entry that refers to a standard ARM-defined personality
2048 routine, extract the frame unwinding instructions (from either
2049 the index or the table section). The unwinding instructions
2051 - extracting them from the rest of the table data
2052 - converting to host endianness
2053 - appending the implicit 0xb0 ("Finish") code
2055 The extracted and normalized instructions are stored for later
2056 retrieval by the arm_find_exidx_entry routine. */
2059 arm_exidx_new_objfile (struct objfile
*objfile
)
2061 struct arm_exidx_data
*data
;
2062 asection
*exidx
, *extab
;
2063 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2066 /* If we've already touched this file, do nothing. */
2067 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2070 /* Read contents of exception table and index. */
2071 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2072 gdb::byte_vector exidx_data
;
2075 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2076 exidx_data
.resize (bfd_get_section_size (exidx
));
2078 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2079 exidx_data
.data (), 0,
2080 exidx_data
.size ()))
2084 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2085 gdb::byte_vector extab_data
;
2088 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2089 extab_data
.resize (bfd_get_section_size (extab
));
2091 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2092 extab_data
.data (), 0,
2093 extab_data
.size ()))
2097 /* Allocate exception table data structure. */
2098 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2099 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2100 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2101 objfile
->obfd
->section_count
,
2102 VEC(arm_exidx_entry_s
) *);
2104 /* Fill in exception table. */
2105 for (i
= 0; i
< exidx_data
.size () / 8; i
++)
2107 struct arm_exidx_entry new_exidx_entry
;
2108 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
.data () + i
* 8);
2109 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
,
2110 exidx_data
.data () + i
* 8 + 4);
2111 bfd_vma addr
= 0, word
= 0;
2112 int n_bytes
= 0, n_words
= 0;
2113 struct obj_section
*sec
;
2114 gdb_byte
*entry
= NULL
;
2116 /* Extract address of start of function. */
2117 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 idx
+= exidx_vma
+ i
* 8;
2120 /* Find section containing function and compute section offset. */
2121 sec
= arm_obj_section_from_vma (objfile
, idx
);
2124 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2126 /* Determine address of exception table entry. */
2129 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2131 else if ((val
& 0xff000000) == 0x80000000)
2133 /* Exception table entry embedded in .ARM.exidx
2134 -- must be short form. */
2138 else if (!(val
& 0x80000000))
2140 /* Exception table entry in .ARM.extab. */
2141 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2142 addr
+= exidx_vma
+ i
* 8 + 4;
2144 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_data
.size ())
2146 word
= bfd_h_get_32 (objfile
->obfd
,
2147 extab_data
.data () + addr
- extab_vma
);
2150 if ((word
& 0xff000000) == 0x80000000)
2155 else if ((word
& 0xff000000) == 0x81000000
2156 || (word
& 0xff000000) == 0x82000000)
2160 n_words
= ((word
>> 16) & 0xff);
2162 else if (!(word
& 0x80000000))
2165 struct obj_section
*pers_sec
;
2166 int gnu_personality
= 0;
2168 /* Custom personality routine. */
2169 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2172 /* Check whether we've got one of the variants of the
2173 GNU personality routines. */
2174 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2177 static const char *personality
[] =
2179 "__gcc_personality_v0",
2180 "__gxx_personality_v0",
2181 "__gcj_personality_v0",
2182 "__gnu_objc_personality_v0",
2186 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2189 for (k
= 0; personality
[k
]; k
++)
2190 if (lookup_minimal_symbol_by_pc_name
2191 (pc
, personality
[k
], objfile
))
2193 gnu_personality
= 1;
2198 /* If so, the next word contains a word count in the high
2199 byte, followed by the same unwind instructions as the
2200 pre-defined forms. */
2202 && addr
+ 4 <= extab_vma
+ extab_data
.size ())
2204 word
= bfd_h_get_32 (objfile
->obfd
,
2206 + addr
- extab_vma
));
2209 n_words
= ((word
>> 24) & 0xff);
2215 /* Sanity check address. */
2217 if (addr
< extab_vma
2218 || addr
+ 4 * n_words
> extab_vma
+ extab_data
.size ())
2219 n_words
= n_bytes
= 0;
2221 /* The unwind instructions reside in WORD (only the N_BYTES least
2222 significant bytes are valid), followed by N_WORDS words in the
2223 extab section starting at ADDR. */
2224 if (n_bytes
|| n_words
)
2227 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2228 n_bytes
+ n_words
* 4 + 1);
2231 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2235 word
= bfd_h_get_32 (objfile
->obfd
,
2236 extab_data
.data () + addr
- extab_vma
);
2239 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2240 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2241 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2242 *p
++ = (gdb_byte
) (word
& 0xff);
2245 /* Implied "Finish" to terminate the list. */
2249 /* Push entry onto vector. They are guaranteed to always
2250 appear in order of increasing addresses. */
2251 new_exidx_entry
.addr
= idx
;
2252 new_exidx_entry
.entry
= entry
;
2253 VEC_safe_push (arm_exidx_entry_s
,
2254 data
->section_maps
[sec
->the_bfd_section
->index
],
2259 /* Search for the exception table entry covering MEMADDR. If one is found,
2260 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2261 set *START to the start of the region covered by this entry. */
2264 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2266 struct obj_section
*sec
;
2268 sec
= find_pc_section (memaddr
);
2271 struct arm_exidx_data
*data
;
2272 VEC(arm_exidx_entry_s
) *map
;
2273 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2276 data
= ((struct arm_exidx_data
*)
2277 objfile_data (sec
->objfile
, arm_exidx_data_key
));
2280 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2281 if (!VEC_empty (arm_exidx_entry_s
, map
))
2283 struct arm_exidx_entry
*map_sym
;
2285 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2286 arm_compare_exidx_entries
);
2288 /* VEC_lower_bound finds the earliest ordered insertion
2289 point. If the following symbol starts at this exact
2290 address, we use that; otherwise, the preceding
2291 exception table entry covers this address. */
2292 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2294 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2295 if (map_sym
->addr
== map_key
.addr
)
2298 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2299 return map_sym
->entry
;
2305 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2307 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2308 return map_sym
->entry
;
2317 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2318 instruction list from the ARM exception table entry ENTRY, allocate and
2319 return a prologue cache structure describing how to unwind this frame.
2321 Return NULL if the unwinding instruction list contains a "spare",
2322 "reserved" or "refuse to unwind" instruction as defined in section
2323 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2324 for the ARM Architecture" document. */
2326 static struct arm_prologue_cache
*
2327 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2332 struct arm_prologue_cache
*cache
;
2333 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2334 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2340 /* Whenever we reload SP, we actually have to retrieve its
2341 actual value in the current frame. */
2344 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2346 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2347 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2351 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2352 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2358 /* Decode next unwind instruction. */
2361 if ((insn
& 0xc0) == 0)
2363 int offset
= insn
& 0x3f;
2364 vsp
+= (offset
<< 2) + 4;
2366 else if ((insn
& 0xc0) == 0x40)
2368 int offset
= insn
& 0x3f;
2369 vsp
-= (offset
<< 2) + 4;
2371 else if ((insn
& 0xf0) == 0x80)
2373 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2376 /* The special case of an all-zero mask identifies
2377 "Refuse to unwind". We return NULL to fall back
2378 to the prologue analyzer. */
2382 /* Pop registers r4..r15 under mask. */
2383 for (i
= 0; i
< 12; i
++)
2384 if (mask
& (1 << i
))
2386 cache
->saved_regs
[4 + i
].addr
= vsp
;
2390 /* Special-case popping SP -- we need to reload vsp. */
2391 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2394 else if ((insn
& 0xf0) == 0x90)
2396 int reg
= insn
& 0xf;
2398 /* Reserved cases. */
2399 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2402 /* Set SP from another register and mark VSP for reload. */
2403 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2406 else if ((insn
& 0xf0) == 0xa0)
2408 int count
= insn
& 0x7;
2409 int pop_lr
= (insn
& 0x8) != 0;
2412 /* Pop r4..r[4+count]. */
2413 for (i
= 0; i
<= count
; i
++)
2415 cache
->saved_regs
[4 + i
].addr
= vsp
;
2419 /* If indicated by flag, pop LR as well. */
2422 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2426 else if (insn
== 0xb0)
2428 /* We could only have updated PC by popping into it; if so, it
2429 will show up as address. Otherwise, copy LR into PC. */
2430 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2431 cache
->saved_regs
[ARM_PC_REGNUM
]
2432 = cache
->saved_regs
[ARM_LR_REGNUM
];
2437 else if (insn
== 0xb1)
2439 int mask
= *entry
++;
2442 /* All-zero mask and mask >= 16 is "spare". */
2443 if (mask
== 0 || mask
>= 16)
2446 /* Pop r0..r3 under mask. */
2447 for (i
= 0; i
< 4; i
++)
2448 if (mask
& (1 << i
))
2450 cache
->saved_regs
[i
].addr
= vsp
;
2454 else if (insn
== 0xb2)
2456 ULONGEST offset
= 0;
2461 offset
|= (*entry
& 0x7f) << shift
;
2464 while (*entry
++ & 0x80);
2466 vsp
+= 0x204 + (offset
<< 2);
2468 else if (insn
== 0xb3)
2470 int start
= *entry
>> 4;
2471 int count
= (*entry
++) & 0xf;
2474 /* Only registers D0..D15 are valid here. */
2475 if (start
+ count
>= 16)
2478 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2479 for (i
= 0; i
<= count
; i
++)
2481 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2488 else if ((insn
& 0xf8) == 0xb8)
2490 int count
= insn
& 0x7;
2493 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2494 for (i
= 0; i
<= count
; i
++)
2496 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2500 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2503 else if (insn
== 0xc6)
2505 int start
= *entry
>> 4;
2506 int count
= (*entry
++) & 0xf;
2509 /* Only registers WR0..WR15 are valid. */
2510 if (start
+ count
>= 16)
2513 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2514 for (i
= 0; i
<= count
; i
++)
2516 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2520 else if (insn
== 0xc7)
2522 int mask
= *entry
++;
2525 /* All-zero mask and mask >= 16 is "spare". */
2526 if (mask
== 0 || mask
>= 16)
2529 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2530 for (i
= 0; i
< 4; i
++)
2531 if (mask
& (1 << i
))
2533 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2537 else if ((insn
& 0xf8) == 0xc0)
2539 int count
= insn
& 0x7;
2542 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2543 for (i
= 0; i
<= count
; i
++)
2545 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2549 else if (insn
== 0xc8)
2551 int start
= *entry
>> 4;
2552 int count
= (*entry
++) & 0xf;
2555 /* Only registers D0..D31 are valid. */
2556 if (start
+ count
>= 16)
2559 /* Pop VFP double-precision registers
2560 D[16+start]..D[16+start+count]. */
2561 for (i
= 0; i
<= count
; i
++)
2563 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2567 else if (insn
== 0xc9)
2569 int start
= *entry
>> 4;
2570 int count
= (*entry
++) & 0xf;
2573 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2574 for (i
= 0; i
<= count
; i
++)
2576 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2580 else if ((insn
& 0xf8) == 0xd0)
2582 int count
= insn
& 0x7;
2585 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2586 for (i
= 0; i
<= count
; i
++)
2588 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2594 /* Everything else is "spare". */
2599 /* If we restore SP from a register, assume this was the frame register.
2600 Otherwise just fall back to SP as frame register. */
2601 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2602 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2604 cache
->framereg
= ARM_SP_REGNUM
;
2606 /* Determine offset to previous frame. */
2608 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2610 /* We already got the previous SP. */
2611 cache
->prev_sp
= vsp
;
2616 /* Unwinding via ARM exception table entries. Note that the sniffer
2617 already computes a filled-in prologue cache, which is then used
2618 with the same arm_prologue_this_id and arm_prologue_prev_register
2619 routines also used for prologue-parsing based unwinding. */
2622 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2623 struct frame_info
*this_frame
,
2624 void **this_prologue_cache
)
2626 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2627 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2628 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2629 struct arm_prologue_cache
*cache
;
2632 /* See if we have an ARM exception table entry covering this address. */
2633 addr_in_block
= get_frame_address_in_block (this_frame
);
2634 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2638 /* The ARM exception table does not describe unwind information
2639 for arbitrary PC values, but is guaranteed to be correct only
2640 at call sites. We have to decide here whether we want to use
2641 ARM exception table information for this frame, or fall back
2642 to using prologue parsing. (Note that if we have DWARF CFI,
2643 this sniffer isn't even called -- CFI is always preferred.)
2645 Before we make this decision, however, we check whether we
2646 actually have *symbol* information for the current frame.
2647 If not, prologue parsing would not work anyway, so we might
2648 as well use the exception table and hope for the best. */
2649 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2653 /* If the next frame is "normal", we are at a call site in this
2654 frame, so exception information is guaranteed to be valid. */
2655 if (get_next_frame (this_frame
)
2656 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2659 /* We also assume exception information is valid if we're currently
2660 blocked in a system call. The system library is supposed to
2661 ensure this, so that e.g. pthread cancellation works. */
2662 if (arm_frame_is_thumb (this_frame
))
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2667 2, byte_order_for_code
, &insn
)
2668 && (insn
& 0xff00) == 0xdf00 /* svc */)
2675 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2676 4, byte_order_for_code
, &insn
)
2677 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2681 /* Bail out if we don't know that exception information is valid. */
2685 /* The ARM exception index does not mark the *end* of the region
2686 covered by the entry, and some functions will not have any entry.
2687 To correctly recognize the end of the covered region, the linker
2688 should have inserted dummy records with a CANTUNWIND marker.
2690 Unfortunately, current versions of GNU ld do not reliably do
2691 this, and thus we may have found an incorrect entry above.
2692 As a (temporary) sanity check, we only use the entry if it
2693 lies *within* the bounds of the function. Note that this check
2694 might reject perfectly valid entries that just happen to cover
2695 multiple functions; therefore this check ought to be removed
2696 once the linker is fixed. */
2697 if (func_start
> exidx_region
)
2701 /* Decode the list of unwinding instructions into a prologue cache.
2702 Note that this may fail due to e.g. a "refuse to unwind" code. */
2703 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2707 *this_prologue_cache
= cache
;
2711 struct frame_unwind arm_exidx_unwind
= {
2713 default_frame_unwind_stop_reason
,
2714 arm_prologue_this_id
,
2715 arm_prologue_prev_register
,
2717 arm_exidx_unwind_sniffer
2720 static struct arm_prologue_cache
*
2721 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2723 struct arm_prologue_cache
*cache
;
2726 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2727 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2729 /* Still rely on the offset calculated from prologue. */
2730 arm_scan_prologue (this_frame
, cache
);
2732 /* Since we are in epilogue, the SP has been restored. */
2733 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2735 /* Calculate actual addresses of saved registers using offsets
2736 determined by arm_scan_prologue. */
2737 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2738 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2739 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2744 /* Implementation of function hook 'this_id' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2748 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2750 struct frame_id
*this_id
)
2752 struct arm_prologue_cache
*cache
;
2755 if (*this_cache
== NULL
)
2756 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2757 cache
= (struct arm_prologue_cache
*) *this_cache
;
2759 /* Use function start address as part of the frame ID. If we cannot
2760 identify the start address (due to missing symbol information),
2761 fall back to just using the current PC. */
2762 pc
= get_frame_pc (this_frame
);
2763 func
= get_frame_func (this_frame
);
2767 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2770 /* Implementation of function hook 'prev_register' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2773 static struct value
*
2774 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2775 void **this_cache
, int regnum
)
2777 if (*this_cache
== NULL
)
2778 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2780 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2783 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2785 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2788 /* Implementation of function hook 'sniffer' in
2789 'struct frame_uwnind' for epilogue unwinder. */
2792 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2793 struct frame_info
*this_frame
,
2794 void **this_prologue_cache
)
2796 if (frame_relative_level (this_frame
) == 0)
2798 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2799 CORE_ADDR pc
= get_frame_pc (this_frame
);
2801 if (arm_frame_is_thumb (this_frame
))
2802 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2804 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2810 /* Frame unwinder from epilogue. */
2812 static const struct frame_unwind arm_epilogue_frame_unwind
=
2815 default_frame_unwind_stop_reason
,
2816 arm_epilogue_frame_this_id
,
2817 arm_epilogue_frame_prev_register
,
2819 arm_epilogue_frame_sniffer
,
2822 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2823 trampoline, return the target PC. Otherwise return 0.
2825 void call0a (char c, short s, int i, long l) {}
2829 (*pointer_to_call0a) (c, s, i, l);
2832 Instead of calling a stub library function _call_via_xx (xx is
2833 the register name), GCC may inline the trampoline in the object
2834 file as below (register r2 has the address of call0a).
2837 .type main, %function
2846 The trampoline 'bx r2' doesn't belong to main. */
2849 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2851 /* The heuristics of recognizing such trampoline is that FRAME is
2852 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2853 if (arm_frame_is_thumb (frame
))
2857 if (target_read_memory (pc
, buf
, 2) == 0)
2859 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2860 enum bfd_endian byte_order_for_code
2861 = gdbarch_byte_order_for_code (gdbarch
);
2863 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2865 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2868 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2870 /* Clear the LSB so that gdb core sets step-resume
2871 breakpoint at the right address. */
2872 return UNMAKE_THUMB_ADDR (dest
);
2880 static struct arm_prologue_cache
*
2881 arm_make_stub_cache (struct frame_info
*this_frame
)
2883 struct arm_prologue_cache
*cache
;
2885 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2886 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2888 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2893 /* Our frame ID for a stub frame is the current SP and LR. */
2896 arm_stub_this_id (struct frame_info
*this_frame
,
2898 struct frame_id
*this_id
)
2900 struct arm_prologue_cache
*cache
;
2902 if (*this_cache
== NULL
)
2903 *this_cache
= arm_make_stub_cache (this_frame
);
2904 cache
= (struct arm_prologue_cache
*) *this_cache
;
2906 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2910 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2911 struct frame_info
*this_frame
,
2912 void **this_prologue_cache
)
2914 CORE_ADDR addr_in_block
;
2916 CORE_ADDR pc
, start_addr
;
2919 addr_in_block
= get_frame_address_in_block (this_frame
);
2920 pc
= get_frame_pc (this_frame
);
2921 if (in_plt_section (addr_in_block
)
2922 /* We also use the stub winder if the target memory is unreadable
2923 to avoid having the prologue unwinder trying to read it. */
2924 || target_read_memory (pc
, dummy
, 4) != 0)
2927 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2928 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2934 struct frame_unwind arm_stub_unwind
= {
2936 default_frame_unwind_stop_reason
,
2938 arm_prologue_prev_register
,
2940 arm_stub_unwind_sniffer
2943 /* Put here the code to store, into CACHE->saved_regs, the addresses
2944 of the saved registers of frame described by THIS_FRAME. CACHE is
2947 static struct arm_prologue_cache
*
2948 arm_m_exception_cache (struct frame_info
*this_frame
)
2950 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2951 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2952 struct arm_prologue_cache
*cache
;
2953 CORE_ADDR unwound_sp
;
2956 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2957 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2959 unwound_sp
= get_frame_register_unsigned (this_frame
,
2962 /* The hardware saves eight 32-bit words, comprising xPSR,
2963 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2964 "B1.5.6 Exception entry behavior" in
2965 "ARMv7-M Architecture Reference Manual". */
2966 cache
->saved_regs
[0].addr
= unwound_sp
;
2967 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
2968 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
2969 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
2970 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
2971 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
2972 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
2973 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
2975 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2976 aligner between the top of the 32-byte stack frame and the
2977 previous context's stack pointer. */
2978 cache
->prev_sp
= unwound_sp
+ 32;
2979 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
2980 && (xpsr
& (1 << 9)) != 0)
2981 cache
->prev_sp
+= 4;
2986 /* Implementation of function hook 'this_id' in
2987 'struct frame_uwnind'. */
2990 arm_m_exception_this_id (struct frame_info
*this_frame
,
2992 struct frame_id
*this_id
)
2994 struct arm_prologue_cache
*cache
;
2996 if (*this_cache
== NULL
)
2997 *this_cache
= arm_m_exception_cache (this_frame
);
2998 cache
= (struct arm_prologue_cache
*) *this_cache
;
3000 /* Our frame ID for a stub frame is the current SP and LR. */
3001 *this_id
= frame_id_build (cache
->prev_sp
,
3002 get_frame_pc (this_frame
));
3005 /* Implementation of function hook 'prev_register' in
3006 'struct frame_uwnind'. */
3008 static struct value
*
3009 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3013 struct arm_prologue_cache
*cache
;
3015 if (*this_cache
== NULL
)
3016 *this_cache
= arm_m_exception_cache (this_frame
);
3017 cache
= (struct arm_prologue_cache
*) *this_cache
;
3019 /* The value was already reconstructed into PREV_SP. */
3020 if (prev_regnum
== ARM_SP_REGNUM
)
3021 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3024 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3028 /* Implementation of function hook 'sniffer' in
3029 'struct frame_uwnind'. */
3032 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3033 struct frame_info
*this_frame
,
3034 void **this_prologue_cache
)
3036 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3038 /* No need to check is_m; this sniffer is only registered for
3039 M-profile architectures. */
3041 /* Check if exception frame returns to a magic PC value. */
3042 return arm_m_addr_is_magic (this_pc
);
3045 /* Frame unwinder for M-profile exceptions. */
3047 struct frame_unwind arm_m_exception_unwind
=
3050 default_frame_unwind_stop_reason
,
3051 arm_m_exception_this_id
,
3052 arm_m_exception_prev_register
,
3054 arm_m_exception_unwind_sniffer
3058 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3060 struct arm_prologue_cache
*cache
;
3062 if (*this_cache
== NULL
)
3063 *this_cache
= arm_make_prologue_cache (this_frame
);
3064 cache
= (struct arm_prologue_cache
*) *this_cache
;
3066 return cache
->prev_sp
- cache
->framesize
;
3069 struct frame_base arm_normal_base
= {
3070 &arm_prologue_unwind
,
3071 arm_normal_frame_base
,
3072 arm_normal_frame_base
,
3073 arm_normal_frame_base
3076 static struct value
*
3077 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3080 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3082 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3087 /* The PC is normally copied from the return column, which
3088 describes saves of LR. However, that version may have an
3089 extra bit set to indicate Thumb state. The bit is not
3091 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3092 return frame_unwind_got_constant (this_frame
, regnum
,
3093 arm_addr_bits_remove (gdbarch
, lr
));
3096 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3097 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3098 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3099 if (IS_THUMB_ADDR (lr
))
3103 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3106 internal_error (__FILE__
, __LINE__
,
3107 _("Unexpected register %d"), regnum
);
3112 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3113 struct dwarf2_frame_state_reg
*reg
,
3114 struct frame_info
*this_frame
)
3120 reg
->how
= DWARF2_FRAME_REG_FN
;
3121 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3124 reg
->how
= DWARF2_FRAME_REG_CFA
;
3129 /* Implement the stack_frame_destroyed_p gdbarch method. */
3132 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3134 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3135 unsigned int insn
, insn2
;
3136 int found_return
= 0, found_stack_adjust
= 0;
3137 CORE_ADDR func_start
, func_end
;
3141 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3144 /* The epilogue is a sequence of instructions along the following lines:
3146 - add stack frame size to SP or FP
3147 - [if frame pointer used] restore SP from FP
3148 - restore registers from SP [may include PC]
3149 - a return-type instruction [if PC wasn't already restored]
3151 In a first pass, we scan forward from the current PC and verify the
3152 instructions we find as compatible with this sequence, ending in a
3155 However, this is not sufficient to distinguish indirect function calls
3156 within a function from indirect tail calls in the epilogue in some cases.
3157 Therefore, if we didn't already find any SP-changing instruction during
3158 forward scan, we add a backward scanning heuristic to ensure we actually
3159 are in the epilogue. */
3162 while (scan_pc
< func_end
&& !found_return
)
3164 if (target_read_memory (scan_pc
, buf
, 2))
3168 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3170 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3172 else if (insn
== 0x46f7) /* mov pc, lr */
3174 else if (thumb_instruction_restores_sp (insn
))
3176 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3179 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3181 if (target_read_memory (scan_pc
, buf
, 2))
3185 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3187 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3189 if (insn2
& 0x8000) /* <registers> include PC. */
3192 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3193 && (insn2
& 0x0fff) == 0x0b04)
3195 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3198 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3199 && (insn2
& 0x0e00) == 0x0a00)
3211 /* Since any instruction in the epilogue sequence, with the possible
3212 exception of return itself, updates the stack pointer, we need to
3213 scan backwards for at most one instruction. Try either a 16-bit or
3214 a 32-bit instruction. This is just a heuristic, so we do not worry
3215 too much about false positives. */
3217 if (pc
- 4 < func_start
)
3219 if (target_read_memory (pc
- 4, buf
, 4))
3222 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3223 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3225 if (thumb_instruction_restores_sp (insn2
))
3226 found_stack_adjust
= 1;
3227 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3228 found_stack_adjust
= 1;
3229 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3230 && (insn2
& 0x0fff) == 0x0b04)
3231 found_stack_adjust
= 1;
3232 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3233 && (insn2
& 0x0e00) == 0x0a00)
3234 found_stack_adjust
= 1;
3236 return found_stack_adjust
;
3240 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3242 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3245 CORE_ADDR func_start
, func_end
;
3247 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3250 /* We are in the epilogue if the previous instruction was a stack
3251 adjustment and the next instruction is a possible return (bx, mov
3252 pc, or pop). We could have to scan backwards to find the stack
3253 adjustment, or forwards to find the return, but this is a decent
3254 approximation. First scan forwards. */
3257 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3258 if (bits (insn
, 28, 31) != INST_NV
)
3260 if ((insn
& 0x0ffffff0) == 0x012fff10)
3263 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3266 else if ((insn
& 0x0fff0000) == 0x08bd0000
3267 && (insn
& 0x0000c000) != 0)
3268 /* POP (LDMIA), including PC or LR. */
3275 /* Scan backwards. This is just a heuristic, so do not worry about
3276 false positives from mode changes. */
3278 if (pc
< func_start
+ 4)
3281 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3282 if (arm_instruction_restores_sp (insn
))
3288 /* Implement the stack_frame_destroyed_p gdbarch method. */
3291 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3293 if (arm_pc_is_thumb (gdbarch
, pc
))
3294 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3296 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3299 /* When arguments must be pushed onto the stack, they go on in reverse
3300 order. The code below implements a FILO (stack) to do this. */
3305 struct stack_item
*prev
;
3309 static struct stack_item
*
3310 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3312 struct stack_item
*si
;
3313 si
= XNEW (struct stack_item
);
3314 si
->data
= (gdb_byte
*) xmalloc (len
);
3317 memcpy (si
->data
, contents
, len
);
3321 static struct stack_item
*
3322 pop_stack_item (struct stack_item
*si
)
3324 struct stack_item
*dead
= si
;
3331 /* Implement the gdbarch type alignment method, overrides the generic
3332 alignment algorithm for anything that is arm specific. */
3335 arm_type_align (gdbarch
*gdbarch
, struct type
*t
)
3337 t
= check_typedef (t
);
3338 if (TYPE_CODE (t
) == TYPE_CODE_ARRAY
&& TYPE_VECTOR (t
))
3340 /* Use the natural alignment for vector types (the same for
3341 scalar type), but the maximum alignment is 64-bit. */
3342 if (TYPE_LENGTH (t
) > 8)
3345 return TYPE_LENGTH (t
);
3348 /* Allow the common code to calculate the alignment. */
3352 /* Possible base types for a candidate for passing and returning in
3355 enum arm_vfp_cprc_base_type
3364 /* The length of one element of base type B. */
3367 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3371 case VFP_CPRC_SINGLE
:
3373 case VFP_CPRC_DOUBLE
:
3375 case VFP_CPRC_VEC64
:
3377 case VFP_CPRC_VEC128
:
3380 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3385 /* The character ('s', 'd' or 'q') for the type of VFP register used
3386 for passing base type B. */
3389 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3393 case VFP_CPRC_SINGLE
:
3395 case VFP_CPRC_DOUBLE
:
3397 case VFP_CPRC_VEC64
:
3399 case VFP_CPRC_VEC128
:
3402 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3407 /* Determine whether T may be part of a candidate for passing and
3408 returning in VFP registers, ignoring the limit on the total number
3409 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3410 classification of the first valid component found; if it is not
3411 VFP_CPRC_UNKNOWN, all components must have the same classification
3412 as *BASE_TYPE. If it is found that T contains a type not permitted
3413 for passing and returning in VFP registers, a type differently
3414 classified from *BASE_TYPE, or two types differently classified
3415 from each other, return -1, otherwise return the total number of
3416 base-type elements found (possibly 0 in an empty structure or
3417 array). Vector types are not currently supported, matching the
3418 generic AAPCS support. */
3421 arm_vfp_cprc_sub_candidate (struct type
*t
,
3422 enum arm_vfp_cprc_base_type
*base_type
)
3424 t
= check_typedef (t
);
3425 switch (TYPE_CODE (t
))
3428 switch (TYPE_LENGTH (t
))
3431 if (*base_type
== VFP_CPRC_UNKNOWN
)
3432 *base_type
= VFP_CPRC_SINGLE
;
3433 else if (*base_type
!= VFP_CPRC_SINGLE
)
3438 if (*base_type
== VFP_CPRC_UNKNOWN
)
3439 *base_type
= VFP_CPRC_DOUBLE
;
3440 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3449 case TYPE_CODE_COMPLEX
:
3450 /* Arguments of complex T where T is one of the types float or
3451 double get treated as if they are implemented as:
3460 switch (TYPE_LENGTH (t
))
3463 if (*base_type
== VFP_CPRC_UNKNOWN
)
3464 *base_type
= VFP_CPRC_SINGLE
;
3465 else if (*base_type
!= VFP_CPRC_SINGLE
)
3470 if (*base_type
== VFP_CPRC_UNKNOWN
)
3471 *base_type
= VFP_CPRC_DOUBLE
;
3472 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3481 case TYPE_CODE_ARRAY
:
3483 if (TYPE_VECTOR (t
))
3485 /* A 64-bit or 128-bit containerized vector type are VFP
3487 switch (TYPE_LENGTH (t
))
3490 if (*base_type
== VFP_CPRC_UNKNOWN
)
3491 *base_type
= VFP_CPRC_VEC64
;
3494 if (*base_type
== VFP_CPRC_UNKNOWN
)
3495 *base_type
= VFP_CPRC_VEC128
;
3506 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3510 if (TYPE_LENGTH (t
) == 0)
3512 gdb_assert (count
== 0);
3515 else if (count
== 0)
3517 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3518 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3519 return TYPE_LENGTH (t
) / unitlen
;
3524 case TYPE_CODE_STRUCT
:
3529 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3533 if (!field_is_static (&TYPE_FIELD (t
, i
)))
3534 sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3536 if (sub_count
== -1)
3540 if (TYPE_LENGTH (t
) == 0)
3542 gdb_assert (count
== 0);
3545 else if (count
== 0)
3547 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3548 if (TYPE_LENGTH (t
) != unitlen
* count
)
3553 case TYPE_CODE_UNION
:
3558 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3560 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3562 if (sub_count
== -1)
3564 count
= (count
> sub_count
? count
: sub_count
);
3566 if (TYPE_LENGTH (t
) == 0)
3568 gdb_assert (count
== 0);
3571 else if (count
== 0)
3573 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3574 if (TYPE_LENGTH (t
) != unitlen
* count
)
3586 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3587 if passed to or returned from a non-variadic function with the VFP
3588 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3589 *BASE_TYPE to the base type for T and *COUNT to the number of
3590 elements of that base type before returning. */
3593 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3596 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3597 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3598 if (c
<= 0 || c
> 4)
3605 /* Return 1 if the VFP ABI should be used for passing arguments to and
3606 returning values from a function of type FUNC_TYPE, 0
3610 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3612 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3613 /* Variadic functions always use the base ABI. Assume that functions
3614 without debug info are not variadic. */
3615 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3617 /* The VFP ABI is only supported as a variant of AAPCS. */
3618 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3620 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3623 /* We currently only support passing parameters in integer registers, which
3624 conforms with GCC's default model, and VFP argument passing following
3625 the VFP variant of AAPCS. Several other variants exist and
3626 we should probably support some of them based on the selected ABI. */
3629 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3630 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3631 struct value
**args
, CORE_ADDR sp
,
3632 function_call_return_method return_method
,
3633 CORE_ADDR struct_addr
)
3635 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3639 struct stack_item
*si
= NULL
;
3642 unsigned vfp_regs_free
= (1 << 16) - 1;
3644 /* Determine the type of this function and whether the VFP ABI
3646 ftype
= check_typedef (value_type (function
));
3647 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3648 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3649 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3651 /* Set the return address. For the ARM, the return breakpoint is
3652 always at BP_ADDR. */
3653 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3655 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3657 /* Walk through the list of args and determine how large a temporary
3658 stack is required. Need to take care here as structs may be
3659 passed on the stack, and we have to push them. */
3662 argreg
= ARM_A1_REGNUM
;
3665 /* The struct_return pointer occupies the first parameter
3666 passing register. */
3667 if (return_method
== return_method_struct
)
3670 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3671 gdbarch_register_name (gdbarch
, argreg
),
3672 paddress (gdbarch
, struct_addr
));
3673 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3677 for (argnum
= 0; argnum
< nargs
; argnum
++)
3680 struct type
*arg_type
;
3681 struct type
*target_type
;
3682 enum type_code typecode
;
3683 const bfd_byte
*val
;
3685 enum arm_vfp_cprc_base_type vfp_base_type
;
3687 int may_use_core_reg
= 1;
3689 arg_type
= check_typedef (value_type (args
[argnum
]));
3690 len
= TYPE_LENGTH (arg_type
);
3691 target_type
= TYPE_TARGET_TYPE (arg_type
);
3692 typecode
= TYPE_CODE (arg_type
);
3693 val
= value_contents (args
[argnum
]);
3695 align
= type_align (arg_type
);
3696 /* Round alignment up to a whole number of words. */
3697 align
= (align
+ ARM_INT_REGISTER_SIZE
- 1)
3698 & ~(ARM_INT_REGISTER_SIZE
- 1);
3699 /* Different ABIs have different maximum alignments. */
3700 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3702 /* The APCS ABI only requires word alignment. */
3703 align
= ARM_INT_REGISTER_SIZE
;
3707 /* The AAPCS requires at most doubleword alignment. */
3708 if (align
> ARM_INT_REGISTER_SIZE
* 2)
3709 align
= ARM_INT_REGISTER_SIZE
* 2;
3713 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3721 /* Because this is a CPRC it cannot go in a core register or
3722 cause a core register to be skipped for alignment.
3723 Either it goes in VFP registers and the rest of this loop
3724 iteration is skipped for this argument, or it goes on the
3725 stack (and the stack alignment code is correct for this
3727 may_use_core_reg
= 0;
3729 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3730 shift
= unit_length
/ 4;
3731 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3732 for (regno
= 0; regno
< 16; regno
+= shift
)
3733 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3742 vfp_regs_free
&= ~(mask
<< regno
);
3743 reg_scaled
= regno
/ shift
;
3744 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3745 for (i
= 0; i
< vfp_base_count
; i
++)
3749 if (reg_char
== 'q')
3750 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3751 val
+ i
* unit_length
);
3754 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3755 reg_char
, reg_scaled
+ i
);
3756 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3758 regcache
->cooked_write (regnum
, val
+ i
* unit_length
);
3765 /* This CPRC could not go in VFP registers, so all VFP
3766 registers are now marked as used. */
3771 /* Push stack padding for dowubleword alignment. */
3772 if (nstack
& (align
- 1))
3774 si
= push_stack_item (si
, val
, ARM_INT_REGISTER_SIZE
);
3775 nstack
+= ARM_INT_REGISTER_SIZE
;
3778 /* Doubleword aligned quantities must go in even register pairs. */
3779 if (may_use_core_reg
3780 && argreg
<= ARM_LAST_ARG_REGNUM
3781 && align
> ARM_INT_REGISTER_SIZE
3785 /* If the argument is a pointer to a function, and it is a
3786 Thumb function, create a LOCAL copy of the value and set
3787 the THUMB bit in it. */
3788 if (TYPE_CODE_PTR
== typecode
3789 && target_type
!= NULL
3790 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3792 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3793 if (arm_pc_is_thumb (gdbarch
, regval
))
3795 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3796 store_unsigned_integer (copy
, len
, byte_order
,
3797 MAKE_THUMB_ADDR (regval
));
3802 /* Copy the argument to general registers or the stack in
3803 register-sized pieces. Large arguments are split between
3804 registers and stack. */
3807 int partial_len
= len
< ARM_INT_REGISTER_SIZE
3808 ? len
: ARM_INT_REGISTER_SIZE
;
3810 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3812 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3814 /* The argument is being passed in a general purpose
3816 if (byte_order
== BFD_ENDIAN_BIG
)
3817 regval
<<= (ARM_INT_REGISTER_SIZE
- partial_len
) * 8;
3819 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3821 gdbarch_register_name
3823 phex (regval
, ARM_INT_REGISTER_SIZE
));
3824 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3829 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
3831 memset (buf
, 0, sizeof (buf
));
3832 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3834 /* Push the arguments onto the stack. */
3836 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3838 si
= push_stack_item (si
, buf
, ARM_INT_REGISTER_SIZE
);
3839 nstack
+= ARM_INT_REGISTER_SIZE
;
3846 /* If we have an odd number of words to push, then decrement the stack
3847 by one word now, so first stack argument will be dword aligned. */
3854 write_memory (sp
, si
->data
, si
->len
);
3855 si
= pop_stack_item (si
);
3858 /* Finally, update teh SP register. */
3859 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3865 /* Always align the frame to an 8-byte boundary. This is required on
3866 some platforms and harmless on the rest. */
3869 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3871 /* Align the stack to eight bytes. */
3872 return sp
& ~ (CORE_ADDR
) 7;
3876 print_fpu_flags (struct ui_file
*file
, int flags
)
3878 if (flags
& (1 << 0))
3879 fputs_filtered ("IVO ", file
);
3880 if (flags
& (1 << 1))
3881 fputs_filtered ("DVZ ", file
);
3882 if (flags
& (1 << 2))
3883 fputs_filtered ("OFL ", file
);
3884 if (flags
& (1 << 3))
3885 fputs_filtered ("UFL ", file
);
3886 if (flags
& (1 << 4))
3887 fputs_filtered ("INX ", file
);
3888 fputc_filtered ('\n', file
);
3891 /* Print interesting information about the floating point processor
3892 (if present) or emulator. */
3894 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3895 struct frame_info
*frame
, const char *args
)
3897 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3900 type
= (status
>> 24) & 127;
3901 if (status
& (1 << 31))
3902 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3904 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3905 /* i18n: [floating point unit] mask */
3906 fputs_filtered (_("mask: "), file
);
3907 print_fpu_flags (file
, status
>> 16);
3908 /* i18n: [floating point unit] flags */
3909 fputs_filtered (_("flags: "), file
);
3910 print_fpu_flags (file
, status
);
3913 /* Construct the ARM extended floating point type. */
3914 static struct type
*
3915 arm_ext_type (struct gdbarch
*gdbarch
)
3917 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3919 if (!tdep
->arm_ext_type
)
3921 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
3922 floatformats_arm_ext
);
3924 return tdep
->arm_ext_type
;
3927 static struct type
*
3928 arm_neon_double_type (struct gdbarch
*gdbarch
)
3930 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3932 if (tdep
->neon_double_type
== NULL
)
3934 struct type
*t
, *elem
;
3936 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
3938 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3939 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
3940 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3941 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
3942 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3943 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
3944 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3945 append_composite_type_field (t
, "u64", elem
);
3946 elem
= builtin_type (gdbarch
)->builtin_float
;
3947 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
3948 elem
= builtin_type (gdbarch
)->builtin_double
;
3949 append_composite_type_field (t
, "f64", elem
);
3951 TYPE_VECTOR (t
) = 1;
3952 TYPE_NAME (t
) = "neon_d";
3953 tdep
->neon_double_type
= t
;
3956 return tdep
->neon_double_type
;
3959 /* FIXME: The vector types are not correctly ordered on big-endian
3960 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3961 bits of d0 - regardless of what unit size is being held in d0. So
3962 the offset of the first uint8 in d0 is 7, but the offset of the
3963 first float is 4. This code works as-is for little-endian
3966 static struct type
*
3967 arm_neon_quad_type (struct gdbarch
*gdbarch
)
3969 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3971 if (tdep
->neon_quad_type
== NULL
)
3973 struct type
*t
, *elem
;
3975 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
3977 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3978 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
3979 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3980 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
3981 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3982 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
3983 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3984 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
3985 elem
= builtin_type (gdbarch
)->builtin_float
;
3986 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
3987 elem
= builtin_type (gdbarch
)->builtin_double
;
3988 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
3990 TYPE_VECTOR (t
) = 1;
3991 TYPE_NAME (t
) = "neon_q";
3992 tdep
->neon_quad_type
= t
;
3995 return tdep
->neon_quad_type
;
3998 /* Return the GDB type object for the "standard" data type of data in
4001 static struct type
*
4002 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4004 int num_regs
= gdbarch_num_regs (gdbarch
);
4006 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4007 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4008 return builtin_type (gdbarch
)->builtin_float
;
4010 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4011 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4012 return arm_neon_quad_type (gdbarch
);
4014 /* If the target description has register information, we are only
4015 in this function so that we can override the types of
4016 double-precision registers for NEON. */
4017 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4019 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4021 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4022 && TYPE_CODE (t
) == TYPE_CODE_FLT
4023 && gdbarch_tdep (gdbarch
)->have_neon
)
4024 return arm_neon_double_type (gdbarch
);
4029 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4031 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4032 return builtin_type (gdbarch
)->builtin_void
;
4034 return arm_ext_type (gdbarch
);
4036 else if (regnum
== ARM_SP_REGNUM
)
4037 return builtin_type (gdbarch
)->builtin_data_ptr
;
4038 else if (regnum
== ARM_PC_REGNUM
)
4039 return builtin_type (gdbarch
)->builtin_func_ptr
;
4040 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4041 /* These registers are only supported on targets which supply
4042 an XML description. */
4043 return builtin_type (gdbarch
)->builtin_int0
;
4045 return builtin_type (gdbarch
)->builtin_uint32
;
4048 /* Map a DWARF register REGNUM onto the appropriate GDB register
4052 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4054 /* Core integer regs. */
4055 if (reg
>= 0 && reg
<= 15)
4058 /* Legacy FPA encoding. These were once used in a way which
4059 overlapped with VFP register numbering, so their use is
4060 discouraged, but GDB doesn't support the ARM toolchain
4061 which used them for VFP. */
4062 if (reg
>= 16 && reg
<= 23)
4063 return ARM_F0_REGNUM
+ reg
- 16;
4065 /* New assignments for the FPA registers. */
4066 if (reg
>= 96 && reg
<= 103)
4067 return ARM_F0_REGNUM
+ reg
- 96;
4069 /* WMMX register assignments. */
4070 if (reg
>= 104 && reg
<= 111)
4071 return ARM_WCGR0_REGNUM
+ reg
- 104;
4073 if (reg
>= 112 && reg
<= 127)
4074 return ARM_WR0_REGNUM
+ reg
- 112;
4076 if (reg
>= 192 && reg
<= 199)
4077 return ARM_WC0_REGNUM
+ reg
- 192;
4079 /* VFP v2 registers. A double precision value is actually
4080 in d1 rather than s2, but the ABI only defines numbering
4081 for the single precision registers. This will "just work"
4082 in GDB for little endian targets (we'll read eight bytes,
4083 starting in s0 and then progressing to s1), but will be
4084 reversed on big endian targets with VFP. This won't
4085 be a problem for the new Neon quad registers; you're supposed
4086 to use DW_OP_piece for those. */
4087 if (reg
>= 64 && reg
<= 95)
4091 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4092 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4096 /* VFP v3 / Neon registers. This range is also used for VFP v2
4097 registers, except that it now describes d0 instead of s0. */
4098 if (reg
>= 256 && reg
<= 287)
4102 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4103 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4110 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4112 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4115 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4117 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4118 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4120 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4121 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4123 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4124 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4126 if (reg
< NUM_GREGS
)
4127 return SIM_ARM_R0_REGNUM
+ reg
;
4130 if (reg
< NUM_FREGS
)
4131 return SIM_ARM_FP0_REGNUM
+ reg
;
4134 if (reg
< NUM_SREGS
)
4135 return SIM_ARM_FPS_REGNUM
+ reg
;
4138 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4141 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4142 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4143 NULL if an error occurs. BUF is freed. */
4146 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4147 int old_len
, int new_len
)
4150 int bytes_to_read
= new_len
- old_len
;
4152 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4153 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4155 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4163 /* An IT block is at most the 2-byte IT instruction followed by
4164 four 4-byte instructions. The furthest back we must search to
4165 find an IT block that affects the current instruction is thus
4166 2 + 3 * 4 == 14 bytes. */
4167 #define MAX_IT_BLOCK_PREFIX 14
4169 /* Use a quick scan if there are more than this many bytes of
4171 #define IT_SCAN_THRESHOLD 32
4173 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4174 A breakpoint in an IT block may not be hit, depending on the
4177 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4181 CORE_ADDR boundary
, func_start
;
4183 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4184 int i
, any
, last_it
, last_it_count
;
4186 /* If we are using BKPT breakpoints, none of this is necessary. */
4187 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4190 /* ARM mode does not have this problem. */
4191 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4194 /* We are setting a breakpoint in Thumb code that could potentially
4195 contain an IT block. The first step is to find how much Thumb
4196 code there is; we do not need to read outside of known Thumb
4198 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4200 /* Thumb-2 code must have mapping symbols to have a chance. */
4203 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4205 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4206 && func_start
> boundary
)
4207 boundary
= func_start
;
4209 /* Search for a candidate IT instruction. We have to do some fancy
4210 footwork to distinguish a real IT instruction from the second
4211 half of a 32-bit instruction, but there is no need for that if
4212 there's no candidate. */
4213 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4215 /* No room for an IT instruction. */
4218 buf
= (gdb_byte
*) xmalloc (buf_len
);
4219 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4222 for (i
= 0; i
< buf_len
; i
+= 2)
4224 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4225 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4238 /* OK, the code bytes before this instruction contain at least one
4239 halfword which resembles an IT instruction. We know that it's
4240 Thumb code, but there are still two possibilities. Either the
4241 halfword really is an IT instruction, or it is the second half of
4242 a 32-bit Thumb instruction. The only way we can tell is to
4243 scan forwards from a known instruction boundary. */
4244 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4248 /* There's a lot of code before this instruction. Start with an
4249 optimistic search; it's easy to recognize halfwords that can
4250 not be the start of a 32-bit instruction, and use that to
4251 lock on to the instruction boundaries. */
4252 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4255 buf_len
= IT_SCAN_THRESHOLD
;
4258 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4260 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4261 if (thumb_insn_size (inst1
) == 2)
4268 /* At this point, if DEFINITE, BUF[I] is the first place we
4269 are sure that we know the instruction boundaries, and it is far
4270 enough from BPADDR that we could not miss an IT instruction
4271 affecting BPADDR. If ! DEFINITE, give up - start from a
4275 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4279 buf_len
= bpaddr
- boundary
;
4285 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4288 buf_len
= bpaddr
- boundary
;
4292 /* Scan forwards. Find the last IT instruction before BPADDR. */
4297 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4299 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4304 else if (inst1
& 0x0002)
4306 else if (inst1
& 0x0004)
4311 i
+= thumb_insn_size (inst1
);
4317 /* There wasn't really an IT instruction after all. */
4320 if (last_it_count
< 1)
4321 /* It was too far away. */
4324 /* This really is a trouble spot. Move the breakpoint to the IT
4326 return bpaddr
- buf_len
+ last_it
;
4329 /* ARM displaced stepping support.
4331 Generally ARM displaced stepping works as follows:
4333 1. When an instruction is to be single-stepped, it is first decoded by
4334 arm_process_displaced_insn. Depending on the type of instruction, it is
4335 then copied to a scratch location, possibly in a modified form. The
4336 copy_* set of functions performs such modification, as necessary. A
4337 breakpoint is placed after the modified instruction in the scratch space
4338 to return control to GDB. Note in particular that instructions which
4339 modify the PC will no longer do so after modification.
4341 2. The instruction is single-stepped, by setting the PC to the scratch
4342 location address, and resuming. Control returns to GDB when the
4345 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4346 function used for the current instruction. This function's job is to
4347 put the CPU/memory state back to what it would have been if the
4348 instruction had been executed unmodified in its original location. */
4350 /* NOP instruction (mov r0, r0). */
4351 #define ARM_NOP 0xe1a00000
4352 #define THUMB_NOP 0x4600
4354 /* Helper for register reads for displaced stepping. In particular, this
4355 returns the PC as it would be seen by the instruction at its original
4359 displaced_read_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4363 CORE_ADDR from
= dsc
->insn_addr
;
4365 if (regno
== ARM_PC_REGNUM
)
4367 /* Compute pipeline offset:
4368 - When executing an ARM instruction, PC reads as the address of the
4369 current instruction plus 8.
4370 - When executing a Thumb instruction, PC reads as the address of the
4371 current instruction plus 4. */
4378 if (debug_displaced
)
4379 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
4380 (unsigned long) from
);
4381 return (ULONGEST
) from
;
4385 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4386 if (debug_displaced
)
4387 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
4388 regno
, (unsigned long) ret
);
4394 displaced_in_arm_mode (struct regcache
*regs
)
4397 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4399 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4401 return (ps
& t_bit
) == 0;
4404 /* Write to the PC as from a branch instruction. */
4407 branch_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4411 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4412 architecture versions < 6. */
4413 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4414 val
& ~(ULONGEST
) 0x3);
4416 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4417 val
& ~(ULONGEST
) 0x1);
4420 /* Write to the PC as from a branch-exchange instruction. */
4423 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4426 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4428 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4432 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4433 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4435 else if ((val
& 2) == 0)
4437 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4438 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4442 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4443 mode, align dest to 4 bytes). */
4444 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4445 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4446 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4450 /* Write to the PC as if from a load instruction. */
4453 load_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4456 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4457 bx_write_pc (regs
, val
);
4459 branch_write_pc (regs
, dsc
, val
);
4462 /* Write to the PC as if from an ALU instruction. */
4465 alu_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4468 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4469 bx_write_pc (regs
, val
);
4471 branch_write_pc (regs
, dsc
, val
);
4474 /* Helper for writing to registers for displaced stepping. Writing to the PC
4475 has a varying effects depending on the instruction which does the write:
4476 this is controlled by the WRITE_PC argument. */
4479 displaced_write_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4480 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4482 if (regno
== ARM_PC_REGNUM
)
4484 if (debug_displaced
)
4485 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
4486 (unsigned long) val
);
4489 case BRANCH_WRITE_PC
:
4490 branch_write_pc (regs
, dsc
, val
);
4494 bx_write_pc (regs
, val
);
4498 load_write_pc (regs
, dsc
, val
);
4502 alu_write_pc (regs
, dsc
, val
);
4505 case CANNOT_WRITE_PC
:
4506 warning (_("Instruction wrote to PC in an unexpected way when "
4507 "single-stepping"));
4511 internal_error (__FILE__
, __LINE__
,
4512 _("Invalid argument to displaced_write_reg"));
4515 dsc
->wrote_to_pc
= 1;
4519 if (debug_displaced
)
4520 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
4521 regno
, (unsigned long) val
);
4522 regcache_cooked_write_unsigned (regs
, regno
, val
);
4526 /* This function is used to concisely determine if an instruction INSN
4527 references PC. Register fields of interest in INSN should have the
4528 corresponding fields of BITMASK set to 0b1111. The function
4529 returns return 1 if any of these fields in INSN reference the PC
4530 (also 0b1111, r15), else it returns 0. */
4533 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4535 uint32_t lowbit
= 1;
4537 while (bitmask
!= 0)
4541 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4547 mask
= lowbit
* 0xf;
4549 if ((insn
& mask
) == mask
)
4558 /* The simplest copy function. Many instructions have the same effect no
4559 matter what address they are executed at: in those cases, use this. */
4562 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
4563 const char *iname
, arm_displaced_step_closure
*dsc
)
4565 if (debug_displaced
)
4566 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
4567 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
4570 dsc
->modinsn
[0] = insn
;
4576 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4577 uint16_t insn2
, const char *iname
,
4578 arm_displaced_step_closure
*dsc
)
4580 if (debug_displaced
)
4581 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
4582 "opcode/class '%s' unmodified\n", insn1
, insn2
,
4585 dsc
->modinsn
[0] = insn1
;
4586 dsc
->modinsn
[1] = insn2
;
4592 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4595 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4597 arm_displaced_step_closure
*dsc
)
4599 if (debug_displaced
)
4600 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
4601 "opcode/class '%s' unmodified\n", insn
,
4604 dsc
->modinsn
[0] = insn
;
4609 /* Preload instructions with immediate offset. */
4612 cleanup_preload (struct gdbarch
*gdbarch
,
4613 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4615 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4616 if (!dsc
->u
.preload
.immed
)
4617 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4621 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4622 arm_displaced_step_closure
*dsc
, unsigned int rn
)
4625 /* Preload instructions:
4627 {pli/pld} [rn, #+/-imm]
4629 {pli/pld} [r0, #+/-imm]. */
4631 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4632 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4633 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4634 dsc
->u
.preload
.immed
= 1;
4636 dsc
->cleanup
= &cleanup_preload
;
4640 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4641 arm_displaced_step_closure
*dsc
)
4643 unsigned int rn
= bits (insn
, 16, 19);
4645 if (!insn_references_pc (insn
, 0x000f0000ul
))
4646 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4648 if (debug_displaced
)
4649 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4650 (unsigned long) insn
);
4652 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4654 install_preload (gdbarch
, regs
, dsc
, rn
);
4660 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4661 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4663 unsigned int rn
= bits (insn1
, 0, 3);
4664 unsigned int u_bit
= bit (insn1
, 7);
4665 int imm12
= bits (insn2
, 0, 11);
4668 if (rn
!= ARM_PC_REGNUM
)
4669 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4671 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4672 PLD (literal) Encoding T1. */
4673 if (debug_displaced
)
4674 fprintf_unfiltered (gdb_stdlog
,
4675 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4676 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4682 /* Rewrite instruction {pli/pld} PC imm12 into:
4683 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4687 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4689 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4690 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4692 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4694 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4695 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4696 dsc
->u
.preload
.immed
= 0;
4698 /* {pli/pld} [r0, r1] */
4699 dsc
->modinsn
[0] = insn1
& 0xfff0;
4700 dsc
->modinsn
[1] = 0xf001;
4703 dsc
->cleanup
= &cleanup_preload
;
4707 /* Preload instructions with register offset. */
4710 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4711 arm_displaced_step_closure
*dsc
, unsigned int rn
,
4714 ULONGEST rn_val
, rm_val
;
4716 /* Preload register-offset instructions:
4718 {pli/pld} [rn, rm {, shift}]
4720 {pli/pld} [r0, r1 {, shift}]. */
4722 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4723 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4724 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4725 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4726 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4727 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4728 dsc
->u
.preload
.immed
= 0;
4730 dsc
->cleanup
= &cleanup_preload
;
4734 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4735 struct regcache
*regs
,
4736 arm_displaced_step_closure
*dsc
)
4738 unsigned int rn
= bits (insn
, 16, 19);
4739 unsigned int rm
= bits (insn
, 0, 3);
4742 if (!insn_references_pc (insn
, 0x000f000ful
))
4743 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4745 if (debug_displaced
)
4746 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4747 (unsigned long) insn
);
4749 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4751 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4755 /* Copy/cleanup coprocessor load and store instructions. */
4758 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4759 struct regcache
*regs
,
4760 arm_displaced_step_closure
*dsc
)
4762 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4764 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4766 if (dsc
->u
.ldst
.writeback
)
4767 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4771 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4772 arm_displaced_step_closure
*dsc
,
4773 int writeback
, unsigned int rn
)
4777 /* Coprocessor load/store instructions:
4779 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4781 {stc/stc2} [r0, #+/-imm].
4783 ldc/ldc2 are handled identically. */
4785 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4786 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4787 /* PC should be 4-byte aligned. */
4788 rn_val
= rn_val
& 0xfffffffc;
4789 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4791 dsc
->u
.ldst
.writeback
= writeback
;
4792 dsc
->u
.ldst
.rn
= rn
;
4794 dsc
->cleanup
= &cleanup_copro_load_store
;
4798 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4799 struct regcache
*regs
,
4800 arm_displaced_step_closure
*dsc
)
4802 unsigned int rn
= bits (insn
, 16, 19);
4804 if (!insn_references_pc (insn
, 0x000f0000ul
))
4805 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4807 if (debug_displaced
)
4808 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4809 "load/store insn %.8lx\n", (unsigned long) insn
);
4811 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4813 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
4819 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
4820 uint16_t insn2
, struct regcache
*regs
,
4821 arm_displaced_step_closure
*dsc
)
4823 unsigned int rn
= bits (insn1
, 0, 3);
4825 if (rn
!= ARM_PC_REGNUM
)
4826 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
4827 "copro load/store", dsc
);
4829 if (debug_displaced
)
4830 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4831 "load/store insn %.4x%.4x\n", insn1
, insn2
);
4833 dsc
->modinsn
[0] = insn1
& 0xfff0;
4834 dsc
->modinsn
[1] = insn2
;
4837 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4838 doesn't support writeback, so pass 0. */
4839 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
4844 /* Clean up branch instructions (actually perform the branch, by setting
4848 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4849 arm_displaced_step_closure
*dsc
)
4851 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
4852 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
4853 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
4854 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
4859 if (dsc
->u
.branch
.link
)
4861 /* The value of LR should be the next insn of current one. In order
4862 not to confuse logic hanlding later insn `bx lr', if current insn mode
4863 is Thumb, the bit 0 of LR value should be set to 1. */
4864 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
4867 next_insn_addr
|= 0x1;
4869 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
4873 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
4876 /* Copy B/BL/BLX instructions with immediate destinations. */
4879 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4880 arm_displaced_step_closure
*dsc
,
4881 unsigned int cond
, int exchange
, int link
, long offset
)
4883 /* Implement "BL<cond> <label>" as:
4885 Preparation: cond <- instruction condition
4886 Insn: mov r0, r0 (nop)
4887 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4889 B<cond> similar, but don't set r14 in cleanup. */
4891 dsc
->u
.branch
.cond
= cond
;
4892 dsc
->u
.branch
.link
= link
;
4893 dsc
->u
.branch
.exchange
= exchange
;
4895 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
4896 if (link
&& exchange
)
4897 /* For BLX, offset is computed from the Align (PC, 4). */
4898 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
4901 dsc
->u
.branch
.dest
+= 4 + offset
;
4903 dsc
->u
.branch
.dest
+= 8 + offset
;
4905 dsc
->cleanup
= &cleanup_branch
;
4908 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
4909 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4911 unsigned int cond
= bits (insn
, 28, 31);
4912 int exchange
= (cond
== 0xf);
4913 int link
= exchange
|| bit (insn
, 24);
4916 if (debug_displaced
)
4917 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
4918 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
4919 (unsigned long) insn
);
4921 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4922 then arrange the switch into Thumb mode. */
4923 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
4925 offset
= bits (insn
, 0, 23) << 2;
4927 if (bit (offset
, 25))
4928 offset
= offset
| ~0x3ffffff;
4930 dsc
->modinsn
[0] = ARM_NOP
;
4932 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
4937 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
4938 uint16_t insn2
, struct regcache
*regs
,
4939 arm_displaced_step_closure
*dsc
)
4941 int link
= bit (insn2
, 14);
4942 int exchange
= link
&& !bit (insn2
, 12);
4945 int j1
= bit (insn2
, 13);
4946 int j2
= bit (insn2
, 11);
4947 int s
= sbits (insn1
, 10, 10);
4948 int i1
= !(j1
^ bit (insn1
, 10));
4949 int i2
= !(j2
^ bit (insn1
, 10));
4951 if (!link
&& !exchange
) /* B */
4953 offset
= (bits (insn2
, 0, 10) << 1);
4954 if (bit (insn2
, 12)) /* Encoding T4 */
4956 offset
|= (bits (insn1
, 0, 9) << 12)
4962 else /* Encoding T3 */
4964 offset
|= (bits (insn1
, 0, 5) << 12)
4968 cond
= bits (insn1
, 6, 9);
4973 offset
= (bits (insn1
, 0, 9) << 12);
4974 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
4975 offset
|= exchange
?
4976 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
4979 if (debug_displaced
)
4980 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
4981 "%.4x %.4x with offset %.8lx\n",
4982 link
? (exchange
) ? "blx" : "bl" : "b",
4983 insn1
, insn2
, offset
);
4985 dsc
->modinsn
[0] = THUMB_NOP
;
4987 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
4991 /* Copy B Thumb instructions. */
4993 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
4994 arm_displaced_step_closure
*dsc
)
4996 unsigned int cond
= 0;
4998 unsigned short bit_12_15
= bits (insn
, 12, 15);
4999 CORE_ADDR from
= dsc
->insn_addr
;
5001 if (bit_12_15
== 0xd)
5003 /* offset = SignExtend (imm8:0, 32) */
5004 offset
= sbits ((insn
<< 1), 0, 8);
5005 cond
= bits (insn
, 8, 11);
5007 else if (bit_12_15
== 0xe) /* Encoding T2 */
5009 offset
= sbits ((insn
<< 1), 0, 11);
5013 if (debug_displaced
)
5014 fprintf_unfiltered (gdb_stdlog
,
5015 "displaced: copying b immediate insn %.4x "
5016 "with offset %d\n", insn
, offset
);
5018 dsc
->u
.branch
.cond
= cond
;
5019 dsc
->u
.branch
.link
= 0;
5020 dsc
->u
.branch
.exchange
= 0;
5021 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5023 dsc
->modinsn
[0] = THUMB_NOP
;
5025 dsc
->cleanup
= &cleanup_branch
;
5030 /* Copy BX/BLX with register-specified destinations. */
5033 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5034 arm_displaced_step_closure
*dsc
, int link
,
5035 unsigned int cond
, unsigned int rm
)
5037 /* Implement {BX,BLX}<cond> <reg>" as:
5039 Preparation: cond <- instruction condition
5040 Insn: mov r0, r0 (nop)
5041 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5043 Don't set r14 in cleanup for BX. */
5045 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5047 dsc
->u
.branch
.cond
= cond
;
5048 dsc
->u
.branch
.link
= link
;
5050 dsc
->u
.branch
.exchange
= 1;
5052 dsc
->cleanup
= &cleanup_branch
;
5056 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5057 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5059 unsigned int cond
= bits (insn
, 28, 31);
5062 int link
= bit (insn
, 5);
5063 unsigned int rm
= bits (insn
, 0, 3);
5065 if (debug_displaced
)
5066 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
5067 (unsigned long) insn
);
5069 dsc
->modinsn
[0] = ARM_NOP
;
5071 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5076 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5077 struct regcache
*regs
,
5078 arm_displaced_step_closure
*dsc
)
5080 int link
= bit (insn
, 7);
5081 unsigned int rm
= bits (insn
, 3, 6);
5083 if (debug_displaced
)
5084 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
5085 (unsigned short) insn
);
5087 dsc
->modinsn
[0] = THUMB_NOP
;
5089 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5095 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5098 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5099 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5101 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5102 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5103 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5104 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5108 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5109 arm_displaced_step_closure
*dsc
)
5111 unsigned int rn
= bits (insn
, 16, 19);
5112 unsigned int rd
= bits (insn
, 12, 15);
5113 unsigned int op
= bits (insn
, 21, 24);
5114 int is_mov
= (op
== 0xd);
5115 ULONGEST rd_val
, rn_val
;
5117 if (!insn_references_pc (insn
, 0x000ff000ul
))
5118 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5120 if (debug_displaced
)
5121 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
5122 "%.8lx\n", is_mov
? "move" : "ALU",
5123 (unsigned long) insn
);
5125 /* Instruction is of form:
5127 <op><cond> rd, [rn,] #imm
5131 Preparation: tmp1, tmp2 <- r0, r1;
5133 Insn: <op><cond> r0, r1, #imm
5134 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5137 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5138 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5139 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5140 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5141 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5142 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5146 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5148 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5150 dsc
->cleanup
= &cleanup_alu_imm
;
5156 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5157 uint16_t insn2
, struct regcache
*regs
,
5158 arm_displaced_step_closure
*dsc
)
5160 unsigned int op
= bits (insn1
, 5, 8);
5161 unsigned int rn
, rm
, rd
;
5162 ULONGEST rd_val
, rn_val
;
5164 rn
= bits (insn1
, 0, 3); /* Rn */
5165 rm
= bits (insn2
, 0, 3); /* Rm */
5166 rd
= bits (insn2
, 8, 11); /* Rd */
5168 /* This routine is only called for instruction MOV. */
5169 gdb_assert (op
== 0x2 && rn
== 0xf);
5171 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5172 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5174 if (debug_displaced
)
5175 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
5176 "ALU", insn1
, insn2
);
5178 /* Instruction is of form:
5180 <op><cond> rd, [rn,] #imm
5184 Preparation: tmp1, tmp2 <- r0, r1;
5186 Insn: <op><cond> r0, r1, #imm
5187 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5190 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5191 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5192 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5193 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5194 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5195 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5198 dsc
->modinsn
[0] = insn1
;
5199 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5202 dsc
->cleanup
= &cleanup_alu_imm
;
5207 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5210 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5211 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5216 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5218 for (i
= 0; i
< 3; i
++)
5219 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5221 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5225 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5226 arm_displaced_step_closure
*dsc
,
5227 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5229 ULONGEST rd_val
, rn_val
, rm_val
;
5231 /* Instruction is of form:
5233 <op><cond> rd, [rn,] rm [, <shift>]
5237 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5238 r0, r1, r2 <- rd, rn, rm
5239 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5240 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5243 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5244 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5245 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5246 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5247 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5248 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5249 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5250 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5251 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5254 dsc
->cleanup
= &cleanup_alu_reg
;
5258 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5259 arm_displaced_step_closure
*dsc
)
5261 unsigned int op
= bits (insn
, 21, 24);
5262 int is_mov
= (op
== 0xd);
5264 if (!insn_references_pc (insn
, 0x000ff00ful
))
5265 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5267 if (debug_displaced
)
5268 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
5269 is_mov
? "move" : "ALU", (unsigned long) insn
);
5272 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5274 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5276 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5282 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5283 struct regcache
*regs
,
5284 arm_displaced_step_closure
*dsc
)
5288 rm
= bits (insn
, 3, 6);
5289 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5291 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5292 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5294 if (debug_displaced
)
5295 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
5296 (unsigned short) insn
);
5298 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5300 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5305 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5308 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5309 struct regcache
*regs
,
5310 arm_displaced_step_closure
*dsc
)
5312 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5315 for (i
= 0; i
< 4; i
++)
5316 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5318 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5322 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5323 arm_displaced_step_closure
*dsc
,
5324 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5328 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5330 /* Instruction is of form:
5332 <op><cond> rd, [rn,] rm, <shift> rs
5336 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5337 r0, r1, r2, r3 <- rd, rn, rm, rs
5338 Insn: <op><cond> r0, r1, r2, <shift> r3
5340 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5344 for (i
= 0; i
< 4; i
++)
5345 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5347 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5348 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5349 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5350 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5351 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5352 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5353 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5354 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5356 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5360 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5361 struct regcache
*regs
,
5362 arm_displaced_step_closure
*dsc
)
5364 unsigned int op
= bits (insn
, 21, 24);
5365 int is_mov
= (op
== 0xd);
5366 unsigned int rd
, rn
, rm
, rs
;
5368 if (!insn_references_pc (insn
, 0x000fff0ful
))
5369 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5371 if (debug_displaced
)
5372 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
5373 "%.8lx\n", is_mov
? "move" : "ALU",
5374 (unsigned long) insn
);
5376 rn
= bits (insn
, 16, 19);
5377 rm
= bits (insn
, 0, 3);
5378 rs
= bits (insn
, 8, 11);
5379 rd
= bits (insn
, 12, 15);
5382 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5384 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5386 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5391 /* Clean up load instructions. */
5394 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5395 arm_displaced_step_closure
*dsc
)
5397 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5399 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5400 if (dsc
->u
.ldst
.xfersize
== 8)
5401 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5402 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5404 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5405 if (dsc
->u
.ldst
.xfersize
> 4)
5406 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5407 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5408 if (!dsc
->u
.ldst
.immed
)
5409 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5411 /* Handle register writeback. */
5412 if (dsc
->u
.ldst
.writeback
)
5413 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5414 /* Put result in right place. */
5415 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5416 if (dsc
->u
.ldst
.xfersize
== 8)
5417 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5420 /* Clean up store instructions. */
5423 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5424 arm_displaced_step_closure
*dsc
)
5426 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5428 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5429 if (dsc
->u
.ldst
.xfersize
> 4)
5430 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5431 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5432 if (!dsc
->u
.ldst
.immed
)
5433 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5434 if (!dsc
->u
.ldst
.restore_r4
)
5435 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5438 if (dsc
->u
.ldst
.writeback
)
5439 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5442 /* Copy "extra" load/store instructions. These are halfword/doubleword
5443 transfers, which have a different encoding to byte/word transfers. */
5446 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5447 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5449 unsigned int op1
= bits (insn
, 20, 24);
5450 unsigned int op2
= bits (insn
, 5, 6);
5451 unsigned int rt
= bits (insn
, 12, 15);
5452 unsigned int rn
= bits (insn
, 16, 19);
5453 unsigned int rm
= bits (insn
, 0, 3);
5454 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5455 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5456 int immed
= (op1
& 0x4) != 0;
5458 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5460 if (!insn_references_pc (insn
, 0x000ff00ful
))
5461 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5463 if (debug_displaced
)
5464 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
5465 "insn %.8lx\n", unprivileged
? "unprivileged " : "",
5466 (unsigned long) insn
);
5468 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5471 internal_error (__FILE__
, __LINE__
,
5472 _("copy_extra_ld_st: instruction decode error"));
5474 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5475 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5476 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5478 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5480 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5481 if (bytesize
[opcode
] == 8)
5482 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5483 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5485 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5487 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5488 if (bytesize
[opcode
] == 8)
5489 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5490 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5492 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5495 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5496 dsc
->u
.ldst
.rn
= rn
;
5497 dsc
->u
.ldst
.immed
= immed
;
5498 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5499 dsc
->u
.ldst
.restore_r4
= 0;
5502 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5504 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5505 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5507 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5509 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5510 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5512 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5517 /* Copy byte/half word/word loads and stores. */
5520 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5521 arm_displaced_step_closure
*dsc
, int load
,
5522 int immed
, int writeback
, int size
, int usermode
,
5523 int rt
, int rm
, int rn
)
5525 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5527 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5528 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5530 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5532 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5534 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5535 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5537 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5539 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5540 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5542 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5544 dsc
->u
.ldst
.xfersize
= size
;
5545 dsc
->u
.ldst
.rn
= rn
;
5546 dsc
->u
.ldst
.immed
= immed
;
5547 dsc
->u
.ldst
.writeback
= writeback
;
5549 /* To write PC we can do:
5551 Before this sequence of instructions:
5552 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5553 r2 is the Rn value got from dispalced_read_reg.
5555 Insn1: push {pc} Write address of STR instruction + offset on stack
5556 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5557 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5558 = addr(Insn1) + offset - addr(Insn3) - 8
5560 Insn4: add r4, r4, #8 r4 = offset - 8
5561 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5563 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5565 Otherwise we don't know what value to write for PC, since the offset is
5566 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5567 of this can be found in Section "Saving from r15" in
5568 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5570 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5575 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5576 uint16_t insn2
, struct regcache
*regs
,
5577 arm_displaced_step_closure
*dsc
, int size
)
5579 unsigned int u_bit
= bit (insn1
, 7);
5580 unsigned int rt
= bits (insn2
, 12, 15);
5581 int imm12
= bits (insn2
, 0, 11);
5584 if (debug_displaced
)
5585 fprintf_unfiltered (gdb_stdlog
,
5586 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5587 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5593 /* Rewrite instruction LDR Rt imm12 into:
5595 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5599 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5602 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5603 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5604 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5606 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5608 pc_val
= pc_val
& 0xfffffffc;
5610 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5611 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5615 dsc
->u
.ldst
.xfersize
= size
;
5616 dsc
->u
.ldst
.immed
= 0;
5617 dsc
->u
.ldst
.writeback
= 0;
5618 dsc
->u
.ldst
.restore_r4
= 0;
5620 /* LDR R0, R2, R3 */
5621 dsc
->modinsn
[0] = 0xf852;
5622 dsc
->modinsn
[1] = 0x3;
5625 dsc
->cleanup
= &cleanup_load
;
5631 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5632 uint16_t insn2
, struct regcache
*regs
,
5633 arm_displaced_step_closure
*dsc
,
5634 int writeback
, int immed
)
5636 unsigned int rt
= bits (insn2
, 12, 15);
5637 unsigned int rn
= bits (insn1
, 0, 3);
5638 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5639 /* In LDR (register), there is also a register Rm, which is not allowed to
5640 be PC, so we don't have to check it. */
5642 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5643 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5646 if (debug_displaced
)
5647 fprintf_unfiltered (gdb_stdlog
,
5648 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5649 rt
, rn
, insn1
, insn2
);
5651 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5654 dsc
->u
.ldst
.restore_r4
= 0;
5657 /* ldr[b]<cond> rt, [rn, #imm], etc.
5659 ldr[b]<cond> r0, [r2, #imm]. */
5661 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5662 dsc
->modinsn
[1] = insn2
& 0x0fff;
5665 /* ldr[b]<cond> rt, [rn, rm], etc.
5667 ldr[b]<cond> r0, [r2, r3]. */
5669 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5670 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5680 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5681 struct regcache
*regs
,
5682 arm_displaced_step_closure
*dsc
,
5683 int load
, int size
, int usermode
)
5685 int immed
= !bit (insn
, 25);
5686 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5687 unsigned int rt
= bits (insn
, 12, 15);
5688 unsigned int rn
= bits (insn
, 16, 19);
5689 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5691 if (!insn_references_pc (insn
, 0x000ff00ful
))
5692 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5694 if (debug_displaced
)
5695 fprintf_unfiltered (gdb_stdlog
,
5696 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5697 load
? (size
== 1 ? "ldrb" : "ldr")
5698 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
5700 (unsigned long) insn
);
5702 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5703 usermode
, rt
, rm
, rn
);
5705 if (load
|| rt
!= ARM_PC_REGNUM
)
5707 dsc
->u
.ldst
.restore_r4
= 0;
5710 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5712 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5713 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5715 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5717 {ldr,str}[b]<cond> r0, [r2, r3]. */
5718 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5722 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5723 dsc
->u
.ldst
.restore_r4
= 1;
5724 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5725 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5726 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5727 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5728 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5732 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5734 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5739 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5744 /* Cleanup LDM instructions with fully-populated register list. This is an
5745 unfortunate corner case: it's impossible to implement correctly by modifying
5746 the instruction. The issue is as follows: we have an instruction,
5750 which we must rewrite to avoid loading PC. A possible solution would be to
5751 do the load in two halves, something like (with suitable cleanup
5755 ldm[id][ab] r8!, {r0-r7}
5757 ldm[id][ab] r8, {r7-r14}
5760 but at present there's no suitable place for <temp>, since the scratch space
5761 is overwritten before the cleanup routine is called. For now, we simply
5762 emulate the instruction. */
5765 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5766 arm_displaced_step_closure
*dsc
)
5768 int inc
= dsc
->u
.block
.increment
;
5769 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5770 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5771 uint32_t regmask
= dsc
->u
.block
.regmask
;
5772 int regno
= inc
? 0 : 15;
5773 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5774 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5775 && (regmask
& 0x8000) != 0;
5776 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5777 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5778 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5783 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5784 sensible we can do here. Complain loudly. */
5785 if (exception_return
)
5786 error (_("Cannot single-step exception return"));
5788 /* We don't handle any stores here for now. */
5789 gdb_assert (dsc
->u
.block
.load
!= 0);
5791 if (debug_displaced
)
5792 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
5793 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
5794 dsc
->u
.block
.increment
? "inc" : "dec",
5795 dsc
->u
.block
.before
? "before" : "after");
5802 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5805 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5808 xfer_addr
+= bump_before
;
5810 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5811 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5813 xfer_addr
+= bump_after
;
5815 regmask
&= ~(1 << regno
);
5818 if (dsc
->u
.block
.writeback
)
5819 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5823 /* Clean up an STM which included the PC in the register list. */
5826 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5827 arm_displaced_step_closure
*dsc
)
5829 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5830 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5831 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
5832 CORE_ADDR stm_insn_addr
;
5835 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5837 /* If condition code fails, there's nothing else to do. */
5838 if (!store_executed
)
5841 if (dsc
->u
.block
.increment
)
5843 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
5845 if (dsc
->u
.block
.before
)
5850 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
5852 if (dsc
->u
.block
.before
)
5856 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
5857 stm_insn_addr
= dsc
->scratch_base
;
5858 offset
= pc_val
- stm_insn_addr
;
5860 if (debug_displaced
)
5861 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
5862 "STM instruction\n", offset
);
5864 /* Rewrite the stored PC to the proper value for the non-displaced original
5866 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
5867 dsc
->insn_addr
+ offset
);
5870 /* Clean up an LDM which includes the PC in the register list. We clumped all
5871 the registers in the transferred list into a contiguous range r0...rX (to
5872 avoid loading PC directly and losing control of the debugged program), so we
5873 must undo that here. */
5876 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
5877 struct regcache
*regs
,
5878 arm_displaced_step_closure
*dsc
)
5880 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5881 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5882 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
5883 unsigned int regs_loaded
= bitcount (mask
);
5884 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
5886 /* The method employed here will fail if the register list is fully populated
5887 (we need to avoid loading PC directly). */
5888 gdb_assert (num_to_shuffle
< 16);
5893 clobbered
= (1 << num_to_shuffle
) - 1;
5895 while (num_to_shuffle
> 0)
5897 if ((mask
& (1 << write_reg
)) != 0)
5899 unsigned int read_reg
= num_to_shuffle
- 1;
5901 if (read_reg
!= write_reg
)
5903 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
5904 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
5905 if (debug_displaced
)
5906 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
5907 "loaded register r%d to r%d\n"), read_reg
,
5910 else if (debug_displaced
)
5911 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
5912 "r%d already in the right place\n"),
5915 clobbered
&= ~(1 << write_reg
);
5923 /* Restore any registers we scribbled over. */
5924 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
5926 if ((clobbered
& (1 << write_reg
)) != 0)
5928 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
5930 if (debug_displaced
)
5931 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
5932 "clobbered register r%d\n"), write_reg
);
5933 clobbered
&= ~(1 << write_reg
);
5937 /* Perform register writeback manually. */
5938 if (dsc
->u
.block
.writeback
)
5940 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
5942 if (dsc
->u
.block
.increment
)
5943 new_rn_val
+= regs_loaded
* 4;
5945 new_rn_val
-= regs_loaded
* 4;
5947 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
5952 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5953 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5956 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
5957 struct regcache
*regs
,
5958 arm_displaced_step_closure
*dsc
)
5960 int load
= bit (insn
, 20);
5961 int user
= bit (insn
, 22);
5962 int increment
= bit (insn
, 23);
5963 int before
= bit (insn
, 24);
5964 int writeback
= bit (insn
, 21);
5965 int rn
= bits (insn
, 16, 19);
5967 /* Block transfers which don't mention PC can be run directly
5969 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
5970 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
5972 if (rn
== ARM_PC_REGNUM
)
5974 warning (_("displaced: Unpredictable LDM or STM with "
5975 "base register r15"));
5976 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
5979 if (debug_displaced
)
5980 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
5981 "%.8lx\n", (unsigned long) insn
);
5983 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
5984 dsc
->u
.block
.rn
= rn
;
5986 dsc
->u
.block
.load
= load
;
5987 dsc
->u
.block
.user
= user
;
5988 dsc
->u
.block
.increment
= increment
;
5989 dsc
->u
.block
.before
= before
;
5990 dsc
->u
.block
.writeback
= writeback
;
5991 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
5993 dsc
->u
.block
.regmask
= insn
& 0xffff;
5997 if ((insn
& 0xffff) == 0xffff)
5999 /* LDM with a fully-populated register list. This case is
6000 particularly tricky. Implement for now by fully emulating the
6001 instruction (which might not behave perfectly in all cases, but
6002 these instructions should be rare enough for that not to matter
6004 dsc
->modinsn
[0] = ARM_NOP
;
6006 dsc
->cleanup
= &cleanup_block_load_all
;
6010 /* LDM of a list of registers which includes PC. Implement by
6011 rewriting the list of registers to be transferred into a
6012 contiguous chunk r0...rX before doing the transfer, then shuffling
6013 registers into the correct places in the cleanup routine. */
6014 unsigned int regmask
= insn
& 0xffff;
6015 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6018 for (i
= 0; i
< num_in_list
; i
++)
6019 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6021 /* Writeback makes things complicated. We need to avoid clobbering
6022 the base register with one of the registers in our modified
6023 register list, but just using a different register can't work in
6026 ldm r14!, {r0-r13,pc}
6028 which would need to be rewritten as:
6032 but that can't work, because there's no free register for N.
6034 Solve this by turning off the writeback bit, and emulating
6035 writeback manually in the cleanup routine. */
6040 new_regmask
= (1 << num_in_list
) - 1;
6042 if (debug_displaced
)
6043 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6044 "{..., pc}: original reg list %.4x, modified "
6045 "list %.4x\n"), rn
, writeback
? "!" : "",
6046 (int) insn
& 0xffff, new_regmask
);
6048 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6050 dsc
->cleanup
= &cleanup_block_load_pc
;
6055 /* STM of a list of registers which includes PC. Run the instruction
6056 as-is, but out of line: this will store the wrong value for the PC,
6057 so we must manually fix up the memory in the cleanup routine.
6058 Doing things this way has the advantage that we can auto-detect
6059 the offset of the PC write (which is architecture-dependent) in
6060 the cleanup routine. */
6061 dsc
->modinsn
[0] = insn
;
6063 dsc
->cleanup
= &cleanup_block_store_pc
;
6070 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6071 struct regcache
*regs
,
6072 arm_displaced_step_closure
*dsc
)
6074 int rn
= bits (insn1
, 0, 3);
6075 int load
= bit (insn1
, 4);
6076 int writeback
= bit (insn1
, 5);
6078 /* Block transfers which don't mention PC can be run directly
6080 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6081 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6083 if (rn
== ARM_PC_REGNUM
)
6085 warning (_("displaced: Unpredictable LDM or STM with "
6086 "base register r15"));
6087 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6088 "unpredictable ldm/stm", dsc
);
6091 if (debug_displaced
)
6092 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6093 "%.4x%.4x\n", insn1
, insn2
);
6095 /* Clear bit 13, since it should be always zero. */
6096 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6097 dsc
->u
.block
.rn
= rn
;
6099 dsc
->u
.block
.load
= load
;
6100 dsc
->u
.block
.user
= 0;
6101 dsc
->u
.block
.increment
= bit (insn1
, 7);
6102 dsc
->u
.block
.before
= bit (insn1
, 8);
6103 dsc
->u
.block
.writeback
= writeback
;
6104 dsc
->u
.block
.cond
= INST_AL
;
6105 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6109 if (dsc
->u
.block
.regmask
== 0xffff)
6111 /* This branch is impossible to happen. */
6116 unsigned int regmask
= dsc
->u
.block
.regmask
;
6117 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6120 for (i
= 0; i
< num_in_list
; i
++)
6121 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6126 new_regmask
= (1 << num_in_list
) - 1;
6128 if (debug_displaced
)
6129 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6130 "{..., pc}: original reg list %.4x, modified "
6131 "list %.4x\n"), rn
, writeback
? "!" : "",
6132 (int) dsc
->u
.block
.regmask
, new_regmask
);
6134 dsc
->modinsn
[0] = insn1
;
6135 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6138 dsc
->cleanup
= &cleanup_block_load_pc
;
6143 dsc
->modinsn
[0] = insn1
;
6144 dsc
->modinsn
[1] = insn2
;
6146 dsc
->cleanup
= &cleanup_block_store_pc
;
6151 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6152 This is used to avoid a dependency on BFD's bfd_endian enum. */
6155 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6158 return read_memory_unsigned_integer (memaddr
, len
,
6159 (enum bfd_endian
) byte_order
);
6162 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6165 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6168 return gdbarch_addr_bits_remove (self
->regcache
->arch (), val
);
6171 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6174 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6179 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6182 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6184 return arm_is_thumb (self
->regcache
);
6187 /* single_step() is called just before we want to resume the inferior,
6188 if we want to single-step it but there is no hardware or kernel
6189 single-step support. We find the target of the coming instructions
6190 and breakpoint them. */
6192 std::vector
<CORE_ADDR
>
6193 arm_software_single_step (struct regcache
*regcache
)
6195 struct gdbarch
*gdbarch
= regcache
->arch ();
6196 struct arm_get_next_pcs next_pcs_ctx
;
6198 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6199 &arm_get_next_pcs_ops
,
6200 gdbarch_byte_order (gdbarch
),
6201 gdbarch_byte_order_for_code (gdbarch
),
6205 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6207 for (CORE_ADDR
&pc_ref
: next_pcs
)
6208 pc_ref
= gdbarch_addr_bits_remove (gdbarch
, pc_ref
);
6213 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6214 for Linux, where some SVC instructions must be treated specially. */
6217 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6218 arm_displaced_step_closure
*dsc
)
6220 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6222 if (debug_displaced
)
6223 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
6224 "%.8lx\n", (unsigned long) resume_addr
);
6226 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6230 /* Common copy routine for svc instruciton. */
6233 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6234 arm_displaced_step_closure
*dsc
)
6236 /* Preparation: none.
6237 Insn: unmodified svc.
6238 Cleanup: pc <- insn_addr + insn_size. */
6240 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6242 dsc
->wrote_to_pc
= 1;
6244 /* Allow OS-specific code to override SVC handling. */
6245 if (dsc
->u
.svc
.copy_svc_os
)
6246 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6249 dsc
->cleanup
= &cleanup_svc
;
6255 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6256 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6259 if (debug_displaced
)
6260 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
6261 (unsigned long) insn
);
6263 dsc
->modinsn
[0] = insn
;
6265 return install_svc (gdbarch
, regs
, dsc
);
6269 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6270 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6273 if (debug_displaced
)
6274 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
6277 dsc
->modinsn
[0] = insn
;
6279 return install_svc (gdbarch
, regs
, dsc
);
6282 /* Copy undefined instructions. */
6285 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6286 arm_displaced_step_closure
*dsc
)
6288 if (debug_displaced
)
6289 fprintf_unfiltered (gdb_stdlog
,
6290 "displaced: copying undefined insn %.8lx\n",
6291 (unsigned long) insn
);
6293 dsc
->modinsn
[0] = insn
;
6299 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6300 arm_displaced_step_closure
*dsc
)
6303 if (debug_displaced
)
6304 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
6305 "%.4x %.4x\n", (unsigned short) insn1
,
6306 (unsigned short) insn2
);
6308 dsc
->modinsn
[0] = insn1
;
6309 dsc
->modinsn
[1] = insn2
;
6315 /* Copy unpredictable instructions. */
6318 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6319 arm_displaced_step_closure
*dsc
)
6321 if (debug_displaced
)
6322 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
6323 "%.8lx\n", (unsigned long) insn
);
6325 dsc
->modinsn
[0] = insn
;
6330 /* The decode_* functions are instruction decoding helpers. They mostly follow
6331 the presentation in the ARM ARM. */
6334 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6335 struct regcache
*regs
,
6336 arm_displaced_step_closure
*dsc
)
6338 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6339 unsigned int rn
= bits (insn
, 16, 19);
6341 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0x1) == 0x0)
6342 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6343 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0x1) == 0x1)
6344 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6345 else if ((op1
& 0x60) == 0x20)
6346 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6347 else if ((op1
& 0x71) == 0x40)
6348 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6350 else if ((op1
& 0x77) == 0x41)
6351 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6352 else if ((op1
& 0x77) == 0x45)
6353 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6354 else if ((op1
& 0x77) == 0x51)
6357 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6359 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6361 else if ((op1
& 0x77) == 0x55)
6362 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6363 else if (op1
== 0x57)
6366 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6367 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6368 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6369 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6370 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6372 else if ((op1
& 0x63) == 0x43)
6373 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6374 else if ((op2
& 0x1) == 0x0)
6375 switch (op1
& ~0x80)
6378 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6380 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6381 case 0x71: case 0x75:
6383 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6384 case 0x63: case 0x67: case 0x73: case 0x77:
6385 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6387 return arm_copy_undef (gdbarch
, insn
, dsc
);
6390 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6394 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6395 struct regcache
*regs
,
6396 arm_displaced_step_closure
*dsc
)
6398 if (bit (insn
, 27) == 0)
6399 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6400 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6401 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6404 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6407 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6409 case 0x4: case 0x5: case 0x6: case 0x7:
6410 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6413 switch ((insn
& 0xe00000) >> 21)
6415 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6417 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6420 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6423 return arm_copy_undef (gdbarch
, insn
, dsc
);
6428 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6429 switch ((insn
& 0xe00000) >> 21)
6432 /* ldc/ldc2 imm (undefined for rn == pc). */
6433 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6434 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6437 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6439 case 0x4: case 0x5: case 0x6: case 0x7:
6440 /* ldc/ldc2 lit (undefined for rn != pc). */
6441 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6442 : arm_copy_undef (gdbarch
, insn
, dsc
);
6445 return arm_copy_undef (gdbarch
, insn
, dsc
);
6450 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6453 if (bits (insn
, 16, 19) == 0xf)
6455 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6457 return arm_copy_undef (gdbarch
, insn
, dsc
);
6461 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6463 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6467 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6469 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6472 return arm_copy_undef (gdbarch
, insn
, dsc
);
6476 /* Decode miscellaneous instructions in dp/misc encoding space. */
6479 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6480 struct regcache
*regs
,
6481 arm_displaced_step_closure
*dsc
)
6483 unsigned int op2
= bits (insn
, 4, 6);
6484 unsigned int op
= bits (insn
, 21, 22);
6489 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6492 if (op
== 0x1) /* bx. */
6493 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6495 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6497 return arm_copy_undef (gdbarch
, insn
, dsc
);
6501 /* Not really supported. */
6502 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6504 return arm_copy_undef (gdbarch
, insn
, dsc
);
6508 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6509 regs
, dsc
); /* blx register. */
6511 return arm_copy_undef (gdbarch
, insn
, dsc
);
6514 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6518 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6520 /* Not really supported. */
6521 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6525 return arm_copy_undef (gdbarch
, insn
, dsc
);
6530 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6531 struct regcache
*regs
,
6532 arm_displaced_step_closure
*dsc
)
6535 switch (bits (insn
, 20, 24))
6538 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6541 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6543 case 0x12: case 0x16:
6544 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6547 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6551 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6553 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6554 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6555 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6556 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6557 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6558 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6559 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6560 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6561 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6562 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6563 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6564 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6565 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6566 /* 2nd arg means "unprivileged". */
6567 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6571 /* Should be unreachable. */
6576 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6577 struct regcache
*regs
,
6578 arm_displaced_step_closure
*dsc
)
6580 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6581 uint32_t op1
= bits (insn
, 20, 24);
6583 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6584 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6585 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6586 else if ((!a
&& (op1
& 0x17) == 0x02)
6587 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6588 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6589 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6590 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6591 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6592 else if ((!a
&& (op1
& 0x17) == 0x03)
6593 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6594 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6595 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6596 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6597 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6598 else if ((!a
&& (op1
& 0x17) == 0x06)
6599 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6600 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6601 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6602 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6603 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6604 else if ((!a
&& (op1
& 0x17) == 0x07)
6605 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6606 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6608 /* Should be unreachable. */
6613 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6614 arm_displaced_step_closure
*dsc
)
6616 switch (bits (insn
, 20, 24))
6618 case 0x00: case 0x01: case 0x02: case 0x03:
6619 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6621 case 0x04: case 0x05: case 0x06: case 0x07:
6622 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6624 case 0x08: case 0x09: case 0x0a: case 0x0b:
6625 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6626 return arm_copy_unmodified (gdbarch
, insn
,
6627 "decode/pack/unpack/saturate/reverse", dsc
);
6630 if (bits (insn
, 5, 7) == 0) /* op2. */
6632 if (bits (insn
, 12, 15) == 0xf)
6633 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6635 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6638 return arm_copy_undef (gdbarch
, insn
, dsc
);
6640 case 0x1a: case 0x1b:
6641 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6642 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6644 return arm_copy_undef (gdbarch
, insn
, dsc
);
6646 case 0x1c: case 0x1d:
6647 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6649 if (bits (insn
, 0, 3) == 0xf)
6650 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6652 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6655 return arm_copy_undef (gdbarch
, insn
, dsc
);
6657 case 0x1e: case 0x1f:
6658 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6659 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6661 return arm_copy_undef (gdbarch
, insn
, dsc
);
6664 /* Should be unreachable. */
6669 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6670 struct regcache
*regs
,
6671 arm_displaced_step_closure
*dsc
)
6674 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6676 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6680 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6681 struct regcache
*regs
,
6682 arm_displaced_step_closure
*dsc
)
6684 unsigned int opcode
= bits (insn
, 20, 24);
6688 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6689 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6691 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6692 case 0x12: case 0x16:
6693 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6695 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6696 case 0x13: case 0x17:
6697 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6699 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6700 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6701 /* Note: no writeback for these instructions. Bit 25 will always be
6702 zero though (via caller), so the following works OK. */
6703 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6706 /* Should be unreachable. */
6710 /* Decode shifted register instructions. */
6713 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6714 uint16_t insn2
, struct regcache
*regs
,
6715 arm_displaced_step_closure
*dsc
)
6717 /* PC is only allowed to be used in instruction MOV. */
6719 unsigned int op
= bits (insn1
, 5, 8);
6720 unsigned int rn
= bits (insn1
, 0, 3);
6722 if (op
== 0x2 && rn
== 0xf) /* MOV */
6723 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6725 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6726 "dp (shift reg)", dsc
);
6730 /* Decode extension register load/store. Exactly the same as
6731 arm_decode_ext_reg_ld_st. */
6734 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6735 uint16_t insn2
, struct regcache
*regs
,
6736 arm_displaced_step_closure
*dsc
)
6738 unsigned int opcode
= bits (insn1
, 4, 8);
6742 case 0x04: case 0x05:
6743 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6744 "vfp/neon vmov", dsc
);
6746 case 0x08: case 0x0c: /* 01x00 */
6747 case 0x0a: case 0x0e: /* 01x10 */
6748 case 0x12: case 0x16: /* 10x10 */
6749 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6750 "vfp/neon vstm/vpush", dsc
);
6752 case 0x09: case 0x0d: /* 01x01 */
6753 case 0x0b: case 0x0f: /* 01x11 */
6754 case 0x13: case 0x17: /* 10x11 */
6755 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6756 "vfp/neon vldm/vpop", dsc
);
6758 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6759 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6761 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6762 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6765 /* Should be unreachable. */
6770 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6771 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6773 unsigned int op1
= bits (insn
, 20, 25);
6774 int op
= bit (insn
, 4);
6775 unsigned int coproc
= bits (insn
, 8, 11);
6777 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6778 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6779 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6780 && (coproc
& 0xe) != 0xa)
6782 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6783 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6784 && (coproc
& 0xe) != 0xa)
6785 /* ldc/ldc2 imm/lit. */
6786 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6787 else if ((op1
& 0x3e) == 0x00)
6788 return arm_copy_undef (gdbarch
, insn
, dsc
);
6789 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6790 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6791 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6792 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6793 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6794 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6795 else if ((op1
& 0x30) == 0x20 && !op
)
6797 if ((coproc
& 0xe) == 0xa)
6798 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6800 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6802 else if ((op1
& 0x30) == 0x20 && op
)
6803 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6804 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6805 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6806 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6807 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6808 else if ((op1
& 0x30) == 0x30)
6809 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6811 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6815 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6816 uint16_t insn2
, struct regcache
*regs
,
6817 arm_displaced_step_closure
*dsc
)
6819 unsigned int coproc
= bits (insn2
, 8, 11);
6820 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6821 unsigned int bit_9
= bit (insn1
, 9);
6822 unsigned int bit_4
= bit (insn1
, 4);
6827 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6828 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6830 else if (bit_5_8
== 0) /* UNDEFINED. */
6831 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6834 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6835 if ((coproc
& 0xe) == 0xa)
6836 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6838 else /* coproc is not 101x. */
6840 if (bit_4
== 0) /* STC/STC2. */
6841 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6843 else /* LDC/LDC2 {literal, immeidate}. */
6844 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6850 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
6856 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6857 arm_displaced_step_closure
*dsc
, int rd
)
6863 Preparation: Rd <- PC
6869 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6870 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
6874 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6875 arm_displaced_step_closure
*dsc
,
6876 int rd
, unsigned int imm
)
6879 /* Encoding T2: ADDS Rd, #imm */
6880 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
6882 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6888 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
6889 struct regcache
*regs
,
6890 arm_displaced_step_closure
*dsc
)
6892 unsigned int rd
= bits (insn
, 8, 10);
6893 unsigned int imm8
= bits (insn
, 0, 7);
6895 if (debug_displaced
)
6896 fprintf_unfiltered (gdb_stdlog
,
6897 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6900 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
6904 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
6905 uint16_t insn2
, struct regcache
*regs
,
6906 arm_displaced_step_closure
*dsc
)
6908 unsigned int rd
= bits (insn2
, 8, 11);
6909 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6910 extract raw immediate encoding rather than computing immediate. When
6911 generating ADD or SUB instruction, we can simply perform OR operation to
6912 set immediate into ADD. */
6913 unsigned int imm_3_8
= insn2
& 0x70ff;
6914 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
6916 if (debug_displaced
)
6917 fprintf_unfiltered (gdb_stdlog
,
6918 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6919 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
6921 if (bit (insn1
, 7)) /* Encoding T2 */
6923 /* Encoding T3: SUB Rd, Rd, #imm */
6924 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
6925 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6927 else /* Encoding T3 */
6929 /* Encoding T3: ADD Rd, Rd, #imm */
6930 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
6931 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6935 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6941 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6942 struct regcache
*regs
,
6943 arm_displaced_step_closure
*dsc
)
6945 unsigned int rt
= bits (insn1
, 8, 10);
6947 int imm8
= (bits (insn1
, 0, 7) << 2);
6953 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6955 Insn: LDR R0, [R2, R3];
6956 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6958 if (debug_displaced
)
6959 fprintf_unfiltered (gdb_stdlog
,
6960 "displaced: copying thumb ldr r%d [pc #%d]\n"
6963 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6964 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6965 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6966 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6967 /* The assembler calculates the required value of the offset from the
6968 Align(PC,4) value of this instruction to the label. */
6969 pc
= pc
& 0xfffffffc;
6971 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
6972 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
6975 dsc
->u
.ldst
.xfersize
= 4;
6977 dsc
->u
.ldst
.immed
= 0;
6978 dsc
->u
.ldst
.writeback
= 0;
6979 dsc
->u
.ldst
.restore_r4
= 0;
6981 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6983 dsc
->cleanup
= &cleanup_load
;
6988 /* Copy Thumb cbnz/cbz insruction. */
6991 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
6992 struct regcache
*regs
,
6993 arm_displaced_step_closure
*dsc
)
6995 int non_zero
= bit (insn1
, 11);
6996 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
6997 CORE_ADDR from
= dsc
->insn_addr
;
6998 int rn
= bits (insn1
, 0, 2);
6999 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7001 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7002 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7003 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7004 condition is false, let it be, cleanup_branch will do nothing. */
7005 if (dsc
->u
.branch
.cond
)
7007 dsc
->u
.branch
.cond
= INST_AL
;
7008 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7011 dsc
->u
.branch
.dest
= from
+ 2;
7013 dsc
->u
.branch
.link
= 0;
7014 dsc
->u
.branch
.exchange
= 0;
7016 if (debug_displaced
)
7017 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
7018 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
7019 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7021 dsc
->modinsn
[0] = THUMB_NOP
;
7023 dsc
->cleanup
= &cleanup_branch
;
7027 /* Copy Table Branch Byte/Halfword */
7029 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7030 uint16_t insn2
, struct regcache
*regs
,
7031 arm_displaced_step_closure
*dsc
)
7033 ULONGEST rn_val
, rm_val
;
7034 int is_tbh
= bit (insn2
, 4);
7035 CORE_ADDR halfwords
= 0;
7036 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7038 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7039 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7045 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7046 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7052 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7053 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7056 if (debug_displaced
)
7057 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
7058 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
7059 (unsigned int) rn_val
, (unsigned int) rm_val
,
7060 (unsigned int) halfwords
);
7062 dsc
->u
.branch
.cond
= INST_AL
;
7063 dsc
->u
.branch
.link
= 0;
7064 dsc
->u
.branch
.exchange
= 0;
7065 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7067 dsc
->cleanup
= &cleanup_branch
;
7073 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7074 arm_displaced_step_closure
*dsc
)
7077 int val
= displaced_read_reg (regs
, dsc
, 7);
7078 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7081 val
= displaced_read_reg (regs
, dsc
, 8);
7082 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7085 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7090 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7091 struct regcache
*regs
,
7092 arm_displaced_step_closure
*dsc
)
7094 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7096 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7099 (1) register list is full, that is, r0-r7 are used.
7100 Prepare: tmp[0] <- r8
7102 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7103 MOV r8, r7; Move value of r7 to r8;
7104 POP {r7}; Store PC value into r7.
7106 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7108 (2) register list is not full, supposing there are N registers in
7109 register list (except PC, 0 <= N <= 7).
7110 Prepare: for each i, 0 - N, tmp[i] <- ri.
7112 POP {r0, r1, ...., rN};
7114 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7115 from tmp[] properly.
7117 if (debug_displaced
)
7118 fprintf_unfiltered (gdb_stdlog
,
7119 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7120 dsc
->u
.block
.regmask
, insn1
);
7122 if (dsc
->u
.block
.regmask
== 0xff)
7124 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7126 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7127 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7128 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7131 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7135 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
7137 unsigned int new_regmask
;
7139 for (i
= 0; i
< num_in_list
+ 1; i
++)
7140 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7142 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7144 if (debug_displaced
)
7145 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
7146 "{..., pc}: original reg list %.4x,"
7147 " modified list %.4x\n"),
7148 (int) dsc
->u
.block
.regmask
, new_regmask
);
7150 dsc
->u
.block
.regmask
|= 0x8000;
7151 dsc
->u
.block
.writeback
= 0;
7152 dsc
->u
.block
.cond
= INST_AL
;
7154 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7156 dsc
->cleanup
= &cleanup_block_load_pc
;
7163 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7164 struct regcache
*regs
,
7165 arm_displaced_step_closure
*dsc
)
7167 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7168 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7171 /* 16-bit thumb instructions. */
7172 switch (op_bit_12_15
)
7174 /* Shift (imme), add, subtract, move and compare. */
7175 case 0: case 1: case 2: case 3:
7176 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7177 "shift/add/sub/mov/cmp",
7181 switch (op_bit_10_11
)
7183 case 0: /* Data-processing */
7184 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7188 case 1: /* Special data instructions and branch and exchange. */
7190 unsigned short op
= bits (insn1
, 7, 9);
7191 if (op
== 6 || op
== 7) /* BX or BLX */
7192 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7193 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7194 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7196 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7200 default: /* LDR (literal) */
7201 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7204 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7205 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7208 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7209 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7210 else /* Generate SP-relative address */
7211 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7213 case 11: /* Misc 16-bit instructions */
7215 switch (bits (insn1
, 8, 11))
7217 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7218 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7220 case 12: case 13: /* POP */
7221 if (bit (insn1
, 8)) /* PC is in register list. */
7222 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7224 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7226 case 15: /* If-Then, and hints */
7227 if (bits (insn1
, 0, 3))
7228 /* If-Then makes up to four following instructions conditional.
7229 IT instruction itself is not conditional, so handle it as a
7230 common unmodified instruction. */
7231 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7234 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7237 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7242 if (op_bit_10_11
< 2) /* Store multiple registers */
7243 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7244 else /* Load multiple registers */
7245 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7247 case 13: /* Conditional branch and supervisor call */
7248 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7249 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7251 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7253 case 14: /* Unconditional branch */
7254 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7261 internal_error (__FILE__
, __LINE__
,
7262 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7266 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7267 uint16_t insn1
, uint16_t insn2
,
7268 struct regcache
*regs
,
7269 arm_displaced_step_closure
*dsc
)
7271 int rt
= bits (insn2
, 12, 15);
7272 int rn
= bits (insn1
, 0, 3);
7273 int op1
= bits (insn1
, 7, 8);
7275 switch (bits (insn1
, 5, 6))
7277 case 0: /* Load byte and memory hints */
7278 if (rt
== 0xf) /* PLD/PLI */
7281 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7282 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7284 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7289 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7290 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7293 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7294 "ldrb{reg, immediate}/ldrbt",
7299 case 1: /* Load halfword and memory hints. */
7300 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7301 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7302 "pld/unalloc memhint", dsc
);
7306 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7309 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7313 case 2: /* Load word */
7315 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7318 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7319 else if (op1
== 0x1) /* Encoding T3 */
7320 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7322 else /* op1 == 0x0 */
7324 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7325 /* LDR (immediate) */
7326 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7327 dsc
, bit (insn2
, 8), 1);
7328 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7329 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7332 /* LDR (register) */
7333 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7339 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7346 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7347 uint16_t insn2
, struct regcache
*regs
,
7348 arm_displaced_step_closure
*dsc
)
7351 unsigned short op
= bit (insn2
, 15);
7352 unsigned int op1
= bits (insn1
, 11, 12);
7358 switch (bits (insn1
, 9, 10))
7363 /* Load/store {dual, execlusive}, table branch. */
7364 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7365 && bits (insn2
, 5, 7) == 0)
7366 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7369 /* PC is not allowed to use in load/store {dual, exclusive}
7371 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7372 "load/store dual/ex", dsc
);
7374 else /* load/store multiple */
7376 switch (bits (insn1
, 7, 8))
7378 case 0: case 3: /* SRS, RFE */
7379 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7382 case 1: case 2: /* LDM/STM/PUSH/POP */
7383 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7390 /* Data-processing (shift register). */
7391 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7394 default: /* Coprocessor instructions. */
7395 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7400 case 2: /* op1 = 2 */
7401 if (op
) /* Branch and misc control. */
7403 if (bit (insn2
, 14) /* BLX/BL */
7404 || bit (insn2
, 12) /* Unconditional branch */
7405 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7406 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7408 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7413 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7415 int dp_op
= bits (insn1
, 4, 8);
7416 int rn
= bits (insn1
, 0, 3);
7417 if ((dp_op
== 0 || dp_op
== 0xa) && rn
== 0xf)
7418 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7421 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7424 else /* Data processing (modified immeidate) */
7425 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7429 case 3: /* op1 = 3 */
7430 switch (bits (insn1
, 9, 10))
7434 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7436 else /* NEON Load/Store and Store single data item */
7437 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7438 "neon elt/struct load/store",
7441 case 1: /* op1 = 3, bits (9, 10) == 1 */
7442 switch (bits (insn1
, 7, 8))
7444 case 0: case 1: /* Data processing (register) */
7445 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7448 case 2: /* Multiply and absolute difference */
7449 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7450 "mul/mua/diff", dsc
);
7452 case 3: /* Long multiply and divide */
7453 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7458 default: /* Coprocessor instructions */
7459 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7468 internal_error (__FILE__
, __LINE__
,
7469 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7474 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7475 struct regcache
*regs
,
7476 arm_displaced_step_closure
*dsc
)
7478 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7480 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7482 if (debug_displaced
)
7483 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
7484 "at %.8lx\n", insn1
, (unsigned long) from
);
7487 dsc
->insn_size
= thumb_insn_size (insn1
);
7488 if (thumb_insn_size (insn1
) == 4)
7491 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7492 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7495 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7499 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7500 CORE_ADDR to
, struct regcache
*regs
,
7501 arm_displaced_step_closure
*dsc
)
7504 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7507 /* Most displaced instructions use a 1-instruction scratch space, so set this
7508 here and override below if/when necessary. */
7510 dsc
->insn_addr
= from
;
7511 dsc
->scratch_base
= to
;
7512 dsc
->cleanup
= NULL
;
7513 dsc
->wrote_to_pc
= 0;
7515 if (!displaced_in_arm_mode (regs
))
7516 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7520 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7521 if (debug_displaced
)
7522 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
7523 "at %.8lx\n", (unsigned long) insn
,
7524 (unsigned long) from
);
7526 if ((insn
& 0xf0000000) == 0xf0000000)
7527 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7528 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7530 case 0x0: case 0x1: case 0x2: case 0x3:
7531 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7534 case 0x4: case 0x5: case 0x6:
7535 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7539 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7542 case 0x8: case 0x9: case 0xa: case 0xb:
7543 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7546 case 0xc: case 0xd: case 0xe: case 0xf:
7547 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7552 internal_error (__FILE__
, __LINE__
,
7553 _("arm_process_displaced_insn: Instruction decode error"));
7556 /* Actually set up the scratch space for a displaced instruction. */
7559 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7560 CORE_ADDR to
, arm_displaced_step_closure
*dsc
)
7562 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7563 unsigned int i
, len
, offset
;
7564 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7565 int size
= dsc
->is_thumb
? 2 : 4;
7566 const gdb_byte
*bkp_insn
;
7569 /* Poke modified instruction(s). */
7570 for (i
= 0; i
< dsc
->numinsns
; i
++)
7572 if (debug_displaced
)
7574 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
7576 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
7579 fprintf_unfiltered (gdb_stdlog
, "%.4x",
7580 (unsigned short)dsc
->modinsn
[i
]);
7582 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
7583 (unsigned long) to
+ offset
);
7586 write_memory_unsigned_integer (to
+ offset
, size
,
7587 byte_order_for_code
,
7592 /* Choose the correct breakpoint instruction. */
7595 bkp_insn
= tdep
->thumb_breakpoint
;
7596 len
= tdep
->thumb_breakpoint_size
;
7600 bkp_insn
= tdep
->arm_breakpoint
;
7601 len
= tdep
->arm_breakpoint_size
;
7604 /* Put breakpoint afterwards. */
7605 write_memory (to
+ offset
, bkp_insn
, len
);
7607 if (debug_displaced
)
7608 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
7609 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
7612 /* Entry point for cleaning things up after a displaced instruction has been
7616 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7617 struct displaced_step_closure
*dsc_
,
7618 CORE_ADDR from
, CORE_ADDR to
,
7619 struct regcache
*regs
)
7621 arm_displaced_step_closure
*dsc
= (arm_displaced_step_closure
*) dsc_
;
7624 dsc
->cleanup (gdbarch
, regs
, dsc
);
7626 if (!dsc
->wrote_to_pc
)
7627 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7628 dsc
->insn_addr
+ dsc
->insn_size
);
7632 #include "bfd-in2.h"
7633 #include "libcoff.h"
7636 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7638 gdb_disassembler
*di
7639 = static_cast<gdb_disassembler
*>(info
->application_data
);
7640 struct gdbarch
*gdbarch
= di
->arch ();
7642 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7644 static asymbol
*asym
;
7645 static combined_entry_type ce
;
7646 static struct coff_symbol_struct csym
;
7647 static struct bfd fake_bfd
;
7648 static bfd_target fake_target
;
7650 if (csym
.native
== NULL
)
7652 /* Create a fake symbol vector containing a Thumb symbol.
7653 This is solely so that the code in print_insn_little_arm()
7654 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7655 the presence of a Thumb symbol and switch to decoding
7656 Thumb instructions. */
7658 fake_target
.flavour
= bfd_target_coff_flavour
;
7659 fake_bfd
.xvec
= &fake_target
;
7660 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7662 csym
.symbol
.the_bfd
= &fake_bfd
;
7663 csym
.symbol
.name
= "fake";
7664 asym
= (asymbol
*) & csym
;
7667 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7668 info
->symbols
= &asym
;
7671 info
->symbols
= NULL
;
7673 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7674 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7675 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7676 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7677 in default_print_insn. */
7678 if (exec_bfd
!= NULL
)
7679 info
->flags
|= USER_SPECIFIED_MACHINE_TYPE
;
7681 return default_print_insn (memaddr
, info
);
7684 /* The following define instruction sequences that will cause ARM
7685 cpu's to take an undefined instruction trap. These are used to
7686 signal a breakpoint to GDB.
7688 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7689 modes. A different instruction is required for each mode. The ARM
7690 cpu's can also be big or little endian. Thus four different
7691 instructions are needed to support all cases.
7693 Note: ARMv4 defines several new instructions that will take the
7694 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7695 not in fact add the new instructions. The new undefined
7696 instructions in ARMv4 are all instructions that had no defined
7697 behaviour in earlier chips. There is no guarantee that they will
7698 raise an exception, but may be treated as NOP's. In practice, it
7699 may only safe to rely on instructions matching:
7701 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7702 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7703 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7705 Even this may only true if the condition predicate is true. The
7706 following use a condition predicate of ALWAYS so it is always TRUE.
7708 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7709 and NetBSD all use a software interrupt rather than an undefined
7710 instruction to force a trap. This can be handled by by the
7711 abi-specific code during establishment of the gdbarch vector. */
7713 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7714 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7715 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7716 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7718 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7719 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7720 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7721 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7723 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7726 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7728 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7729 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7731 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7733 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7735 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7736 check whether we are replacing a 32-bit instruction. */
7737 if (tdep
->thumb2_breakpoint
!= NULL
)
7741 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7743 unsigned short inst1
;
7745 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7746 if (thumb_insn_size (inst1
) == 4)
7747 return ARM_BP_KIND_THUMB2
;
7751 return ARM_BP_KIND_THUMB
;
7754 return ARM_BP_KIND_ARM
;
7758 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7760 static const gdb_byte
*
7761 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7763 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7767 case ARM_BP_KIND_ARM
:
7768 *size
= tdep
->arm_breakpoint_size
;
7769 return tdep
->arm_breakpoint
;
7770 case ARM_BP_KIND_THUMB
:
7771 *size
= tdep
->thumb_breakpoint_size
;
7772 return tdep
->thumb_breakpoint
;
7773 case ARM_BP_KIND_THUMB2
:
7774 *size
= tdep
->thumb2_breakpoint_size
;
7775 return tdep
->thumb2_breakpoint
;
7777 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7781 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7784 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7785 struct regcache
*regcache
,
7790 /* Check the memory pointed by PC is readable. */
7791 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7793 struct arm_get_next_pcs next_pcs_ctx
;
7795 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7796 &arm_get_next_pcs_ops
,
7797 gdbarch_byte_order (gdbarch
),
7798 gdbarch_byte_order_for_code (gdbarch
),
7802 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7804 /* If MEMADDR is the next instruction of current pc, do the
7805 software single step computation, and get the thumb mode by
7806 the destination address. */
7807 for (CORE_ADDR pc
: next_pcs
)
7809 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7811 if (IS_THUMB_ADDR (pc
))
7813 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7814 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7817 return ARM_BP_KIND_ARM
;
7822 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7825 /* Extract from an array REGBUF containing the (raw) register state a
7826 function return value of type TYPE, and copy that, in virtual
7827 format, into VALBUF. */
7830 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7833 struct gdbarch
*gdbarch
= regs
->arch ();
7834 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7836 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
7838 switch (gdbarch_tdep (gdbarch
)->fp_model
)
7842 /* The value is in register F0 in internal format. We need to
7843 extract the raw value and then convert it to the desired
7845 bfd_byte tmpbuf
[ARM_FP_REGISTER_SIZE
];
7847 regs
->cooked_read (ARM_F0_REGNUM
, tmpbuf
);
7848 target_float_convert (tmpbuf
, arm_ext_type (gdbarch
),
7853 case ARM_FLOAT_SOFT_FPA
:
7854 case ARM_FLOAT_SOFT_VFP
:
7855 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7856 not using the VFP ABI code. */
7858 regs
->cooked_read (ARM_A1_REGNUM
, valbuf
);
7859 if (TYPE_LENGTH (type
) > 4)
7860 regs
->cooked_read (ARM_A1_REGNUM
+ 1,
7861 valbuf
+ ARM_INT_REGISTER_SIZE
);
7865 internal_error (__FILE__
, __LINE__
,
7866 _("arm_extract_return_value: "
7867 "Floating point model not supported"));
7871 else if (TYPE_CODE (type
) == TYPE_CODE_INT
7872 || TYPE_CODE (type
) == TYPE_CODE_CHAR
7873 || TYPE_CODE (type
) == TYPE_CODE_BOOL
7874 || TYPE_CODE (type
) == TYPE_CODE_PTR
7875 || TYPE_IS_REFERENCE (type
)
7876 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
7878 /* If the type is a plain integer, then the access is
7879 straight-forward. Otherwise we have to play around a bit
7881 int len
= TYPE_LENGTH (type
);
7882 int regno
= ARM_A1_REGNUM
;
7887 /* By using store_unsigned_integer we avoid having to do
7888 anything special for small big-endian values. */
7889 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
7890 store_unsigned_integer (valbuf
,
7891 (len
> ARM_INT_REGISTER_SIZE
7892 ? ARM_INT_REGISTER_SIZE
: len
),
7894 len
-= ARM_INT_REGISTER_SIZE
;
7895 valbuf
+= ARM_INT_REGISTER_SIZE
;
7900 /* For a structure or union the behaviour is as if the value had
7901 been stored to word-aligned memory and then loaded into
7902 registers with 32-bit load instruction(s). */
7903 int len
= TYPE_LENGTH (type
);
7904 int regno
= ARM_A1_REGNUM
;
7905 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
7909 regs
->cooked_read (regno
++, tmpbuf
);
7910 memcpy (valbuf
, tmpbuf
,
7911 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
7912 len
-= ARM_INT_REGISTER_SIZE
;
7913 valbuf
+= ARM_INT_REGISTER_SIZE
;
7919 /* Will a function return an aggregate type in memory or in a
7920 register? Return 0 if an aggregate type can be returned in a
7921 register, 1 if it must be returned in memory. */
7924 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
7926 enum type_code code
;
7928 type
= check_typedef (type
);
7930 /* Simple, non-aggregate types (ie not including vectors and
7931 complex) are always returned in a register (or registers). */
7932 code
= TYPE_CODE (type
);
7933 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
7934 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
7937 if (TYPE_CODE_ARRAY
== code
&& TYPE_VECTOR (type
))
7939 /* Vector values should be returned using ARM registers if they
7940 are not over 16 bytes. */
7941 return (TYPE_LENGTH (type
) > 16);
7944 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
7946 /* The AAPCS says all aggregates not larger than a word are returned
7948 if (TYPE_LENGTH (type
) <= ARM_INT_REGISTER_SIZE
)
7957 /* All aggregate types that won't fit in a register must be returned
7959 if (TYPE_LENGTH (type
) > ARM_INT_REGISTER_SIZE
)
7962 /* In the ARM ABI, "integer" like aggregate types are returned in
7963 registers. For an aggregate type to be integer like, its size
7964 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7965 offset of each addressable subfield must be zero. Note that bit
7966 fields are not addressable, and all addressable subfields of
7967 unions always start at offset zero.
7969 This function is based on the behaviour of GCC 2.95.1.
7970 See: gcc/arm.c: arm_return_in_memory() for details.
7972 Note: All versions of GCC before GCC 2.95.2 do not set up the
7973 parameters correctly for a function returning the following
7974 structure: struct { float f;}; This should be returned in memory,
7975 not a register. Richard Earnshaw sent me a patch, but I do not
7976 know of any way to detect if a function like the above has been
7977 compiled with the correct calling convention. */
7979 /* Assume all other aggregate types can be returned in a register.
7980 Run a check for structures, unions and arrays. */
7983 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
7986 /* Need to check if this struct/union is "integer" like. For
7987 this to be true, its size must be less than or equal to
7988 ARM_INT_REGISTER_SIZE and the offset of each addressable
7989 subfield must be zero. Note that bit fields are not
7990 addressable, and unions always start at offset zero. If any
7991 of the subfields is a floating point type, the struct/union
7992 cannot be an integer type. */
7994 /* For each field in the object, check:
7995 1) Is it FP? --> yes, nRc = 1;
7996 2) Is it addressable (bitpos != 0) and
7997 not packed (bitsize == 0)?
8001 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
8003 enum type_code field_type_code
;
8006 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
8009 /* Is it a floating point type field? */
8010 if (field_type_code
== TYPE_CODE_FLT
)
8016 /* If bitpos != 0, then we have to care about it. */
8017 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
8019 /* Bitfields are not addressable. If the field bitsize is
8020 zero, then the field is not packed. Hence it cannot be
8021 a bitfield or any other packed type. */
8022 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8035 /* Write into appropriate registers a function return value of type
8036 TYPE, given in virtual format. */
8039 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8040 const gdb_byte
*valbuf
)
8042 struct gdbarch
*gdbarch
= regs
->arch ();
8043 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8045 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
8047 gdb_byte buf
[ARM_FP_REGISTER_SIZE
];
8049 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8053 target_float_convert (valbuf
, type
, buf
, arm_ext_type (gdbarch
));
8054 regs
->cooked_write (ARM_F0_REGNUM
, buf
);
8057 case ARM_FLOAT_SOFT_FPA
:
8058 case ARM_FLOAT_SOFT_VFP
:
8059 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8060 not using the VFP ABI code. */
8062 regs
->cooked_write (ARM_A1_REGNUM
, valbuf
);
8063 if (TYPE_LENGTH (type
) > 4)
8064 regs
->cooked_write (ARM_A1_REGNUM
+ 1,
8065 valbuf
+ ARM_INT_REGISTER_SIZE
);
8069 internal_error (__FILE__
, __LINE__
,
8070 _("arm_store_return_value: Floating "
8071 "point model not supported"));
8075 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8076 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8077 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8078 || TYPE_CODE (type
) == TYPE_CODE_PTR
8079 || TYPE_IS_REFERENCE (type
)
8080 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8082 if (TYPE_LENGTH (type
) <= 4)
8084 /* Values of one word or less are zero/sign-extended and
8086 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8087 LONGEST val
= unpack_long (type
, valbuf
);
8089 store_signed_integer (tmpbuf
, ARM_INT_REGISTER_SIZE
, byte_order
, val
);
8090 regs
->cooked_write (ARM_A1_REGNUM
, tmpbuf
);
8094 /* Integral values greater than one word are stored in consecutive
8095 registers starting with r0. This will always be a multiple of
8096 the regiser size. */
8097 int len
= TYPE_LENGTH (type
);
8098 int regno
= ARM_A1_REGNUM
;
8102 regs
->cooked_write (regno
++, valbuf
);
8103 len
-= ARM_INT_REGISTER_SIZE
;
8104 valbuf
+= ARM_INT_REGISTER_SIZE
;
8110 /* For a structure or union the behaviour is as if the value had
8111 been stored to word-aligned memory and then loaded into
8112 registers with 32-bit load instruction(s). */
8113 int len
= TYPE_LENGTH (type
);
8114 int regno
= ARM_A1_REGNUM
;
8115 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8119 memcpy (tmpbuf
, valbuf
,
8120 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
8121 regs
->cooked_write (regno
++, tmpbuf
);
8122 len
-= ARM_INT_REGISTER_SIZE
;
8123 valbuf
+= ARM_INT_REGISTER_SIZE
;
8129 /* Handle function return values. */
8131 static enum return_value_convention
8132 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8133 struct type
*valtype
, struct regcache
*regcache
,
8134 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8136 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8137 struct type
*func_type
= function
? value_type (function
) : NULL
;
8138 enum arm_vfp_cprc_base_type vfp_base_type
;
8141 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8142 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8144 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8145 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8147 for (i
= 0; i
< vfp_base_count
; i
++)
8149 if (reg_char
== 'q')
8152 arm_neon_quad_write (gdbarch
, regcache
, i
,
8153 writebuf
+ i
* unit_length
);
8156 arm_neon_quad_read (gdbarch
, regcache
, i
,
8157 readbuf
+ i
* unit_length
);
8164 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8165 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8168 regcache
->cooked_write (regnum
, writebuf
+ i
* unit_length
);
8170 regcache
->cooked_read (regnum
, readbuf
+ i
* unit_length
);
8173 return RETURN_VALUE_REGISTER_CONVENTION
;
8176 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
8177 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
8178 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
8180 if (tdep
->struct_return
== pcc_struct_return
8181 || arm_return_in_memory (gdbarch
, valtype
))
8182 return RETURN_VALUE_STRUCT_CONVENTION
;
8184 else if (TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
)
8186 if (arm_return_in_memory (gdbarch
, valtype
))
8187 return RETURN_VALUE_STRUCT_CONVENTION
;
8191 arm_store_return_value (valtype
, regcache
, writebuf
);
8194 arm_extract_return_value (valtype
, regcache
, readbuf
);
8196 return RETURN_VALUE_REGISTER_CONVENTION
;
8201 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8203 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8204 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8205 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8207 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
8209 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8211 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8212 ARM_INT_REGISTER_SIZE
))
8215 *pc
= extract_unsigned_integer (buf
, ARM_INT_REGISTER_SIZE
, byte_order
);
8219 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8220 return the target PC. Otherwise return 0. */
8223 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8227 CORE_ADDR start_addr
;
8229 /* Find the starting address and name of the function containing the PC. */
8230 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8232 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8234 start_addr
= arm_skip_bx_reg (frame
, pc
);
8235 if (start_addr
!= 0)
8241 /* If PC is in a Thumb call or return stub, return the address of the
8242 target PC, which is in a register. The thunk functions are called
8243 _call_via_xx, where x is the register name. The possible names
8244 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8245 functions, named __ARM_call_via_r[0-7]. */
8246 if (startswith (name
, "_call_via_")
8247 || startswith (name
, "__ARM_call_via_"))
8249 /* Use the name suffix to determine which register contains the
8251 static const char *table
[15] =
8252 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8253 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8256 int offset
= strlen (name
) - 2;
8258 for (regno
= 0; regno
<= 14; regno
++)
8259 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8260 return get_frame_register_unsigned (frame
, regno
);
8263 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8264 non-interworking calls to foo. We could decode the stubs
8265 to find the target but it's easier to use the symbol table. */
8266 namelen
= strlen (name
);
8267 if (name
[0] == '_' && name
[1] == '_'
8268 && ((namelen
> 2 + strlen ("_from_thumb")
8269 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8270 || (namelen
> 2 + strlen ("_from_arm")
8271 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8274 int target_len
= namelen
- 2;
8275 struct bound_minimal_symbol minsym
;
8276 struct objfile
*objfile
;
8277 struct obj_section
*sec
;
8279 if (name
[namelen
- 1] == 'b')
8280 target_len
-= strlen ("_from_thumb");
8282 target_len
-= strlen ("_from_arm");
8284 target_name
= (char *) alloca (target_len
+ 1);
8285 memcpy (target_name
, name
+ 2, target_len
);
8286 target_name
[target_len
] = '\0';
8288 sec
= find_pc_section (pc
);
8289 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8290 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8291 if (minsym
.minsym
!= NULL
)
8292 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8297 return 0; /* not a stub */
8301 set_arm_command (const char *args
, int from_tty
)
8303 printf_unfiltered (_("\
8304 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8305 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
8309 show_arm_command (const char *args
, int from_tty
)
8311 cmd_show_list (showarmcmdlist
, from_tty
, "");
8315 arm_update_current_architecture (void)
8317 struct gdbarch_info info
;
8319 /* If the current architecture is not ARM, we have nothing to do. */
8320 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8323 /* Update the architecture. */
8324 gdbarch_info_init (&info
);
8326 if (!gdbarch_update_p (info
))
8327 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8331 set_fp_model_sfunc (const char *args
, int from_tty
,
8332 struct cmd_list_element
*c
)
8336 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8337 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8339 arm_fp_model
= (enum arm_float_model
) fp_model
;
8343 if (fp_model
== ARM_FLOAT_LAST
)
8344 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8347 arm_update_current_architecture ();
8351 show_fp_model (struct ui_file
*file
, int from_tty
,
8352 struct cmd_list_element
*c
, const char *value
)
8354 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8356 if (arm_fp_model
== ARM_FLOAT_AUTO
8357 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8358 fprintf_filtered (file
, _("\
8359 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8360 fp_model_strings
[tdep
->fp_model
]);
8362 fprintf_filtered (file
, _("\
8363 The current ARM floating point model is \"%s\".\n"),
8364 fp_model_strings
[arm_fp_model
]);
8368 arm_set_abi (const char *args
, int from_tty
,
8369 struct cmd_list_element
*c
)
8373 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8374 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8376 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8380 if (arm_abi
== ARM_ABI_LAST
)
8381 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8384 arm_update_current_architecture ();
8388 arm_show_abi (struct ui_file
*file
, int from_tty
,
8389 struct cmd_list_element
*c
, const char *value
)
8391 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8393 if (arm_abi_global
== ARM_ABI_AUTO
8394 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8395 fprintf_filtered (file
, _("\
8396 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8397 arm_abi_strings
[tdep
->arm_abi
]);
8399 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8404 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8405 struct cmd_list_element
*c
, const char *value
)
8407 fprintf_filtered (file
,
8408 _("The current execution mode assumed "
8409 "(when symbols are unavailable) is \"%s\".\n"),
8410 arm_fallback_mode_string
);
8414 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8415 struct cmd_list_element
*c
, const char *value
)
8417 fprintf_filtered (file
,
8418 _("The current execution mode assumed "
8419 "(even when symbols are available) is \"%s\".\n"),
8420 arm_force_mode_string
);
8423 /* If the user changes the register disassembly style used for info
8424 register and other commands, we have to also switch the style used
8425 in opcodes for disassembly output. This function is run in the "set
8426 arm disassembly" command, and does that. */
8429 set_disassembly_style_sfunc (const char *args
, int from_tty
,
8430 struct cmd_list_element
*c
)
8432 /* Convert the short style name into the long style name (eg, reg-names-*)
8433 before calling the generic set_disassembler_options() function. */
8434 std::string long_name
= std::string ("reg-names-") + disassembly_style
;
8435 set_disassembler_options (&long_name
[0]);
8439 show_disassembly_style_sfunc (struct ui_file
*file
, int from_tty
,
8440 struct cmd_list_element
*c
, const char *value
)
8442 struct gdbarch
*gdbarch
= get_current_arch ();
8443 char *options
= get_disassembler_options (gdbarch
);
8444 const char *style
= "";
8448 FOR_EACH_DISASSEMBLER_OPTION (opt
, options
)
8449 if (CONST_STRNEQ (opt
, "reg-names-"))
8451 style
= &opt
[strlen ("reg-names-")];
8452 len
= strcspn (style
, ",");
8455 fprintf_unfiltered (file
, "The disassembly style is \"%.*s\".\n", len
, style
);
8458 /* Return the ARM register name corresponding to register I. */
8460 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8462 const int num_regs
= gdbarch_num_regs (gdbarch
);
8464 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
8465 && i
>= num_regs
&& i
< num_regs
+ 32)
8467 static const char *const vfp_pseudo_names
[] = {
8468 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8469 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8470 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8471 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8474 return vfp_pseudo_names
[i
- num_regs
];
8477 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
8478 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
8480 static const char *const neon_pseudo_names
[] = {
8481 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8482 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8485 return neon_pseudo_names
[i
- num_regs
- 32];
8488 if (i
>= ARRAY_SIZE (arm_register_names
))
8489 /* These registers are only supported on targets which supply
8490 an XML description. */
8493 return arm_register_names
[i
];
8496 /* Test whether the coff symbol specific value corresponds to a Thumb
8500 coff_sym_is_thumb (int val
)
8502 return (val
== C_THUMBEXT
8503 || val
== C_THUMBSTAT
8504 || val
== C_THUMBEXTFUNC
8505 || val
== C_THUMBSTATFUNC
8506 || val
== C_THUMBLABEL
);
8509 /* arm_coff_make_msymbol_special()
8510 arm_elf_make_msymbol_special()
8512 These functions test whether the COFF or ELF symbol corresponds to
8513 an address in thumb code, and set a "special" bit in a minimal
8514 symbol to indicate that it does. */
8517 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8519 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8521 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8522 == ST_BRANCH_TO_THUMB
)
8523 MSYMBOL_SET_SPECIAL (msym
);
8527 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8529 if (coff_sym_is_thumb (val
))
8530 MSYMBOL_SET_SPECIAL (msym
);
8534 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8537 const char *name
= bfd_asymbol_name (sym
);
8538 struct arm_per_objfile
*data
;
8539 struct arm_mapping_symbol new_map_sym
;
8541 gdb_assert (name
[0] == '$');
8542 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8545 data
= arm_objfile_data_key
.get (objfile
);
8547 data
= arm_objfile_data_key
.emplace (objfile
,
8548 objfile
->obfd
->section_count
);
8549 arm_mapping_symbol_vec
&map
8550 = data
->section_maps
[bfd_get_section (sym
)->index
];
8552 new_map_sym
.value
= sym
->value
;
8553 new_map_sym
.type
= name
[1];
8555 /* Insert at the end, the vector will be sorted on first use. */
8556 map
.push_back (new_map_sym
);
8560 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8562 struct gdbarch
*gdbarch
= regcache
->arch ();
8563 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8565 /* If necessary, set the T bit. */
8568 ULONGEST val
, t_bit
;
8569 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8570 t_bit
= arm_psr_thumb_bit (gdbarch
);
8571 if (arm_pc_is_thumb (gdbarch
, pc
))
8572 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8575 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8580 /* Read the contents of a NEON quad register, by reading from two
8581 double registers. This is used to implement the quad pseudo
8582 registers, and for argument passing in case the quad registers are
8583 missing; vectors are passed in quad registers when using the VFP
8584 ABI, even if a NEON unit is not present. REGNUM is the index of
8585 the quad register, in [0, 15]. */
8587 static enum register_status
8588 arm_neon_quad_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8589 int regnum
, gdb_byte
*buf
)
8592 gdb_byte reg_buf
[8];
8593 int offset
, double_regnum
;
8594 enum register_status status
;
8596 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8597 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8600 /* d0 is always the least significant half of q0. */
8601 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8606 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8607 if (status
!= REG_VALID
)
8609 memcpy (buf
+ offset
, reg_buf
, 8);
8611 offset
= 8 - offset
;
8612 status
= regcache
->raw_read (double_regnum
+ 1, reg_buf
);
8613 if (status
!= REG_VALID
)
8615 memcpy (buf
+ offset
, reg_buf
, 8);
8620 static enum register_status
8621 arm_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8622 int regnum
, gdb_byte
*buf
)
8624 const int num_regs
= gdbarch_num_regs (gdbarch
);
8626 gdb_byte reg_buf
[8];
8627 int offset
, double_regnum
;
8629 gdb_assert (regnum
>= num_regs
);
8632 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8633 /* Quad-precision register. */
8634 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
8637 enum register_status status
;
8639 /* Single-precision register. */
8640 gdb_assert (regnum
< 32);
8642 /* s0 is always the least significant half of d0. */
8643 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8644 offset
= (regnum
& 1) ? 0 : 4;
8646 offset
= (regnum
& 1) ? 4 : 0;
8648 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8649 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8652 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8653 if (status
== REG_VALID
)
8654 memcpy (buf
, reg_buf
+ offset
, 4);
8659 /* Store the contents of BUF to a NEON quad register, by writing to
8660 two double registers. This is used to implement the quad pseudo
8661 registers, and for argument passing in case the quad registers are
8662 missing; vectors are passed in quad registers when using the VFP
8663 ABI, even if a NEON unit is not present. REGNUM is the index
8664 of the quad register, in [0, 15]. */
8667 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8668 int regnum
, const gdb_byte
*buf
)
8671 int offset
, double_regnum
;
8673 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8674 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8677 /* d0 is always the least significant half of q0. */
8678 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8683 regcache
->raw_write (double_regnum
, buf
+ offset
);
8684 offset
= 8 - offset
;
8685 regcache
->raw_write (double_regnum
+ 1, buf
+ offset
);
8689 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8690 int regnum
, const gdb_byte
*buf
)
8692 const int num_regs
= gdbarch_num_regs (gdbarch
);
8694 gdb_byte reg_buf
[8];
8695 int offset
, double_regnum
;
8697 gdb_assert (regnum
>= num_regs
);
8700 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8701 /* Quad-precision register. */
8702 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
8705 /* Single-precision register. */
8706 gdb_assert (regnum
< 32);
8708 /* s0 is always the least significant half of d0. */
8709 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8710 offset
= (regnum
& 1) ? 0 : 4;
8712 offset
= (regnum
& 1) ? 4 : 0;
8714 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8715 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8718 regcache
->raw_read (double_regnum
, reg_buf
);
8719 memcpy (reg_buf
+ offset
, buf
, 4);
8720 regcache
->raw_write (double_regnum
, reg_buf
);
8724 static struct value
*
8725 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8727 const int *reg_p
= (const int *) baton
;
8728 return value_of_register (*reg_p
, frame
);
8731 static enum gdb_osabi
8732 arm_elf_osabi_sniffer (bfd
*abfd
)
8734 unsigned int elfosabi
;
8735 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8737 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8739 if (elfosabi
== ELFOSABI_ARM
)
8740 /* GNU tools use this value. Check note sections in this case,
8742 bfd_map_over_sections (abfd
,
8743 generic_elf_osabi_sniff_abi_tag_sections
,
8746 /* Anything else will be handled by the generic ELF sniffer. */
8751 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8752 struct reggroup
*group
)
8754 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8755 this, FPS register belongs to save_regroup, restore_reggroup, and
8756 all_reggroup, of course. */
8757 if (regnum
== ARM_FPS_REGNUM
)
8758 return (group
== float_reggroup
8759 || group
== save_reggroup
8760 || group
== restore_reggroup
8761 || group
== all_reggroup
);
8763 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8767 /* For backward-compatibility we allow two 'g' packet lengths with
8768 the remote protocol depending on whether FPA registers are
8769 supplied. M-profile targets do not have FPA registers, but some
8770 stubs already exist in the wild which use a 'g' packet which
8771 supplies them albeit with dummy values. The packet format which
8772 includes FPA registers should be considered deprecated for
8773 M-profile targets. */
8776 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8778 if (gdbarch_tdep (gdbarch
)->is_m
)
8780 /* If we know from the executable this is an M-profile target,
8781 cater for remote targets whose register set layout is the
8782 same as the FPA layout. */
8783 register_remote_g_packet_guess (gdbarch
,
8784 ARM_CORE_REGS_SIZE
+ ARM_FP_REGS_SIZE
,
8785 tdesc_arm_with_m_fpa_layout
);
8787 /* The regular M-profile layout. */
8788 register_remote_g_packet_guess (gdbarch
, ARM_CORE_REGS_SIZE
,
8791 /* M-profile plus M4F VFP. */
8792 register_remote_g_packet_guess (gdbarch
,
8793 ARM_CORE_REGS_SIZE
+ ARM_VFP2_REGS_SIZE
,
8794 tdesc_arm_with_m_vfp_d16
);
8797 /* Otherwise we don't have a useful guess. */
8800 /* Implement the code_of_frame_writable gdbarch method. */
8803 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
8805 if (gdbarch_tdep (gdbarch
)->is_m
8806 && get_frame_type (frame
) == SIGTRAMP_FRAME
)
8808 /* M-profile exception frames return to some magic PCs, where
8809 isn't writable at all. */
8816 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8817 to be postfixed by a version (eg armv7hl). */
8820 arm_gnu_triplet_regexp (struct gdbarch
*gdbarch
)
8822 if (strcmp (gdbarch_bfd_arch_info (gdbarch
)->arch_name
, "arm") == 0)
8823 return "arm(v[^- ]*)?";
8824 return gdbarch_bfd_arch_info (gdbarch
)->arch_name
;
8827 /* Initialize the current architecture based on INFO. If possible,
8828 re-use an architecture from ARCHES, which is a list of
8829 architectures already created during this debugging session.
8831 Called e.g. at program startup, when reading a core file, and when
8832 reading a binary file. */
8834 static struct gdbarch
*
8835 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
8837 struct gdbarch_tdep
*tdep
;
8838 struct gdbarch
*gdbarch
;
8839 struct gdbarch_list
*best_arch
;
8840 enum arm_abi_kind arm_abi
= arm_abi_global
;
8841 enum arm_float_model fp_model
= arm_fp_model
;
8842 struct tdesc_arch_data
*tdesc_data
= NULL
;
8844 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
8845 int have_wmmx_registers
= 0;
8847 int have_fpa_registers
= 1;
8848 const struct target_desc
*tdesc
= info
.target_desc
;
8850 /* If we have an object to base this architecture on, try to determine
8853 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
8855 int ei_osabi
, e_flags
;
8857 switch (bfd_get_flavour (info
.abfd
))
8859 case bfd_target_coff_flavour
:
8860 /* Assume it's an old APCS-style ABI. */
8862 arm_abi
= ARM_ABI_APCS
;
8865 case bfd_target_elf_flavour
:
8866 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
8867 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
8869 if (ei_osabi
== ELFOSABI_ARM
)
8871 /* GNU tools used to use this value, but do not for EABI
8872 objects. There's nowhere to tag an EABI version
8873 anyway, so assume APCS. */
8874 arm_abi
= ARM_ABI_APCS
;
8876 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
8878 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
8882 case EF_ARM_EABI_UNKNOWN
:
8883 /* Assume GNU tools. */
8884 arm_abi
= ARM_ABI_APCS
;
8887 case EF_ARM_EABI_VER4
:
8888 case EF_ARM_EABI_VER5
:
8889 arm_abi
= ARM_ABI_AAPCS
;
8890 /* EABI binaries default to VFP float ordering.
8891 They may also contain build attributes that can
8892 be used to identify if the VFP argument-passing
8894 if (fp_model
== ARM_FLOAT_AUTO
)
8897 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
8901 case AEABI_VFP_args_base
:
8902 /* "The user intended FP parameter/result
8903 passing to conform to AAPCS, base
8905 fp_model
= ARM_FLOAT_SOFT_VFP
;
8907 case AEABI_VFP_args_vfp
:
8908 /* "The user intended FP parameter/result
8909 passing to conform to AAPCS, VFP
8911 fp_model
= ARM_FLOAT_VFP
;
8913 case AEABI_VFP_args_toolchain
:
8914 /* "The user intended FP parameter/result
8915 passing to conform to tool chain-specific
8916 conventions" - we don't know any such
8917 conventions, so leave it as "auto". */
8919 case AEABI_VFP_args_compatible
:
8920 /* "Code is compatible with both the base
8921 and VFP variants; the user did not permit
8922 non-variadic functions to pass FP
8923 parameters/results" - leave it as
8927 /* Attribute value not mentioned in the
8928 November 2012 ABI, so leave it as
8933 fp_model
= ARM_FLOAT_SOFT_VFP
;
8939 /* Leave it as "auto". */
8940 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
8945 /* Detect M-profile programs. This only works if the
8946 executable file includes build attributes; GCC does
8947 copy them to the executable, but e.g. RealView does
8950 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
8953 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
8954 Tag_CPU_arch_profile
);
8956 /* GCC specifies the profile for v6-M; RealView only
8957 specifies the profile for architectures starting with
8958 V7 (as opposed to architectures with a tag
8959 numerically greater than TAG_CPU_ARCH_V7). */
8960 if (!tdesc_has_registers (tdesc
)
8961 && (attr_arch
== TAG_CPU_ARCH_V6_M
8962 || attr_arch
== TAG_CPU_ARCH_V6S_M
8963 || attr_profile
== 'M'))
8968 if (fp_model
== ARM_FLOAT_AUTO
)
8970 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
8973 /* Leave it as "auto". Strictly speaking this case
8974 means FPA, but almost nobody uses that now, and
8975 many toolchains fail to set the appropriate bits
8976 for the floating-point model they use. */
8978 case EF_ARM_SOFT_FLOAT
:
8979 fp_model
= ARM_FLOAT_SOFT_FPA
;
8981 case EF_ARM_VFP_FLOAT
:
8982 fp_model
= ARM_FLOAT_VFP
;
8984 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
8985 fp_model
= ARM_FLOAT_SOFT_VFP
;
8990 if (e_flags
& EF_ARM_BE8
)
8991 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
8996 /* Leave it as "auto". */
9001 /* Check any target description for validity. */
9002 if (tdesc_has_registers (tdesc
))
9004 /* For most registers we require GDB's default names; but also allow
9005 the numeric names for sp / lr / pc, as a convenience. */
9006 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9007 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9008 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9010 const struct tdesc_feature
*feature
;
9013 feature
= tdesc_find_feature (tdesc
,
9014 "org.gnu.gdb.arm.core");
9015 if (feature
== NULL
)
9017 feature
= tdesc_find_feature (tdesc
,
9018 "org.gnu.gdb.arm.m-profile");
9019 if (feature
== NULL
)
9025 tdesc_data
= tdesc_data_alloc ();
9028 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9029 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9030 arm_register_names
[i
]);
9031 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9034 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9037 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9041 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9042 ARM_PS_REGNUM
, "xpsr");
9044 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9045 ARM_PS_REGNUM
, "cpsr");
9049 tdesc_data_cleanup (tdesc_data
);
9053 feature
= tdesc_find_feature (tdesc
,
9054 "org.gnu.gdb.arm.fpa");
9055 if (feature
!= NULL
)
9058 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9059 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9060 arm_register_names
[i
]);
9063 tdesc_data_cleanup (tdesc_data
);
9068 have_fpa_registers
= 0;
9070 feature
= tdesc_find_feature (tdesc
,
9071 "org.gnu.gdb.xscale.iwmmxt");
9072 if (feature
!= NULL
)
9074 static const char *const iwmmxt_names
[] = {
9075 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9076 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9077 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9078 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9082 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9084 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9085 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9087 /* Check for the control registers, but do not fail if they
9089 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9090 tdesc_numbered_register (feature
, tdesc_data
, i
,
9091 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9093 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9095 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9096 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9100 tdesc_data_cleanup (tdesc_data
);
9104 have_wmmx_registers
= 1;
9107 /* If we have a VFP unit, check whether the single precision registers
9108 are present. If not, then we will synthesize them as pseudo
9110 feature
= tdesc_find_feature (tdesc
,
9111 "org.gnu.gdb.arm.vfp");
9112 if (feature
!= NULL
)
9114 static const char *const vfp_double_names
[] = {
9115 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9116 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9117 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9118 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9121 /* Require the double precision registers. There must be either
9124 for (i
= 0; i
< 32; i
++)
9126 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9128 vfp_double_names
[i
]);
9132 if (!valid_p
&& i
== 16)
9135 /* Also require FPSCR. */
9136 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9137 ARM_FPSCR_REGNUM
, "fpscr");
9140 tdesc_data_cleanup (tdesc_data
);
9144 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9145 have_vfp_pseudos
= 1;
9147 vfp_register_count
= i
;
9149 /* If we have VFP, also check for NEON. The architecture allows
9150 NEON without VFP (integer vector operations only), but GDB
9151 does not support that. */
9152 feature
= tdesc_find_feature (tdesc
,
9153 "org.gnu.gdb.arm.neon");
9154 if (feature
!= NULL
)
9156 /* NEON requires 32 double-precision registers. */
9159 tdesc_data_cleanup (tdesc_data
);
9163 /* If there are quad registers defined by the stub, use
9164 their type; otherwise (normally) provide them with
9165 the default type. */
9166 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9167 have_neon_pseudos
= 1;
9174 /* If there is already a candidate, use it. */
9175 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9177 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9179 if (arm_abi
!= ARM_ABI_AUTO
9180 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9183 if (fp_model
!= ARM_FLOAT_AUTO
9184 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9187 /* There are various other properties in tdep that we do not
9188 need to check here: those derived from a target description,
9189 since gdbarches with a different target description are
9190 automatically disqualified. */
9192 /* Do check is_m, though, since it might come from the binary. */
9193 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9196 /* Found a match. */
9200 if (best_arch
!= NULL
)
9202 if (tdesc_data
!= NULL
)
9203 tdesc_data_cleanup (tdesc_data
);
9204 return best_arch
->gdbarch
;
9207 tdep
= XCNEW (struct gdbarch_tdep
);
9208 gdbarch
= gdbarch_alloc (&info
, tdep
);
9210 /* Record additional information about the architecture we are defining.
9211 These are gdbarch discriminators, like the OSABI. */
9212 tdep
->arm_abi
= arm_abi
;
9213 tdep
->fp_model
= fp_model
;
9215 tdep
->have_fpa_registers
= have_fpa_registers
;
9216 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9217 gdb_assert (vfp_register_count
== 0
9218 || vfp_register_count
== 16
9219 || vfp_register_count
== 32);
9220 tdep
->vfp_register_count
= vfp_register_count
;
9221 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9222 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9223 tdep
->have_neon
= have_neon
;
9225 arm_register_g_packet_guesses (gdbarch
);
9228 switch (info
.byte_order_for_code
)
9230 case BFD_ENDIAN_BIG
:
9231 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9232 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9233 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9234 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9238 case BFD_ENDIAN_LITTLE
:
9239 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9240 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9241 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9242 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9247 internal_error (__FILE__
, __LINE__
,
9248 _("arm_gdbarch_init: bad byte order for float format"));
9251 /* On ARM targets char defaults to unsigned. */
9252 set_gdbarch_char_signed (gdbarch
, 0);
9254 /* wchar_t is unsigned under the AAPCS. */
9255 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
9256 set_gdbarch_wchar_signed (gdbarch
, 0);
9258 set_gdbarch_wchar_signed (gdbarch
, 1);
9260 /* Compute type alignment. */
9261 set_gdbarch_type_align (gdbarch
, arm_type_align
);
9263 /* Note: for displaced stepping, this includes the breakpoint, and one word
9264 of additional scratch space. This setting isn't used for anything beside
9265 displaced stepping at present. */
9266 set_gdbarch_max_insn_length (gdbarch
, 4 * ARM_DISPLACED_MODIFIED_INSNS
);
9268 /* This should be low enough for everything. */
9269 tdep
->lowest_pc
= 0x20;
9270 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9272 /* The default, for both APCS and AAPCS, is to return small
9273 structures in registers. */
9274 tdep
->struct_return
= reg_struct_return
;
9276 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9277 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9280 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9282 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9284 frame_base_set_default (gdbarch
, &arm_normal_base
);
9286 /* Address manipulation. */
9287 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9289 /* Advance PC across function entry code. */
9290 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9292 /* Detect whether PC is at a point where the stack has been destroyed. */
9293 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9295 /* Skip trampolines. */
9296 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9298 /* The stack grows downward. */
9299 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9301 /* Breakpoint manipulation. */
9302 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9303 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9304 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9305 arm_breakpoint_kind_from_current_state
);
9307 /* Information about registers, etc. */
9308 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9309 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9310 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9311 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9312 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9314 /* This "info float" is FPA-specific. Use the generic version if we
9316 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9317 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9319 /* Internal <-> external register number maps. */
9320 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9321 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9323 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9325 /* Returning results. */
9326 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9329 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9331 /* Minsymbol frobbing. */
9332 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9333 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9334 arm_coff_make_msymbol_special
);
9335 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9337 /* Thumb-2 IT block support. */
9338 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9339 arm_adjust_breakpoint_address
);
9341 /* Virtual tables. */
9342 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9344 /* Hook in the ABI-specific overrides, if they have been registered. */
9345 gdbarch_init_osabi (info
, gdbarch
);
9347 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9349 /* Add some default predicates. */
9351 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9352 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9353 dwarf2_append_unwinders (gdbarch
);
9354 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9355 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9356 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9358 /* Now we have tuned the configuration, set a few final things,
9359 based on what the OS ABI has told us. */
9361 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9362 binaries are always marked. */
9363 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9364 tdep
->arm_abi
= ARM_ABI_APCS
;
9366 /* Watchpoints are not steppable. */
9367 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9369 /* We used to default to FPA for generic ARM, but almost nobody
9370 uses that now, and we now provide a way for the user to force
9371 the model. So default to the most useful variant. */
9372 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9373 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9375 if (tdep
->jb_pc
>= 0)
9376 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9378 /* Floating point sizes and format. */
9379 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9380 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9382 set_gdbarch_double_format
9383 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9384 set_gdbarch_long_double_format
9385 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9389 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9390 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9393 if (have_vfp_pseudos
)
9395 /* NOTE: These are the only pseudo registers used by
9396 the ARM target at the moment. If more are added, a
9397 little more care in numbering will be needed. */
9399 int num_pseudos
= 32;
9400 if (have_neon_pseudos
)
9402 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9403 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9404 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9409 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9411 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
9413 /* Override tdesc_register_type to adjust the types of VFP
9414 registers for NEON. */
9415 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9418 /* Add standard register aliases. We add aliases even for those
9419 nanes which are used by the current architecture - it's simpler,
9420 and does no harm, since nothing ever lists user registers. */
9421 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9422 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9423 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9425 set_gdbarch_disassembler_options (gdbarch
, &arm_disassembler_options
);
9426 set_gdbarch_valid_disassembler_options (gdbarch
, disassembler_options_arm ());
9428 set_gdbarch_gnu_triplet_regexp (gdbarch
, arm_gnu_triplet_regexp
);
9434 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9436 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9441 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9442 (unsigned long) tdep
->lowest_pc
);
9448 static void arm_record_test (void);
9453 _initialize_arm_tdep (void)
9457 char regdesc
[1024], *rdptr
= regdesc
;
9458 size_t rest
= sizeof (regdesc
);
9460 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9462 /* Add ourselves to objfile event chain. */
9463 gdb::observers::new_objfile
.attach (arm_exidx_new_objfile
);
9465 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
9467 /* Register an ELF OS ABI sniffer for ARM binaries. */
9468 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9469 bfd_target_elf_flavour
,
9470 arm_elf_osabi_sniffer
);
9472 /* Initialize the standard target descriptions. */
9473 initialize_tdesc_arm_with_m ();
9474 initialize_tdesc_arm_with_m_fpa_layout ();
9475 initialize_tdesc_arm_with_m_vfp_d16 ();
9476 initialize_tdesc_arm_with_iwmmxt ();
9477 initialize_tdesc_arm_with_vfpv2 ();
9478 initialize_tdesc_arm_with_vfpv3 ();
9479 initialize_tdesc_arm_with_neon ();
9481 /* Add root prefix command for all "set arm"/"show arm" commands. */
9482 add_prefix_cmd ("arm", no_class
, set_arm_command
,
9483 _("Various ARM-specific commands."),
9484 &setarmcmdlist
, "set arm ", 0, &setlist
);
9486 add_prefix_cmd ("arm", no_class
, show_arm_command
,
9487 _("Various ARM-specific commands."),
9488 &showarmcmdlist
, "show arm ", 0, &showlist
);
9491 arm_disassembler_options
= xstrdup ("reg-names-std");
9492 const disasm_options_t
*disasm_options
9493 = &disassembler_options_arm ()->options
;
9494 int num_disassembly_styles
= 0;
9495 for (i
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9496 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9497 num_disassembly_styles
++;
9499 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9500 valid_disassembly_styles
= XNEWVEC (const char *,
9501 num_disassembly_styles
+ 1);
9502 for (i
= j
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9503 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9505 size_t offset
= strlen ("reg-names-");
9506 const char *style
= disasm_options
->name
[i
];
9507 valid_disassembly_styles
[j
++] = &style
[offset
];
9508 length
= snprintf (rdptr
, rest
, "%s - %s\n", &style
[offset
],
9509 disasm_options
->description
[i
]);
9513 /* Mark the end of valid options. */
9514 valid_disassembly_styles
[num_disassembly_styles
] = NULL
;
9516 /* Create the help text. */
9517 std::string helptext
= string_printf ("%s%s%s",
9518 _("The valid values are:\n"),
9520 _("The default is \"std\"."));
9522 add_setshow_enum_cmd("disassembler", no_class
,
9523 valid_disassembly_styles
, &disassembly_style
,
9524 _("Set the disassembly style."),
9525 _("Show the disassembly style."),
9527 set_disassembly_style_sfunc
,
9528 show_disassembly_style_sfunc
,
9529 &setarmcmdlist
, &showarmcmdlist
);
9531 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9532 _("Set usage of ARM 32-bit mode."),
9533 _("Show usage of ARM 32-bit mode."),
9534 _("When off, a 26-bit PC will be used."),
9536 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9538 &setarmcmdlist
, &showarmcmdlist
);
9540 /* Add a command to allow the user to force the FPU model. */
9541 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9542 _("Set the floating point type."),
9543 _("Show the floating point type."),
9544 _("auto - Determine the FP typefrom the OS-ABI.\n\
9545 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9546 fpa - FPA co-processor (GCC compiled).\n\
9547 softvfp - Software FP with pure-endian doubles.\n\
9548 vfp - VFP co-processor."),
9549 set_fp_model_sfunc
, show_fp_model
,
9550 &setarmcmdlist
, &showarmcmdlist
);
9552 /* Add a command to allow the user to force the ABI. */
9553 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9556 NULL
, arm_set_abi
, arm_show_abi
,
9557 &setarmcmdlist
, &showarmcmdlist
);
9559 /* Add two commands to allow the user to force the assumed
9561 add_setshow_enum_cmd ("fallback-mode", class_support
,
9562 arm_mode_strings
, &arm_fallback_mode_string
,
9563 _("Set the mode assumed when symbols are unavailable."),
9564 _("Show the mode assumed when symbols are unavailable."),
9565 NULL
, NULL
, arm_show_fallback_mode
,
9566 &setarmcmdlist
, &showarmcmdlist
);
9567 add_setshow_enum_cmd ("force-mode", class_support
,
9568 arm_mode_strings
, &arm_force_mode_string
,
9569 _("Set the mode assumed even when symbols are available."),
9570 _("Show the mode assumed even when symbols are available."),
9571 NULL
, NULL
, arm_show_force_mode
,
9572 &setarmcmdlist
, &showarmcmdlist
);
9574 /* Debugging flag. */
9575 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9576 _("Set ARM debugging."),
9577 _("Show ARM debugging."),
9578 _("When on, arm-specific debugging is enabled."),
9580 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9581 &setdebuglist
, &showdebuglist
);
9584 selftests::register_test ("arm-record", selftests::arm_record_test
);
9589 /* ARM-reversible process record data structures. */
9591 #define ARM_INSN_SIZE_BYTES 4
9592 #define THUMB_INSN_SIZE_BYTES 2
9593 #define THUMB2_INSN_SIZE_BYTES 4
9596 /* Position of the bit within a 32-bit ARM instruction
9597 that defines whether the instruction is a load or store. */
9598 #define INSN_S_L_BIT_NUM 20
9600 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9603 unsigned int reg_len = LENGTH; \
9606 REGS = XNEWVEC (uint32_t, reg_len); \
9607 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9612 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9615 unsigned int mem_len = LENGTH; \
9618 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9619 memcpy(&MEMS->len, &RECORD_BUF[0], \
9620 sizeof(struct arm_mem_r) * LENGTH); \
9625 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9626 #define INSN_RECORDED(ARM_RECORD) \
9627 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9629 /* ARM memory record structure. */
9632 uint32_t len
; /* Record length. */
9633 uint32_t addr
; /* Memory address. */
9636 /* ARM instruction record contains opcode of current insn
9637 and execution state (before entry to decode_insn()),
9638 contains list of to-be-modified registers and
9639 memory blocks (on return from decode_insn()). */
9641 typedef struct insn_decode_record_t
9643 struct gdbarch
*gdbarch
;
9644 struct regcache
*regcache
;
9645 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9646 uint32_t arm_insn
; /* Should accommodate thumb. */
9647 uint32_t cond
; /* Condition code. */
9648 uint32_t opcode
; /* Insn opcode. */
9649 uint32_t decode
; /* Insn decode bits. */
9650 uint32_t mem_rec_count
; /* No of mem records. */
9651 uint32_t reg_rec_count
; /* No of reg records. */
9652 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9653 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9654 } insn_decode_record
;
9657 /* Checks ARM SBZ and SBO mandatory fields. */
9660 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9662 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9681 enum arm_record_result
9683 ARM_RECORD_SUCCESS
= 0,
9684 ARM_RECORD_FAILURE
= 1
9691 } arm_record_strx_t
;
9702 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
9703 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
9706 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9707 ULONGEST u_regval
[2]= {0};
9709 uint32_t reg_src1
= 0, reg_src2
= 0;
9710 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
9712 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
9713 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
9715 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
9717 /* 1) Handle misc store, immediate offset. */
9718 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9719 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9720 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9721 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
9723 if (ARM_PC_REGNUM
== reg_src1
)
9725 /* If R15 was used as Rn, hence current PC+8. */
9726 u_regval
[0] = u_regval
[0] + 8;
9728 offset_8
= (immed_high
<< 4) | immed_low
;
9729 /* Calculate target store address. */
9730 if (14 == arm_insn_r
->opcode
)
9732 tgt_mem_addr
= u_regval
[0] + offset_8
;
9736 tgt_mem_addr
= u_regval
[0] - offset_8
;
9738 if (ARM_RECORD_STRH
== str_type
)
9740 record_buf_mem
[0] = 2;
9741 record_buf_mem
[1] = tgt_mem_addr
;
9742 arm_insn_r
->mem_rec_count
= 1;
9744 else if (ARM_RECORD_STRD
== str_type
)
9746 record_buf_mem
[0] = 4;
9747 record_buf_mem
[1] = tgt_mem_addr
;
9748 record_buf_mem
[2] = 4;
9749 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9750 arm_insn_r
->mem_rec_count
= 2;
9753 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
9755 /* 2) Store, register offset. */
9757 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9759 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9760 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9761 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9764 /* If R15 was used as Rn, hence current PC+8. */
9765 u_regval
[0] = u_regval
[0] + 8;
9767 /* Calculate target store address, Rn +/- Rm, register offset. */
9768 if (12 == arm_insn_r
->opcode
)
9770 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9774 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9776 if (ARM_RECORD_STRH
== str_type
)
9778 record_buf_mem
[0] = 2;
9779 record_buf_mem
[1] = tgt_mem_addr
;
9780 arm_insn_r
->mem_rec_count
= 1;
9782 else if (ARM_RECORD_STRD
== str_type
)
9784 record_buf_mem
[0] = 4;
9785 record_buf_mem
[1] = tgt_mem_addr
;
9786 record_buf_mem
[2] = 4;
9787 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9788 arm_insn_r
->mem_rec_count
= 2;
9791 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
9792 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9794 /* 3) Store, immediate pre-indexed. */
9795 /* 5) Store, immediate post-indexed. */
9796 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9797 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9798 offset_8
= (immed_high
<< 4) | immed_low
;
9799 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9800 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9801 /* Calculate target store address, Rn +/- Rm, register offset. */
9802 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9804 tgt_mem_addr
= u_regval
[0] + offset_8
;
9808 tgt_mem_addr
= u_regval
[0] - offset_8
;
9810 if (ARM_RECORD_STRH
== str_type
)
9812 record_buf_mem
[0] = 2;
9813 record_buf_mem
[1] = tgt_mem_addr
;
9814 arm_insn_r
->mem_rec_count
= 1;
9816 else if (ARM_RECORD_STRD
== str_type
)
9818 record_buf_mem
[0] = 4;
9819 record_buf_mem
[1] = tgt_mem_addr
;
9820 record_buf_mem
[2] = 4;
9821 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9822 arm_insn_r
->mem_rec_count
= 2;
9824 /* Record Rn also as it changes. */
9825 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9826 arm_insn_r
->reg_rec_count
= 1;
9828 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
9829 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9831 /* 4) Store, register pre-indexed. */
9832 /* 6) Store, register post -indexed. */
9833 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9834 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9835 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9836 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9837 /* Calculate target store address, Rn +/- Rm, register offset. */
9838 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9840 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9844 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9846 if (ARM_RECORD_STRH
== str_type
)
9848 record_buf_mem
[0] = 2;
9849 record_buf_mem
[1] = tgt_mem_addr
;
9850 arm_insn_r
->mem_rec_count
= 1;
9852 else if (ARM_RECORD_STRD
== str_type
)
9854 record_buf_mem
[0] = 4;
9855 record_buf_mem
[1] = tgt_mem_addr
;
9856 record_buf_mem
[2] = 4;
9857 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9858 arm_insn_r
->mem_rec_count
= 2;
9860 /* Record Rn also as it changes. */
9861 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9862 arm_insn_r
->reg_rec_count
= 1;
9867 /* Handling ARM extension space insns. */
9870 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
9872 int ret
= 0; /* Return value: -1:record failure ; 0:success */
9873 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
9874 uint32_t record_buf
[8], record_buf_mem
[8];
9875 uint32_t reg_src1
= 0;
9876 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9877 ULONGEST u_regval
= 0;
9879 gdb_assert (!INSN_RECORDED(arm_insn_r
));
9880 /* Handle unconditional insn extension space. */
9882 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
9883 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9884 if (arm_insn_r
->cond
)
9886 /* PLD has no affect on architectural state, it just affects
9888 if (5 == ((opcode1
& 0xE0) >> 5))
9891 record_buf
[0] = ARM_PS_REGNUM
;
9892 record_buf
[1] = ARM_LR_REGNUM
;
9893 arm_insn_r
->reg_rec_count
= 2;
9895 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9899 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
9900 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
9903 /* Undefined instruction on ARM V5; need to handle if later
9904 versions define it. */
9907 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
9908 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9909 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
9911 /* Handle arithmetic insn extension space. */
9912 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
9913 && !INSN_RECORDED(arm_insn_r
))
9915 /* Handle MLA(S) and MUL(S). */
9916 if (in_inclusive_range (insn_op1
, 0U, 3U))
9918 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9919 record_buf
[1] = ARM_PS_REGNUM
;
9920 arm_insn_r
->reg_rec_count
= 2;
9922 else if (in_inclusive_range (insn_op1
, 4U, 15U))
9924 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9925 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
9926 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
9927 record_buf
[2] = ARM_PS_REGNUM
;
9928 arm_insn_r
->reg_rec_count
= 3;
9932 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
9933 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
9934 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
9936 /* Handle control insn extension space. */
9938 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
9939 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
9941 if (!bit (arm_insn_r
->arm_insn
,25))
9943 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
9945 if ((0 == insn_op1
) || (2 == insn_op1
))
9948 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9949 arm_insn_r
->reg_rec_count
= 1;
9951 else if (1 == insn_op1
)
9953 /* CSPR is going to be changed. */
9954 record_buf
[0] = ARM_PS_REGNUM
;
9955 arm_insn_r
->reg_rec_count
= 1;
9957 else if (3 == insn_op1
)
9959 /* SPSR is going to be changed. */
9960 /* We need to get SPSR value, which is yet to be done. */
9964 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
9969 record_buf
[0] = ARM_PS_REGNUM
;
9970 arm_insn_r
->reg_rec_count
= 1;
9972 else if (3 == insn_op1
)
9975 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9976 arm_insn_r
->reg_rec_count
= 1;
9979 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
9982 record_buf
[0] = ARM_PS_REGNUM
;
9983 record_buf
[1] = ARM_LR_REGNUM
;
9984 arm_insn_r
->reg_rec_count
= 2;
9986 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
9988 /* QADD, QSUB, QDADD, QDSUB */
9989 record_buf
[0] = ARM_PS_REGNUM
;
9990 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
9991 arm_insn_r
->reg_rec_count
= 2;
9993 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
9996 record_buf
[0] = ARM_PS_REGNUM
;
9997 record_buf
[1] = ARM_LR_REGNUM
;
9998 arm_insn_r
->reg_rec_count
= 2;
10000 /* Save SPSR also;how? */
10003 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
10004 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
10005 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
10006 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
10009 if (0 == insn_op1
|| 1 == insn_op1
)
10011 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10012 /* We dont do optimization for SMULW<y> where we
10014 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10015 record_buf
[1] = ARM_PS_REGNUM
;
10016 arm_insn_r
->reg_rec_count
= 2;
10018 else if (2 == insn_op1
)
10021 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10022 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
10023 arm_insn_r
->reg_rec_count
= 2;
10025 else if (3 == insn_op1
)
10028 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10029 arm_insn_r
->reg_rec_count
= 1;
10035 /* MSR : immediate form. */
10038 /* CSPR is going to be changed. */
10039 record_buf
[0] = ARM_PS_REGNUM
;
10040 arm_insn_r
->reg_rec_count
= 1;
10042 else if (3 == insn_op1
)
10044 /* SPSR is going to be changed. */
10045 /* we need to get SPSR value, which is yet to be done */
10051 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10052 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10053 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10055 /* Handle load/store insn extension space. */
10057 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10058 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10059 && !INSN_RECORDED(arm_insn_r
))
10064 /* These insn, changes register and memory as well. */
10065 /* SWP or SWPB insn. */
10066 /* Get memory address given by Rn. */
10067 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10068 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10069 /* SWP insn ?, swaps word. */
10070 if (8 == arm_insn_r
->opcode
)
10072 record_buf_mem
[0] = 4;
10076 /* SWPB insn, swaps only byte. */
10077 record_buf_mem
[0] = 1;
10079 record_buf_mem
[1] = u_regval
;
10080 arm_insn_r
->mem_rec_count
= 1;
10081 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10082 arm_insn_r
->reg_rec_count
= 1;
10084 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10087 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10090 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10093 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10094 record_buf
[1] = record_buf
[0] + 1;
10095 arm_insn_r
->reg_rec_count
= 2;
10097 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10100 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10103 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10105 /* LDRH, LDRSB, LDRSH. */
10106 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10107 arm_insn_r
->reg_rec_count
= 1;
10112 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10113 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10114 && !INSN_RECORDED(arm_insn_r
))
10117 /* Handle coprocessor insn extension space. */
10120 /* To be done for ARMv5 and later; as of now we return -1. */
10124 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10125 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10130 /* Handling opcode 000 insns. */
10133 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10135 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10136 uint32_t record_buf
[8], record_buf_mem
[8];
10137 ULONGEST u_regval
[2] = {0};
10139 uint32_t reg_src1
= 0;
10140 uint32_t opcode1
= 0;
10142 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10143 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10144 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10146 if (!((opcode1
& 0x19) == 0x10))
10148 /* Data-processing (register) and Data-processing (register-shifted
10150 /* Out of 11 shifter operands mode, all the insn modifies destination
10151 register, which is specified by 13-16 decode. */
10152 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10153 record_buf
[1] = ARM_PS_REGNUM
;
10154 arm_insn_r
->reg_rec_count
= 2;
10156 else if ((arm_insn_r
->decode
< 8) && ((opcode1
& 0x19) == 0x10))
10158 /* Miscellaneous instructions */
10160 if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10161 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10163 /* Handle BLX, branch and link/exchange. */
10164 if (9 == arm_insn_r
->opcode
)
10166 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10167 and R14 stores the return address. */
10168 record_buf
[0] = ARM_PS_REGNUM
;
10169 record_buf
[1] = ARM_LR_REGNUM
;
10170 arm_insn_r
->reg_rec_count
= 2;
10173 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10175 /* Handle enhanced software breakpoint insn, BKPT. */
10176 /* CPSR is changed to be executed in ARM state, disabling normal
10177 interrupts, entering abort mode. */
10178 /* According to high vector configuration PC is set. */
10179 /* user hit breakpoint and type reverse, in
10180 that case, we need to go back with previous CPSR and
10181 Program Counter. */
10182 record_buf
[0] = ARM_PS_REGNUM
;
10183 record_buf
[1] = ARM_LR_REGNUM
;
10184 arm_insn_r
->reg_rec_count
= 2;
10186 /* Save SPSR also; how? */
10189 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10190 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10192 /* Handle BX, branch and link/exchange. */
10193 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10194 record_buf
[0] = ARM_PS_REGNUM
;
10195 arm_insn_r
->reg_rec_count
= 1;
10197 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10198 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10199 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10201 /* Count leading zeros: CLZ. */
10202 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10203 arm_insn_r
->reg_rec_count
= 1;
10205 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10206 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10207 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10208 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0))
10210 /* Handle MRS insn. */
10211 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10212 arm_insn_r
->reg_rec_count
= 1;
10215 else if (9 == arm_insn_r
->decode
&& opcode1
< 0x10)
10217 /* Multiply and multiply-accumulate */
10219 /* Handle multiply instructions. */
10220 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10221 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10223 /* Handle MLA and MUL. */
10224 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10225 record_buf
[1] = ARM_PS_REGNUM
;
10226 arm_insn_r
->reg_rec_count
= 2;
10228 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10230 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10231 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10232 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10233 record_buf
[2] = ARM_PS_REGNUM
;
10234 arm_insn_r
->reg_rec_count
= 3;
10237 else if (9 == arm_insn_r
->decode
&& opcode1
> 0x10)
10239 /* Synchronization primitives */
10241 /* Handling SWP, SWPB. */
10242 /* These insn, changes register and memory as well. */
10243 /* SWP or SWPB insn. */
10245 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10246 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10247 /* SWP insn ?, swaps word. */
10248 if (8 == arm_insn_r
->opcode
)
10250 record_buf_mem
[0] = 4;
10254 /* SWPB insn, swaps only byte. */
10255 record_buf_mem
[0] = 1;
10257 record_buf_mem
[1] = u_regval
[0];
10258 arm_insn_r
->mem_rec_count
= 1;
10259 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10260 arm_insn_r
->reg_rec_count
= 1;
10262 else if (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
10263 || 15 == arm_insn_r
->decode
)
10265 if ((opcode1
& 0x12) == 2)
10267 /* Extra load/store (unprivileged) */
10272 /* Extra load/store */
10273 switch (bits (arm_insn_r
->arm_insn
, 5, 6))
10276 if ((opcode1
& 0x05) == 0x0 || (opcode1
& 0x05) == 0x4)
10278 /* STRH (register), STRH (immediate) */
10279 arm_record_strx (arm_insn_r
, &record_buf
[0],
10280 &record_buf_mem
[0], ARM_RECORD_STRH
);
10282 else if ((opcode1
& 0x05) == 0x1)
10284 /* LDRH (register) */
10285 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10286 arm_insn_r
->reg_rec_count
= 1;
10288 if (bit (arm_insn_r
->arm_insn
, 21))
10290 /* Write back to Rn. */
10291 record_buf
[arm_insn_r
->reg_rec_count
++]
10292 = bits (arm_insn_r
->arm_insn
, 16, 19);
10295 else if ((opcode1
& 0x05) == 0x5)
10297 /* LDRH (immediate), LDRH (literal) */
10298 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10300 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10301 arm_insn_r
->reg_rec_count
= 1;
10305 /*LDRH (immediate) */
10306 if (bit (arm_insn_r
->arm_insn
, 21))
10308 /* Write back to Rn. */
10309 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10317 if ((opcode1
& 0x05) == 0x0)
10319 /* LDRD (register) */
10320 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10321 record_buf
[1] = record_buf
[0] + 1;
10322 arm_insn_r
->reg_rec_count
= 2;
10324 if (bit (arm_insn_r
->arm_insn
, 21))
10326 /* Write back to Rn. */
10327 record_buf
[arm_insn_r
->reg_rec_count
++]
10328 = bits (arm_insn_r
->arm_insn
, 16, 19);
10331 else if ((opcode1
& 0x05) == 0x1)
10333 /* LDRSB (register) */
10334 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10335 arm_insn_r
->reg_rec_count
= 1;
10337 if (bit (arm_insn_r
->arm_insn
, 21))
10339 /* Write back to Rn. */
10340 record_buf
[arm_insn_r
->reg_rec_count
++]
10341 = bits (arm_insn_r
->arm_insn
, 16, 19);
10344 else if ((opcode1
& 0x05) == 0x4 || (opcode1
& 0x05) == 0x5)
10346 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10348 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10350 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10351 arm_insn_r
->reg_rec_count
= 1;
10355 /*LDRD (immediate), LDRSB (immediate) */
10356 if (bit (arm_insn_r
->arm_insn
, 21))
10358 /* Write back to Rn. */
10359 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10367 if ((opcode1
& 0x05) == 0x0)
10369 /* STRD (register) */
10370 arm_record_strx (arm_insn_r
, &record_buf
[0],
10371 &record_buf_mem
[0], ARM_RECORD_STRD
);
10373 else if ((opcode1
& 0x05) == 0x1)
10375 /* LDRSH (register) */
10376 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10377 arm_insn_r
->reg_rec_count
= 1;
10379 if (bit (arm_insn_r
->arm_insn
, 21))
10381 /* Write back to Rn. */
10382 record_buf
[arm_insn_r
->reg_rec_count
++]
10383 = bits (arm_insn_r
->arm_insn
, 16, 19);
10386 else if ((opcode1
& 0x05) == 0x4)
10388 /* STRD (immediate) */
10389 arm_record_strx (arm_insn_r
, &record_buf
[0],
10390 &record_buf_mem
[0], ARM_RECORD_STRD
);
10392 else if ((opcode1
& 0x05) == 0x5)
10394 /* LDRSH (immediate), LDRSH (literal) */
10395 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10396 arm_insn_r
->reg_rec_count
= 1;
10398 if (bit (arm_insn_r
->arm_insn
, 21))
10400 /* Write back to Rn. */
10401 record_buf
[arm_insn_r
->reg_rec_count
++]
10402 = bits (arm_insn_r
->arm_insn
, 16, 19);
10418 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10419 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10423 /* Handling opcode 001 insns. */
10426 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10428 uint32_t record_buf
[8], record_buf_mem
[8];
10430 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10431 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10433 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10434 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10435 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10438 /* Handle MSR insn. */
10439 if (9 == arm_insn_r
->opcode
)
10441 /* CSPR is going to be changed. */
10442 record_buf
[0] = ARM_PS_REGNUM
;
10443 arm_insn_r
->reg_rec_count
= 1;
10447 /* SPSR is going to be changed. */
10450 else if (arm_insn_r
->opcode
<= 15)
10452 /* Normal data processing insns. */
10453 /* Out of 11 shifter operands mode, all the insn modifies destination
10454 register, which is specified by 13-16 decode. */
10455 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10456 record_buf
[1] = ARM_PS_REGNUM
;
10457 arm_insn_r
->reg_rec_count
= 2;
10464 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10465 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10470 arm_record_media (insn_decode_record
*arm_insn_r
)
10472 uint32_t record_buf
[8];
10474 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10477 /* Parallel addition and subtraction, signed */
10479 /* Parallel addition and subtraction, unsigned */
10482 /* Packing, unpacking, saturation and reversal */
10484 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10486 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10492 /* Signed multiplies */
10494 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10495 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10497 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10499 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10500 else if (op1
== 0x4)
10501 record_buf
[arm_insn_r
->reg_rec_count
++]
10502 = bits (arm_insn_r
->arm_insn
, 12, 15);
10508 if (bit (arm_insn_r
->arm_insn
, 21)
10509 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10512 record_buf
[arm_insn_r
->reg_rec_count
++]
10513 = bits (arm_insn_r
->arm_insn
, 12, 15);
10515 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10516 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10518 /* USAD8 and USADA8 */
10519 record_buf
[arm_insn_r
->reg_rec_count
++]
10520 = bits (arm_insn_r
->arm_insn
, 16, 19);
10527 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10528 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10530 /* Permanently UNDEFINED */
10535 /* BFC, BFI and UBFX */
10536 record_buf
[arm_insn_r
->reg_rec_count
++]
10537 = bits (arm_insn_r
->arm_insn
, 12, 15);
10546 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10551 /* Handle ARM mode instructions with opcode 010. */
10554 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10556 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10558 uint32_t reg_base
, reg_dest
;
10559 uint32_t offset_12
, tgt_mem_addr
;
10560 uint32_t record_buf
[8], record_buf_mem
[8];
10561 unsigned char wback
;
10564 /* Calculate wback. */
10565 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10566 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10568 arm_insn_r
->reg_rec_count
= 0;
10569 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10571 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10573 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10576 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10577 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10579 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10580 preceeds a LDR instruction having R15 as reg_base, it
10581 emulates a branch and link instruction, and hence we need to save
10582 CPSR and PC as well. */
10583 if (ARM_PC_REGNUM
== reg_dest
)
10584 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10586 /* If wback is true, also save the base register, which is going to be
10589 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10593 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10595 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10596 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10598 /* Handle bit U. */
10599 if (bit (arm_insn_r
->arm_insn
, 23))
10601 /* U == 1: Add the offset. */
10602 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10606 /* U == 0: subtract the offset. */
10607 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10610 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10612 if (bit (arm_insn_r
->arm_insn
, 22))
10614 /* STRB and STRBT: 1 byte. */
10615 record_buf_mem
[0] = 1;
10619 /* STR and STRT: 4 bytes. */
10620 record_buf_mem
[0] = 4;
10623 /* Handle bit P. */
10624 if (bit (arm_insn_r
->arm_insn
, 24))
10625 record_buf_mem
[1] = tgt_mem_addr
;
10627 record_buf_mem
[1] = (uint32_t) u_regval
;
10629 arm_insn_r
->mem_rec_count
= 1;
10631 /* If wback is true, also save the base register, which is going to be
10634 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10637 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10638 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10642 /* Handling opcode 011 insns. */
10645 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10647 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10649 uint32_t shift_imm
= 0;
10650 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10651 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10652 uint32_t record_buf
[8], record_buf_mem
[8];
10655 ULONGEST u_regval
[2];
10657 if (bit (arm_insn_r
->arm_insn
, 4))
10658 return arm_record_media (arm_insn_r
);
10660 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10661 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10663 /* Handle enhanced store insns and LDRD DSP insn,
10664 order begins according to addressing modes for store insns
10668 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10670 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10671 /* LDR insn has a capability to do branching, if
10672 MOV LR, PC is precedded by LDR insn having Rn as R15
10673 in that case, it emulates branch and link insn, and hence we
10674 need to save CSPR and PC as well. */
10675 if (15 != reg_dest
)
10677 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10678 arm_insn_r
->reg_rec_count
= 1;
10682 record_buf
[0] = reg_dest
;
10683 record_buf
[1] = ARM_PS_REGNUM
;
10684 arm_insn_r
->reg_rec_count
= 2;
10689 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10691 /* Store insn, register offset and register pre-indexed,
10692 register post-indexed. */
10694 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10696 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10697 regcache_raw_read_unsigned (reg_cache
, reg_src1
10699 regcache_raw_read_unsigned (reg_cache
, reg_src2
10701 if (15 == reg_src2
)
10703 /* If R15 was used as Rn, hence current PC+8. */
10704 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10705 u_regval
[0] = u_regval
[0] + 8;
10707 /* Calculate target store address, Rn +/- Rm, register offset. */
10709 if (bit (arm_insn_r
->arm_insn
, 23))
10711 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10715 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10718 switch (arm_insn_r
->opcode
)
10732 record_buf_mem
[0] = 4;
10747 record_buf_mem
[0] = 1;
10751 gdb_assert_not_reached ("no decoding pattern found");
10754 record_buf_mem
[1] = tgt_mem_addr
;
10755 arm_insn_r
->mem_rec_count
= 1;
10757 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10758 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10759 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10760 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10761 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10762 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10765 /* Rn is going to be changed in pre-indexed mode and
10766 post-indexed mode as well. */
10767 record_buf
[0] = reg_src2
;
10768 arm_insn_r
->reg_rec_count
= 1;
10773 /* Store insn, scaled register offset; scaled pre-indexed. */
10774 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
10776 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10778 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10779 /* Get shift_imm. */
10780 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
10781 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10782 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
10783 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10784 /* Offset_12 used as shift. */
10788 /* Offset_12 used as index. */
10789 offset_12
= u_regval
[0] << shift_imm
;
10793 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
10799 if (bit (u_regval
[0], 31))
10801 offset_12
= 0xFFFFFFFF;
10810 /* This is arithmetic shift. */
10811 offset_12
= s_word
>> shift_imm
;
10818 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
10820 /* Get C flag value and shift it by 31. */
10821 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
10822 | (u_regval
[0]) >> 1);
10826 offset_12
= (u_regval
[0] >> shift_imm
) \
10828 (sizeof(uint32_t) - shift_imm
));
10833 gdb_assert_not_reached ("no decoding pattern found");
10837 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10839 if (bit (arm_insn_r
->arm_insn
, 23))
10841 tgt_mem_addr
= u_regval
[1] + offset_12
;
10845 tgt_mem_addr
= u_regval
[1] - offset_12
;
10848 switch (arm_insn_r
->opcode
)
10862 record_buf_mem
[0] = 4;
10877 record_buf_mem
[0] = 1;
10881 gdb_assert_not_reached ("no decoding pattern found");
10884 record_buf_mem
[1] = tgt_mem_addr
;
10885 arm_insn_r
->mem_rec_count
= 1;
10887 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10888 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10889 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10890 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10891 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10892 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10895 /* Rn is going to be changed in register scaled pre-indexed
10896 mode,and scaled post indexed mode. */
10897 record_buf
[0] = reg_src2
;
10898 arm_insn_r
->reg_rec_count
= 1;
10903 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10904 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10908 /* Handle ARM mode instructions with opcode 100. */
10911 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
10913 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10914 uint32_t register_count
= 0, register_bits
;
10915 uint32_t reg_base
, addr_mode
;
10916 uint32_t record_buf
[24], record_buf_mem
[48];
10920 /* Fetch the list of registers. */
10921 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
10922 arm_insn_r
->reg_rec_count
= 0;
10924 /* Fetch the base register that contains the address we are loading data
10926 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10928 /* Calculate wback. */
10929 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
10931 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10933 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10935 /* Find out which registers are going to be loaded from memory. */
10936 while (register_bits
)
10938 if (register_bits
& 0x00000001)
10939 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
10940 register_bits
= register_bits
>> 1;
10945 /* If wback is true, also save the base register, which is going to be
10948 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10950 /* Save the CPSR register. */
10951 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10955 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10957 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
10959 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10961 /* Find out how many registers are going to be stored to memory. */
10962 while (register_bits
)
10964 if (register_bits
& 0x00000001)
10966 register_bits
= register_bits
>> 1;
10971 /* STMDA (STMED): Decrement after. */
10973 record_buf_mem
[1] = (uint32_t) u_regval
10974 - register_count
* ARM_INT_REGISTER_SIZE
+ 4;
10976 /* STM (STMIA, STMEA): Increment after. */
10978 record_buf_mem
[1] = (uint32_t) u_regval
;
10980 /* STMDB (STMFD): Decrement before. */
10982 record_buf_mem
[1] = (uint32_t) u_regval
10983 - register_count
* ARM_INT_REGISTER_SIZE
;
10985 /* STMIB (STMFA): Increment before. */
10987 record_buf_mem
[1] = (uint32_t) u_regval
+ ARM_INT_REGISTER_SIZE
;
10990 gdb_assert_not_reached ("no decoding pattern found");
10994 record_buf_mem
[0] = register_count
* ARM_INT_REGISTER_SIZE
;
10995 arm_insn_r
->mem_rec_count
= 1;
10997 /* If wback is true, also save the base register, which is going to be
11000 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11003 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11004 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11008 /* Handling opcode 101 insns. */
11011 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11013 uint32_t record_buf
[8];
11015 /* Handle B, BL, BLX(1) insns. */
11016 /* B simply branches so we do nothing here. */
11017 /* Note: BLX(1) doesnt fall here but instead it falls into
11018 extension space. */
11019 if (bit (arm_insn_r
->arm_insn
, 24))
11021 record_buf
[0] = ARM_LR_REGNUM
;
11022 arm_insn_r
->reg_rec_count
= 1;
11025 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11031 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11033 printf_unfiltered (_("Process record does not support instruction "
11034 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11035 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11040 /* Record handler for vector data transfer instructions. */
11043 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11045 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11046 uint32_t record_buf
[4];
11048 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11049 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11050 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11051 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11052 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11054 /* Handle VMOV instruction. */
11055 if (bit_l
&& bit_c
)
11057 record_buf
[0] = reg_t
;
11058 arm_insn_r
->reg_rec_count
= 1;
11060 else if (bit_l
&& !bit_c
)
11062 /* Handle VMOV instruction. */
11063 if (bits_a
== 0x00)
11065 record_buf
[0] = reg_t
;
11066 arm_insn_r
->reg_rec_count
= 1;
11068 /* Handle VMRS instruction. */
11069 else if (bits_a
== 0x07)
11072 reg_t
= ARM_PS_REGNUM
;
11074 record_buf
[0] = reg_t
;
11075 arm_insn_r
->reg_rec_count
= 1;
11078 else if (!bit_l
&& !bit_c
)
11080 /* Handle VMOV instruction. */
11081 if (bits_a
== 0x00)
11083 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11085 arm_insn_r
->reg_rec_count
= 1;
11087 /* Handle VMSR instruction. */
11088 else if (bits_a
== 0x07)
11090 record_buf
[0] = ARM_FPSCR_REGNUM
;
11091 arm_insn_r
->reg_rec_count
= 1;
11094 else if (!bit_l
&& bit_c
)
11096 /* Handle VMOV instruction. */
11097 if (!(bits_a
& 0x04))
11099 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11101 arm_insn_r
->reg_rec_count
= 1;
11103 /* Handle VDUP instruction. */
11106 if (bit (arm_insn_r
->arm_insn
, 21))
11108 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11109 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11110 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11111 arm_insn_r
->reg_rec_count
= 2;
11115 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11116 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11117 arm_insn_r
->reg_rec_count
= 1;
11122 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11126 /* Record handler for extension register load/store instructions. */
11129 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11131 uint32_t opcode
, single_reg
;
11132 uint8_t op_vldm_vstm
;
11133 uint32_t record_buf
[8], record_buf_mem
[128];
11134 ULONGEST u_regval
= 0;
11136 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11138 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11139 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11140 op_vldm_vstm
= opcode
& 0x1b;
11142 /* Handle VMOV instructions. */
11143 if ((opcode
& 0x1e) == 0x04)
11145 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11147 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11148 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11149 arm_insn_r
->reg_rec_count
= 2;
11153 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11154 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11158 /* The first S register number m is REG_M:M (M is bit 5),
11159 the corresponding D register number is REG_M:M / 2, which
11161 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11162 /* The second S register number is REG_M:M + 1, the
11163 corresponding D register number is (REG_M:M + 1) / 2.
11164 IOW, if bit M is 1, the first and second S registers
11165 are mapped to different D registers, otherwise, they are
11166 in the same D register. */
11169 record_buf
[arm_insn_r
->reg_rec_count
++]
11170 = ARM_D0_REGNUM
+ reg_m
+ 1;
11175 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11176 arm_insn_r
->reg_rec_count
= 1;
11180 /* Handle VSTM and VPUSH instructions. */
11181 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11182 || op_vldm_vstm
== 0x12)
11184 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11185 uint32_t memory_index
= 0;
11187 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11188 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11189 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11190 imm_off32
= imm_off8
<< 2;
11191 memory_count
= imm_off8
;
11193 if (bit (arm_insn_r
->arm_insn
, 23))
11194 start_address
= u_regval
;
11196 start_address
= u_regval
- imm_off32
;
11198 if (bit (arm_insn_r
->arm_insn
, 21))
11200 record_buf
[0] = reg_rn
;
11201 arm_insn_r
->reg_rec_count
= 1;
11204 while (memory_count
> 0)
11208 record_buf_mem
[memory_index
] = 4;
11209 record_buf_mem
[memory_index
+ 1] = start_address
;
11210 start_address
= start_address
+ 4;
11211 memory_index
= memory_index
+ 2;
11215 record_buf_mem
[memory_index
] = 4;
11216 record_buf_mem
[memory_index
+ 1] = start_address
;
11217 record_buf_mem
[memory_index
+ 2] = 4;
11218 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11219 start_address
= start_address
+ 8;
11220 memory_index
= memory_index
+ 4;
11224 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11226 /* Handle VLDM instructions. */
11227 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11228 || op_vldm_vstm
== 0x13)
11230 uint32_t reg_count
, reg_vd
;
11231 uint32_t reg_index
= 0;
11232 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11234 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11235 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11237 /* REG_VD is the first D register number. If the instruction
11238 loads memory to S registers (SINGLE_REG is TRUE), the register
11239 number is (REG_VD << 1 | bit D), so the corresponding D
11240 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11242 reg_vd
= reg_vd
| (bit_d
<< 4);
11244 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11245 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11247 /* If the instruction loads memory to D register, REG_COUNT should
11248 be divided by 2, according to the ARM Architecture Reference
11249 Manual. If the instruction loads memory to S register, divide by
11250 2 as well because two S registers are mapped to D register. */
11251 reg_count
= reg_count
/ 2;
11252 if (single_reg
&& bit_d
)
11254 /* Increase the register count if S register list starts from
11255 an odd number (bit d is one). */
11259 while (reg_count
> 0)
11261 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11264 arm_insn_r
->reg_rec_count
= reg_index
;
11266 /* VSTR Vector store register. */
11267 else if ((opcode
& 0x13) == 0x10)
11269 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11270 uint32_t memory_index
= 0;
11272 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11273 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11274 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11275 imm_off32
= imm_off8
<< 2;
11277 if (bit (arm_insn_r
->arm_insn
, 23))
11278 start_address
= u_regval
+ imm_off32
;
11280 start_address
= u_regval
- imm_off32
;
11284 record_buf_mem
[memory_index
] = 4;
11285 record_buf_mem
[memory_index
+ 1] = start_address
;
11286 arm_insn_r
->mem_rec_count
= 1;
11290 record_buf_mem
[memory_index
] = 4;
11291 record_buf_mem
[memory_index
+ 1] = start_address
;
11292 record_buf_mem
[memory_index
+ 2] = 4;
11293 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11294 arm_insn_r
->mem_rec_count
= 2;
11297 /* VLDR Vector load register. */
11298 else if ((opcode
& 0x13) == 0x11)
11300 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11304 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11305 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11309 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11310 /* Record register D rather than pseudo register S. */
11311 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11313 arm_insn_r
->reg_rec_count
= 1;
11316 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11317 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11321 /* Record handler for arm/thumb mode VFP data processing instructions. */
11324 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11326 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11327 uint32_t record_buf
[4];
11328 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11329 enum insn_types curr_insn_type
= INSN_INV
;
11331 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11332 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11333 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11334 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11335 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11336 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11337 /* Mask off the "D" bit. */
11338 opc1
= opc1
& ~0x04;
11340 /* Handle VMLA, VMLS. */
11343 if (bit (arm_insn_r
->arm_insn
, 10))
11345 if (bit (arm_insn_r
->arm_insn
, 6))
11346 curr_insn_type
= INSN_T0
;
11348 curr_insn_type
= INSN_T1
;
11353 curr_insn_type
= INSN_T1
;
11355 curr_insn_type
= INSN_T2
;
11358 /* Handle VNMLA, VNMLS, VNMUL. */
11359 else if (opc1
== 0x01)
11362 curr_insn_type
= INSN_T1
;
11364 curr_insn_type
= INSN_T2
;
11367 else if (opc1
== 0x02 && !(opc3
& 0x01))
11369 if (bit (arm_insn_r
->arm_insn
, 10))
11371 if (bit (arm_insn_r
->arm_insn
, 6))
11372 curr_insn_type
= INSN_T0
;
11374 curr_insn_type
= INSN_T1
;
11379 curr_insn_type
= INSN_T1
;
11381 curr_insn_type
= INSN_T2
;
11384 /* Handle VADD, VSUB. */
11385 else if (opc1
== 0x03)
11387 if (!bit (arm_insn_r
->arm_insn
, 9))
11389 if (bit (arm_insn_r
->arm_insn
, 6))
11390 curr_insn_type
= INSN_T0
;
11392 curr_insn_type
= INSN_T1
;
11397 curr_insn_type
= INSN_T1
;
11399 curr_insn_type
= INSN_T2
;
11403 else if (opc1
== 0x08)
11406 curr_insn_type
= INSN_T1
;
11408 curr_insn_type
= INSN_T2
;
11410 /* Handle all other vfp data processing instructions. */
11411 else if (opc1
== 0x0b)
11414 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11416 if (bit (arm_insn_r
->arm_insn
, 4))
11418 if (bit (arm_insn_r
->arm_insn
, 6))
11419 curr_insn_type
= INSN_T0
;
11421 curr_insn_type
= INSN_T1
;
11426 curr_insn_type
= INSN_T1
;
11428 curr_insn_type
= INSN_T2
;
11431 /* Handle VNEG and VABS. */
11432 else if ((opc2
== 0x01 && opc3
== 0x01)
11433 || (opc2
== 0x00 && opc3
== 0x03))
11435 if (!bit (arm_insn_r
->arm_insn
, 11))
11437 if (bit (arm_insn_r
->arm_insn
, 6))
11438 curr_insn_type
= INSN_T0
;
11440 curr_insn_type
= INSN_T1
;
11445 curr_insn_type
= INSN_T1
;
11447 curr_insn_type
= INSN_T2
;
11450 /* Handle VSQRT. */
11451 else if (opc2
== 0x01 && opc3
== 0x03)
11454 curr_insn_type
= INSN_T1
;
11456 curr_insn_type
= INSN_T2
;
11459 else if (opc2
== 0x07 && opc3
== 0x03)
11462 curr_insn_type
= INSN_T1
;
11464 curr_insn_type
= INSN_T2
;
11466 else if (opc3
& 0x01)
11469 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11471 if (!bit (arm_insn_r
->arm_insn
, 18))
11472 curr_insn_type
= INSN_T2
;
11476 curr_insn_type
= INSN_T1
;
11478 curr_insn_type
= INSN_T2
;
11482 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11485 curr_insn_type
= INSN_T1
;
11487 curr_insn_type
= INSN_T2
;
11489 /* Handle VCVTB, VCVTT. */
11490 else if ((opc2
& 0x0e) == 0x02)
11491 curr_insn_type
= INSN_T2
;
11492 /* Handle VCMP, VCMPE. */
11493 else if ((opc2
& 0x0e) == 0x04)
11494 curr_insn_type
= INSN_T3
;
11498 switch (curr_insn_type
)
11501 reg_vd
= reg_vd
| (bit_d
<< 4);
11502 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11503 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11504 arm_insn_r
->reg_rec_count
= 2;
11508 reg_vd
= reg_vd
| (bit_d
<< 4);
11509 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11510 arm_insn_r
->reg_rec_count
= 1;
11514 reg_vd
= (reg_vd
<< 1) | bit_d
;
11515 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11516 arm_insn_r
->reg_rec_count
= 1;
11520 record_buf
[0] = ARM_FPSCR_REGNUM
;
11521 arm_insn_r
->reg_rec_count
= 1;
11525 gdb_assert_not_reached ("no decoding pattern found");
11529 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11533 /* Handling opcode 110 insns. */
11536 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11538 uint32_t op1
, op1_ebit
, coproc
;
11540 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11541 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11542 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11544 if ((coproc
& 0x0e) == 0x0a)
11546 /* Handle extension register ld/st instructions. */
11548 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11550 /* 64-bit transfers between arm core and extension registers. */
11551 if ((op1
& 0x3e) == 0x04)
11552 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11556 /* Handle coprocessor ld/st instructions. */
11561 return arm_record_unsupported_insn (arm_insn_r
);
11564 return arm_record_unsupported_insn (arm_insn_r
);
11567 /* Move to coprocessor from two arm core registers. */
11569 return arm_record_unsupported_insn (arm_insn_r
);
11571 /* Move to two arm core registers from coprocessor. */
11576 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11577 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11578 arm_insn_r
->reg_rec_count
= 2;
11580 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11584 return arm_record_unsupported_insn (arm_insn_r
);
11587 /* Handling opcode 111 insns. */
11590 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11592 uint32_t op
, op1_ebit
, coproc
, bits_24_25
;
11593 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
11594 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11596 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11597 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11598 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11599 op
= bit (arm_insn_r
->arm_insn
, 4);
11600 bits_24_25
= bits (arm_insn_r
->arm_insn
, 24, 25);
11602 /* Handle arm SWI/SVC system call instructions. */
11603 if (bits_24_25
== 0x3)
11605 if (tdep
->arm_syscall_record
!= NULL
)
11607 ULONGEST svc_operand
, svc_number
;
11609 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11611 if (svc_operand
) /* OABI. */
11612 svc_number
= svc_operand
- 0x900000;
11614 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11616 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11620 printf_unfiltered (_("no syscall record support\n"));
11624 else if (bits_24_25
== 0x02)
11628 if ((coproc
& 0x0e) == 0x0a)
11630 /* 8, 16, and 32-bit transfer */
11631 return arm_record_vdata_transfer_insn (arm_insn_r
);
11638 uint32_t record_buf
[1];
11640 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11641 if (record_buf
[0] == 15)
11642 record_buf
[0] = ARM_PS_REGNUM
;
11644 arm_insn_r
->reg_rec_count
= 1;
11645 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11658 if ((coproc
& 0x0e) == 0x0a)
11660 /* VFP data-processing instructions. */
11661 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11672 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11676 if ((coproc
& 0x0e) != 0x0a)
11682 else if (op1
== 4 || op1
== 5)
11684 if ((coproc
& 0x0e) == 0x0a)
11686 /* 64-bit transfers between ARM core and extension */
11695 else if (op1
== 0 || op1
== 1)
11702 if ((coproc
& 0x0e) == 0x0a)
11704 /* Extension register load/store */
11708 /* STC, STC2, LDC, LDC2 */
11717 /* Handling opcode 000 insns. */
11720 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
11722 uint32_t record_buf
[8];
11723 uint32_t reg_src1
= 0;
11725 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11727 record_buf
[0] = ARM_PS_REGNUM
;
11728 record_buf
[1] = reg_src1
;
11729 thumb_insn_r
->reg_rec_count
= 2;
11731 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11737 /* Handling opcode 001 insns. */
11740 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
11742 uint32_t record_buf
[8];
11743 uint32_t reg_src1
= 0;
11745 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11747 record_buf
[0] = ARM_PS_REGNUM
;
11748 record_buf
[1] = reg_src1
;
11749 thumb_insn_r
->reg_rec_count
= 2;
11751 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11756 /* Handling opcode 010 insns. */
11759 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
11761 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11762 uint32_t record_buf
[8], record_buf_mem
[8];
11764 uint32_t reg_src1
= 0, reg_src2
= 0;
11765 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
11767 ULONGEST u_regval
[2] = {0};
11769 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
11771 if (bit (thumb_insn_r
->arm_insn
, 12))
11773 /* Handle load/store register offset. */
11774 uint32_t opB
= bits (thumb_insn_r
->arm_insn
, 9, 11);
11776 if (in_inclusive_range (opB
, 4U, 7U))
11778 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11779 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
11780 record_buf
[0] = reg_src1
;
11781 thumb_insn_r
->reg_rec_count
= 1;
11783 else if (in_inclusive_range (opB
, 0U, 2U))
11785 /* STR(2), STRB(2), STRH(2) . */
11786 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11787 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
11788 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11789 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11791 record_buf_mem
[0] = 4; /* STR (2). */
11793 record_buf_mem
[0] = 1; /* STRB (2). */
11795 record_buf_mem
[0] = 2; /* STRH (2). */
11796 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
11797 thumb_insn_r
->mem_rec_count
= 1;
11800 else if (bit (thumb_insn_r
->arm_insn
, 11))
11802 /* Handle load from literal pool. */
11804 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11805 record_buf
[0] = reg_src1
;
11806 thumb_insn_r
->reg_rec_count
= 1;
11810 /* Special data instructions and branch and exchange */
11811 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
11812 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11813 if ((3 == opcode2
) && (!opcode3
))
11815 /* Branch with exchange. */
11816 record_buf
[0] = ARM_PS_REGNUM
;
11817 thumb_insn_r
->reg_rec_count
= 1;
11821 /* Format 8; special data processing insns. */
11822 record_buf
[0] = ARM_PS_REGNUM
;
11823 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
11824 | bits (thumb_insn_r
->arm_insn
, 0, 2));
11825 thumb_insn_r
->reg_rec_count
= 2;
11830 /* Format 5; data processing insns. */
11831 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11832 if (bit (thumb_insn_r
->arm_insn
, 7))
11834 reg_src1
= reg_src1
+ 8;
11836 record_buf
[0] = ARM_PS_REGNUM
;
11837 record_buf
[1] = reg_src1
;
11838 thumb_insn_r
->reg_rec_count
= 2;
11841 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11842 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11848 /* Handling opcode 001 insns. */
11851 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
11853 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11854 uint32_t record_buf
[8], record_buf_mem
[8];
11856 uint32_t reg_src1
= 0;
11857 uint32_t opcode
= 0, immed_5
= 0;
11859 ULONGEST u_regval
= 0;
11861 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11866 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11867 record_buf
[0] = reg_src1
;
11868 thumb_insn_r
->reg_rec_count
= 1;
11873 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11874 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11875 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11876 record_buf_mem
[0] = 4;
11877 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
11878 thumb_insn_r
->mem_rec_count
= 1;
11881 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11882 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11888 /* Handling opcode 100 insns. */
11891 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
11893 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11894 uint32_t record_buf
[8], record_buf_mem
[8];
11896 uint32_t reg_src1
= 0;
11897 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
11899 ULONGEST u_regval
= 0;
11901 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11906 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11907 record_buf
[0] = reg_src1
;
11908 thumb_insn_r
->reg_rec_count
= 1;
11910 else if (1 == opcode
)
11913 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11914 record_buf
[0] = reg_src1
;
11915 thumb_insn_r
->reg_rec_count
= 1;
11917 else if (2 == opcode
)
11920 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11921 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11922 record_buf_mem
[0] = 4;
11923 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
11924 thumb_insn_r
->mem_rec_count
= 1;
11926 else if (0 == opcode
)
11929 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11930 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11931 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11932 record_buf_mem
[0] = 2;
11933 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
11934 thumb_insn_r
->mem_rec_count
= 1;
11937 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11938 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11944 /* Handling opcode 101 insns. */
11947 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
11949 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11951 uint32_t opcode
= 0;
11952 uint32_t register_bits
= 0, register_count
= 0;
11953 uint32_t index
= 0, start_address
= 0;
11954 uint32_t record_buf
[24], record_buf_mem
[48];
11957 ULONGEST u_regval
= 0;
11959 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11961 if (opcode
== 0 || opcode
== 1)
11963 /* ADR and ADD (SP plus immediate) */
11965 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11966 record_buf
[0] = reg_src1
;
11967 thumb_insn_r
->reg_rec_count
= 1;
11971 /* Miscellaneous 16-bit instructions */
11972 uint32_t opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 11);
11977 /* SETEND and CPS */
11980 /* ADD/SUB (SP plus immediate) */
11981 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11982 record_buf
[0] = ARM_SP_REGNUM
;
11983 thumb_insn_r
->reg_rec_count
= 1;
11985 case 1: /* fall through */
11986 case 3: /* fall through */
11987 case 9: /* fall through */
11992 /* SXTH, SXTB, UXTH, UXTB */
11993 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
11994 thumb_insn_r
->reg_rec_count
= 1;
11996 case 4: /* fall through */
11999 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12000 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12001 while (register_bits
)
12003 if (register_bits
& 0x00000001)
12005 register_bits
= register_bits
>> 1;
12007 start_address
= u_regval
- \
12008 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12009 thumb_insn_r
->mem_rec_count
= register_count
;
12010 while (register_count
)
12012 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12013 record_buf_mem
[(register_count
* 2) - 2] = 4;
12014 start_address
= start_address
+ 4;
12017 record_buf
[0] = ARM_SP_REGNUM
;
12018 thumb_insn_r
->reg_rec_count
= 1;
12021 /* REV, REV16, REVSH */
12022 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12023 thumb_insn_r
->reg_rec_count
= 1;
12025 case 12: /* fall through */
12028 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12029 while (register_bits
)
12031 if (register_bits
& 0x00000001)
12032 record_buf
[index
++] = register_count
;
12033 register_bits
= register_bits
>> 1;
12036 record_buf
[index
++] = ARM_PS_REGNUM
;
12037 record_buf
[index
++] = ARM_SP_REGNUM
;
12038 thumb_insn_r
->reg_rec_count
= index
;
12042 /* Handle enhanced software breakpoint insn, BKPT. */
12043 /* CPSR is changed to be executed in ARM state, disabling normal
12044 interrupts, entering abort mode. */
12045 /* According to high vector configuration PC is set. */
12046 /* User hits breakpoint and type reverse, in that case, we need to go back with
12047 previous CPSR and Program Counter. */
12048 record_buf
[0] = ARM_PS_REGNUM
;
12049 record_buf
[1] = ARM_LR_REGNUM
;
12050 thumb_insn_r
->reg_rec_count
= 2;
12051 /* We need to save SPSR value, which is not yet done. */
12052 printf_unfiltered (_("Process record does not support instruction "
12053 "0x%0x at address %s.\n"),
12054 thumb_insn_r
->arm_insn
,
12055 paddress (thumb_insn_r
->gdbarch
,
12056 thumb_insn_r
->this_addr
));
12060 /* If-Then, and hints */
12067 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12068 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12074 /* Handling opcode 110 insns. */
12077 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12079 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12080 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12082 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12083 uint32_t reg_src1
= 0;
12084 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12085 uint32_t index
= 0, start_address
= 0;
12086 uint32_t record_buf
[24], record_buf_mem
[48];
12088 ULONGEST u_regval
= 0;
12090 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12091 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12097 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12099 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12100 while (register_bits
)
12102 if (register_bits
& 0x00000001)
12103 record_buf
[index
++] = register_count
;
12104 register_bits
= register_bits
>> 1;
12107 record_buf
[index
++] = reg_src1
;
12108 thumb_insn_r
->reg_rec_count
= index
;
12110 else if (0 == opcode2
)
12112 /* It handles both STMIA. */
12113 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12115 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12116 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12117 while (register_bits
)
12119 if (register_bits
& 0x00000001)
12121 register_bits
= register_bits
>> 1;
12123 start_address
= u_regval
;
12124 thumb_insn_r
->mem_rec_count
= register_count
;
12125 while (register_count
)
12127 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12128 record_buf_mem
[(register_count
* 2) - 2] = 4;
12129 start_address
= start_address
+ 4;
12133 else if (0x1F == opcode1
)
12135 /* Handle arm syscall insn. */
12136 if (tdep
->arm_syscall_record
!= NULL
)
12138 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12139 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12143 printf_unfiltered (_("no syscall record support\n"));
12148 /* B (1), conditional branch is automatically taken care in process_record,
12149 as PC is saved there. */
12151 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12152 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12158 /* Handling opcode 111 insns. */
12161 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12163 uint32_t record_buf
[8];
12164 uint32_t bits_h
= 0;
12166 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12168 if (2 == bits_h
|| 3 == bits_h
)
12171 record_buf
[0] = ARM_LR_REGNUM
;
12172 thumb_insn_r
->reg_rec_count
= 1;
12174 else if (1 == bits_h
)
12177 record_buf
[0] = ARM_PS_REGNUM
;
12178 record_buf
[1] = ARM_LR_REGNUM
;
12179 thumb_insn_r
->reg_rec_count
= 2;
12182 /* B(2) is automatically taken care in process_record, as PC is
12185 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12190 /* Handler for thumb2 load/store multiple instructions. */
12193 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12195 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12197 uint32_t reg_rn
, op
;
12198 uint32_t register_bits
= 0, register_count
= 0;
12199 uint32_t index
= 0, start_address
= 0;
12200 uint32_t record_buf
[24], record_buf_mem
[48];
12202 ULONGEST u_regval
= 0;
12204 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12205 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12207 if (0 == op
|| 3 == op
)
12209 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12211 /* Handle RFE instruction. */
12212 record_buf
[0] = ARM_PS_REGNUM
;
12213 thumb2_insn_r
->reg_rec_count
= 1;
12217 /* Handle SRS instruction after reading banked SP. */
12218 return arm_record_unsupported_insn (thumb2_insn_r
);
12221 else if (1 == op
|| 2 == op
)
12223 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12225 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12226 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12227 while (register_bits
)
12229 if (register_bits
& 0x00000001)
12230 record_buf
[index
++] = register_count
;
12233 register_bits
= register_bits
>> 1;
12235 record_buf
[index
++] = reg_rn
;
12236 record_buf
[index
++] = ARM_PS_REGNUM
;
12237 thumb2_insn_r
->reg_rec_count
= index
;
12241 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12242 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12243 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12244 while (register_bits
)
12246 if (register_bits
& 0x00000001)
12249 register_bits
= register_bits
>> 1;
12254 /* Start address calculation for LDMDB/LDMEA. */
12255 start_address
= u_regval
;
12259 /* Start address calculation for LDMDB/LDMEA. */
12260 start_address
= u_regval
- register_count
* 4;
12263 thumb2_insn_r
->mem_rec_count
= register_count
;
12264 while (register_count
)
12266 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12267 record_buf_mem
[register_count
* 2 - 2] = 4;
12268 start_address
= start_address
+ 4;
12271 record_buf
[0] = reg_rn
;
12272 record_buf
[1] = ARM_PS_REGNUM
;
12273 thumb2_insn_r
->reg_rec_count
= 2;
12277 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12279 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12281 return ARM_RECORD_SUCCESS
;
12284 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12288 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12290 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12292 uint32_t reg_rd
, reg_rn
, offset_imm
;
12293 uint32_t reg_dest1
, reg_dest2
;
12294 uint32_t address
, offset_addr
;
12295 uint32_t record_buf
[8], record_buf_mem
[8];
12296 uint32_t op1
, op2
, op3
;
12298 ULONGEST u_regval
[2];
12300 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12301 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12302 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12304 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12306 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12308 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12309 record_buf
[0] = reg_dest1
;
12310 record_buf
[1] = ARM_PS_REGNUM
;
12311 thumb2_insn_r
->reg_rec_count
= 2;
12314 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12316 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12317 record_buf
[2] = reg_dest2
;
12318 thumb2_insn_r
->reg_rec_count
= 3;
12323 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12324 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12326 if (0 == op1
&& 0 == op2
)
12328 /* Handle STREX. */
12329 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12330 address
= u_regval
[0] + (offset_imm
* 4);
12331 record_buf_mem
[0] = 4;
12332 record_buf_mem
[1] = address
;
12333 thumb2_insn_r
->mem_rec_count
= 1;
12334 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12335 record_buf
[0] = reg_rd
;
12336 thumb2_insn_r
->reg_rec_count
= 1;
12338 else if (1 == op1
&& 0 == op2
)
12340 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12341 record_buf
[0] = reg_rd
;
12342 thumb2_insn_r
->reg_rec_count
= 1;
12343 address
= u_regval
[0];
12344 record_buf_mem
[1] = address
;
12348 /* Handle STREXB. */
12349 record_buf_mem
[0] = 1;
12350 thumb2_insn_r
->mem_rec_count
= 1;
12354 /* Handle STREXH. */
12355 record_buf_mem
[0] = 2 ;
12356 thumb2_insn_r
->mem_rec_count
= 1;
12360 /* Handle STREXD. */
12361 address
= u_regval
[0];
12362 record_buf_mem
[0] = 4;
12363 record_buf_mem
[2] = 4;
12364 record_buf_mem
[3] = address
+ 4;
12365 thumb2_insn_r
->mem_rec_count
= 2;
12370 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12372 if (bit (thumb2_insn_r
->arm_insn
, 24))
12374 if (bit (thumb2_insn_r
->arm_insn
, 23))
12375 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12377 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12379 address
= offset_addr
;
12382 address
= u_regval
[0];
12384 record_buf_mem
[0] = 4;
12385 record_buf_mem
[1] = address
;
12386 record_buf_mem
[2] = 4;
12387 record_buf_mem
[3] = address
+ 4;
12388 thumb2_insn_r
->mem_rec_count
= 2;
12389 record_buf
[0] = reg_rn
;
12390 thumb2_insn_r
->reg_rec_count
= 1;
12394 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12396 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12398 return ARM_RECORD_SUCCESS
;
12401 /* Handler for thumb2 data processing (shift register and modified immediate)
12405 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12407 uint32_t reg_rd
, op
;
12408 uint32_t record_buf
[8];
12410 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12411 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12413 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12415 record_buf
[0] = ARM_PS_REGNUM
;
12416 thumb2_insn_r
->reg_rec_count
= 1;
12420 record_buf
[0] = reg_rd
;
12421 record_buf
[1] = ARM_PS_REGNUM
;
12422 thumb2_insn_r
->reg_rec_count
= 2;
12425 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12427 return ARM_RECORD_SUCCESS
;
12430 /* Generic handler for thumb2 instructions which effect destination and PS
12434 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12437 uint32_t record_buf
[8];
12439 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12441 record_buf
[0] = reg_rd
;
12442 record_buf
[1] = ARM_PS_REGNUM
;
12443 thumb2_insn_r
->reg_rec_count
= 2;
12445 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12447 return ARM_RECORD_SUCCESS
;
12450 /* Handler for thumb2 branch and miscellaneous control instructions. */
12453 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12455 uint32_t op
, op1
, op2
;
12456 uint32_t record_buf
[8];
12458 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12459 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12460 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12462 /* Handle MSR insn. */
12463 if (!(op1
& 0x2) && 0x38 == op
)
12467 /* CPSR is going to be changed. */
12468 record_buf
[0] = ARM_PS_REGNUM
;
12469 thumb2_insn_r
->reg_rec_count
= 1;
12473 arm_record_unsupported_insn(thumb2_insn_r
);
12477 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12480 record_buf
[0] = ARM_PS_REGNUM
;
12481 record_buf
[1] = ARM_LR_REGNUM
;
12482 thumb2_insn_r
->reg_rec_count
= 2;
12485 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12487 return ARM_RECORD_SUCCESS
;
12490 /* Handler for thumb2 store single data item instructions. */
12493 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12495 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12497 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12498 uint32_t address
, offset_addr
;
12499 uint32_t record_buf
[8], record_buf_mem
[8];
12502 ULONGEST u_regval
[2];
12504 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12505 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12506 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12507 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12509 if (bit (thumb2_insn_r
->arm_insn
, 23))
12512 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12513 offset_addr
= u_regval
[0] + offset_imm
;
12514 address
= offset_addr
;
12519 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12521 /* Handle STRB (register). */
12522 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12523 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12524 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12525 offset_addr
= u_regval
[1] << shift_imm
;
12526 address
= u_regval
[0] + offset_addr
;
12530 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12531 if (bit (thumb2_insn_r
->arm_insn
, 10))
12533 if (bit (thumb2_insn_r
->arm_insn
, 9))
12534 offset_addr
= u_regval
[0] + offset_imm
;
12536 offset_addr
= u_regval
[0] - offset_imm
;
12538 address
= offset_addr
;
12541 address
= u_regval
[0];
12547 /* Store byte instructions. */
12550 record_buf_mem
[0] = 1;
12552 /* Store half word instructions. */
12555 record_buf_mem
[0] = 2;
12557 /* Store word instructions. */
12560 record_buf_mem
[0] = 4;
12564 gdb_assert_not_reached ("no decoding pattern found");
12568 record_buf_mem
[1] = address
;
12569 thumb2_insn_r
->mem_rec_count
= 1;
12570 record_buf
[0] = reg_rn
;
12571 thumb2_insn_r
->reg_rec_count
= 1;
12573 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12575 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12577 return ARM_RECORD_SUCCESS
;
12580 /* Handler for thumb2 load memory hints instructions. */
12583 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12585 uint32_t record_buf
[8];
12586 uint32_t reg_rt
, reg_rn
;
12588 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12589 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12591 if (ARM_PC_REGNUM
!= reg_rt
)
12593 record_buf
[0] = reg_rt
;
12594 record_buf
[1] = reg_rn
;
12595 record_buf
[2] = ARM_PS_REGNUM
;
12596 thumb2_insn_r
->reg_rec_count
= 3;
12598 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12600 return ARM_RECORD_SUCCESS
;
12603 return ARM_RECORD_FAILURE
;
12606 /* Handler for thumb2 load word instructions. */
12609 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12611 uint32_t record_buf
[8];
12613 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12614 record_buf
[1] = ARM_PS_REGNUM
;
12615 thumb2_insn_r
->reg_rec_count
= 2;
12617 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12619 return ARM_RECORD_SUCCESS
;
12622 /* Handler for thumb2 long multiply, long multiply accumulate, and
12623 divide instructions. */
12626 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12628 uint32_t opcode1
= 0, opcode2
= 0;
12629 uint32_t record_buf
[8];
12631 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12632 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12634 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12636 /* Handle SMULL, UMULL, SMULAL. */
12637 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12638 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12639 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12640 record_buf
[2] = ARM_PS_REGNUM
;
12641 thumb2_insn_r
->reg_rec_count
= 3;
12643 else if (1 == opcode1
|| 3 == opcode2
)
12645 /* Handle SDIV and UDIV. */
12646 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12647 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12648 record_buf
[2] = ARM_PS_REGNUM
;
12649 thumb2_insn_r
->reg_rec_count
= 3;
12652 return ARM_RECORD_FAILURE
;
12654 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12656 return ARM_RECORD_SUCCESS
;
12659 /* Record handler for thumb32 coprocessor instructions. */
12662 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12664 if (bit (thumb2_insn_r
->arm_insn
, 25))
12665 return arm_record_coproc_data_proc (thumb2_insn_r
);
12667 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12670 /* Record handler for advance SIMD structure load/store instructions. */
12673 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12675 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12676 uint32_t l_bit
, a_bit
, b_bits
;
12677 uint32_t record_buf
[128], record_buf_mem
[128];
12678 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12679 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12682 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12683 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12684 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12685 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12686 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12687 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12688 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12689 f_elem
= 8 / f_ebytes
;
12693 ULONGEST u_regval
= 0;
12694 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12695 address
= u_regval
;
12700 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12702 if (b_bits
== 0x07)
12704 else if (b_bits
== 0x0a)
12706 else if (b_bits
== 0x06)
12708 else if (b_bits
== 0x02)
12713 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12715 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12717 record_buf_mem
[index_m
++] = f_ebytes
;
12718 record_buf_mem
[index_m
++] = address
;
12719 address
= address
+ f_ebytes
;
12720 thumb2_insn_r
->mem_rec_count
+= 1;
12725 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12727 if (b_bits
== 0x09 || b_bits
== 0x08)
12729 else if (b_bits
== 0x03)
12734 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12735 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12737 for (loop_t
= 0; loop_t
< 2; loop_t
++)
12739 record_buf_mem
[index_m
++] = f_ebytes
;
12740 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12741 thumb2_insn_r
->mem_rec_count
+= 1;
12743 address
= address
+ (2 * f_ebytes
);
12747 else if ((b_bits
& 0x0e) == 0x04)
12749 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12751 for (loop_t
= 0; loop_t
< 3; loop_t
++)
12753 record_buf_mem
[index_m
++] = f_ebytes
;
12754 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12755 thumb2_insn_r
->mem_rec_count
+= 1;
12757 address
= address
+ (3 * f_ebytes
);
12761 else if (!(b_bits
& 0x0e))
12763 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12765 for (loop_t
= 0; loop_t
< 4; loop_t
++)
12767 record_buf_mem
[index_m
++] = f_ebytes
;
12768 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12769 thumb2_insn_r
->mem_rec_count
+= 1;
12771 address
= address
+ (4 * f_ebytes
);
12777 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
12779 if (bft_size
== 0x00)
12781 else if (bft_size
== 0x01)
12783 else if (bft_size
== 0x02)
12789 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
12790 thumb2_insn_r
->mem_rec_count
= 1;
12792 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
12793 thumb2_insn_r
->mem_rec_count
= 2;
12795 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
12796 thumb2_insn_r
->mem_rec_count
= 3;
12798 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
12799 thumb2_insn_r
->mem_rec_count
= 4;
12801 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
12803 record_buf_mem
[index_m
] = f_ebytes
;
12804 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
12813 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12814 thumb2_insn_r
->reg_rec_count
= 1;
12816 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12817 thumb2_insn_r
->reg_rec_count
= 2;
12819 else if ((b_bits
& 0x0e) == 0x04)
12820 thumb2_insn_r
->reg_rec_count
= 3;
12822 else if (!(b_bits
& 0x0e))
12823 thumb2_insn_r
->reg_rec_count
= 4;
12828 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
12829 thumb2_insn_r
->reg_rec_count
= 1;
12831 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
12832 thumb2_insn_r
->reg_rec_count
= 2;
12834 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
12835 thumb2_insn_r
->reg_rec_count
= 3;
12837 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
12838 thumb2_insn_r
->reg_rec_count
= 4;
12840 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
12841 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
12845 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
12847 record_buf
[index_r
] = reg_rn
;
12848 thumb2_insn_r
->reg_rec_count
+= 1;
12851 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12853 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12858 /* Decodes thumb2 instruction type and invokes its record handler. */
12860 static unsigned int
12861 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
12863 uint32_t op
, op1
, op2
;
12865 op
= bit (thumb2_insn_r
->arm_insn
, 15);
12866 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
12867 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12871 if (!(op2
& 0x64 ))
12873 /* Load/store multiple instruction. */
12874 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
12876 else if ((op2
& 0x64) == 0x4)
12878 /* Load/store (dual/exclusive) and table branch instruction. */
12879 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
12881 else if ((op2
& 0x60) == 0x20)
12883 /* Data-processing (shifted register). */
12884 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12886 else if (op2
& 0x40)
12888 /* Co-processor instructions. */
12889 return thumb2_record_coproc_insn (thumb2_insn_r
);
12892 else if (op1
== 0x02)
12896 /* Branches and miscellaneous control instructions. */
12897 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
12899 else if (op2
& 0x20)
12901 /* Data-processing (plain binary immediate) instruction. */
12902 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12906 /* Data-processing (modified immediate). */
12907 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12910 else if (op1
== 0x03)
12912 if (!(op2
& 0x71 ))
12914 /* Store single data item. */
12915 return thumb2_record_str_single_data (thumb2_insn_r
);
12917 else if (!((op2
& 0x71) ^ 0x10))
12919 /* Advanced SIMD or structure load/store instructions. */
12920 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
12922 else if (!((op2
& 0x67) ^ 0x01))
12924 /* Load byte, memory hints instruction. */
12925 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12927 else if (!((op2
& 0x67) ^ 0x03))
12929 /* Load halfword, memory hints instruction. */
12930 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12932 else if (!((op2
& 0x67) ^ 0x05))
12934 /* Load word instruction. */
12935 return thumb2_record_ld_word (thumb2_insn_r
);
12937 else if (!((op2
& 0x70) ^ 0x20))
12939 /* Data-processing (register) instruction. */
12940 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12942 else if (!((op2
& 0x78) ^ 0x30))
12944 /* Multiply, multiply accumulate, abs diff instruction. */
12945 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12947 else if (!((op2
& 0x78) ^ 0x38))
12949 /* Long multiply, long multiply accumulate, and divide. */
12950 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
12952 else if (op2
& 0x40)
12954 /* Co-processor instructions. */
12955 return thumb2_record_coproc_insn (thumb2_insn_r
);
12963 /* Abstract memory reader. */
12965 class abstract_memory_reader
12968 /* Read LEN bytes of target memory at address MEMADDR, placing the
12969 results in GDB's memory at BUF. Return true on success. */
12971 virtual bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) = 0;
12974 /* Instruction reader from real target. */
12976 class instruction_reader
: public abstract_memory_reader
12979 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
12981 if (target_read_memory (memaddr
, buf
, len
))
12990 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12991 and positive val on fauilure. */
12994 extract_arm_insn (abstract_memory_reader
& reader
,
12995 insn_decode_record
*insn_record
, uint32_t insn_size
)
12997 gdb_byte buf
[insn_size
];
12999 memset (&buf
[0], 0, insn_size
);
13001 if (!reader
.read (insn_record
->this_addr
, buf
, insn_size
))
13003 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13005 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13009 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13011 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13015 decode_insn (abstract_memory_reader
&reader
, insn_decode_record
*arm_record
,
13016 record_type_t record_type
, uint32_t insn_size
)
13019 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13021 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13023 arm_record_data_proc_misc_ld_str
, /* 000. */
13024 arm_record_data_proc_imm
, /* 001. */
13025 arm_record_ld_st_imm_offset
, /* 010. */
13026 arm_record_ld_st_reg_offset
, /* 011. */
13027 arm_record_ld_st_multiple
, /* 100. */
13028 arm_record_b_bl
, /* 101. */
13029 arm_record_asimd_vfp_coproc
, /* 110. */
13030 arm_record_coproc_data_proc
/* 111. */
13033 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13035 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13037 thumb_record_shift_add_sub
, /* 000. */
13038 thumb_record_add_sub_cmp_mov
, /* 001. */
13039 thumb_record_ld_st_reg_offset
, /* 010. */
13040 thumb_record_ld_st_imm_offset
, /* 011. */
13041 thumb_record_ld_st_stack
, /* 100. */
13042 thumb_record_misc
, /* 101. */
13043 thumb_record_ldm_stm_swi
, /* 110. */
13044 thumb_record_branch
/* 111. */
13047 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13048 uint32_t insn_id
= 0;
13050 if (extract_arm_insn (reader
, arm_record
, insn_size
))
13054 printf_unfiltered (_("Process record: error reading memory at "
13055 "addr %s len = %d.\n"),
13056 paddress (arm_record
->gdbarch
,
13057 arm_record
->this_addr
), insn_size
);
13061 else if (ARM_RECORD
== record_type
)
13063 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13064 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13066 if (arm_record
->cond
== 0xf)
13067 ret
= arm_record_extension_space (arm_record
);
13070 /* If this insn has fallen into extension space
13071 then we need not decode it anymore. */
13072 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13074 if (ret
!= ARM_RECORD_SUCCESS
)
13076 arm_record_unsupported_insn (arm_record
);
13080 else if (THUMB_RECORD
== record_type
)
13082 /* As thumb does not have condition codes, we set negative. */
13083 arm_record
->cond
= -1;
13084 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13085 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13086 if (ret
!= ARM_RECORD_SUCCESS
)
13088 arm_record_unsupported_insn (arm_record
);
13092 else if (THUMB2_RECORD
== record_type
)
13094 /* As thumb does not have condition codes, we set negative. */
13095 arm_record
->cond
= -1;
13097 /* Swap first half of 32bit thumb instruction with second half. */
13098 arm_record
->arm_insn
13099 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13101 ret
= thumb2_record_decode_insn_handler (arm_record
);
13103 if (ret
!= ARM_RECORD_SUCCESS
)
13105 arm_record_unsupported_insn (arm_record
);
13111 /* Throw assertion. */
13112 gdb_assert_not_reached ("not a valid instruction, could not decode");
13119 namespace selftests
{
13121 /* Provide both 16-bit and 32-bit thumb instructions. */
13123 class instruction_reader_thumb
: public abstract_memory_reader
13126 template<size_t SIZE
>
13127 instruction_reader_thumb (enum bfd_endian endian
,
13128 const uint16_t (&insns
)[SIZE
])
13129 : m_endian (endian
), m_insns (insns
), m_insns_size (SIZE
)
13132 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13134 SELF_CHECK (len
== 4 || len
== 2);
13135 SELF_CHECK (memaddr
% 2 == 0);
13136 SELF_CHECK ((memaddr
/ 2) < m_insns_size
);
13138 store_unsigned_integer (buf
, 2, m_endian
, m_insns
[memaddr
/ 2]);
13141 store_unsigned_integer (&buf
[2], 2, m_endian
,
13142 m_insns
[memaddr
/ 2 + 1]);
13148 enum bfd_endian m_endian
;
13149 const uint16_t *m_insns
;
13150 size_t m_insns_size
;
13154 arm_record_test (void)
13156 struct gdbarch_info info
;
13157 gdbarch_info_init (&info
);
13158 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13160 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13162 SELF_CHECK (gdbarch
!= NULL
);
13164 /* 16-bit Thumb instructions. */
13166 insn_decode_record arm_record
;
13168 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13169 arm_record
.gdbarch
= gdbarch
;
13171 static const uint16_t insns
[] = {
13172 /* db b2 uxtb r3, r3 */
13174 /* cd 58 ldr r5, [r1, r3] */
13178 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13179 instruction_reader_thumb
reader (endian
, insns
);
13180 int ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13181 THUMB_INSN_SIZE_BYTES
);
13183 SELF_CHECK (ret
== 0);
13184 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13185 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13186 SELF_CHECK (arm_record
.arm_regs
[0] == 3);
13188 arm_record
.this_addr
+= 2;
13189 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13190 THUMB_INSN_SIZE_BYTES
);
13192 SELF_CHECK (ret
== 0);
13193 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13194 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13195 SELF_CHECK (arm_record
.arm_regs
[0] == 5);
13198 /* 32-bit Thumb-2 instructions. */
13200 insn_decode_record arm_record
;
13202 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13203 arm_record
.gdbarch
= gdbarch
;
13205 static const uint16_t insns
[] = {
13206 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13210 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13211 instruction_reader_thumb
reader (endian
, insns
);
13212 int ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13213 THUMB2_INSN_SIZE_BYTES
);
13215 SELF_CHECK (ret
== 0);
13216 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13217 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13218 SELF_CHECK (arm_record
.arm_regs
[0] == 7);
13221 } // namespace selftests
13222 #endif /* GDB_SELF_TEST */
13224 /* Cleans up local record registers and memory allocations. */
13227 deallocate_reg_mem (insn_decode_record
*record
)
13229 xfree (record
->arm_regs
);
13230 xfree (record
->arm_mems
);
13234 /* Parse the current instruction and record the values of the registers and
13235 memory that will be changed in current instruction to record_arch_list".
13236 Return -1 if something is wrong. */
13239 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13240 CORE_ADDR insn_addr
)
13243 uint32_t no_of_rec
= 0;
13244 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13245 ULONGEST t_bit
= 0, insn_id
= 0;
13247 ULONGEST u_regval
= 0;
13249 insn_decode_record arm_record
;
13251 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13252 arm_record
.regcache
= regcache
;
13253 arm_record
.this_addr
= insn_addr
;
13254 arm_record
.gdbarch
= gdbarch
;
13257 if (record_debug
> 1)
13259 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13261 paddress (gdbarch
, arm_record
.this_addr
));
13264 instruction_reader reader
;
13265 if (extract_arm_insn (reader
, &arm_record
, 2))
13269 printf_unfiltered (_("Process record: error reading memory at "
13270 "addr %s len = %d.\n"),
13271 paddress (arm_record
.gdbarch
,
13272 arm_record
.this_addr
), 2);
13277 /* Check the insn, whether it is thumb or arm one. */
13279 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13280 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13283 if (!(u_regval
& t_bit
))
13285 /* We are decoding arm insn. */
13286 ret
= decode_insn (reader
, &arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13290 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13291 /* is it thumb2 insn? */
13292 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13294 ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13295 THUMB2_INSN_SIZE_BYTES
);
13299 /* We are decoding thumb insn. */
13300 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13301 THUMB_INSN_SIZE_BYTES
);
13307 /* Record registers. */
13308 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13309 if (arm_record
.arm_regs
)
13311 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13313 if (record_full_arch_list_add_reg
13314 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13318 /* Record memories. */
13319 if (arm_record
.arm_mems
)
13321 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13323 if (record_full_arch_list_add_mem
13324 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13325 arm_record
.arm_mems
[no_of_rec
].len
))
13330 if (record_full_arch_list_add_end ())
13335 deallocate_reg_mem (&arm_record
);