1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
74 /* Macros for setting and testing a bit in a minimal symbol that marks
75 it as Thumb function. The MSB of the minimal symbol's "info" field
76 is used for this purpose.
78 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
79 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
81 #define MSYMBOL_SET_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym) = 1
84 #define MSYMBOL_IS_SPECIAL(msym) \
85 MSYMBOL_TARGET_FLAG_1 (msym)
87 /* Per-objfile data used for mapping symbols. */
88 static const struct objfile_data
*arm_objfile_data_key
;
90 struct arm_mapping_symbol
95 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
96 DEF_VEC_O(arm_mapping_symbol_s
);
98 struct arm_per_objfile
100 VEC(arm_mapping_symbol_s
) **section_maps
;
103 /* The list of available "set arm ..." and "show arm ..." commands. */
104 static struct cmd_list_element
*setarmcmdlist
= NULL
;
105 static struct cmd_list_element
*showarmcmdlist
= NULL
;
107 /* The type of floating-point to use. Keep this in sync with enum
108 arm_float_model, and the help string in _initialize_arm_tdep. */
109 static const char *const fp_model_strings
[] =
119 /* A variable that can be configured by the user. */
120 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
121 static const char *current_fp_model
= "auto";
123 /* The ABI to use. Keep this in sync with arm_abi_kind. */
124 static const char *const arm_abi_strings
[] =
132 /* A variable that can be configured by the user. */
133 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
134 static const char *arm_abi_string
= "auto";
136 /* The execution mode to assume. */
137 static const char *const arm_mode_strings
[] =
145 static const char *arm_fallback_mode_string
= "auto";
146 static const char *arm_force_mode_string
= "auto";
148 /* Number of different reg name sets (options). */
149 static int num_disassembly_options
;
151 /* The standard register names, and all the valid aliases for them. Note
152 that `fp', `sp' and `pc' are not added in this alias list, because they
153 have been added as builtin user registers in
154 std-regs.c:_initialize_frame_reg. */
159 } arm_register_aliases
[] = {
160 /* Basic register numbers. */
177 /* Synonyms (argument and variable registers). */
190 /* Other platform-specific names for r9. */
196 /* Names used by GCC (not listed in the ARM EABI). */
198 /* A special name from the older ATPCS. */
202 static const char *const arm_register_names
[] =
203 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
204 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
205 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
206 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
207 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
208 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
209 "fps", "cpsr" }; /* 24 25 */
211 /* Valid register name styles. */
212 static const char **valid_disassembly_styles
;
214 /* Disassembly style to use. Default to "std" register names. */
215 static const char *disassembly_style
;
217 /* This is used to keep the bfd arch_info in sync with the disassembly
219 static void set_disassembly_style_sfunc(char *, int,
220 struct cmd_list_element
*);
221 static void set_disassembly_style (void);
223 static void convert_from_extended (const struct floatformat
*, const void *,
225 static void convert_to_extended (const struct floatformat
*, void *,
228 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
229 struct regcache
*regcache
,
230 int regnum
, gdb_byte
*buf
);
231 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
232 struct regcache
*regcache
,
233 int regnum
, const gdb_byte
*buf
);
236 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
239 /* get_next_pcs operations. */
240 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
241 arm_get_next_pcs_read_memory_unsigned_integer
,
242 arm_get_next_pcs_syscall_next_pc
,
243 arm_get_next_pcs_addr_bits_remove
,
244 arm_get_next_pcs_is_thumb
,
248 struct arm_prologue_cache
250 /* The stack pointer at the time this frame was created; i.e. the
251 caller's stack pointer when this function was called. It is used
252 to identify this frame. */
255 /* The frame base for this frame is just prev_sp - frame size.
256 FRAMESIZE is the distance from the frame pointer to the
257 initial stack pointer. */
261 /* The register used to hold the frame pointer for this frame. */
264 /* Saved register offsets. */
265 struct trad_frame_saved_reg
*saved_regs
;
268 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
269 CORE_ADDR prologue_start
,
270 CORE_ADDR prologue_end
,
271 struct arm_prologue_cache
*cache
);
273 /* Architecture version for displaced stepping. This effects the behaviour of
274 certain instructions, and really should not be hard-wired. */
276 #define DISPLACED_STEPPING_ARCH_VERSION 5
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
287 if (gdbarch_tdep (gdbarch
)->is_m
)
293 /* Determine if the processor is currently executing in Thumb mode. */
296 arm_is_thumb (struct regcache
*regcache
)
299 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regcache
));
301 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
303 return (cpsr
& t_bit
) != 0;
306 /* Determine if FRAME is executing in Thumb mode. */
309 arm_frame_is_thumb (struct frame_info
*frame
)
312 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
314 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
315 directly (from a signal frame or dummy frame) or by interpreting
316 the saved LR (from a prologue or DWARF frame). So consult it and
317 trust the unwinders. */
318 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
320 return (cpsr
& t_bit
) != 0;
323 /* Callback for VEC_lower_bound. */
326 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
327 const struct arm_mapping_symbol
*rhs
)
329 return lhs
->value
< rhs
->value
;
332 /* Search for the mapping symbol covering MEMADDR. If one is found,
333 return its type. Otherwise, return 0. If START is non-NULL,
334 set *START to the location of the mapping symbol. */
337 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
339 struct obj_section
*sec
;
341 /* If there are mapping symbols, consult them. */
342 sec
= find_pc_section (memaddr
);
345 struct arm_per_objfile
*data
;
346 VEC(arm_mapping_symbol_s
) *map
;
347 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
351 data
= (struct arm_per_objfile
*) objfile_data (sec
->objfile
,
352 arm_objfile_data_key
);
355 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
356 if (!VEC_empty (arm_mapping_symbol_s
, map
))
358 struct arm_mapping_symbol
*map_sym
;
360 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
361 arm_compare_mapping_symbols
);
363 /* VEC_lower_bound finds the earliest ordered insertion
364 point. If the following symbol starts at this exact
365 address, we use that; otherwise, the preceding
366 mapping symbol covers this address. */
367 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
369 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
370 if (map_sym
->value
== map_key
.value
)
373 *start
= map_sym
->value
+ obj_section_addr (sec
);
374 return map_sym
->type
;
380 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
382 *start
= map_sym
->value
+ obj_section_addr (sec
);
383 return map_sym
->type
;
392 /* Determine if the program counter specified in MEMADDR is in a Thumb
393 function. This function should be called for addresses unrelated to
394 any executing frame; otherwise, prefer arm_frame_is_thumb. */
397 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
399 struct bound_minimal_symbol sym
;
401 struct displaced_step_closure
* dsc
402 = get_displaced_step_closure_by_addr(memaddr
);
404 /* If checking the mode of displaced instruction in copy area, the mode
405 should be determined by instruction on the original address. */
409 fprintf_unfiltered (gdb_stdlog
,
410 "displaced: check mode of %.8lx instead of %.8lx\n",
411 (unsigned long) dsc
->insn_addr
,
412 (unsigned long) memaddr
);
413 memaddr
= dsc
->insn_addr
;
416 /* If bit 0 of the address is set, assume this is a Thumb address. */
417 if (IS_THUMB_ADDR (memaddr
))
420 /* If the user wants to override the symbol table, let him. */
421 if (strcmp (arm_force_mode_string
, "arm") == 0)
423 if (strcmp (arm_force_mode_string
, "thumb") == 0)
426 /* ARM v6-M and v7-M are always in Thumb mode. */
427 if (gdbarch_tdep (gdbarch
)->is_m
)
430 /* If there are mapping symbols, consult them. */
431 type
= arm_find_mapping_symbol (memaddr
, NULL
);
435 /* Thumb functions have a "special" bit set in minimal symbols. */
436 sym
= lookup_minimal_symbol_by_pc (memaddr
);
438 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
440 /* If the user wants to override the fallback mode, let them. */
441 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
443 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
446 /* If we couldn't find any symbol, but we're talking to a running
447 target, then trust the current value of $cpsr. This lets
448 "display/i $pc" always show the correct mode (though if there is
449 a symbol table we will not reach here, so it still may not be
450 displayed in the mode it will be executed). */
451 if (target_has_registers
)
452 return arm_frame_is_thumb (get_current_frame ());
454 /* Otherwise we're out of luck; we assume ARM. */
458 /* Determine if the address specified equals any of these magic return
459 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
462 From ARMv6-M Reference Manual B1.5.8
463 Table B1-5 Exception return behavior
465 EXC_RETURN Return To Return Stack
466 0xFFFFFFF1 Handler mode Main
467 0xFFFFFFF9 Thread mode Main
468 0xFFFFFFFD Thread mode Process
470 From ARMv7-M Reference Manual B1.5.8
471 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
473 EXC_RETURN Return To Return Stack
474 0xFFFFFFF1 Handler mode Main
475 0xFFFFFFF9 Thread mode Main
476 0xFFFFFFFD Thread mode Process
478 Table B1-9 EXC_RETURN definition of exception return behavior, with
481 EXC_RETURN Return To Return Stack Frame Type
482 0xFFFFFFE1 Handler mode Main Extended
483 0xFFFFFFE9 Thread mode Main Extended
484 0xFFFFFFED Thread mode Process Extended
485 0xFFFFFFF1 Handler mode Main Basic
486 0xFFFFFFF9 Thread mode Main Basic
487 0xFFFFFFFD Thread mode Process Basic
489 For more details see "B1.5.8 Exception return behavior"
490 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
493 arm_m_addr_is_magic (CORE_ADDR addr
)
497 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
498 the exception return behavior. */
505 /* Address is magic. */
509 /* Address is not magic. */
514 /* Remove useless bits from addresses in a running program. */
516 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
518 /* On M-profile devices, do not strip the low bit from EXC_RETURN
519 (the magic exception return address). */
520 if (gdbarch_tdep (gdbarch
)->is_m
521 && arm_m_addr_is_magic (val
))
525 return UNMAKE_THUMB_ADDR (val
);
527 return (val
& 0x03fffffc);
530 /* Return 1 if PC is the start of a compiler helper function which
531 can be safely ignored during prologue skipping. IS_THUMB is true
532 if the function is known to be a Thumb function due to the way it
535 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
537 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
538 struct bound_minimal_symbol msym
;
540 msym
= lookup_minimal_symbol_by_pc (pc
);
541 if (msym
.minsym
!= NULL
542 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
543 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
545 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
547 /* The GNU linker's Thumb call stub to foo is named
549 if (strstr (name
, "_from_thumb") != NULL
)
552 /* On soft-float targets, __truncdfsf2 is called to convert promoted
553 arguments to their argument types in non-prototyped
555 if (startswith (name
, "__truncdfsf2"))
557 if (startswith (name
, "__aeabi_d2f"))
560 /* Internal functions related to thread-local storage. */
561 if (startswith (name
, "__tls_get_addr"))
563 if (startswith (name
, "__aeabi_read_tp"))
568 /* If we run against a stripped glibc, we may be unable to identify
569 special functions by name. Check for one important case,
570 __aeabi_read_tp, by comparing the *code* against the default
571 implementation (this is hand-written ARM assembler in glibc). */
574 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
575 == 0xe3e00a0f /* mov r0, #0xffff0fff */
576 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
577 == 0xe240f01f) /* sub pc, r0, #31 */
584 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
585 the first 16-bit of instruction, and INSN2 is the second 16-bit of
587 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
588 ((bits ((insn1), 0, 3) << 12) \
589 | (bits ((insn1), 10, 10) << 11) \
590 | (bits ((insn2), 12, 14) << 8) \
591 | bits ((insn2), 0, 7))
593 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
594 the 32-bit instruction. */
595 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
596 ((bits ((insn), 16, 19) << 12) \
597 | bits ((insn), 0, 11))
599 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
602 thumb_expand_immediate (unsigned int imm
)
604 unsigned int count
= imm
>> 7;
612 return (imm
& 0xff) | ((imm
& 0xff) << 16);
614 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
616 return (imm
& 0xff) | ((imm
& 0xff) << 8)
617 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
620 return (0x80 | (imm
& 0x7f)) << (32 - count
);
623 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
624 epilogue, 0 otherwise. */
627 thumb_instruction_restores_sp (unsigned short insn
)
629 return (insn
== 0x46bd /* mov sp, r7 */
630 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
631 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
634 /* Analyze a Thumb prologue, looking for a recognizable stack frame
635 and frame pointer. Scan until we encounter a store that could
636 clobber the stack frame unexpectedly, or an unknown instruction.
637 Return the last address which is definitely safe to skip for an
638 initial breakpoint. */
641 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
642 CORE_ADDR start
, CORE_ADDR limit
,
643 struct arm_prologue_cache
*cache
)
645 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
646 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
649 struct pv_area
*stack
;
650 struct cleanup
*back_to
;
652 CORE_ADDR unrecognized_pc
= 0;
654 for (i
= 0; i
< 16; i
++)
655 regs
[i
] = pv_register (i
, 0);
656 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
657 back_to
= make_cleanup_free_pv_area (stack
);
659 while (start
< limit
)
663 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
665 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
670 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
673 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
674 whether to save LR (R14). */
675 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
677 /* Calculate offsets of saved R0-R7 and LR. */
678 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
679 if (mask
& (1 << regno
))
681 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
683 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
686 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
688 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
689 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
692 else if (thumb_instruction_restores_sp (insn
))
694 /* Don't scan past the epilogue. */
697 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
698 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
700 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
701 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
702 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
704 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
705 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
706 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
708 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
709 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
710 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
711 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
712 regs
[bits (insn
, 6, 8)]);
713 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
714 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
716 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
717 int rm
= bits (insn
, 3, 6);
718 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
720 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
722 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
723 int src_reg
= (insn
& 0x78) >> 3;
724 regs
[dst_reg
] = regs
[src_reg
];
726 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
728 /* Handle stores to the stack. Normally pushes are used,
729 but with GCC -mtpcs-frame, there may be other stores
730 in the prologue to create the frame. */
731 int regno
= (insn
>> 8) & 0x7;
734 offset
= (insn
& 0xff) << 2;
735 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
737 if (pv_area_store_would_trash (stack
, addr
))
740 pv_area_store (stack
, addr
, 4, regs
[regno
]);
742 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
744 int rd
= bits (insn
, 0, 2);
745 int rn
= bits (insn
, 3, 5);
748 offset
= bits (insn
, 6, 10) << 2;
749 addr
= pv_add_constant (regs
[rn
], offset
);
751 if (pv_area_store_would_trash (stack
, addr
))
754 pv_area_store (stack
, addr
, 4, regs
[rd
]);
756 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
757 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
758 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
759 /* Ignore stores of argument registers to the stack. */
761 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
762 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
763 /* Ignore block loads from the stack, potentially copying
764 parameters from memory. */
766 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
767 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
768 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
769 /* Similarly ignore single loads from the stack. */
771 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
772 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
773 /* Skip register copies, i.e. saves to another register
774 instead of the stack. */
776 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
777 /* Recognize constant loads; even with small stacks these are necessary
779 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
780 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
782 /* Constant pool loads, for the same reason. */
783 unsigned int constant
;
786 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
787 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
788 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
790 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
792 unsigned short inst2
;
794 inst2
= read_code_unsigned_integer (start
+ 2, 2,
795 byte_order_for_code
);
797 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
799 /* BL, BLX. Allow some special function calls when
800 skipping the prologue; GCC generates these before
801 storing arguments to the stack. */
803 int j1
, j2
, imm1
, imm2
;
805 imm1
= sbits (insn
, 0, 10);
806 imm2
= bits (inst2
, 0, 10);
807 j1
= bit (inst2
, 13);
808 j2
= bit (inst2
, 11);
810 offset
= ((imm1
<< 12) + (imm2
<< 1));
811 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
813 nextpc
= start
+ 4 + offset
;
814 /* For BLX make sure to clear the low bits. */
815 if (bit (inst2
, 12) == 0)
816 nextpc
= nextpc
& 0xfffffffc;
818 if (!skip_prologue_function (gdbarch
, nextpc
,
819 bit (inst2
, 12) != 0))
823 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
825 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
827 pv_t addr
= regs
[bits (insn
, 0, 3)];
830 if (pv_area_store_would_trash (stack
, addr
))
833 /* Calculate offsets of saved registers. */
834 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
835 if (inst2
& (1 << regno
))
837 addr
= pv_add_constant (addr
, -4);
838 pv_area_store (stack
, addr
, 4, regs
[regno
]);
842 regs
[bits (insn
, 0, 3)] = addr
;
845 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
847 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
849 int regno1
= bits (inst2
, 12, 15);
850 int regno2
= bits (inst2
, 8, 11);
851 pv_t addr
= regs
[bits (insn
, 0, 3)];
853 offset
= inst2
& 0xff;
855 addr
= pv_add_constant (addr
, offset
);
857 addr
= pv_add_constant (addr
, -offset
);
859 if (pv_area_store_would_trash (stack
, addr
))
862 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
863 pv_area_store (stack
, pv_add_constant (addr
, 4),
867 regs
[bits (insn
, 0, 3)] = addr
;
870 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
871 && (inst2
& 0x0c00) == 0x0c00
872 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
874 int regno
= bits (inst2
, 12, 15);
875 pv_t addr
= regs
[bits (insn
, 0, 3)];
877 offset
= inst2
& 0xff;
879 addr
= pv_add_constant (addr
, offset
);
881 addr
= pv_add_constant (addr
, -offset
);
883 if (pv_area_store_would_trash (stack
, addr
))
886 pv_area_store (stack
, addr
, 4, regs
[regno
]);
889 regs
[bits (insn
, 0, 3)] = addr
;
892 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
893 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
895 int regno
= bits (inst2
, 12, 15);
898 offset
= inst2
& 0xfff;
899 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
901 if (pv_area_store_would_trash (stack
, addr
))
904 pv_area_store (stack
, addr
, 4, regs
[regno
]);
907 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
908 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
909 /* Ignore stores of argument registers to the stack. */
912 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
913 && (inst2
& 0x0d00) == 0x0c00
914 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
915 /* Ignore stores of argument registers to the stack. */
918 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
920 && (inst2
& 0x8000) == 0x0000
921 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
922 /* Ignore block loads from the stack, potentially copying
923 parameters from memory. */
926 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
928 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
929 /* Similarly ignore dual loads from the stack. */
932 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
933 && (inst2
& 0x0d00) == 0x0c00
934 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
935 /* Similarly ignore single loads from the stack. */
938 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
939 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
940 /* Similarly ignore single loads from the stack. */
943 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
944 && (inst2
& 0x8000) == 0x0000)
946 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
947 | (bits (inst2
, 12, 14) << 8)
948 | bits (inst2
, 0, 7));
950 regs
[bits (inst2
, 8, 11)]
951 = pv_add_constant (regs
[bits (insn
, 0, 3)],
952 thumb_expand_immediate (imm
));
955 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
956 && (inst2
& 0x8000) == 0x0000)
958 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
959 | (bits (inst2
, 12, 14) << 8)
960 | bits (inst2
, 0, 7));
962 regs
[bits (inst2
, 8, 11)]
963 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
966 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
967 && (inst2
& 0x8000) == 0x0000)
969 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
970 | (bits (inst2
, 12, 14) << 8)
971 | bits (inst2
, 0, 7));
973 regs
[bits (inst2
, 8, 11)]
974 = pv_add_constant (regs
[bits (insn
, 0, 3)],
975 - (CORE_ADDR
) thumb_expand_immediate (imm
));
978 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
979 && (inst2
& 0x8000) == 0x0000)
981 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
982 | (bits (inst2
, 12, 14) << 8)
983 | bits (inst2
, 0, 7));
985 regs
[bits (inst2
, 8, 11)]
986 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
989 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
991 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
992 | (bits (inst2
, 12, 14) << 8)
993 | bits (inst2
, 0, 7));
995 regs
[bits (inst2
, 8, 11)]
996 = pv_constant (thumb_expand_immediate (imm
));
999 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1002 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1004 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1007 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1008 && (inst2
& 0xf0f0) == 0)
1010 int dst_reg
= (inst2
& 0x0f00) >> 8;
1011 int src_reg
= inst2
& 0xf;
1012 regs
[dst_reg
] = regs
[src_reg
];
1015 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1017 /* Constant pool loads. */
1018 unsigned int constant
;
1021 offset
= bits (inst2
, 0, 11);
1023 loc
= start
+ 4 + offset
;
1025 loc
= start
+ 4 - offset
;
1027 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1028 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1031 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1033 /* Constant pool loads. */
1034 unsigned int constant
;
1037 offset
= bits (inst2
, 0, 7) << 2;
1039 loc
= start
+ 4 + offset
;
1041 loc
= start
+ 4 - offset
;
1043 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1044 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1046 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1047 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1050 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1052 /* Don't scan past anything that might change control flow. */
1057 /* The optimizer might shove anything into the prologue,
1058 so we just skip what we don't recognize. */
1059 unrecognized_pc
= start
;
1064 else if (thumb_instruction_changes_pc (insn
))
1066 /* Don't scan past anything that might change control flow. */
1071 /* The optimizer might shove anything into the prologue,
1072 so we just skip what we don't recognize. */
1073 unrecognized_pc
= start
;
1080 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1081 paddress (gdbarch
, start
));
1083 if (unrecognized_pc
== 0)
1084 unrecognized_pc
= start
;
1088 do_cleanups (back_to
);
1089 return unrecognized_pc
;
1092 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1094 /* Frame pointer is fp. Frame size is constant. */
1095 cache
->framereg
= ARM_FP_REGNUM
;
1096 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1098 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1100 /* Frame pointer is r7. Frame size is constant. */
1101 cache
->framereg
= THUMB_FP_REGNUM
;
1102 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1106 /* Try the stack pointer... this is a bit desperate. */
1107 cache
->framereg
= ARM_SP_REGNUM
;
1108 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1111 for (i
= 0; i
< 16; i
++)
1112 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1113 cache
->saved_regs
[i
].addr
= offset
;
1115 do_cleanups (back_to
);
1116 return unrecognized_pc
;
1120 /* Try to analyze the instructions starting from PC, which load symbol
1121 __stack_chk_guard. Return the address of instruction after loading this
1122 symbol, set the dest register number to *BASEREG, and set the size of
1123 instructions for loading symbol in OFFSET. Return 0 if instructions are
1127 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1128 unsigned int *destreg
, int *offset
)
1130 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1131 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1132 unsigned int low
, high
, address
;
1137 unsigned short insn1
1138 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1140 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1142 *destreg
= bits (insn1
, 8, 10);
1144 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1145 address
= read_memory_unsigned_integer (address
, 4,
1146 byte_order_for_code
);
1148 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1150 unsigned short insn2
1151 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1153 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1156 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1158 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1160 /* movt Rd, #const */
1161 if ((insn1
& 0xfbc0) == 0xf2c0)
1163 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1164 *destreg
= bits (insn2
, 8, 11);
1166 address
= (high
<< 16 | low
);
1173 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1175 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1177 address
= bits (insn
, 0, 11) + pc
+ 8;
1178 address
= read_memory_unsigned_integer (address
, 4,
1179 byte_order_for_code
);
1181 *destreg
= bits (insn
, 12, 15);
1184 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1186 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1189 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1191 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1193 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1194 *destreg
= bits (insn
, 12, 15);
1196 address
= (high
<< 16 | low
);
1204 /* Try to skip a sequence of instructions used for stack protector. If PC
1205 points to the first instruction of this sequence, return the address of
1206 first instruction after this sequence, otherwise, return original PC.
1208 On arm, this sequence of instructions is composed of mainly three steps,
1209 Step 1: load symbol __stack_chk_guard,
1210 Step 2: load from address of __stack_chk_guard,
1211 Step 3: store it to somewhere else.
1213 Usually, instructions on step 2 and step 3 are the same on various ARM
1214 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1215 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1216 instructions in step 1 vary from different ARM architectures. On ARMv7,
1219 movw Rn, #:lower16:__stack_chk_guard
1220 movt Rn, #:upper16:__stack_chk_guard
1227 .word __stack_chk_guard
1229 Since ldr/str is a very popular instruction, we can't use them as
1230 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1231 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1232 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1235 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1237 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1238 unsigned int basereg
;
1239 struct bound_minimal_symbol stack_chk_guard
;
1241 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1244 /* Try to parse the instructions in Step 1. */
1245 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1250 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1251 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1252 Otherwise, this sequence cannot be for stack protector. */
1253 if (stack_chk_guard
.minsym
== NULL
1254 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1259 unsigned int destreg
;
1261 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1263 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1264 if ((insn
& 0xf800) != 0x6800)
1266 if (bits (insn
, 3, 5) != basereg
)
1268 destreg
= bits (insn
, 0, 2);
1270 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1271 byte_order_for_code
);
1272 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1273 if ((insn
& 0xf800) != 0x6000)
1275 if (destreg
!= bits (insn
, 0, 2))
1280 unsigned int destreg
;
1282 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1284 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1285 if ((insn
& 0x0e500000) != 0x04100000)
1287 if (bits (insn
, 16, 19) != basereg
)
1289 destreg
= bits (insn
, 12, 15);
1290 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1291 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1292 4, byte_order_for_code
);
1293 if ((insn
& 0x0e500000) != 0x04000000)
1295 if (bits (insn
, 12, 15) != destreg
)
1298 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1301 return pc
+ offset
+ 4;
1303 return pc
+ offset
+ 8;
1306 /* Advance the PC across any function entry prologue instructions to
1307 reach some "real" code.
1309 The APCS (ARM Procedure Call Standard) defines the following
1313 [stmfd sp!, {a1,a2,a3,a4}]
1314 stmfd sp!, {...,fp,ip,lr,pc}
1315 [stfe f7, [sp, #-12]!]
1316 [stfe f6, [sp, #-12]!]
1317 [stfe f5, [sp, #-12]!]
1318 [stfe f4, [sp, #-12]!]
1319 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1322 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1324 CORE_ADDR func_addr
, limit_pc
;
1326 /* See if we can determine the end of the prologue via the symbol table.
1327 If so, then return either PC, or the PC after the prologue, whichever
1329 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1331 CORE_ADDR post_prologue_pc
1332 = skip_prologue_using_sal (gdbarch
, func_addr
);
1333 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1335 if (post_prologue_pc
)
1337 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1340 /* GCC always emits a line note before the prologue and another
1341 one after, even if the two are at the same address or on the
1342 same line. Take advantage of this so that we do not need to
1343 know every instruction that might appear in the prologue. We
1344 will have producer information for most binaries; if it is
1345 missing (e.g. for -gstabs), assuming the GNU tools. */
1346 if (post_prologue_pc
1348 || COMPUNIT_PRODUCER (cust
) == NULL
1349 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1350 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1351 return post_prologue_pc
;
1353 if (post_prologue_pc
!= 0)
1355 CORE_ADDR analyzed_limit
;
1357 /* For non-GCC compilers, make sure the entire line is an
1358 acceptable prologue; GDB will round this function's
1359 return value up to the end of the following line so we
1360 can not skip just part of a line (and we do not want to).
1362 RealView does not treat the prologue specially, but does
1363 associate prologue code with the opening brace; so this
1364 lets us skip the first line if we think it is the opening
1366 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1367 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1368 post_prologue_pc
, NULL
);
1370 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1371 post_prologue_pc
, NULL
);
1373 if (analyzed_limit
!= post_prologue_pc
)
1376 return post_prologue_pc
;
1380 /* Can't determine prologue from the symbol table, need to examine
1383 /* Find an upper limit on the function prologue using the debug
1384 information. If the debug information could not be used to provide
1385 that bound, then use an arbitrary large number as the upper bound. */
1386 /* Like arm_scan_prologue, stop no later than pc + 64. */
1387 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1389 limit_pc
= pc
+ 64; /* Magic. */
1392 /* Check if this is Thumb code. */
1393 if (arm_pc_is_thumb (gdbarch
, pc
))
1394 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1396 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1400 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1401 This function decodes a Thumb function prologue to determine:
1402 1) the size of the stack frame
1403 2) which registers are saved on it
1404 3) the offsets of saved regs
1405 4) the offset from the stack pointer to the frame pointer
1407 A typical Thumb function prologue would create this stack frame
1408 (offsets relative to FP)
1409 old SP -> 24 stack parameters
1412 R7 -> 0 local variables (16 bytes)
1413 SP -> -12 additional stack space (12 bytes)
1414 The frame size would thus be 36 bytes, and the frame offset would be
1415 12 bytes. The frame register is R7.
1417 The comments for thumb_skip_prolog() describe the algorithm we use
1418 to detect the end of the prolog. */
1422 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1423 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1425 CORE_ADDR prologue_start
;
1426 CORE_ADDR prologue_end
;
1428 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1431 /* See comment in arm_scan_prologue for an explanation of
1433 if (prologue_end
> prologue_start
+ 64)
1435 prologue_end
= prologue_start
+ 64;
1439 /* We're in the boondocks: we have no idea where the start of the
1443 prologue_end
= std::min (prologue_end
, prev_pc
);
1445 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1448 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1452 arm_instruction_restores_sp (unsigned int insn
)
1454 if (bits (insn
, 28, 31) != INST_NV
)
1456 if ((insn
& 0x0df0f000) == 0x0080d000
1457 /* ADD SP (register or immediate). */
1458 || (insn
& 0x0df0f000) == 0x0040d000
1459 /* SUB SP (register or immediate). */
1460 || (insn
& 0x0ffffff0) == 0x01a0d000
1462 || (insn
& 0x0fff0000) == 0x08bd0000
1464 || (insn
& 0x0fff0000) == 0x049d0000)
1465 /* POP of a single register. */
1472 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1473 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1474 fill it in. Return the first address not recognized as a prologue
1477 We recognize all the instructions typically found in ARM prologues,
1478 plus harmless instructions which can be skipped (either for analysis
1479 purposes, or a more restrictive set that can be skipped when finding
1480 the end of the prologue). */
1483 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1484 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1485 struct arm_prologue_cache
*cache
)
1487 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1489 CORE_ADDR offset
, current_pc
;
1490 pv_t regs
[ARM_FPS_REGNUM
];
1491 struct pv_area
*stack
;
1492 struct cleanup
*back_to
;
1493 CORE_ADDR unrecognized_pc
= 0;
1495 /* Search the prologue looking for instructions that set up the
1496 frame pointer, adjust the stack pointer, and save registers.
1498 Be careful, however, and if it doesn't look like a prologue,
1499 don't try to scan it. If, for instance, a frameless function
1500 begins with stmfd sp!, then we will tell ourselves there is
1501 a frame, which will confuse stack traceback, as well as "finish"
1502 and other operations that rely on a knowledge of the stack
1505 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1506 regs
[regno
] = pv_register (regno
, 0);
1507 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1508 back_to
= make_cleanup_free_pv_area (stack
);
1510 for (current_pc
= prologue_start
;
1511 current_pc
< prologue_end
;
1515 = read_code_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1517 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1519 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1522 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1523 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1525 unsigned imm
= insn
& 0xff; /* immediate value */
1526 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1527 int rd
= bits (insn
, 12, 15);
1528 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1529 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1532 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1533 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1535 unsigned imm
= insn
& 0xff; /* immediate value */
1536 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1537 int rd
= bits (insn
, 12, 15);
1538 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1539 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1542 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1545 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1547 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1548 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1549 regs
[bits (insn
, 12, 15)]);
1552 else if ((insn
& 0xffff0000) == 0xe92d0000)
1553 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 stmfd sp!, {a1, a2, a3, a4} */
1557 int mask
= insn
& 0xffff;
1559 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1562 /* Calculate offsets of saved registers. */
1563 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1564 if (mask
& (1 << regno
))
1567 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1568 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1571 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1572 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1573 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1575 /* No need to add this to saved_regs -- it's just an arg reg. */
1578 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1579 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1580 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1582 /* No need to add this to saved_regs -- it's just an arg reg. */
1585 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1587 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1589 /* No need to add this to saved_regs -- it's just arg regs. */
1592 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 unsigned imm
= insn
& 0xff; /* immediate value */
1595 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1596 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1597 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1599 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 unsigned imm
= insn
& 0xff; /* immediate value */
1602 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1603 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1604 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1606 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1610 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1613 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1614 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1615 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1617 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1621 int n_saved_fp_regs
;
1622 unsigned int fp_start_reg
, fp_bound_reg
;
1624 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1627 if ((insn
& 0x800) == 0x800) /* N0 is set */
1629 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1630 n_saved_fp_regs
= 3;
1632 n_saved_fp_regs
= 1;
1636 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1637 n_saved_fp_regs
= 2;
1639 n_saved_fp_regs
= 4;
1642 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1643 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1644 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1646 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1647 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1648 regs
[fp_start_reg
++]);
1651 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1653 /* Allow some special function calls when skipping the
1654 prologue; GCC generates these before storing arguments to
1656 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1658 if (skip_prologue_function (gdbarch
, dest
, 0))
1663 else if ((insn
& 0xf0000000) != 0xe0000000)
1664 break; /* Condition not true, exit early. */
1665 else if (arm_instruction_changes_pc (insn
))
1666 /* Don't scan past anything that might change control flow. */
1668 else if (arm_instruction_restores_sp (insn
))
1670 /* Don't scan past the epilogue. */
1673 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1674 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1675 /* Ignore block loads from the stack, potentially copying
1676 parameters from memory. */
1678 else if ((insn
& 0xfc500000) == 0xe4100000
1679 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1680 /* Similarly ignore single loads from the stack. */
1682 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1683 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1684 register instead of the stack. */
1688 /* The optimizer might shove anything into the prologue, if
1689 we build up cache (cache != NULL) from scanning prologue,
1690 we just skip what we don't recognize and scan further to
1691 make cache as complete as possible. However, if we skip
1692 prologue, we'll stop immediately on unrecognized
1694 unrecognized_pc
= current_pc
;
1702 if (unrecognized_pc
== 0)
1703 unrecognized_pc
= current_pc
;
1707 int framereg
, framesize
;
1709 /* The frame size is just the distance from the frame register
1710 to the original stack pointer. */
1711 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1713 /* Frame pointer is fp. */
1714 framereg
= ARM_FP_REGNUM
;
1715 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1719 /* Try the stack pointer... this is a bit desperate. */
1720 framereg
= ARM_SP_REGNUM
;
1721 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1724 cache
->framereg
= framereg
;
1725 cache
->framesize
= framesize
;
1727 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1728 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1729 cache
->saved_regs
[regno
].addr
= offset
;
1733 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1734 paddress (gdbarch
, unrecognized_pc
));
1736 do_cleanups (back_to
);
1737 return unrecognized_pc
;
1741 arm_scan_prologue (struct frame_info
*this_frame
,
1742 struct arm_prologue_cache
*cache
)
1744 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1745 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1746 CORE_ADDR prologue_start
, prologue_end
;
1747 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1748 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1750 /* Assume there is no frame until proven otherwise. */
1751 cache
->framereg
= ARM_SP_REGNUM
;
1752 cache
->framesize
= 0;
1754 /* Check for Thumb prologue. */
1755 if (arm_frame_is_thumb (this_frame
))
1757 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1761 /* Find the function prologue. If we can't find the function in
1762 the symbol table, peek in the stack frame to find the PC. */
1763 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1766 /* One way to find the end of the prologue (which works well
1767 for unoptimized code) is to do the following:
1769 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1772 prologue_end = prev_pc;
1773 else if (sal.end < prologue_end)
1774 prologue_end = sal.end;
1776 This mechanism is very accurate so long as the optimizer
1777 doesn't move any instructions from the function body into the
1778 prologue. If this happens, sal.end will be the last
1779 instruction in the first hunk of prologue code just before
1780 the first instruction that the scheduler has moved from
1781 the body to the prologue.
1783 In order to make sure that we scan all of the prologue
1784 instructions, we use a slightly less accurate mechanism which
1785 may scan more than necessary. To help compensate for this
1786 lack of accuracy, the prologue scanning loop below contains
1787 several clauses which'll cause the loop to terminate early if
1788 an implausible prologue instruction is encountered.
1794 is a suitable endpoint since it accounts for the largest
1795 possible prologue plus up to five instructions inserted by
1798 if (prologue_end
> prologue_start
+ 64)
1800 prologue_end
= prologue_start
+ 64; /* See above. */
1805 /* We have no symbol information. Our only option is to assume this
1806 function has a standard stack frame and the normal frame register.
1807 Then, we can find the value of our frame pointer on entrance to
1808 the callee (or at the present moment if this is the innermost frame).
1809 The value stored there should be the address of the stmfd + 8. */
1810 CORE_ADDR frame_loc
;
1811 ULONGEST return_value
;
1813 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1814 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1819 prologue_start
= gdbarch_addr_bits_remove
1820 (gdbarch
, return_value
) - 8;
1821 prologue_end
= prologue_start
+ 64; /* See above. */
1825 if (prev_pc
< prologue_end
)
1826 prologue_end
= prev_pc
;
1828 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1831 static struct arm_prologue_cache
*
1832 arm_make_prologue_cache (struct frame_info
*this_frame
)
1835 struct arm_prologue_cache
*cache
;
1836 CORE_ADDR unwound_fp
;
1838 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1839 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1841 arm_scan_prologue (this_frame
, cache
);
1843 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1844 if (unwound_fp
== 0)
1847 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1849 /* Calculate actual addresses of saved registers using offsets
1850 determined by arm_scan_prologue. */
1851 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1852 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
1853 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
1858 /* Implementation of the stop_reason hook for arm_prologue frames. */
1860 static enum unwind_stop_reason
1861 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1864 struct arm_prologue_cache
*cache
;
1867 if (*this_cache
== NULL
)
1868 *this_cache
= arm_make_prologue_cache (this_frame
);
1869 cache
= (struct arm_prologue_cache
*) *this_cache
;
1871 /* This is meant to halt the backtrace at "_start". */
1872 pc
= get_frame_pc (this_frame
);
1873 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
1874 return UNWIND_OUTERMOST
;
1876 /* If we've hit a wall, stop. */
1877 if (cache
->prev_sp
== 0)
1878 return UNWIND_OUTERMOST
;
1880 return UNWIND_NO_REASON
;
1883 /* Our frame ID for a normal frame is the current function's starting PC
1884 and the caller's SP when we were called. */
1887 arm_prologue_this_id (struct frame_info
*this_frame
,
1889 struct frame_id
*this_id
)
1891 struct arm_prologue_cache
*cache
;
1895 if (*this_cache
== NULL
)
1896 *this_cache
= arm_make_prologue_cache (this_frame
);
1897 cache
= (struct arm_prologue_cache
*) *this_cache
;
1899 /* Use function start address as part of the frame ID. If we cannot
1900 identify the start address (due to missing symbol information),
1901 fall back to just using the current PC. */
1902 pc
= get_frame_pc (this_frame
);
1903 func
= get_frame_func (this_frame
);
1907 id
= frame_id_build (cache
->prev_sp
, func
);
1911 static struct value
*
1912 arm_prologue_prev_register (struct frame_info
*this_frame
,
1916 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1917 struct arm_prologue_cache
*cache
;
1919 if (*this_cache
== NULL
)
1920 *this_cache
= arm_make_prologue_cache (this_frame
);
1921 cache
= (struct arm_prologue_cache
*) *this_cache
;
1923 /* If we are asked to unwind the PC, then we need to return the LR
1924 instead. The prologue may save PC, but it will point into this
1925 frame's prologue, not the next frame's resume location. Also
1926 strip the saved T bit. A valid LR may have the low bit set, but
1927 a valid PC never does. */
1928 if (prev_regnum
== ARM_PC_REGNUM
)
1932 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1933 return frame_unwind_got_constant (this_frame
, prev_regnum
,
1934 arm_addr_bits_remove (gdbarch
, lr
));
1937 /* SP is generally not saved to the stack, but this frame is
1938 identified by the next frame's stack pointer at the time of the call.
1939 The value was already reconstructed into PREV_SP. */
1940 if (prev_regnum
== ARM_SP_REGNUM
)
1941 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
1943 /* The CPSR may have been changed by the call instruction and by the
1944 called function. The only bit we can reconstruct is the T bit,
1945 by checking the low bit of LR as of the call. This is a reliable
1946 indicator of Thumb-ness except for some ARM v4T pre-interworking
1947 Thumb code, which could get away with a clear low bit as long as
1948 the called function did not use bx. Guess that all other
1949 bits are unchanged; the condition flags are presumably lost,
1950 but the processor status is likely valid. */
1951 if (prev_regnum
== ARM_PS_REGNUM
)
1954 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
1956 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
1957 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1958 if (IS_THUMB_ADDR (lr
))
1962 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
1965 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
1969 struct frame_unwind arm_prologue_unwind
= {
1971 arm_prologue_unwind_stop_reason
,
1972 arm_prologue_this_id
,
1973 arm_prologue_prev_register
,
1975 default_frame_sniffer
1978 /* Maintain a list of ARM exception table entries per objfile, similar to the
1979 list of mapping symbols. We only cache entries for standard ARM-defined
1980 personality routines; the cache will contain only the frame unwinding
1981 instructions associated with the entry (not the descriptors). */
1983 static const struct objfile_data
*arm_exidx_data_key
;
1985 struct arm_exidx_entry
1990 typedef struct arm_exidx_entry arm_exidx_entry_s
;
1991 DEF_VEC_O(arm_exidx_entry_s
);
1993 struct arm_exidx_data
1995 VEC(arm_exidx_entry_s
) **section_maps
;
1999 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2001 struct arm_exidx_data
*data
= (struct arm_exidx_data
*) arg
;
2004 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2005 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2009 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2010 const struct arm_exidx_entry
*rhs
)
2012 return lhs
->addr
< rhs
->addr
;
2015 static struct obj_section
*
2016 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2018 struct obj_section
*osect
;
2020 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2021 if (bfd_get_section_flags (objfile
->obfd
,
2022 osect
->the_bfd_section
) & SEC_ALLOC
)
2024 bfd_vma start
, size
;
2025 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2026 size
= bfd_get_section_size (osect
->the_bfd_section
);
2028 if (start
<= vma
&& vma
< start
+ size
)
2035 /* Parse contents of exception table and exception index sections
2036 of OBJFILE, and fill in the exception table entry cache.
2038 For each entry that refers to a standard ARM-defined personality
2039 routine, extract the frame unwinding instructions (from either
2040 the index or the table section). The unwinding instructions
2042 - extracting them from the rest of the table data
2043 - converting to host endianness
2044 - appending the implicit 0xb0 ("Finish") code
2046 The extracted and normalized instructions are stored for later
2047 retrieval by the arm_find_exidx_entry routine. */
2050 arm_exidx_new_objfile (struct objfile
*objfile
)
2052 struct cleanup
*cleanups
;
2053 struct arm_exidx_data
*data
;
2054 asection
*exidx
, *extab
;
2055 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2056 bfd_size_type exidx_size
= 0, extab_size
= 0;
2057 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2060 /* If we've already touched this file, do nothing. */
2061 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2063 cleanups
= make_cleanup (null_cleanup
, NULL
);
2065 /* Read contents of exception table and index. */
2066 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2069 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2070 exidx_size
= bfd_get_section_size (exidx
);
2071 exidx_data
= (gdb_byte
*) xmalloc (exidx_size
);
2072 make_cleanup (xfree
, exidx_data
);
2074 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2075 exidx_data
, 0, exidx_size
))
2077 do_cleanups (cleanups
);
2082 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2085 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2086 extab_size
= bfd_get_section_size (extab
);
2087 extab_data
= (gdb_byte
*) xmalloc (extab_size
);
2088 make_cleanup (xfree
, extab_data
);
2090 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2091 extab_data
, 0, extab_size
))
2093 do_cleanups (cleanups
);
2098 /* Allocate exception table data structure. */
2099 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2100 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2101 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2102 objfile
->obfd
->section_count
,
2103 VEC(arm_exidx_entry_s
) *);
2105 /* Fill in exception table. */
2106 for (i
= 0; i
< exidx_size
/ 8; i
++)
2108 struct arm_exidx_entry new_exidx_entry
;
2109 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2110 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2111 bfd_vma addr
= 0, word
= 0;
2112 int n_bytes
= 0, n_words
= 0;
2113 struct obj_section
*sec
;
2114 gdb_byte
*entry
= NULL
;
2116 /* Extract address of start of function. */
2117 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 idx
+= exidx_vma
+ i
* 8;
2120 /* Find section containing function and compute section offset. */
2121 sec
= arm_obj_section_from_vma (objfile
, idx
);
2124 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2126 /* Determine address of exception table entry. */
2129 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2131 else if ((val
& 0xff000000) == 0x80000000)
2133 /* Exception table entry embedded in .ARM.exidx
2134 -- must be short form. */
2138 else if (!(val
& 0x80000000))
2140 /* Exception table entry in .ARM.extab. */
2141 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2142 addr
+= exidx_vma
+ i
* 8 + 4;
2144 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2146 word
= bfd_h_get_32 (objfile
->obfd
,
2147 extab_data
+ addr
- extab_vma
);
2150 if ((word
& 0xff000000) == 0x80000000)
2155 else if ((word
& 0xff000000) == 0x81000000
2156 || (word
& 0xff000000) == 0x82000000)
2160 n_words
= ((word
>> 16) & 0xff);
2162 else if (!(word
& 0x80000000))
2165 struct obj_section
*pers_sec
;
2166 int gnu_personality
= 0;
2168 /* Custom personality routine. */
2169 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2172 /* Check whether we've got one of the variants of the
2173 GNU personality routines. */
2174 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2177 static const char *personality
[] =
2179 "__gcc_personality_v0",
2180 "__gxx_personality_v0",
2181 "__gcj_personality_v0",
2182 "__gnu_objc_personality_v0",
2186 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2189 for (k
= 0; personality
[k
]; k
++)
2190 if (lookup_minimal_symbol_by_pc_name
2191 (pc
, personality
[k
], objfile
))
2193 gnu_personality
= 1;
2198 /* If so, the next word contains a word count in the high
2199 byte, followed by the same unwind instructions as the
2200 pre-defined forms. */
2202 && addr
+ 4 <= extab_vma
+ extab_size
)
2204 word
= bfd_h_get_32 (objfile
->obfd
,
2205 extab_data
+ addr
- extab_vma
);
2208 n_words
= ((word
>> 24) & 0xff);
2214 /* Sanity check address. */
2216 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2217 n_words
= n_bytes
= 0;
2219 /* The unwind instructions reside in WORD (only the N_BYTES least
2220 significant bytes are valid), followed by N_WORDS words in the
2221 extab section starting at ADDR. */
2222 if (n_bytes
|| n_words
)
2225 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2226 n_bytes
+ n_words
* 4 + 1);
2229 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2233 word
= bfd_h_get_32 (objfile
->obfd
,
2234 extab_data
+ addr
- extab_vma
);
2237 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2238 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2239 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2240 *p
++ = (gdb_byte
) (word
& 0xff);
2243 /* Implied "Finish" to terminate the list. */
2247 /* Push entry onto vector. They are guaranteed to always
2248 appear in order of increasing addresses. */
2249 new_exidx_entry
.addr
= idx
;
2250 new_exidx_entry
.entry
= entry
;
2251 VEC_safe_push (arm_exidx_entry_s
,
2252 data
->section_maps
[sec
->the_bfd_section
->index
],
2256 do_cleanups (cleanups
);
2259 /* Search for the exception table entry covering MEMADDR. If one is found,
2260 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2261 set *START to the start of the region covered by this entry. */
2264 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2266 struct obj_section
*sec
;
2268 sec
= find_pc_section (memaddr
);
2271 struct arm_exidx_data
*data
;
2272 VEC(arm_exidx_entry_s
) *map
;
2273 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2276 data
= ((struct arm_exidx_data
*)
2277 objfile_data (sec
->objfile
, arm_exidx_data_key
));
2280 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2281 if (!VEC_empty (arm_exidx_entry_s
, map
))
2283 struct arm_exidx_entry
*map_sym
;
2285 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2286 arm_compare_exidx_entries
);
2288 /* VEC_lower_bound finds the earliest ordered insertion
2289 point. If the following symbol starts at this exact
2290 address, we use that; otherwise, the preceding
2291 exception table entry covers this address. */
2292 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2294 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2295 if (map_sym
->addr
== map_key
.addr
)
2298 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2299 return map_sym
->entry
;
2305 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2307 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2308 return map_sym
->entry
;
2317 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2318 instruction list from the ARM exception table entry ENTRY, allocate and
2319 return a prologue cache structure describing how to unwind this frame.
2321 Return NULL if the unwinding instruction list contains a "spare",
2322 "reserved" or "refuse to unwind" instruction as defined in section
2323 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2324 for the ARM Architecture" document. */
2326 static struct arm_prologue_cache
*
2327 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2332 struct arm_prologue_cache
*cache
;
2333 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2334 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2340 /* Whenever we reload SP, we actually have to retrieve its
2341 actual value in the current frame. */
2344 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2346 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2347 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2351 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2352 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2358 /* Decode next unwind instruction. */
2361 if ((insn
& 0xc0) == 0)
2363 int offset
= insn
& 0x3f;
2364 vsp
+= (offset
<< 2) + 4;
2366 else if ((insn
& 0xc0) == 0x40)
2368 int offset
= insn
& 0x3f;
2369 vsp
-= (offset
<< 2) + 4;
2371 else if ((insn
& 0xf0) == 0x80)
2373 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2376 /* The special case of an all-zero mask identifies
2377 "Refuse to unwind". We return NULL to fall back
2378 to the prologue analyzer. */
2382 /* Pop registers r4..r15 under mask. */
2383 for (i
= 0; i
< 12; i
++)
2384 if (mask
& (1 << i
))
2386 cache
->saved_regs
[4 + i
].addr
= vsp
;
2390 /* Special-case popping SP -- we need to reload vsp. */
2391 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2394 else if ((insn
& 0xf0) == 0x90)
2396 int reg
= insn
& 0xf;
2398 /* Reserved cases. */
2399 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2402 /* Set SP from another register and mark VSP for reload. */
2403 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2406 else if ((insn
& 0xf0) == 0xa0)
2408 int count
= insn
& 0x7;
2409 int pop_lr
= (insn
& 0x8) != 0;
2412 /* Pop r4..r[4+count]. */
2413 for (i
= 0; i
<= count
; i
++)
2415 cache
->saved_regs
[4 + i
].addr
= vsp
;
2419 /* If indicated by flag, pop LR as well. */
2422 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2426 else if (insn
== 0xb0)
2428 /* We could only have updated PC by popping into it; if so, it
2429 will show up as address. Otherwise, copy LR into PC. */
2430 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2431 cache
->saved_regs
[ARM_PC_REGNUM
]
2432 = cache
->saved_regs
[ARM_LR_REGNUM
];
2437 else if (insn
== 0xb1)
2439 int mask
= *entry
++;
2442 /* All-zero mask and mask >= 16 is "spare". */
2443 if (mask
== 0 || mask
>= 16)
2446 /* Pop r0..r3 under mask. */
2447 for (i
= 0; i
< 4; i
++)
2448 if (mask
& (1 << i
))
2450 cache
->saved_regs
[i
].addr
= vsp
;
2454 else if (insn
== 0xb2)
2456 ULONGEST offset
= 0;
2461 offset
|= (*entry
& 0x7f) << shift
;
2464 while (*entry
++ & 0x80);
2466 vsp
+= 0x204 + (offset
<< 2);
2468 else if (insn
== 0xb3)
2470 int start
= *entry
>> 4;
2471 int count
= (*entry
++) & 0xf;
2474 /* Only registers D0..D15 are valid here. */
2475 if (start
+ count
>= 16)
2478 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2479 for (i
= 0; i
<= count
; i
++)
2481 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2488 else if ((insn
& 0xf8) == 0xb8)
2490 int count
= insn
& 0x7;
2493 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2494 for (i
= 0; i
<= count
; i
++)
2496 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2500 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2503 else if (insn
== 0xc6)
2505 int start
= *entry
>> 4;
2506 int count
= (*entry
++) & 0xf;
2509 /* Only registers WR0..WR15 are valid. */
2510 if (start
+ count
>= 16)
2513 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2514 for (i
= 0; i
<= count
; i
++)
2516 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2520 else if (insn
== 0xc7)
2522 int mask
= *entry
++;
2525 /* All-zero mask and mask >= 16 is "spare". */
2526 if (mask
== 0 || mask
>= 16)
2529 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2530 for (i
= 0; i
< 4; i
++)
2531 if (mask
& (1 << i
))
2533 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2537 else if ((insn
& 0xf8) == 0xc0)
2539 int count
= insn
& 0x7;
2542 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2543 for (i
= 0; i
<= count
; i
++)
2545 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2549 else if (insn
== 0xc8)
2551 int start
= *entry
>> 4;
2552 int count
= (*entry
++) & 0xf;
2555 /* Only registers D0..D31 are valid. */
2556 if (start
+ count
>= 16)
2559 /* Pop VFP double-precision registers
2560 D[16+start]..D[16+start+count]. */
2561 for (i
= 0; i
<= count
; i
++)
2563 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2567 else if (insn
== 0xc9)
2569 int start
= *entry
>> 4;
2570 int count
= (*entry
++) & 0xf;
2573 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2574 for (i
= 0; i
<= count
; i
++)
2576 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2580 else if ((insn
& 0xf8) == 0xd0)
2582 int count
= insn
& 0x7;
2585 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2586 for (i
= 0; i
<= count
; i
++)
2588 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2594 /* Everything else is "spare". */
2599 /* If we restore SP from a register, assume this was the frame register.
2600 Otherwise just fall back to SP as frame register. */
2601 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2602 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2604 cache
->framereg
= ARM_SP_REGNUM
;
2606 /* Determine offset to previous frame. */
2608 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2610 /* We already got the previous SP. */
2611 cache
->prev_sp
= vsp
;
2616 /* Unwinding via ARM exception table entries. Note that the sniffer
2617 already computes a filled-in prologue cache, which is then used
2618 with the same arm_prologue_this_id and arm_prologue_prev_register
2619 routines also used for prologue-parsing based unwinding. */
2622 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2623 struct frame_info
*this_frame
,
2624 void **this_prologue_cache
)
2626 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2627 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2628 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2629 struct arm_prologue_cache
*cache
;
2632 /* See if we have an ARM exception table entry covering this address. */
2633 addr_in_block
= get_frame_address_in_block (this_frame
);
2634 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2638 /* The ARM exception table does not describe unwind information
2639 for arbitrary PC values, but is guaranteed to be correct only
2640 at call sites. We have to decide here whether we want to use
2641 ARM exception table information for this frame, or fall back
2642 to using prologue parsing. (Note that if we have DWARF CFI,
2643 this sniffer isn't even called -- CFI is always preferred.)
2645 Before we make this decision, however, we check whether we
2646 actually have *symbol* information for the current frame.
2647 If not, prologue parsing would not work anyway, so we might
2648 as well use the exception table and hope for the best. */
2649 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2653 /* If the next frame is "normal", we are at a call site in this
2654 frame, so exception information is guaranteed to be valid. */
2655 if (get_next_frame (this_frame
)
2656 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2659 /* We also assume exception information is valid if we're currently
2660 blocked in a system call. The system library is supposed to
2661 ensure this, so that e.g. pthread cancellation works. */
2662 if (arm_frame_is_thumb (this_frame
))
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2667 2, byte_order_for_code
, &insn
)
2668 && (insn
& 0xff00) == 0xdf00 /* svc */)
2675 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2676 4, byte_order_for_code
, &insn
)
2677 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2681 /* Bail out if we don't know that exception information is valid. */
2685 /* The ARM exception index does not mark the *end* of the region
2686 covered by the entry, and some functions will not have any entry.
2687 To correctly recognize the end of the covered region, the linker
2688 should have inserted dummy records with a CANTUNWIND marker.
2690 Unfortunately, current versions of GNU ld do not reliably do
2691 this, and thus we may have found an incorrect entry above.
2692 As a (temporary) sanity check, we only use the entry if it
2693 lies *within* the bounds of the function. Note that this check
2694 might reject perfectly valid entries that just happen to cover
2695 multiple functions; therefore this check ought to be removed
2696 once the linker is fixed. */
2697 if (func_start
> exidx_region
)
2701 /* Decode the list of unwinding instructions into a prologue cache.
2702 Note that this may fail due to e.g. a "refuse to unwind" code. */
2703 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2707 *this_prologue_cache
= cache
;
2711 struct frame_unwind arm_exidx_unwind
= {
2713 default_frame_unwind_stop_reason
,
2714 arm_prologue_this_id
,
2715 arm_prologue_prev_register
,
2717 arm_exidx_unwind_sniffer
2720 static struct arm_prologue_cache
*
2721 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2723 struct arm_prologue_cache
*cache
;
2726 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2727 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2729 /* Still rely on the offset calculated from prologue. */
2730 arm_scan_prologue (this_frame
, cache
);
2732 /* Since we are in epilogue, the SP has been restored. */
2733 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2735 /* Calculate actual addresses of saved registers using offsets
2736 determined by arm_scan_prologue. */
2737 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2738 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2739 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2744 /* Implementation of function hook 'this_id' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2748 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2750 struct frame_id
*this_id
)
2752 struct arm_prologue_cache
*cache
;
2755 if (*this_cache
== NULL
)
2756 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2757 cache
= (struct arm_prologue_cache
*) *this_cache
;
2759 /* Use function start address as part of the frame ID. If we cannot
2760 identify the start address (due to missing symbol information),
2761 fall back to just using the current PC. */
2762 pc
= get_frame_pc (this_frame
);
2763 func
= get_frame_func (this_frame
);
2767 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2770 /* Implementation of function hook 'prev_register' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2773 static struct value
*
2774 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2775 void **this_cache
, int regnum
)
2777 if (*this_cache
== NULL
)
2778 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2780 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2783 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2785 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2788 /* Implementation of function hook 'sniffer' in
2789 'struct frame_uwnind' for epilogue unwinder. */
2792 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2793 struct frame_info
*this_frame
,
2794 void **this_prologue_cache
)
2796 if (frame_relative_level (this_frame
) == 0)
2798 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2799 CORE_ADDR pc
= get_frame_pc (this_frame
);
2801 if (arm_frame_is_thumb (this_frame
))
2802 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2804 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2810 /* Frame unwinder from epilogue. */
2812 static const struct frame_unwind arm_epilogue_frame_unwind
=
2815 default_frame_unwind_stop_reason
,
2816 arm_epilogue_frame_this_id
,
2817 arm_epilogue_frame_prev_register
,
2819 arm_epilogue_frame_sniffer
,
2822 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2823 trampoline, return the target PC. Otherwise return 0.
2825 void call0a (char c, short s, int i, long l) {}
2829 (*pointer_to_call0a) (c, s, i, l);
2832 Instead of calling a stub library function _call_via_xx (xx is
2833 the register name), GCC may inline the trampoline in the object
2834 file as below (register r2 has the address of call0a).
2837 .type main, %function
2846 The trampoline 'bx r2' doesn't belong to main. */
2849 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2851 /* The heuristics of recognizing such trampoline is that FRAME is
2852 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2853 if (arm_frame_is_thumb (frame
))
2857 if (target_read_memory (pc
, buf
, 2) == 0)
2859 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2860 enum bfd_endian byte_order_for_code
2861 = gdbarch_byte_order_for_code (gdbarch
);
2863 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2865 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2868 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2870 /* Clear the LSB so that gdb core sets step-resume
2871 breakpoint at the right address. */
2872 return UNMAKE_THUMB_ADDR (dest
);
2880 static struct arm_prologue_cache
*
2881 arm_make_stub_cache (struct frame_info
*this_frame
)
2883 struct arm_prologue_cache
*cache
;
2885 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2886 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2888 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2893 /* Our frame ID for a stub frame is the current SP and LR. */
2896 arm_stub_this_id (struct frame_info
*this_frame
,
2898 struct frame_id
*this_id
)
2900 struct arm_prologue_cache
*cache
;
2902 if (*this_cache
== NULL
)
2903 *this_cache
= arm_make_stub_cache (this_frame
);
2904 cache
= (struct arm_prologue_cache
*) *this_cache
;
2906 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2910 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2911 struct frame_info
*this_frame
,
2912 void **this_prologue_cache
)
2914 CORE_ADDR addr_in_block
;
2916 CORE_ADDR pc
, start_addr
;
2919 addr_in_block
= get_frame_address_in_block (this_frame
);
2920 pc
= get_frame_pc (this_frame
);
2921 if (in_plt_section (addr_in_block
)
2922 /* We also use the stub winder if the target memory is unreadable
2923 to avoid having the prologue unwinder trying to read it. */
2924 || target_read_memory (pc
, dummy
, 4) != 0)
2927 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2928 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2934 struct frame_unwind arm_stub_unwind
= {
2936 default_frame_unwind_stop_reason
,
2938 arm_prologue_prev_register
,
2940 arm_stub_unwind_sniffer
2943 /* Put here the code to store, into CACHE->saved_regs, the addresses
2944 of the saved registers of frame described by THIS_FRAME. CACHE is
2947 static struct arm_prologue_cache
*
2948 arm_m_exception_cache (struct frame_info
*this_frame
)
2950 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2951 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2952 struct arm_prologue_cache
*cache
;
2953 CORE_ADDR unwound_sp
;
2956 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2957 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2959 unwound_sp
= get_frame_register_unsigned (this_frame
,
2962 /* The hardware saves eight 32-bit words, comprising xPSR,
2963 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2964 "B1.5.6 Exception entry behavior" in
2965 "ARMv7-M Architecture Reference Manual". */
2966 cache
->saved_regs
[0].addr
= unwound_sp
;
2967 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
2968 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
2969 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
2970 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
2971 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
2972 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
2973 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
2975 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2976 aligner between the top of the 32-byte stack frame and the
2977 previous context's stack pointer. */
2978 cache
->prev_sp
= unwound_sp
+ 32;
2979 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
2980 && (xpsr
& (1 << 9)) != 0)
2981 cache
->prev_sp
+= 4;
2986 /* Implementation of function hook 'this_id' in
2987 'struct frame_uwnind'. */
2990 arm_m_exception_this_id (struct frame_info
*this_frame
,
2992 struct frame_id
*this_id
)
2994 struct arm_prologue_cache
*cache
;
2996 if (*this_cache
== NULL
)
2997 *this_cache
= arm_m_exception_cache (this_frame
);
2998 cache
= (struct arm_prologue_cache
*) *this_cache
;
3000 /* Our frame ID for a stub frame is the current SP and LR. */
3001 *this_id
= frame_id_build (cache
->prev_sp
,
3002 get_frame_pc (this_frame
));
3005 /* Implementation of function hook 'prev_register' in
3006 'struct frame_uwnind'. */
3008 static struct value
*
3009 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3013 struct arm_prologue_cache
*cache
;
3015 if (*this_cache
== NULL
)
3016 *this_cache
= arm_m_exception_cache (this_frame
);
3017 cache
= (struct arm_prologue_cache
*) *this_cache
;
3019 /* The value was already reconstructed into PREV_SP. */
3020 if (prev_regnum
== ARM_SP_REGNUM
)
3021 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3024 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3028 /* Implementation of function hook 'sniffer' in
3029 'struct frame_uwnind'. */
3032 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3033 struct frame_info
*this_frame
,
3034 void **this_prologue_cache
)
3036 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3038 /* No need to check is_m; this sniffer is only registered for
3039 M-profile architectures. */
3041 /* Check if exception frame returns to a magic PC value. */
3042 return arm_m_addr_is_magic (this_pc
);
3045 /* Frame unwinder for M-profile exceptions. */
3047 struct frame_unwind arm_m_exception_unwind
=
3050 default_frame_unwind_stop_reason
,
3051 arm_m_exception_this_id
,
3052 arm_m_exception_prev_register
,
3054 arm_m_exception_unwind_sniffer
3058 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3060 struct arm_prologue_cache
*cache
;
3062 if (*this_cache
== NULL
)
3063 *this_cache
= arm_make_prologue_cache (this_frame
);
3064 cache
= (struct arm_prologue_cache
*) *this_cache
;
3066 return cache
->prev_sp
- cache
->framesize
;
3069 struct frame_base arm_normal_base
= {
3070 &arm_prologue_unwind
,
3071 arm_normal_frame_base
,
3072 arm_normal_frame_base
,
3073 arm_normal_frame_base
3076 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3077 dummy frame. The frame ID's base needs to match the TOS value
3078 saved by save_dummy_frame_tos() and returned from
3079 arm_push_dummy_call, and the PC needs to match the dummy frame's
3082 static struct frame_id
3083 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3085 return frame_id_build (get_frame_register_unsigned (this_frame
,
3087 get_frame_pc (this_frame
));
3090 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3091 be used to construct the previous frame's ID, after looking up the
3092 containing function). */
3095 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3098 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3099 return arm_addr_bits_remove (gdbarch
, pc
);
3103 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3105 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3108 static struct value
*
3109 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3112 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3114 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3119 /* The PC is normally copied from the return column, which
3120 describes saves of LR. However, that version may have an
3121 extra bit set to indicate Thumb state. The bit is not
3123 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3124 return frame_unwind_got_constant (this_frame
, regnum
,
3125 arm_addr_bits_remove (gdbarch
, lr
));
3128 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3129 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3130 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3131 if (IS_THUMB_ADDR (lr
))
3135 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3138 internal_error (__FILE__
, __LINE__
,
3139 _("Unexpected register %d"), regnum
);
3144 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3145 struct dwarf2_frame_state_reg
*reg
,
3146 struct frame_info
*this_frame
)
3152 reg
->how
= DWARF2_FRAME_REG_FN
;
3153 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3156 reg
->how
= DWARF2_FRAME_REG_CFA
;
3161 /* Implement the stack_frame_destroyed_p gdbarch method. */
3164 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3166 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3167 unsigned int insn
, insn2
;
3168 int found_return
= 0, found_stack_adjust
= 0;
3169 CORE_ADDR func_start
, func_end
;
3173 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3176 /* The epilogue is a sequence of instructions along the following lines:
3178 - add stack frame size to SP or FP
3179 - [if frame pointer used] restore SP from FP
3180 - restore registers from SP [may include PC]
3181 - a return-type instruction [if PC wasn't already restored]
3183 In a first pass, we scan forward from the current PC and verify the
3184 instructions we find as compatible with this sequence, ending in a
3187 However, this is not sufficient to distinguish indirect function calls
3188 within a function from indirect tail calls in the epilogue in some cases.
3189 Therefore, if we didn't already find any SP-changing instruction during
3190 forward scan, we add a backward scanning heuristic to ensure we actually
3191 are in the epilogue. */
3194 while (scan_pc
< func_end
&& !found_return
)
3196 if (target_read_memory (scan_pc
, buf
, 2))
3200 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3202 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3204 else if (insn
== 0x46f7) /* mov pc, lr */
3206 else if (thumb_instruction_restores_sp (insn
))
3208 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3211 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3213 if (target_read_memory (scan_pc
, buf
, 2))
3217 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3219 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3221 if (insn2
& 0x8000) /* <registers> include PC. */
3224 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3225 && (insn2
& 0x0fff) == 0x0b04)
3227 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3230 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3231 && (insn2
& 0x0e00) == 0x0a00)
3243 /* Since any instruction in the epilogue sequence, with the possible
3244 exception of return itself, updates the stack pointer, we need to
3245 scan backwards for at most one instruction. Try either a 16-bit or
3246 a 32-bit instruction. This is just a heuristic, so we do not worry
3247 too much about false positives. */
3249 if (pc
- 4 < func_start
)
3251 if (target_read_memory (pc
- 4, buf
, 4))
3254 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3255 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3257 if (thumb_instruction_restores_sp (insn2
))
3258 found_stack_adjust
= 1;
3259 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3260 found_stack_adjust
= 1;
3261 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3262 && (insn2
& 0x0fff) == 0x0b04)
3263 found_stack_adjust
= 1;
3264 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3265 && (insn2
& 0x0e00) == 0x0a00)
3266 found_stack_adjust
= 1;
3268 return found_stack_adjust
;
3272 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3274 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3277 CORE_ADDR func_start
, func_end
;
3279 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3282 /* We are in the epilogue if the previous instruction was a stack
3283 adjustment and the next instruction is a possible return (bx, mov
3284 pc, or pop). We could have to scan backwards to find the stack
3285 adjustment, or forwards to find the return, but this is a decent
3286 approximation. First scan forwards. */
3289 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3290 if (bits (insn
, 28, 31) != INST_NV
)
3292 if ((insn
& 0x0ffffff0) == 0x012fff10)
3295 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3298 else if ((insn
& 0x0fff0000) == 0x08bd0000
3299 && (insn
& 0x0000c000) != 0)
3300 /* POP (LDMIA), including PC or LR. */
3307 /* Scan backwards. This is just a heuristic, so do not worry about
3308 false positives from mode changes. */
3310 if (pc
< func_start
+ 4)
3313 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3314 if (arm_instruction_restores_sp (insn
))
3320 /* Implement the stack_frame_destroyed_p gdbarch method. */
3323 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3325 if (arm_pc_is_thumb (gdbarch
, pc
))
3326 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3328 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3331 /* When arguments must be pushed onto the stack, they go on in reverse
3332 order. The code below implements a FILO (stack) to do this. */
3337 struct stack_item
*prev
;
3341 static struct stack_item
*
3342 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3344 struct stack_item
*si
;
3345 si
= XNEW (struct stack_item
);
3346 si
->data
= (gdb_byte
*) xmalloc (len
);
3349 memcpy (si
->data
, contents
, len
);
3353 static struct stack_item
*
3354 pop_stack_item (struct stack_item
*si
)
3356 struct stack_item
*dead
= si
;
3364 /* Return the alignment (in bytes) of the given type. */
3367 arm_type_align (struct type
*t
)
3373 t
= check_typedef (t
);
3374 switch (TYPE_CODE (t
))
3377 /* Should never happen. */
3378 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3382 case TYPE_CODE_ENUM
:
3386 case TYPE_CODE_RANGE
:
3388 case TYPE_CODE_CHAR
:
3389 case TYPE_CODE_BOOL
:
3390 return TYPE_LENGTH (t
);
3392 case TYPE_CODE_ARRAY
:
3393 if (TYPE_VECTOR (t
))
3395 /* Use the natural alignment for vector types (the same for
3396 scalar type), but the maximum alignment is 64-bit. */
3397 if (TYPE_LENGTH (t
) > 8)
3400 return TYPE_LENGTH (t
);
3403 return arm_type_align (TYPE_TARGET_TYPE (t
));
3404 case TYPE_CODE_COMPLEX
:
3405 return arm_type_align (TYPE_TARGET_TYPE (t
));
3407 case TYPE_CODE_STRUCT
:
3408 case TYPE_CODE_UNION
:
3410 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3412 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3420 /* Possible base types for a candidate for passing and returning in
3423 enum arm_vfp_cprc_base_type
3432 /* The length of one element of base type B. */
3435 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3439 case VFP_CPRC_SINGLE
:
3441 case VFP_CPRC_DOUBLE
:
3443 case VFP_CPRC_VEC64
:
3445 case VFP_CPRC_VEC128
:
3448 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3453 /* The character ('s', 'd' or 'q') for the type of VFP register used
3454 for passing base type B. */
3457 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3461 case VFP_CPRC_SINGLE
:
3463 case VFP_CPRC_DOUBLE
:
3465 case VFP_CPRC_VEC64
:
3467 case VFP_CPRC_VEC128
:
3470 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3475 /* Determine whether T may be part of a candidate for passing and
3476 returning in VFP registers, ignoring the limit on the total number
3477 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3478 classification of the first valid component found; if it is not
3479 VFP_CPRC_UNKNOWN, all components must have the same classification
3480 as *BASE_TYPE. If it is found that T contains a type not permitted
3481 for passing and returning in VFP registers, a type differently
3482 classified from *BASE_TYPE, or two types differently classified
3483 from each other, return -1, otherwise return the total number of
3484 base-type elements found (possibly 0 in an empty structure or
3485 array). Vector types are not currently supported, matching the
3486 generic AAPCS support. */
3489 arm_vfp_cprc_sub_candidate (struct type
*t
,
3490 enum arm_vfp_cprc_base_type
*base_type
)
3492 t
= check_typedef (t
);
3493 switch (TYPE_CODE (t
))
3496 switch (TYPE_LENGTH (t
))
3499 if (*base_type
== VFP_CPRC_UNKNOWN
)
3500 *base_type
= VFP_CPRC_SINGLE
;
3501 else if (*base_type
!= VFP_CPRC_SINGLE
)
3506 if (*base_type
== VFP_CPRC_UNKNOWN
)
3507 *base_type
= VFP_CPRC_DOUBLE
;
3508 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3517 case TYPE_CODE_COMPLEX
:
3518 /* Arguments of complex T where T is one of the types float or
3519 double get treated as if they are implemented as:
3528 switch (TYPE_LENGTH (t
))
3531 if (*base_type
== VFP_CPRC_UNKNOWN
)
3532 *base_type
= VFP_CPRC_SINGLE
;
3533 else if (*base_type
!= VFP_CPRC_SINGLE
)
3538 if (*base_type
== VFP_CPRC_UNKNOWN
)
3539 *base_type
= VFP_CPRC_DOUBLE
;
3540 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3549 case TYPE_CODE_ARRAY
:
3551 if (TYPE_VECTOR (t
))
3553 /* A 64-bit or 128-bit containerized vector type are VFP
3555 switch (TYPE_LENGTH (t
))
3558 if (*base_type
== VFP_CPRC_UNKNOWN
)
3559 *base_type
= VFP_CPRC_VEC64
;
3562 if (*base_type
== VFP_CPRC_UNKNOWN
)
3563 *base_type
= VFP_CPRC_VEC128
;
3574 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3578 if (TYPE_LENGTH (t
) == 0)
3580 gdb_assert (count
== 0);
3583 else if (count
== 0)
3585 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3586 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3587 return TYPE_LENGTH (t
) / unitlen
;
3592 case TYPE_CODE_STRUCT
:
3597 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3601 if (!field_is_static (&TYPE_FIELD (t
, i
)))
3602 sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3604 if (sub_count
== -1)
3608 if (TYPE_LENGTH (t
) == 0)
3610 gdb_assert (count
== 0);
3613 else if (count
== 0)
3615 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3616 if (TYPE_LENGTH (t
) != unitlen
* count
)
3621 case TYPE_CODE_UNION
:
3626 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3628 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3630 if (sub_count
== -1)
3632 count
= (count
> sub_count
? count
: sub_count
);
3634 if (TYPE_LENGTH (t
) == 0)
3636 gdb_assert (count
== 0);
3639 else if (count
== 0)
3641 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3642 if (TYPE_LENGTH (t
) != unitlen
* count
)
3654 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3655 if passed to or returned from a non-variadic function with the VFP
3656 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3657 *BASE_TYPE to the base type for T and *COUNT to the number of
3658 elements of that base type before returning. */
3661 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3664 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3665 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3666 if (c
<= 0 || c
> 4)
3673 /* Return 1 if the VFP ABI should be used for passing arguments to and
3674 returning values from a function of type FUNC_TYPE, 0
3678 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3680 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3681 /* Variadic functions always use the base ABI. Assume that functions
3682 without debug info are not variadic. */
3683 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3685 /* The VFP ABI is only supported as a variant of AAPCS. */
3686 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3688 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3691 /* We currently only support passing parameters in integer registers, which
3692 conforms with GCC's default model, and VFP argument passing following
3693 the VFP variant of AAPCS. Several other variants exist and
3694 we should probably support some of them based on the selected ABI. */
3697 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3698 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3699 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3700 CORE_ADDR struct_addr
)
3702 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3706 struct stack_item
*si
= NULL
;
3709 unsigned vfp_regs_free
= (1 << 16) - 1;
3711 /* Determine the type of this function and whether the VFP ABI
3713 ftype
= check_typedef (value_type (function
));
3714 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3715 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3716 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3718 /* Set the return address. For the ARM, the return breakpoint is
3719 always at BP_ADDR. */
3720 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3722 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3724 /* Walk through the list of args and determine how large a temporary
3725 stack is required. Need to take care here as structs may be
3726 passed on the stack, and we have to push them. */
3729 argreg
= ARM_A1_REGNUM
;
3732 /* The struct_return pointer occupies the first parameter
3733 passing register. */
3737 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3738 gdbarch_register_name (gdbarch
, argreg
),
3739 paddress (gdbarch
, struct_addr
));
3740 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3744 for (argnum
= 0; argnum
< nargs
; argnum
++)
3747 struct type
*arg_type
;
3748 struct type
*target_type
;
3749 enum type_code typecode
;
3750 const bfd_byte
*val
;
3752 enum arm_vfp_cprc_base_type vfp_base_type
;
3754 int may_use_core_reg
= 1;
3756 arg_type
= check_typedef (value_type (args
[argnum
]));
3757 len
= TYPE_LENGTH (arg_type
);
3758 target_type
= TYPE_TARGET_TYPE (arg_type
);
3759 typecode
= TYPE_CODE (arg_type
);
3760 val
= value_contents (args
[argnum
]);
3762 align
= arm_type_align (arg_type
);
3763 /* Round alignment up to a whole number of words. */
3764 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3765 /* Different ABIs have different maximum alignments. */
3766 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3768 /* The APCS ABI only requires word alignment. */
3769 align
= INT_REGISTER_SIZE
;
3773 /* The AAPCS requires at most doubleword alignment. */
3774 if (align
> INT_REGISTER_SIZE
* 2)
3775 align
= INT_REGISTER_SIZE
* 2;
3779 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3787 /* Because this is a CPRC it cannot go in a core register or
3788 cause a core register to be skipped for alignment.
3789 Either it goes in VFP registers and the rest of this loop
3790 iteration is skipped for this argument, or it goes on the
3791 stack (and the stack alignment code is correct for this
3793 may_use_core_reg
= 0;
3795 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3796 shift
= unit_length
/ 4;
3797 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3798 for (regno
= 0; regno
< 16; regno
+= shift
)
3799 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3808 vfp_regs_free
&= ~(mask
<< regno
);
3809 reg_scaled
= regno
/ shift
;
3810 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3811 for (i
= 0; i
< vfp_base_count
; i
++)
3815 if (reg_char
== 'q')
3816 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3817 val
+ i
* unit_length
);
3820 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3821 reg_char
, reg_scaled
+ i
);
3822 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3824 regcache_cooked_write (regcache
, regnum
,
3825 val
+ i
* unit_length
);
3832 /* This CPRC could not go in VFP registers, so all VFP
3833 registers are now marked as used. */
3838 /* Push stack padding for dowubleword alignment. */
3839 if (nstack
& (align
- 1))
3841 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3842 nstack
+= INT_REGISTER_SIZE
;
3845 /* Doubleword aligned quantities must go in even register pairs. */
3846 if (may_use_core_reg
3847 && argreg
<= ARM_LAST_ARG_REGNUM
3848 && align
> INT_REGISTER_SIZE
3852 /* If the argument is a pointer to a function, and it is a
3853 Thumb function, create a LOCAL copy of the value and set
3854 the THUMB bit in it. */
3855 if (TYPE_CODE_PTR
== typecode
3856 && target_type
!= NULL
3857 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3859 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3860 if (arm_pc_is_thumb (gdbarch
, regval
))
3862 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3863 store_unsigned_integer (copy
, len
, byte_order
,
3864 MAKE_THUMB_ADDR (regval
));
3869 /* Copy the argument to general registers or the stack in
3870 register-sized pieces. Large arguments are split between
3871 registers and stack. */
3874 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3876 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3878 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3880 /* The argument is being passed in a general purpose
3882 if (byte_order
== BFD_ENDIAN_BIG
)
3883 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3885 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3887 gdbarch_register_name
3889 phex (regval
, INT_REGISTER_SIZE
));
3890 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3895 gdb_byte buf
[INT_REGISTER_SIZE
];
3897 memset (buf
, 0, sizeof (buf
));
3898 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3900 /* Push the arguments onto the stack. */
3902 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3904 si
= push_stack_item (si
, buf
, INT_REGISTER_SIZE
);
3905 nstack
+= INT_REGISTER_SIZE
;
3912 /* If we have an odd number of words to push, then decrement the stack
3913 by one word now, so first stack argument will be dword aligned. */
3920 write_memory (sp
, si
->data
, si
->len
);
3921 si
= pop_stack_item (si
);
3924 /* Finally, update teh SP register. */
3925 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3931 /* Always align the frame to an 8-byte boundary. This is required on
3932 some platforms and harmless on the rest. */
3935 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3937 /* Align the stack to eight bytes. */
3938 return sp
& ~ (CORE_ADDR
) 7;
3942 print_fpu_flags (struct ui_file
*file
, int flags
)
3944 if (flags
& (1 << 0))
3945 fputs_filtered ("IVO ", file
);
3946 if (flags
& (1 << 1))
3947 fputs_filtered ("DVZ ", file
);
3948 if (flags
& (1 << 2))
3949 fputs_filtered ("OFL ", file
);
3950 if (flags
& (1 << 3))
3951 fputs_filtered ("UFL ", file
);
3952 if (flags
& (1 << 4))
3953 fputs_filtered ("INX ", file
);
3954 fputc_filtered ('\n', file
);
3957 /* Print interesting information about the floating point processor
3958 (if present) or emulator. */
3960 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3961 struct frame_info
*frame
, const char *args
)
3963 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3966 type
= (status
>> 24) & 127;
3967 if (status
& (1 << 31))
3968 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3970 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3971 /* i18n: [floating point unit] mask */
3972 fputs_filtered (_("mask: "), file
);
3973 print_fpu_flags (file
, status
>> 16);
3974 /* i18n: [floating point unit] flags */
3975 fputs_filtered (_("flags: "), file
);
3976 print_fpu_flags (file
, status
);
3979 /* Construct the ARM extended floating point type. */
3980 static struct type
*
3981 arm_ext_type (struct gdbarch
*gdbarch
)
3983 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3985 if (!tdep
->arm_ext_type
)
3987 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
3988 floatformats_arm_ext
);
3990 return tdep
->arm_ext_type
;
3993 static struct type
*
3994 arm_neon_double_type (struct gdbarch
*gdbarch
)
3996 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3998 if (tdep
->neon_double_type
== NULL
)
4000 struct type
*t
, *elem
;
4002 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4004 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4005 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4006 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4007 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4008 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4009 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4010 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4011 append_composite_type_field (t
, "u64", elem
);
4012 elem
= builtin_type (gdbarch
)->builtin_float
;
4013 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4014 elem
= builtin_type (gdbarch
)->builtin_double
;
4015 append_composite_type_field (t
, "f64", elem
);
4017 TYPE_VECTOR (t
) = 1;
4018 TYPE_NAME (t
) = "neon_d";
4019 tdep
->neon_double_type
= t
;
4022 return tdep
->neon_double_type
;
4025 /* FIXME: The vector types are not correctly ordered on big-endian
4026 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4027 bits of d0 - regardless of what unit size is being held in d0. So
4028 the offset of the first uint8 in d0 is 7, but the offset of the
4029 first float is 4. This code works as-is for little-endian
4032 static struct type
*
4033 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4035 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4037 if (tdep
->neon_quad_type
== NULL
)
4039 struct type
*t
, *elem
;
4041 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4043 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4044 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4045 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4046 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4047 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4048 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4049 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4050 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4051 elem
= builtin_type (gdbarch
)->builtin_float
;
4052 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4053 elem
= builtin_type (gdbarch
)->builtin_double
;
4054 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4056 TYPE_VECTOR (t
) = 1;
4057 TYPE_NAME (t
) = "neon_q";
4058 tdep
->neon_quad_type
= t
;
4061 return tdep
->neon_quad_type
;
4064 /* Return the GDB type object for the "standard" data type of data in
4067 static struct type
*
4068 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4070 int num_regs
= gdbarch_num_regs (gdbarch
);
4072 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4073 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4074 return builtin_type (gdbarch
)->builtin_float
;
4076 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4077 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4078 return arm_neon_quad_type (gdbarch
);
4080 /* If the target description has register information, we are only
4081 in this function so that we can override the types of
4082 double-precision registers for NEON. */
4083 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4085 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4087 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4088 && TYPE_CODE (t
) == TYPE_CODE_FLT
4089 && gdbarch_tdep (gdbarch
)->have_neon
)
4090 return arm_neon_double_type (gdbarch
);
4095 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4097 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4098 return builtin_type (gdbarch
)->builtin_void
;
4100 return arm_ext_type (gdbarch
);
4102 else if (regnum
== ARM_SP_REGNUM
)
4103 return builtin_type (gdbarch
)->builtin_data_ptr
;
4104 else if (regnum
== ARM_PC_REGNUM
)
4105 return builtin_type (gdbarch
)->builtin_func_ptr
;
4106 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4107 /* These registers are only supported on targets which supply
4108 an XML description. */
4109 return builtin_type (gdbarch
)->builtin_int0
;
4111 return builtin_type (gdbarch
)->builtin_uint32
;
4114 /* Map a DWARF register REGNUM onto the appropriate GDB register
4118 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4120 /* Core integer regs. */
4121 if (reg
>= 0 && reg
<= 15)
4124 /* Legacy FPA encoding. These were once used in a way which
4125 overlapped with VFP register numbering, so their use is
4126 discouraged, but GDB doesn't support the ARM toolchain
4127 which used them for VFP. */
4128 if (reg
>= 16 && reg
<= 23)
4129 return ARM_F0_REGNUM
+ reg
- 16;
4131 /* New assignments for the FPA registers. */
4132 if (reg
>= 96 && reg
<= 103)
4133 return ARM_F0_REGNUM
+ reg
- 96;
4135 /* WMMX register assignments. */
4136 if (reg
>= 104 && reg
<= 111)
4137 return ARM_WCGR0_REGNUM
+ reg
- 104;
4139 if (reg
>= 112 && reg
<= 127)
4140 return ARM_WR0_REGNUM
+ reg
- 112;
4142 if (reg
>= 192 && reg
<= 199)
4143 return ARM_WC0_REGNUM
+ reg
- 192;
4145 /* VFP v2 registers. A double precision value is actually
4146 in d1 rather than s2, but the ABI only defines numbering
4147 for the single precision registers. This will "just work"
4148 in GDB for little endian targets (we'll read eight bytes,
4149 starting in s0 and then progressing to s1), but will be
4150 reversed on big endian targets with VFP. This won't
4151 be a problem for the new Neon quad registers; you're supposed
4152 to use DW_OP_piece for those. */
4153 if (reg
>= 64 && reg
<= 95)
4157 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4158 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4162 /* VFP v3 / Neon registers. This range is also used for VFP v2
4163 registers, except that it now describes d0 instead of s0. */
4164 if (reg
>= 256 && reg
<= 287)
4168 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4169 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4176 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4178 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4181 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4183 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4184 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4186 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4187 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4189 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4190 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4192 if (reg
< NUM_GREGS
)
4193 return SIM_ARM_R0_REGNUM
+ reg
;
4196 if (reg
< NUM_FREGS
)
4197 return SIM_ARM_FP0_REGNUM
+ reg
;
4200 if (reg
< NUM_SREGS
)
4201 return SIM_ARM_FPS_REGNUM
+ reg
;
4204 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4207 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4208 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4209 It is thought that this is is the floating-point register format on
4210 little-endian systems. */
4213 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4214 void *dbl
, int endianess
)
4218 if (endianess
== BFD_ENDIAN_BIG
)
4219 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4221 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4223 floatformat_from_doublest (fmt
, &d
, dbl
);
4227 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4232 floatformat_to_doublest (fmt
, ptr
, &d
);
4233 if (endianess
== BFD_ENDIAN_BIG
)
4234 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4236 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4240 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4241 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4242 NULL if an error occurs. BUF is freed. */
4245 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4246 int old_len
, int new_len
)
4249 int bytes_to_read
= new_len
- old_len
;
4251 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4252 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4254 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4262 /* An IT block is at most the 2-byte IT instruction followed by
4263 four 4-byte instructions. The furthest back we must search to
4264 find an IT block that affects the current instruction is thus
4265 2 + 3 * 4 == 14 bytes. */
4266 #define MAX_IT_BLOCK_PREFIX 14
4268 /* Use a quick scan if there are more than this many bytes of
4270 #define IT_SCAN_THRESHOLD 32
4272 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4273 A breakpoint in an IT block may not be hit, depending on the
4276 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4280 CORE_ADDR boundary
, func_start
;
4282 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4283 int i
, any
, last_it
, last_it_count
;
4285 /* If we are using BKPT breakpoints, none of this is necessary. */
4286 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4289 /* ARM mode does not have this problem. */
4290 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4293 /* We are setting a breakpoint in Thumb code that could potentially
4294 contain an IT block. The first step is to find how much Thumb
4295 code there is; we do not need to read outside of known Thumb
4297 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4299 /* Thumb-2 code must have mapping symbols to have a chance. */
4302 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4304 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4305 && func_start
> boundary
)
4306 boundary
= func_start
;
4308 /* Search for a candidate IT instruction. We have to do some fancy
4309 footwork to distinguish a real IT instruction from the second
4310 half of a 32-bit instruction, but there is no need for that if
4311 there's no candidate. */
4312 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4314 /* No room for an IT instruction. */
4317 buf
= (gdb_byte
*) xmalloc (buf_len
);
4318 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4321 for (i
= 0; i
< buf_len
; i
+= 2)
4323 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4324 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4337 /* OK, the code bytes before this instruction contain at least one
4338 halfword which resembles an IT instruction. We know that it's
4339 Thumb code, but there are still two possibilities. Either the
4340 halfword really is an IT instruction, or it is the second half of
4341 a 32-bit Thumb instruction. The only way we can tell is to
4342 scan forwards from a known instruction boundary. */
4343 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4347 /* There's a lot of code before this instruction. Start with an
4348 optimistic search; it's easy to recognize halfwords that can
4349 not be the start of a 32-bit instruction, and use that to
4350 lock on to the instruction boundaries. */
4351 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4354 buf_len
= IT_SCAN_THRESHOLD
;
4357 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4359 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4360 if (thumb_insn_size (inst1
) == 2)
4367 /* At this point, if DEFINITE, BUF[I] is the first place we
4368 are sure that we know the instruction boundaries, and it is far
4369 enough from BPADDR that we could not miss an IT instruction
4370 affecting BPADDR. If ! DEFINITE, give up - start from a
4374 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4378 buf_len
= bpaddr
- boundary
;
4384 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4387 buf_len
= bpaddr
- boundary
;
4391 /* Scan forwards. Find the last IT instruction before BPADDR. */
4396 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4398 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4403 else if (inst1
& 0x0002)
4405 else if (inst1
& 0x0004)
4410 i
+= thumb_insn_size (inst1
);
4416 /* There wasn't really an IT instruction after all. */
4419 if (last_it_count
< 1)
4420 /* It was too far away. */
4423 /* This really is a trouble spot. Move the breakpoint to the IT
4425 return bpaddr
- buf_len
+ last_it
;
4428 /* ARM displaced stepping support.
4430 Generally ARM displaced stepping works as follows:
4432 1. When an instruction is to be single-stepped, it is first decoded by
4433 arm_process_displaced_insn. Depending on the type of instruction, it is
4434 then copied to a scratch location, possibly in a modified form. The
4435 copy_* set of functions performs such modification, as necessary. A
4436 breakpoint is placed after the modified instruction in the scratch space
4437 to return control to GDB. Note in particular that instructions which
4438 modify the PC will no longer do so after modification.
4440 2. The instruction is single-stepped, by setting the PC to the scratch
4441 location address, and resuming. Control returns to GDB when the
4444 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4445 function used for the current instruction. This function's job is to
4446 put the CPU/memory state back to what it would have been if the
4447 instruction had been executed unmodified in its original location. */
4449 /* NOP instruction (mov r0, r0). */
4450 #define ARM_NOP 0xe1a00000
4451 #define THUMB_NOP 0x4600
4453 /* Helper for register reads for displaced stepping. In particular, this
4454 returns the PC as it would be seen by the instruction at its original
4458 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
4462 CORE_ADDR from
= dsc
->insn_addr
;
4464 if (regno
== ARM_PC_REGNUM
)
4466 /* Compute pipeline offset:
4467 - When executing an ARM instruction, PC reads as the address of the
4468 current instruction plus 8.
4469 - When executing a Thumb instruction, PC reads as the address of the
4470 current instruction plus 4. */
4477 if (debug_displaced
)
4478 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
4479 (unsigned long) from
);
4480 return (ULONGEST
) from
;
4484 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4485 if (debug_displaced
)
4486 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
4487 regno
, (unsigned long) ret
);
4493 displaced_in_arm_mode (struct regcache
*regs
)
4496 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
4498 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4500 return (ps
& t_bit
) == 0;
4503 /* Write to the PC as from a branch instruction. */
4506 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
4510 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4511 architecture versions < 6. */
4512 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4513 val
& ~(ULONGEST
) 0x3);
4515 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4516 val
& ~(ULONGEST
) 0x1);
4519 /* Write to the PC as from a branch-exchange instruction. */
4522 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4525 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
4527 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4531 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4532 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4534 else if ((val
& 2) == 0)
4536 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4537 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4541 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4542 mode, align dest to 4 bytes). */
4543 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4544 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4545 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4549 /* Write to the PC as if from a load instruction. */
4552 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
4555 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4556 bx_write_pc (regs
, val
);
4558 branch_write_pc (regs
, dsc
, val
);
4561 /* Write to the PC as if from an ALU instruction. */
4564 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
4567 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4568 bx_write_pc (regs
, val
);
4570 branch_write_pc (regs
, dsc
, val
);
4573 /* Helper for writing to registers for displaced stepping. Writing to the PC
4574 has a varying effects depending on the instruction which does the write:
4575 this is controlled by the WRITE_PC argument. */
4578 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
4579 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4581 if (regno
== ARM_PC_REGNUM
)
4583 if (debug_displaced
)
4584 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
4585 (unsigned long) val
);
4588 case BRANCH_WRITE_PC
:
4589 branch_write_pc (regs
, dsc
, val
);
4593 bx_write_pc (regs
, val
);
4597 load_write_pc (regs
, dsc
, val
);
4601 alu_write_pc (regs
, dsc
, val
);
4604 case CANNOT_WRITE_PC
:
4605 warning (_("Instruction wrote to PC in an unexpected way when "
4606 "single-stepping"));
4610 internal_error (__FILE__
, __LINE__
,
4611 _("Invalid argument to displaced_write_reg"));
4614 dsc
->wrote_to_pc
= 1;
4618 if (debug_displaced
)
4619 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
4620 regno
, (unsigned long) val
);
4621 regcache_cooked_write_unsigned (regs
, regno
, val
);
4625 /* This function is used to concisely determine if an instruction INSN
4626 references PC. Register fields of interest in INSN should have the
4627 corresponding fields of BITMASK set to 0b1111. The function
4628 returns return 1 if any of these fields in INSN reference the PC
4629 (also 0b1111, r15), else it returns 0. */
4632 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4634 uint32_t lowbit
= 1;
4636 while (bitmask
!= 0)
4640 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4646 mask
= lowbit
* 0xf;
4648 if ((insn
& mask
) == mask
)
4657 /* The simplest copy function. Many instructions have the same effect no
4658 matter what address they are executed at: in those cases, use this. */
4661 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
4662 const char *iname
, struct displaced_step_closure
*dsc
)
4664 if (debug_displaced
)
4665 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
4666 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
4669 dsc
->modinsn
[0] = insn
;
4675 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4676 uint16_t insn2
, const char *iname
,
4677 struct displaced_step_closure
*dsc
)
4679 if (debug_displaced
)
4680 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
4681 "opcode/class '%s' unmodified\n", insn1
, insn2
,
4684 dsc
->modinsn
[0] = insn1
;
4685 dsc
->modinsn
[1] = insn2
;
4691 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4694 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4696 struct displaced_step_closure
*dsc
)
4698 if (debug_displaced
)
4699 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
4700 "opcode/class '%s' unmodified\n", insn
,
4703 dsc
->modinsn
[0] = insn
;
4708 /* Preload instructions with immediate offset. */
4711 cleanup_preload (struct gdbarch
*gdbarch
,
4712 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
4714 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4715 if (!dsc
->u
.preload
.immed
)
4716 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4720 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4721 struct displaced_step_closure
*dsc
, unsigned int rn
)
4724 /* Preload instructions:
4726 {pli/pld} [rn, #+/-imm]
4728 {pli/pld} [r0, #+/-imm]. */
4730 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4731 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4732 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4733 dsc
->u
.preload
.immed
= 1;
4735 dsc
->cleanup
= &cleanup_preload
;
4739 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4740 struct displaced_step_closure
*dsc
)
4742 unsigned int rn
= bits (insn
, 16, 19);
4744 if (!insn_references_pc (insn
, 0x000f0000ul
))
4745 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4747 if (debug_displaced
)
4748 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4749 (unsigned long) insn
);
4751 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4753 install_preload (gdbarch
, regs
, dsc
, rn
);
4759 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4760 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
4762 unsigned int rn
= bits (insn1
, 0, 3);
4763 unsigned int u_bit
= bit (insn1
, 7);
4764 int imm12
= bits (insn2
, 0, 11);
4767 if (rn
!= ARM_PC_REGNUM
)
4768 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4770 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4771 PLD (literal) Encoding T1. */
4772 if (debug_displaced
)
4773 fprintf_unfiltered (gdb_stdlog
,
4774 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4775 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4781 /* Rewrite instruction {pli/pld} PC imm12 into:
4782 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4786 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4788 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4789 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4791 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4793 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4794 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4795 dsc
->u
.preload
.immed
= 0;
4797 /* {pli/pld} [r0, r1] */
4798 dsc
->modinsn
[0] = insn1
& 0xfff0;
4799 dsc
->modinsn
[1] = 0xf001;
4802 dsc
->cleanup
= &cleanup_preload
;
4806 /* Preload instructions with register offset. */
4809 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4810 struct displaced_step_closure
*dsc
, unsigned int rn
,
4813 ULONGEST rn_val
, rm_val
;
4815 /* Preload register-offset instructions:
4817 {pli/pld} [rn, rm {, shift}]
4819 {pli/pld} [r0, r1 {, shift}]. */
4821 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4822 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4823 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4824 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4825 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4826 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4827 dsc
->u
.preload
.immed
= 0;
4829 dsc
->cleanup
= &cleanup_preload
;
4833 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4834 struct regcache
*regs
,
4835 struct displaced_step_closure
*dsc
)
4837 unsigned int rn
= bits (insn
, 16, 19);
4838 unsigned int rm
= bits (insn
, 0, 3);
4841 if (!insn_references_pc (insn
, 0x000f000ful
))
4842 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4844 if (debug_displaced
)
4845 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4846 (unsigned long) insn
);
4848 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4850 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4854 /* Copy/cleanup coprocessor load and store instructions. */
4857 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4858 struct regcache
*regs
,
4859 struct displaced_step_closure
*dsc
)
4861 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4863 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4865 if (dsc
->u
.ldst
.writeback
)
4866 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4870 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4871 struct displaced_step_closure
*dsc
,
4872 int writeback
, unsigned int rn
)
4876 /* Coprocessor load/store instructions:
4878 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4880 {stc/stc2} [r0, #+/-imm].
4882 ldc/ldc2 are handled identically. */
4884 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4885 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4886 /* PC should be 4-byte aligned. */
4887 rn_val
= rn_val
& 0xfffffffc;
4888 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4890 dsc
->u
.ldst
.writeback
= writeback
;
4891 dsc
->u
.ldst
.rn
= rn
;
4893 dsc
->cleanup
= &cleanup_copro_load_store
;
4897 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4898 struct regcache
*regs
,
4899 struct displaced_step_closure
*dsc
)
4901 unsigned int rn
= bits (insn
, 16, 19);
4903 if (!insn_references_pc (insn
, 0x000f0000ul
))
4904 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4906 if (debug_displaced
)
4907 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4908 "load/store insn %.8lx\n", (unsigned long) insn
);
4910 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4912 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
4918 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
4919 uint16_t insn2
, struct regcache
*regs
,
4920 struct displaced_step_closure
*dsc
)
4922 unsigned int rn
= bits (insn1
, 0, 3);
4924 if (rn
!= ARM_PC_REGNUM
)
4925 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
4926 "copro load/store", dsc
);
4928 if (debug_displaced
)
4929 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4930 "load/store insn %.4x%.4x\n", insn1
, insn2
);
4932 dsc
->modinsn
[0] = insn1
& 0xfff0;
4933 dsc
->modinsn
[1] = insn2
;
4936 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4937 doesn't support writeback, so pass 0. */
4938 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
4943 /* Clean up branch instructions (actually perform the branch, by setting
4947 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4948 struct displaced_step_closure
*dsc
)
4950 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
4951 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
4952 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
4953 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
4958 if (dsc
->u
.branch
.link
)
4960 /* The value of LR should be the next insn of current one. In order
4961 not to confuse logic hanlding later insn `bx lr', if current insn mode
4962 is Thumb, the bit 0 of LR value should be set to 1. */
4963 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
4966 next_insn_addr
|= 0x1;
4968 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
4972 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
4975 /* Copy B/BL/BLX instructions with immediate destinations. */
4978 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4979 struct displaced_step_closure
*dsc
,
4980 unsigned int cond
, int exchange
, int link
, long offset
)
4982 /* Implement "BL<cond> <label>" as:
4984 Preparation: cond <- instruction condition
4985 Insn: mov r0, r0 (nop)
4986 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4988 B<cond> similar, but don't set r14 in cleanup. */
4990 dsc
->u
.branch
.cond
= cond
;
4991 dsc
->u
.branch
.link
= link
;
4992 dsc
->u
.branch
.exchange
= exchange
;
4994 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
4995 if (link
&& exchange
)
4996 /* For BLX, offset is computed from the Align (PC, 4). */
4997 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
5000 dsc
->u
.branch
.dest
+= 4 + offset
;
5002 dsc
->u
.branch
.dest
+= 8 + offset
;
5004 dsc
->cleanup
= &cleanup_branch
;
5007 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
5008 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5010 unsigned int cond
= bits (insn
, 28, 31);
5011 int exchange
= (cond
== 0xf);
5012 int link
= exchange
|| bit (insn
, 24);
5015 if (debug_displaced
)
5016 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
5017 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
5018 (unsigned long) insn
);
5020 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5021 then arrange the switch into Thumb mode. */
5022 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
5024 offset
= bits (insn
, 0, 23) << 2;
5026 if (bit (offset
, 25))
5027 offset
= offset
| ~0x3ffffff;
5029 dsc
->modinsn
[0] = ARM_NOP
;
5031 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5036 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
5037 uint16_t insn2
, struct regcache
*regs
,
5038 struct displaced_step_closure
*dsc
)
5040 int link
= bit (insn2
, 14);
5041 int exchange
= link
&& !bit (insn2
, 12);
5044 int j1
= bit (insn2
, 13);
5045 int j2
= bit (insn2
, 11);
5046 int s
= sbits (insn1
, 10, 10);
5047 int i1
= !(j1
^ bit (insn1
, 10));
5048 int i2
= !(j2
^ bit (insn1
, 10));
5050 if (!link
&& !exchange
) /* B */
5052 offset
= (bits (insn2
, 0, 10) << 1);
5053 if (bit (insn2
, 12)) /* Encoding T4 */
5055 offset
|= (bits (insn1
, 0, 9) << 12)
5061 else /* Encoding T3 */
5063 offset
|= (bits (insn1
, 0, 5) << 12)
5067 cond
= bits (insn1
, 6, 9);
5072 offset
= (bits (insn1
, 0, 9) << 12);
5073 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
5074 offset
|= exchange
?
5075 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
5078 if (debug_displaced
)
5079 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
5080 "%.4x %.4x with offset %.8lx\n",
5081 link
? (exchange
) ? "blx" : "bl" : "b",
5082 insn1
, insn2
, offset
);
5084 dsc
->modinsn
[0] = THUMB_NOP
;
5086 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5090 /* Copy B Thumb instructions. */
5092 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
5093 struct displaced_step_closure
*dsc
)
5095 unsigned int cond
= 0;
5097 unsigned short bit_12_15
= bits (insn
, 12, 15);
5098 CORE_ADDR from
= dsc
->insn_addr
;
5100 if (bit_12_15
== 0xd)
5102 /* offset = SignExtend (imm8:0, 32) */
5103 offset
= sbits ((insn
<< 1), 0, 8);
5104 cond
= bits (insn
, 8, 11);
5106 else if (bit_12_15
== 0xe) /* Encoding T2 */
5108 offset
= sbits ((insn
<< 1), 0, 11);
5112 if (debug_displaced
)
5113 fprintf_unfiltered (gdb_stdlog
,
5114 "displaced: copying b immediate insn %.4x "
5115 "with offset %d\n", insn
, offset
);
5117 dsc
->u
.branch
.cond
= cond
;
5118 dsc
->u
.branch
.link
= 0;
5119 dsc
->u
.branch
.exchange
= 0;
5120 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5122 dsc
->modinsn
[0] = THUMB_NOP
;
5124 dsc
->cleanup
= &cleanup_branch
;
5129 /* Copy BX/BLX with register-specified destinations. */
5132 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5133 struct displaced_step_closure
*dsc
, int link
,
5134 unsigned int cond
, unsigned int rm
)
5136 /* Implement {BX,BLX}<cond> <reg>" as:
5138 Preparation: cond <- instruction condition
5139 Insn: mov r0, r0 (nop)
5140 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5142 Don't set r14 in cleanup for BX. */
5144 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5146 dsc
->u
.branch
.cond
= cond
;
5147 dsc
->u
.branch
.link
= link
;
5149 dsc
->u
.branch
.exchange
= 1;
5151 dsc
->cleanup
= &cleanup_branch
;
5155 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5156 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5158 unsigned int cond
= bits (insn
, 28, 31);
5161 int link
= bit (insn
, 5);
5162 unsigned int rm
= bits (insn
, 0, 3);
5164 if (debug_displaced
)
5165 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
5166 (unsigned long) insn
);
5168 dsc
->modinsn
[0] = ARM_NOP
;
5170 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5175 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5176 struct regcache
*regs
,
5177 struct displaced_step_closure
*dsc
)
5179 int link
= bit (insn
, 7);
5180 unsigned int rm
= bits (insn
, 3, 6);
5182 if (debug_displaced
)
5183 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
5184 (unsigned short) insn
);
5186 dsc
->modinsn
[0] = THUMB_NOP
;
5188 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5194 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5197 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5198 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5200 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5201 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5202 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5203 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5207 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5208 struct displaced_step_closure
*dsc
)
5210 unsigned int rn
= bits (insn
, 16, 19);
5211 unsigned int rd
= bits (insn
, 12, 15);
5212 unsigned int op
= bits (insn
, 21, 24);
5213 int is_mov
= (op
== 0xd);
5214 ULONGEST rd_val
, rn_val
;
5216 if (!insn_references_pc (insn
, 0x000ff000ul
))
5217 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5219 if (debug_displaced
)
5220 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
5221 "%.8lx\n", is_mov
? "move" : "ALU",
5222 (unsigned long) insn
);
5224 /* Instruction is of form:
5226 <op><cond> rd, [rn,] #imm
5230 Preparation: tmp1, tmp2 <- r0, r1;
5232 Insn: <op><cond> r0, r1, #imm
5233 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5236 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5237 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5238 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5239 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5240 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5241 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5245 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5247 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5249 dsc
->cleanup
= &cleanup_alu_imm
;
5255 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5256 uint16_t insn2
, struct regcache
*regs
,
5257 struct displaced_step_closure
*dsc
)
5259 unsigned int op
= bits (insn1
, 5, 8);
5260 unsigned int rn
, rm
, rd
;
5261 ULONGEST rd_val
, rn_val
;
5263 rn
= bits (insn1
, 0, 3); /* Rn */
5264 rm
= bits (insn2
, 0, 3); /* Rm */
5265 rd
= bits (insn2
, 8, 11); /* Rd */
5267 /* This routine is only called for instruction MOV. */
5268 gdb_assert (op
== 0x2 && rn
== 0xf);
5270 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5271 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5273 if (debug_displaced
)
5274 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
5275 "ALU", insn1
, insn2
);
5277 /* Instruction is of form:
5279 <op><cond> rd, [rn,] #imm
5283 Preparation: tmp1, tmp2 <- r0, r1;
5285 Insn: <op><cond> r0, r1, #imm
5286 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5289 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5290 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5291 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5292 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5293 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5294 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5297 dsc
->modinsn
[0] = insn1
;
5298 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5301 dsc
->cleanup
= &cleanup_alu_imm
;
5306 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5309 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5310 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5315 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5317 for (i
= 0; i
< 3; i
++)
5318 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5320 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5324 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5325 struct displaced_step_closure
*dsc
,
5326 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5328 ULONGEST rd_val
, rn_val
, rm_val
;
5330 /* Instruction is of form:
5332 <op><cond> rd, [rn,] rm [, <shift>]
5336 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5337 r0, r1, r2 <- rd, rn, rm
5338 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5339 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5342 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5343 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5344 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5345 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5346 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5347 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5348 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5349 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5350 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5353 dsc
->cleanup
= &cleanup_alu_reg
;
5357 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5358 struct displaced_step_closure
*dsc
)
5360 unsigned int op
= bits (insn
, 21, 24);
5361 int is_mov
= (op
== 0xd);
5363 if (!insn_references_pc (insn
, 0x000ff00ful
))
5364 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5366 if (debug_displaced
)
5367 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
5368 is_mov
? "move" : "ALU", (unsigned long) insn
);
5371 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5373 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5375 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5381 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5382 struct regcache
*regs
,
5383 struct displaced_step_closure
*dsc
)
5387 rm
= bits (insn
, 3, 6);
5388 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5390 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5391 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5393 if (debug_displaced
)
5394 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
5395 (unsigned short) insn
);
5397 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5399 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5404 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5407 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5408 struct regcache
*regs
,
5409 struct displaced_step_closure
*dsc
)
5411 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5414 for (i
= 0; i
< 4; i
++)
5415 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5417 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5421 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5422 struct displaced_step_closure
*dsc
,
5423 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5427 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5429 /* Instruction is of form:
5431 <op><cond> rd, [rn,] rm, <shift> rs
5435 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5436 r0, r1, r2, r3 <- rd, rn, rm, rs
5437 Insn: <op><cond> r0, r1, r2, <shift> r3
5439 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5443 for (i
= 0; i
< 4; i
++)
5444 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5446 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5447 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5448 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5449 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5450 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5451 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5452 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5453 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5455 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5459 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5460 struct regcache
*regs
,
5461 struct displaced_step_closure
*dsc
)
5463 unsigned int op
= bits (insn
, 21, 24);
5464 int is_mov
= (op
== 0xd);
5465 unsigned int rd
, rn
, rm
, rs
;
5467 if (!insn_references_pc (insn
, 0x000fff0ful
))
5468 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5470 if (debug_displaced
)
5471 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
5472 "%.8lx\n", is_mov
? "move" : "ALU",
5473 (unsigned long) insn
);
5475 rn
= bits (insn
, 16, 19);
5476 rm
= bits (insn
, 0, 3);
5477 rs
= bits (insn
, 8, 11);
5478 rd
= bits (insn
, 12, 15);
5481 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5483 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5485 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5490 /* Clean up load instructions. */
5493 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5494 struct displaced_step_closure
*dsc
)
5496 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5498 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5499 if (dsc
->u
.ldst
.xfersize
== 8)
5500 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5501 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5503 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5504 if (dsc
->u
.ldst
.xfersize
> 4)
5505 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5506 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5507 if (!dsc
->u
.ldst
.immed
)
5508 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5510 /* Handle register writeback. */
5511 if (dsc
->u
.ldst
.writeback
)
5512 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5513 /* Put result in right place. */
5514 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5515 if (dsc
->u
.ldst
.xfersize
== 8)
5516 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5519 /* Clean up store instructions. */
5522 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5523 struct displaced_step_closure
*dsc
)
5525 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5527 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5528 if (dsc
->u
.ldst
.xfersize
> 4)
5529 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5530 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5531 if (!dsc
->u
.ldst
.immed
)
5532 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5533 if (!dsc
->u
.ldst
.restore_r4
)
5534 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5537 if (dsc
->u
.ldst
.writeback
)
5538 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5541 /* Copy "extra" load/store instructions. These are halfword/doubleword
5542 transfers, which have a different encoding to byte/word transfers. */
5545 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5546 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5548 unsigned int op1
= bits (insn
, 20, 24);
5549 unsigned int op2
= bits (insn
, 5, 6);
5550 unsigned int rt
= bits (insn
, 12, 15);
5551 unsigned int rn
= bits (insn
, 16, 19);
5552 unsigned int rm
= bits (insn
, 0, 3);
5553 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5554 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5555 int immed
= (op1
& 0x4) != 0;
5557 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5559 if (!insn_references_pc (insn
, 0x000ff00ful
))
5560 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5562 if (debug_displaced
)
5563 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
5564 "insn %.8lx\n", unprivileged
? "unprivileged " : "",
5565 (unsigned long) insn
);
5567 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5570 internal_error (__FILE__
, __LINE__
,
5571 _("copy_extra_ld_st: instruction decode error"));
5573 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5574 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5575 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5577 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5579 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5580 if (bytesize
[opcode
] == 8)
5581 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5582 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5584 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5586 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5587 if (bytesize
[opcode
] == 8)
5588 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5589 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5591 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5594 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5595 dsc
->u
.ldst
.rn
= rn
;
5596 dsc
->u
.ldst
.immed
= immed
;
5597 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5598 dsc
->u
.ldst
.restore_r4
= 0;
5601 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5603 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5604 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5606 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5608 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5609 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5611 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5616 /* Copy byte/half word/word loads and stores. */
5619 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5620 struct displaced_step_closure
*dsc
, int load
,
5621 int immed
, int writeback
, int size
, int usermode
,
5622 int rt
, int rm
, int rn
)
5624 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5626 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5627 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5629 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5631 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5633 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5634 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5636 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5638 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5639 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5641 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5643 dsc
->u
.ldst
.xfersize
= size
;
5644 dsc
->u
.ldst
.rn
= rn
;
5645 dsc
->u
.ldst
.immed
= immed
;
5646 dsc
->u
.ldst
.writeback
= writeback
;
5648 /* To write PC we can do:
5650 Before this sequence of instructions:
5651 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5652 r2 is the Rn value got from dispalced_read_reg.
5654 Insn1: push {pc} Write address of STR instruction + offset on stack
5655 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5656 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5657 = addr(Insn1) + offset - addr(Insn3) - 8
5659 Insn4: add r4, r4, #8 r4 = offset - 8
5660 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5662 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5664 Otherwise we don't know what value to write for PC, since the offset is
5665 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5666 of this can be found in Section "Saving from r15" in
5667 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5669 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5674 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5675 uint16_t insn2
, struct regcache
*regs
,
5676 struct displaced_step_closure
*dsc
, int size
)
5678 unsigned int u_bit
= bit (insn1
, 7);
5679 unsigned int rt
= bits (insn2
, 12, 15);
5680 int imm12
= bits (insn2
, 0, 11);
5683 if (debug_displaced
)
5684 fprintf_unfiltered (gdb_stdlog
,
5685 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5686 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5692 /* Rewrite instruction LDR Rt imm12 into:
5694 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5698 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5701 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5702 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5703 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5705 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5707 pc_val
= pc_val
& 0xfffffffc;
5709 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5710 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5714 dsc
->u
.ldst
.xfersize
= size
;
5715 dsc
->u
.ldst
.immed
= 0;
5716 dsc
->u
.ldst
.writeback
= 0;
5717 dsc
->u
.ldst
.restore_r4
= 0;
5719 /* LDR R0, R2, R3 */
5720 dsc
->modinsn
[0] = 0xf852;
5721 dsc
->modinsn
[1] = 0x3;
5724 dsc
->cleanup
= &cleanup_load
;
5730 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5731 uint16_t insn2
, struct regcache
*regs
,
5732 struct displaced_step_closure
*dsc
,
5733 int writeback
, int immed
)
5735 unsigned int rt
= bits (insn2
, 12, 15);
5736 unsigned int rn
= bits (insn1
, 0, 3);
5737 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5738 /* In LDR (register), there is also a register Rm, which is not allowed to
5739 be PC, so we don't have to check it. */
5741 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5742 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5745 if (debug_displaced
)
5746 fprintf_unfiltered (gdb_stdlog
,
5747 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5748 rt
, rn
, insn1
, insn2
);
5750 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5753 dsc
->u
.ldst
.restore_r4
= 0;
5756 /* ldr[b]<cond> rt, [rn, #imm], etc.
5758 ldr[b]<cond> r0, [r2, #imm]. */
5760 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5761 dsc
->modinsn
[1] = insn2
& 0x0fff;
5764 /* ldr[b]<cond> rt, [rn, rm], etc.
5766 ldr[b]<cond> r0, [r2, r3]. */
5768 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5769 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5779 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5780 struct regcache
*regs
,
5781 struct displaced_step_closure
*dsc
,
5782 int load
, int size
, int usermode
)
5784 int immed
= !bit (insn
, 25);
5785 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5786 unsigned int rt
= bits (insn
, 12, 15);
5787 unsigned int rn
= bits (insn
, 16, 19);
5788 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5790 if (!insn_references_pc (insn
, 0x000ff00ful
))
5791 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5793 if (debug_displaced
)
5794 fprintf_unfiltered (gdb_stdlog
,
5795 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5796 load
? (size
== 1 ? "ldrb" : "ldr")
5797 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
5799 (unsigned long) insn
);
5801 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5802 usermode
, rt
, rm
, rn
);
5804 if (load
|| rt
!= ARM_PC_REGNUM
)
5806 dsc
->u
.ldst
.restore_r4
= 0;
5809 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5811 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5812 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5814 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5816 {ldr,str}[b]<cond> r0, [r2, r3]. */
5817 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5821 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5822 dsc
->u
.ldst
.restore_r4
= 1;
5823 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5824 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5825 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5826 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5827 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5831 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5833 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5838 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5843 /* Cleanup LDM instructions with fully-populated register list. This is an
5844 unfortunate corner case: it's impossible to implement correctly by modifying
5845 the instruction. The issue is as follows: we have an instruction,
5849 which we must rewrite to avoid loading PC. A possible solution would be to
5850 do the load in two halves, something like (with suitable cleanup
5854 ldm[id][ab] r8!, {r0-r7}
5856 ldm[id][ab] r8, {r7-r14}
5859 but at present there's no suitable place for <temp>, since the scratch space
5860 is overwritten before the cleanup routine is called. For now, we simply
5861 emulate the instruction. */
5864 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5865 struct displaced_step_closure
*dsc
)
5867 int inc
= dsc
->u
.block
.increment
;
5868 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5869 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5870 uint32_t regmask
= dsc
->u
.block
.regmask
;
5871 int regno
= inc
? 0 : 15;
5872 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5873 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5874 && (regmask
& 0x8000) != 0;
5875 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5876 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5877 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5882 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5883 sensible we can do here. Complain loudly. */
5884 if (exception_return
)
5885 error (_("Cannot single-step exception return"));
5887 /* We don't handle any stores here for now. */
5888 gdb_assert (dsc
->u
.block
.load
!= 0);
5890 if (debug_displaced
)
5891 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
5892 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
5893 dsc
->u
.block
.increment
? "inc" : "dec",
5894 dsc
->u
.block
.before
? "before" : "after");
5901 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5904 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5907 xfer_addr
+= bump_before
;
5909 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5910 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5912 xfer_addr
+= bump_after
;
5914 regmask
&= ~(1 << regno
);
5917 if (dsc
->u
.block
.writeback
)
5918 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5922 /* Clean up an STM which included the PC in the register list. */
5925 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5926 struct displaced_step_closure
*dsc
)
5928 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5929 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5930 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
5931 CORE_ADDR stm_insn_addr
;
5934 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5936 /* If condition code fails, there's nothing else to do. */
5937 if (!store_executed
)
5940 if (dsc
->u
.block
.increment
)
5942 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
5944 if (dsc
->u
.block
.before
)
5949 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
5951 if (dsc
->u
.block
.before
)
5955 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
5956 stm_insn_addr
= dsc
->scratch_base
;
5957 offset
= pc_val
- stm_insn_addr
;
5959 if (debug_displaced
)
5960 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
5961 "STM instruction\n", offset
);
5963 /* Rewrite the stored PC to the proper value for the non-displaced original
5965 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
5966 dsc
->insn_addr
+ offset
);
5969 /* Clean up an LDM which includes the PC in the register list. We clumped all
5970 the registers in the transferred list into a contiguous range r0...rX (to
5971 avoid loading PC directly and losing control of the debugged program), so we
5972 must undo that here. */
5975 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
5976 struct regcache
*regs
,
5977 struct displaced_step_closure
*dsc
)
5979 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5980 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5981 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
5982 unsigned int regs_loaded
= bitcount (mask
);
5983 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
5985 /* The method employed here will fail if the register list is fully populated
5986 (we need to avoid loading PC directly). */
5987 gdb_assert (num_to_shuffle
< 16);
5992 clobbered
= (1 << num_to_shuffle
) - 1;
5994 while (num_to_shuffle
> 0)
5996 if ((mask
& (1 << write_reg
)) != 0)
5998 unsigned int read_reg
= num_to_shuffle
- 1;
6000 if (read_reg
!= write_reg
)
6002 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
6003 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
6004 if (debug_displaced
)
6005 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
6006 "loaded register r%d to r%d\n"), read_reg
,
6009 else if (debug_displaced
)
6010 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
6011 "r%d already in the right place\n"),
6014 clobbered
&= ~(1 << write_reg
);
6022 /* Restore any registers we scribbled over. */
6023 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
6025 if ((clobbered
& (1 << write_reg
)) != 0)
6027 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
6029 if (debug_displaced
)
6030 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
6031 "clobbered register r%d\n"), write_reg
);
6032 clobbered
&= ~(1 << write_reg
);
6036 /* Perform register writeback manually. */
6037 if (dsc
->u
.block
.writeback
)
6039 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
6041 if (dsc
->u
.block
.increment
)
6042 new_rn_val
+= regs_loaded
* 4;
6044 new_rn_val
-= regs_loaded
* 4;
6046 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
6051 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6052 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6055 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
6056 struct regcache
*regs
,
6057 struct displaced_step_closure
*dsc
)
6059 int load
= bit (insn
, 20);
6060 int user
= bit (insn
, 22);
6061 int increment
= bit (insn
, 23);
6062 int before
= bit (insn
, 24);
6063 int writeback
= bit (insn
, 21);
6064 int rn
= bits (insn
, 16, 19);
6066 /* Block transfers which don't mention PC can be run directly
6068 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
6069 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
6071 if (rn
== ARM_PC_REGNUM
)
6073 warning (_("displaced: Unpredictable LDM or STM with "
6074 "base register r15"));
6075 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
6078 if (debug_displaced
)
6079 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6080 "%.8lx\n", (unsigned long) insn
);
6082 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6083 dsc
->u
.block
.rn
= rn
;
6085 dsc
->u
.block
.load
= load
;
6086 dsc
->u
.block
.user
= user
;
6087 dsc
->u
.block
.increment
= increment
;
6088 dsc
->u
.block
.before
= before
;
6089 dsc
->u
.block
.writeback
= writeback
;
6090 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
6092 dsc
->u
.block
.regmask
= insn
& 0xffff;
6096 if ((insn
& 0xffff) == 0xffff)
6098 /* LDM with a fully-populated register list. This case is
6099 particularly tricky. Implement for now by fully emulating the
6100 instruction (which might not behave perfectly in all cases, but
6101 these instructions should be rare enough for that not to matter
6103 dsc
->modinsn
[0] = ARM_NOP
;
6105 dsc
->cleanup
= &cleanup_block_load_all
;
6109 /* LDM of a list of registers which includes PC. Implement by
6110 rewriting the list of registers to be transferred into a
6111 contiguous chunk r0...rX before doing the transfer, then shuffling
6112 registers into the correct places in the cleanup routine. */
6113 unsigned int regmask
= insn
& 0xffff;
6114 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6117 for (i
= 0; i
< num_in_list
; i
++)
6118 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6120 /* Writeback makes things complicated. We need to avoid clobbering
6121 the base register with one of the registers in our modified
6122 register list, but just using a different register can't work in
6125 ldm r14!, {r0-r13,pc}
6127 which would need to be rewritten as:
6131 but that can't work, because there's no free register for N.
6133 Solve this by turning off the writeback bit, and emulating
6134 writeback manually in the cleanup routine. */
6139 new_regmask
= (1 << num_in_list
) - 1;
6141 if (debug_displaced
)
6142 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6143 "{..., pc}: original reg list %.4x, modified "
6144 "list %.4x\n"), rn
, writeback
? "!" : "",
6145 (int) insn
& 0xffff, new_regmask
);
6147 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6149 dsc
->cleanup
= &cleanup_block_load_pc
;
6154 /* STM of a list of registers which includes PC. Run the instruction
6155 as-is, but out of line: this will store the wrong value for the PC,
6156 so we must manually fix up the memory in the cleanup routine.
6157 Doing things this way has the advantage that we can auto-detect
6158 the offset of the PC write (which is architecture-dependent) in
6159 the cleanup routine. */
6160 dsc
->modinsn
[0] = insn
;
6162 dsc
->cleanup
= &cleanup_block_store_pc
;
6169 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6170 struct regcache
*regs
,
6171 struct displaced_step_closure
*dsc
)
6173 int rn
= bits (insn1
, 0, 3);
6174 int load
= bit (insn1
, 4);
6175 int writeback
= bit (insn1
, 5);
6177 /* Block transfers which don't mention PC can be run directly
6179 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6180 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6182 if (rn
== ARM_PC_REGNUM
)
6184 warning (_("displaced: Unpredictable LDM or STM with "
6185 "base register r15"));
6186 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6187 "unpredictable ldm/stm", dsc
);
6190 if (debug_displaced
)
6191 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6192 "%.4x%.4x\n", insn1
, insn2
);
6194 /* Clear bit 13, since it should be always zero. */
6195 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6196 dsc
->u
.block
.rn
= rn
;
6198 dsc
->u
.block
.load
= load
;
6199 dsc
->u
.block
.user
= 0;
6200 dsc
->u
.block
.increment
= bit (insn1
, 7);
6201 dsc
->u
.block
.before
= bit (insn1
, 8);
6202 dsc
->u
.block
.writeback
= writeback
;
6203 dsc
->u
.block
.cond
= INST_AL
;
6204 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6208 if (dsc
->u
.block
.regmask
== 0xffff)
6210 /* This branch is impossible to happen. */
6215 unsigned int regmask
= dsc
->u
.block
.regmask
;
6216 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6219 for (i
= 0; i
< num_in_list
; i
++)
6220 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6225 new_regmask
= (1 << num_in_list
) - 1;
6227 if (debug_displaced
)
6228 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6229 "{..., pc}: original reg list %.4x, modified "
6230 "list %.4x\n"), rn
, writeback
? "!" : "",
6231 (int) dsc
->u
.block
.regmask
, new_regmask
);
6233 dsc
->modinsn
[0] = insn1
;
6234 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6237 dsc
->cleanup
= &cleanup_block_load_pc
;
6242 dsc
->modinsn
[0] = insn1
;
6243 dsc
->modinsn
[1] = insn2
;
6245 dsc
->cleanup
= &cleanup_block_store_pc
;
6250 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6251 This is used to avoid a dependency on BFD's bfd_endian enum. */
6254 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6257 return read_memory_unsigned_integer (memaddr
, len
,
6258 (enum bfd_endian
) byte_order
);
6261 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6264 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6267 return gdbarch_addr_bits_remove (get_regcache_arch (self
->regcache
), val
);
6270 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6273 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6278 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6281 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6283 return arm_is_thumb (self
->regcache
);
6286 /* single_step() is called just before we want to resume the inferior,
6287 if we want to single-step it but there is no hardware or kernel
6288 single-step support. We find the target of the coming instructions
6289 and breakpoint them. */
6292 arm_software_single_step (struct regcache
*regcache
)
6294 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
6295 struct arm_get_next_pcs next_pcs_ctx
;
6298 VEC (CORE_ADDR
) *next_pcs
= NULL
;
6299 struct cleanup
*old_chain
= make_cleanup (VEC_cleanup (CORE_ADDR
), &next_pcs
);
6301 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6302 &arm_get_next_pcs_ops
,
6303 gdbarch_byte_order (gdbarch
),
6304 gdbarch_byte_order_for_code (gdbarch
),
6308 next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6310 for (i
= 0; VEC_iterate (CORE_ADDR
, next_pcs
, i
, pc
); i
++)
6312 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
6313 VEC_replace (CORE_ADDR
, next_pcs
, i
, pc
);
6316 discard_cleanups (old_chain
);
6321 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6322 for Linux, where some SVC instructions must be treated specially. */
6325 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6326 struct displaced_step_closure
*dsc
)
6328 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6330 if (debug_displaced
)
6331 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
6332 "%.8lx\n", (unsigned long) resume_addr
);
6334 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6338 /* Common copy routine for svc instruciton. */
6341 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6342 struct displaced_step_closure
*dsc
)
6344 /* Preparation: none.
6345 Insn: unmodified svc.
6346 Cleanup: pc <- insn_addr + insn_size. */
6348 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6350 dsc
->wrote_to_pc
= 1;
6352 /* Allow OS-specific code to override SVC handling. */
6353 if (dsc
->u
.svc
.copy_svc_os
)
6354 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6357 dsc
->cleanup
= &cleanup_svc
;
6363 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6364 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6367 if (debug_displaced
)
6368 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
6369 (unsigned long) insn
);
6371 dsc
->modinsn
[0] = insn
;
6373 return install_svc (gdbarch
, regs
, dsc
);
6377 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6378 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6381 if (debug_displaced
)
6382 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
6385 dsc
->modinsn
[0] = insn
;
6387 return install_svc (gdbarch
, regs
, dsc
);
6390 /* Copy undefined instructions. */
6393 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6394 struct displaced_step_closure
*dsc
)
6396 if (debug_displaced
)
6397 fprintf_unfiltered (gdb_stdlog
,
6398 "displaced: copying undefined insn %.8lx\n",
6399 (unsigned long) insn
);
6401 dsc
->modinsn
[0] = insn
;
6407 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6408 struct displaced_step_closure
*dsc
)
6411 if (debug_displaced
)
6412 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
6413 "%.4x %.4x\n", (unsigned short) insn1
,
6414 (unsigned short) insn2
);
6416 dsc
->modinsn
[0] = insn1
;
6417 dsc
->modinsn
[1] = insn2
;
6423 /* Copy unpredictable instructions. */
6426 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6427 struct displaced_step_closure
*dsc
)
6429 if (debug_displaced
)
6430 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
6431 "%.8lx\n", (unsigned long) insn
);
6433 dsc
->modinsn
[0] = insn
;
6438 /* The decode_* functions are instruction decoding helpers. They mostly follow
6439 the presentation in the ARM ARM. */
6442 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6443 struct regcache
*regs
,
6444 struct displaced_step_closure
*dsc
)
6446 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6447 unsigned int rn
= bits (insn
, 16, 19);
6449 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
6450 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6451 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
6452 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6453 else if ((op1
& 0x60) == 0x20)
6454 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6455 else if ((op1
& 0x71) == 0x40)
6456 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6458 else if ((op1
& 0x77) == 0x41)
6459 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6460 else if ((op1
& 0x77) == 0x45)
6461 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6462 else if ((op1
& 0x77) == 0x51)
6465 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6467 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6469 else if ((op1
& 0x77) == 0x55)
6470 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6471 else if (op1
== 0x57)
6474 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6475 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6476 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6477 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6478 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6480 else if ((op1
& 0x63) == 0x43)
6481 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6482 else if ((op2
& 0x1) == 0x0)
6483 switch (op1
& ~0x80)
6486 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6488 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6489 case 0x71: case 0x75:
6491 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6492 case 0x63: case 0x67: case 0x73: case 0x77:
6493 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6495 return arm_copy_undef (gdbarch
, insn
, dsc
);
6498 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6502 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6503 struct regcache
*regs
,
6504 struct displaced_step_closure
*dsc
)
6506 if (bit (insn
, 27) == 0)
6507 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6508 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6509 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6512 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6515 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6517 case 0x4: case 0x5: case 0x6: case 0x7:
6518 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6521 switch ((insn
& 0xe00000) >> 21)
6523 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6525 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6528 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6531 return arm_copy_undef (gdbarch
, insn
, dsc
);
6536 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6537 switch ((insn
& 0xe00000) >> 21)
6540 /* ldc/ldc2 imm (undefined for rn == pc). */
6541 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6542 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6545 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6547 case 0x4: case 0x5: case 0x6: case 0x7:
6548 /* ldc/ldc2 lit (undefined for rn != pc). */
6549 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6550 : arm_copy_undef (gdbarch
, insn
, dsc
);
6553 return arm_copy_undef (gdbarch
, insn
, dsc
);
6558 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6561 if (bits (insn
, 16, 19) == 0xf)
6563 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6565 return arm_copy_undef (gdbarch
, insn
, dsc
);
6569 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6571 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6575 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6577 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6580 return arm_copy_undef (gdbarch
, insn
, dsc
);
6584 /* Decode miscellaneous instructions in dp/misc encoding space. */
6587 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6588 struct regcache
*regs
,
6589 struct displaced_step_closure
*dsc
)
6591 unsigned int op2
= bits (insn
, 4, 6);
6592 unsigned int op
= bits (insn
, 21, 22);
6597 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6600 if (op
== 0x1) /* bx. */
6601 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6603 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6605 return arm_copy_undef (gdbarch
, insn
, dsc
);
6609 /* Not really supported. */
6610 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6612 return arm_copy_undef (gdbarch
, insn
, dsc
);
6616 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6617 regs
, dsc
); /* blx register. */
6619 return arm_copy_undef (gdbarch
, insn
, dsc
);
6622 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6626 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6628 /* Not really supported. */
6629 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6632 return arm_copy_undef (gdbarch
, insn
, dsc
);
6637 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6638 struct regcache
*regs
,
6639 struct displaced_step_closure
*dsc
)
6642 switch (bits (insn
, 20, 24))
6645 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6648 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6650 case 0x12: case 0x16:
6651 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6654 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6658 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6660 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6661 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6662 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6663 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6664 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6665 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6666 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6667 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6668 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6669 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6670 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6671 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6672 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6673 /* 2nd arg means "unprivileged". */
6674 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6678 /* Should be unreachable. */
6683 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6684 struct regcache
*regs
,
6685 struct displaced_step_closure
*dsc
)
6687 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6688 uint32_t op1
= bits (insn
, 20, 24);
6690 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6691 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6692 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6693 else if ((!a
&& (op1
& 0x17) == 0x02)
6694 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6695 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6696 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6697 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6698 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6699 else if ((!a
&& (op1
& 0x17) == 0x03)
6700 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6701 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6702 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6703 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6704 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6705 else if ((!a
&& (op1
& 0x17) == 0x06)
6706 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6707 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6708 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6709 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6710 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6711 else if ((!a
&& (op1
& 0x17) == 0x07)
6712 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6713 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6715 /* Should be unreachable. */
6720 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6721 struct displaced_step_closure
*dsc
)
6723 switch (bits (insn
, 20, 24))
6725 case 0x00: case 0x01: case 0x02: case 0x03:
6726 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6728 case 0x04: case 0x05: case 0x06: case 0x07:
6729 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6731 case 0x08: case 0x09: case 0x0a: case 0x0b:
6732 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6733 return arm_copy_unmodified (gdbarch
, insn
,
6734 "decode/pack/unpack/saturate/reverse", dsc
);
6737 if (bits (insn
, 5, 7) == 0) /* op2. */
6739 if (bits (insn
, 12, 15) == 0xf)
6740 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6742 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6745 return arm_copy_undef (gdbarch
, insn
, dsc
);
6747 case 0x1a: case 0x1b:
6748 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6749 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6751 return arm_copy_undef (gdbarch
, insn
, dsc
);
6753 case 0x1c: case 0x1d:
6754 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6756 if (bits (insn
, 0, 3) == 0xf)
6757 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6759 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6762 return arm_copy_undef (gdbarch
, insn
, dsc
);
6764 case 0x1e: case 0x1f:
6765 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6766 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6768 return arm_copy_undef (gdbarch
, insn
, dsc
);
6771 /* Should be unreachable. */
6776 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6777 struct regcache
*regs
,
6778 struct displaced_step_closure
*dsc
)
6781 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6783 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6787 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6788 struct regcache
*regs
,
6789 struct displaced_step_closure
*dsc
)
6791 unsigned int opcode
= bits (insn
, 20, 24);
6795 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6796 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6798 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6799 case 0x12: case 0x16:
6800 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6802 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6803 case 0x13: case 0x17:
6804 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6806 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6807 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6808 /* Note: no writeback for these instructions. Bit 25 will always be
6809 zero though (via caller), so the following works OK. */
6810 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6813 /* Should be unreachable. */
6817 /* Decode shifted register instructions. */
6820 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6821 uint16_t insn2
, struct regcache
*regs
,
6822 struct displaced_step_closure
*dsc
)
6824 /* PC is only allowed to be used in instruction MOV. */
6826 unsigned int op
= bits (insn1
, 5, 8);
6827 unsigned int rn
= bits (insn1
, 0, 3);
6829 if (op
== 0x2 && rn
== 0xf) /* MOV */
6830 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6832 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6833 "dp (shift reg)", dsc
);
6837 /* Decode extension register load/store. Exactly the same as
6838 arm_decode_ext_reg_ld_st. */
6841 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6842 uint16_t insn2
, struct regcache
*regs
,
6843 struct displaced_step_closure
*dsc
)
6845 unsigned int opcode
= bits (insn1
, 4, 8);
6849 case 0x04: case 0x05:
6850 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6851 "vfp/neon vmov", dsc
);
6853 case 0x08: case 0x0c: /* 01x00 */
6854 case 0x0a: case 0x0e: /* 01x10 */
6855 case 0x12: case 0x16: /* 10x10 */
6856 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6857 "vfp/neon vstm/vpush", dsc
);
6859 case 0x09: case 0x0d: /* 01x01 */
6860 case 0x0b: case 0x0f: /* 01x11 */
6861 case 0x13: case 0x17: /* 10x11 */
6862 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6863 "vfp/neon vldm/vpop", dsc
);
6865 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6866 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6868 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6869 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6872 /* Should be unreachable. */
6877 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6878 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6880 unsigned int op1
= bits (insn
, 20, 25);
6881 int op
= bit (insn
, 4);
6882 unsigned int coproc
= bits (insn
, 8, 11);
6884 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6885 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6886 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6887 && (coproc
& 0xe) != 0xa)
6889 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6890 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6891 && (coproc
& 0xe) != 0xa)
6892 /* ldc/ldc2 imm/lit. */
6893 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6894 else if ((op1
& 0x3e) == 0x00)
6895 return arm_copy_undef (gdbarch
, insn
, dsc
);
6896 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6897 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6898 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6899 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6900 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6901 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6902 else if ((op1
& 0x30) == 0x20 && !op
)
6904 if ((coproc
& 0xe) == 0xa)
6905 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6907 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6909 else if ((op1
& 0x30) == 0x20 && op
)
6910 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6911 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6912 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6913 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6914 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6915 else if ((op1
& 0x30) == 0x30)
6916 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6918 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6922 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6923 uint16_t insn2
, struct regcache
*regs
,
6924 struct displaced_step_closure
*dsc
)
6926 unsigned int coproc
= bits (insn2
, 8, 11);
6927 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6928 unsigned int bit_9
= bit (insn1
, 9);
6929 unsigned int bit_4
= bit (insn1
, 4);
6934 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6935 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6937 else if (bit_5_8
== 0) /* UNDEFINED. */
6938 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6941 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6942 if ((coproc
& 0xe) == 0xa)
6943 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6945 else /* coproc is not 101x. */
6947 if (bit_4
== 0) /* STC/STC2. */
6948 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6950 else /* LDC/LDC2 {literal, immeidate}. */
6951 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6957 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
6963 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6964 struct displaced_step_closure
*dsc
, int rd
)
6970 Preparation: Rd <- PC
6976 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6977 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
6981 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6982 struct displaced_step_closure
*dsc
,
6983 int rd
, unsigned int imm
)
6986 /* Encoding T2: ADDS Rd, #imm */
6987 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
6989 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6995 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
6996 struct regcache
*regs
,
6997 struct displaced_step_closure
*dsc
)
6999 unsigned int rd
= bits (insn
, 8, 10);
7000 unsigned int imm8
= bits (insn
, 0, 7);
7002 if (debug_displaced
)
7003 fprintf_unfiltered (gdb_stdlog
,
7004 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7007 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
7011 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7012 uint16_t insn2
, struct regcache
*regs
,
7013 struct displaced_step_closure
*dsc
)
7015 unsigned int rd
= bits (insn2
, 8, 11);
7016 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7017 extract raw immediate encoding rather than computing immediate. When
7018 generating ADD or SUB instruction, we can simply perform OR operation to
7019 set immediate into ADD. */
7020 unsigned int imm_3_8
= insn2
& 0x70ff;
7021 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
7023 if (debug_displaced
)
7024 fprintf_unfiltered (gdb_stdlog
,
7025 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7026 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
7028 if (bit (insn1
, 7)) /* Encoding T2 */
7030 /* Encoding T3: SUB Rd, Rd, #imm */
7031 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
7032 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7034 else /* Encoding T3 */
7036 /* Encoding T3: ADD Rd, Rd, #imm */
7037 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
7038 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
7042 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7048 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
7049 struct regcache
*regs
,
7050 struct displaced_step_closure
*dsc
)
7052 unsigned int rt
= bits (insn1
, 8, 10);
7054 int imm8
= (bits (insn1
, 0, 7) << 2);
7060 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7062 Insn: LDR R0, [R2, R3];
7063 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7065 if (debug_displaced
)
7066 fprintf_unfiltered (gdb_stdlog
,
7067 "displaced: copying thumb ldr r%d [pc #%d]\n"
7070 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
7071 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
7072 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
7073 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7074 /* The assembler calculates the required value of the offset from the
7075 Align(PC,4) value of this instruction to the label. */
7076 pc
= pc
& 0xfffffffc;
7078 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
7079 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
7082 dsc
->u
.ldst
.xfersize
= 4;
7084 dsc
->u
.ldst
.immed
= 0;
7085 dsc
->u
.ldst
.writeback
= 0;
7086 dsc
->u
.ldst
.restore_r4
= 0;
7088 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7090 dsc
->cleanup
= &cleanup_load
;
7095 /* Copy Thumb cbnz/cbz insruction. */
7098 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
7099 struct regcache
*regs
,
7100 struct displaced_step_closure
*dsc
)
7102 int non_zero
= bit (insn1
, 11);
7103 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
7104 CORE_ADDR from
= dsc
->insn_addr
;
7105 int rn
= bits (insn1
, 0, 2);
7106 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7108 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7109 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7110 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7111 condition is false, let it be, cleanup_branch will do nothing. */
7112 if (dsc
->u
.branch
.cond
)
7114 dsc
->u
.branch
.cond
= INST_AL
;
7115 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7118 dsc
->u
.branch
.dest
= from
+ 2;
7120 dsc
->u
.branch
.link
= 0;
7121 dsc
->u
.branch
.exchange
= 0;
7123 if (debug_displaced
)
7124 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
7125 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
7126 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7128 dsc
->modinsn
[0] = THUMB_NOP
;
7130 dsc
->cleanup
= &cleanup_branch
;
7134 /* Copy Table Branch Byte/Halfword */
7136 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7137 uint16_t insn2
, struct regcache
*regs
,
7138 struct displaced_step_closure
*dsc
)
7140 ULONGEST rn_val
, rm_val
;
7141 int is_tbh
= bit (insn2
, 4);
7142 CORE_ADDR halfwords
= 0;
7143 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7145 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7146 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7152 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7153 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7159 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7160 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7163 if (debug_displaced
)
7164 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
7165 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
7166 (unsigned int) rn_val
, (unsigned int) rm_val
,
7167 (unsigned int) halfwords
);
7169 dsc
->u
.branch
.cond
= INST_AL
;
7170 dsc
->u
.branch
.link
= 0;
7171 dsc
->u
.branch
.exchange
= 0;
7172 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7174 dsc
->cleanup
= &cleanup_branch
;
7180 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7181 struct displaced_step_closure
*dsc
)
7184 int val
= displaced_read_reg (regs
, dsc
, 7);
7185 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7188 val
= displaced_read_reg (regs
, dsc
, 8);
7189 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7192 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7197 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7198 struct regcache
*regs
,
7199 struct displaced_step_closure
*dsc
)
7201 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7203 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7206 (1) register list is full, that is, r0-r7 are used.
7207 Prepare: tmp[0] <- r8
7209 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7210 MOV r8, r7; Move value of r7 to r8;
7211 POP {r7}; Store PC value into r7.
7213 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7215 (2) register list is not full, supposing there are N registers in
7216 register list (except PC, 0 <= N <= 7).
7217 Prepare: for each i, 0 - N, tmp[i] <- ri.
7219 POP {r0, r1, ...., rN};
7221 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7222 from tmp[] properly.
7224 if (debug_displaced
)
7225 fprintf_unfiltered (gdb_stdlog
,
7226 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7227 dsc
->u
.block
.regmask
, insn1
);
7229 if (dsc
->u
.block
.regmask
== 0xff)
7231 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7233 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7234 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7235 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7238 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7242 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
7244 unsigned int new_regmask
;
7246 for (i
= 0; i
< num_in_list
+ 1; i
++)
7247 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7249 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7251 if (debug_displaced
)
7252 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
7253 "{..., pc}: original reg list %.4x,"
7254 " modified list %.4x\n"),
7255 (int) dsc
->u
.block
.regmask
, new_regmask
);
7257 dsc
->u
.block
.regmask
|= 0x8000;
7258 dsc
->u
.block
.writeback
= 0;
7259 dsc
->u
.block
.cond
= INST_AL
;
7261 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7263 dsc
->cleanup
= &cleanup_block_load_pc
;
7270 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7271 struct regcache
*regs
,
7272 struct displaced_step_closure
*dsc
)
7274 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7275 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7278 /* 16-bit thumb instructions. */
7279 switch (op_bit_12_15
)
7281 /* Shift (imme), add, subtract, move and compare. */
7282 case 0: case 1: case 2: case 3:
7283 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7284 "shift/add/sub/mov/cmp",
7288 switch (op_bit_10_11
)
7290 case 0: /* Data-processing */
7291 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7295 case 1: /* Special data instructions and branch and exchange. */
7297 unsigned short op
= bits (insn1
, 7, 9);
7298 if (op
== 6 || op
== 7) /* BX or BLX */
7299 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7300 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7301 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7303 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7307 default: /* LDR (literal) */
7308 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7311 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7312 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7315 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7316 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7317 else /* Generate SP-relative address */
7318 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7320 case 11: /* Misc 16-bit instructions */
7322 switch (bits (insn1
, 8, 11))
7324 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7325 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7327 case 12: case 13: /* POP */
7328 if (bit (insn1
, 8)) /* PC is in register list. */
7329 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7331 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7333 case 15: /* If-Then, and hints */
7334 if (bits (insn1
, 0, 3))
7335 /* If-Then makes up to four following instructions conditional.
7336 IT instruction itself is not conditional, so handle it as a
7337 common unmodified instruction. */
7338 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7341 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7344 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7349 if (op_bit_10_11
< 2) /* Store multiple registers */
7350 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7351 else /* Load multiple registers */
7352 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7354 case 13: /* Conditional branch and supervisor call */
7355 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7356 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7358 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7360 case 14: /* Unconditional branch */
7361 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7368 internal_error (__FILE__
, __LINE__
,
7369 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7373 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7374 uint16_t insn1
, uint16_t insn2
,
7375 struct regcache
*regs
,
7376 struct displaced_step_closure
*dsc
)
7378 int rt
= bits (insn2
, 12, 15);
7379 int rn
= bits (insn1
, 0, 3);
7380 int op1
= bits (insn1
, 7, 8);
7382 switch (bits (insn1
, 5, 6))
7384 case 0: /* Load byte and memory hints */
7385 if (rt
== 0xf) /* PLD/PLI */
7388 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7389 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7391 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7396 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7397 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7400 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7401 "ldrb{reg, immediate}/ldrbt",
7406 case 1: /* Load halfword and memory hints. */
7407 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7408 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7409 "pld/unalloc memhint", dsc
);
7413 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7416 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7420 case 2: /* Load word */
7422 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7425 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7426 else if (op1
== 0x1) /* Encoding T3 */
7427 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7429 else /* op1 == 0x0 */
7431 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7432 /* LDR (immediate) */
7433 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7434 dsc
, bit (insn2
, 8), 1);
7435 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7436 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7439 /* LDR (register) */
7440 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7446 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7453 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7454 uint16_t insn2
, struct regcache
*regs
,
7455 struct displaced_step_closure
*dsc
)
7458 unsigned short op
= bit (insn2
, 15);
7459 unsigned int op1
= bits (insn1
, 11, 12);
7465 switch (bits (insn1
, 9, 10))
7470 /* Load/store {dual, execlusive}, table branch. */
7471 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7472 && bits (insn2
, 5, 7) == 0)
7473 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7476 /* PC is not allowed to use in load/store {dual, exclusive}
7478 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7479 "load/store dual/ex", dsc
);
7481 else /* load/store multiple */
7483 switch (bits (insn1
, 7, 8))
7485 case 0: case 3: /* SRS, RFE */
7486 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7489 case 1: case 2: /* LDM/STM/PUSH/POP */
7490 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7497 /* Data-processing (shift register). */
7498 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7501 default: /* Coprocessor instructions. */
7502 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7507 case 2: /* op1 = 2 */
7508 if (op
) /* Branch and misc control. */
7510 if (bit (insn2
, 14) /* BLX/BL */
7511 || bit (insn2
, 12) /* Unconditional branch */
7512 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7513 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7515 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7520 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7522 int op
= bits (insn1
, 4, 8);
7523 int rn
= bits (insn1
, 0, 3);
7524 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
7525 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7528 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7531 else /* Data processing (modified immeidate) */
7532 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7536 case 3: /* op1 = 3 */
7537 switch (bits (insn1
, 9, 10))
7541 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7543 else /* NEON Load/Store and Store single data item */
7544 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7545 "neon elt/struct load/store",
7548 case 1: /* op1 = 3, bits (9, 10) == 1 */
7549 switch (bits (insn1
, 7, 8))
7551 case 0: case 1: /* Data processing (register) */
7552 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7555 case 2: /* Multiply and absolute difference */
7556 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7557 "mul/mua/diff", dsc
);
7559 case 3: /* Long multiply and divide */
7560 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7565 default: /* Coprocessor instructions */
7566 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7575 internal_error (__FILE__
, __LINE__
,
7576 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7581 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7582 struct regcache
*regs
,
7583 struct displaced_step_closure
*dsc
)
7585 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7587 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7589 if (debug_displaced
)
7590 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
7591 "at %.8lx\n", insn1
, (unsigned long) from
);
7594 dsc
->insn_size
= thumb_insn_size (insn1
);
7595 if (thumb_insn_size (insn1
) == 4)
7598 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7599 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7602 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7606 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7607 CORE_ADDR to
, struct regcache
*regs
,
7608 struct displaced_step_closure
*dsc
)
7611 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7614 /* Most displaced instructions use a 1-instruction scratch space, so set this
7615 here and override below if/when necessary. */
7617 dsc
->insn_addr
= from
;
7618 dsc
->scratch_base
= to
;
7619 dsc
->cleanup
= NULL
;
7620 dsc
->wrote_to_pc
= 0;
7622 if (!displaced_in_arm_mode (regs
))
7623 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7627 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7628 if (debug_displaced
)
7629 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
7630 "at %.8lx\n", (unsigned long) insn
,
7631 (unsigned long) from
);
7633 if ((insn
& 0xf0000000) == 0xf0000000)
7634 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7635 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7637 case 0x0: case 0x1: case 0x2: case 0x3:
7638 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7641 case 0x4: case 0x5: case 0x6:
7642 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7646 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7649 case 0x8: case 0x9: case 0xa: case 0xb:
7650 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7653 case 0xc: case 0xd: case 0xe: case 0xf:
7654 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7659 internal_error (__FILE__
, __LINE__
,
7660 _("arm_process_displaced_insn: Instruction decode error"));
7663 /* Actually set up the scratch space for a displaced instruction. */
7666 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7667 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
7669 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7670 unsigned int i
, len
, offset
;
7671 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7672 int size
= dsc
->is_thumb
? 2 : 4;
7673 const gdb_byte
*bkp_insn
;
7676 /* Poke modified instruction(s). */
7677 for (i
= 0; i
< dsc
->numinsns
; i
++)
7679 if (debug_displaced
)
7681 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
7683 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
7686 fprintf_unfiltered (gdb_stdlog
, "%.4x",
7687 (unsigned short)dsc
->modinsn
[i
]);
7689 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
7690 (unsigned long) to
+ offset
);
7693 write_memory_unsigned_integer (to
+ offset
, size
,
7694 byte_order_for_code
,
7699 /* Choose the correct breakpoint instruction. */
7702 bkp_insn
= tdep
->thumb_breakpoint
;
7703 len
= tdep
->thumb_breakpoint_size
;
7707 bkp_insn
= tdep
->arm_breakpoint
;
7708 len
= tdep
->arm_breakpoint_size
;
7711 /* Put breakpoint afterwards. */
7712 write_memory (to
+ offset
, bkp_insn
, len
);
7714 if (debug_displaced
)
7715 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
7716 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
7719 /* Entry point for cleaning things up after a displaced instruction has been
7723 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7724 struct displaced_step_closure
*dsc
,
7725 CORE_ADDR from
, CORE_ADDR to
,
7726 struct regcache
*regs
)
7729 dsc
->cleanup (gdbarch
, regs
, dsc
);
7731 if (!dsc
->wrote_to_pc
)
7732 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7733 dsc
->insn_addr
+ dsc
->insn_size
);
7737 #include "bfd-in2.h"
7738 #include "libcoff.h"
7741 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7743 gdb_disassembler
*di
7744 = static_cast<gdb_disassembler
*>(info
->application_data
);
7745 struct gdbarch
*gdbarch
= di
->arch ();
7747 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7749 static asymbol
*asym
;
7750 static combined_entry_type ce
;
7751 static struct coff_symbol_struct csym
;
7752 static struct bfd fake_bfd
;
7753 static bfd_target fake_target
;
7755 if (csym
.native
== NULL
)
7757 /* Create a fake symbol vector containing a Thumb symbol.
7758 This is solely so that the code in print_insn_little_arm()
7759 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7760 the presence of a Thumb symbol and switch to decoding
7761 Thumb instructions. */
7763 fake_target
.flavour
= bfd_target_coff_flavour
;
7764 fake_bfd
.xvec
= &fake_target
;
7765 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7767 csym
.symbol
.the_bfd
= &fake_bfd
;
7768 csym
.symbol
.name
= "fake";
7769 asym
= (asymbol
*) & csym
;
7772 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7773 info
->symbols
= &asym
;
7776 info
->symbols
= NULL
;
7778 if (info
->endian
== BFD_ENDIAN_BIG
)
7779 return print_insn_big_arm (memaddr
, info
);
7781 return print_insn_little_arm (memaddr
, info
);
7784 /* The following define instruction sequences that will cause ARM
7785 cpu's to take an undefined instruction trap. These are used to
7786 signal a breakpoint to GDB.
7788 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7789 modes. A different instruction is required for each mode. The ARM
7790 cpu's can also be big or little endian. Thus four different
7791 instructions are needed to support all cases.
7793 Note: ARMv4 defines several new instructions that will take the
7794 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7795 not in fact add the new instructions. The new undefined
7796 instructions in ARMv4 are all instructions that had no defined
7797 behaviour in earlier chips. There is no guarantee that they will
7798 raise an exception, but may be treated as NOP's. In practice, it
7799 may only safe to rely on instructions matching:
7801 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7802 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7803 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7805 Even this may only true if the condition predicate is true. The
7806 following use a condition predicate of ALWAYS so it is always TRUE.
7808 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7809 and NetBSD all use a software interrupt rather than an undefined
7810 instruction to force a trap. This can be handled by by the
7811 abi-specific code during establishment of the gdbarch vector. */
7813 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7814 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7815 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7816 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7818 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7819 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7820 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7821 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7823 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7826 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7828 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7829 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7831 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7833 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7835 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7836 check whether we are replacing a 32-bit instruction. */
7837 if (tdep
->thumb2_breakpoint
!= NULL
)
7841 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7843 unsigned short inst1
;
7845 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7846 if (thumb_insn_size (inst1
) == 4)
7847 return ARM_BP_KIND_THUMB2
;
7851 return ARM_BP_KIND_THUMB
;
7854 return ARM_BP_KIND_ARM
;
7858 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7860 static const gdb_byte
*
7861 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7863 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7867 case ARM_BP_KIND_ARM
:
7868 *size
= tdep
->arm_breakpoint_size
;
7869 return tdep
->arm_breakpoint
;
7870 case ARM_BP_KIND_THUMB
:
7871 *size
= tdep
->thumb_breakpoint_size
;
7872 return tdep
->thumb_breakpoint
;
7873 case ARM_BP_KIND_THUMB2
:
7874 *size
= tdep
->thumb2_breakpoint_size
;
7875 return tdep
->thumb2_breakpoint
;
7877 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7881 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7884 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7885 struct regcache
*regcache
,
7890 /* Check the memory pointed by PC is readable. */
7891 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7893 struct arm_get_next_pcs next_pcs_ctx
;
7896 VEC (CORE_ADDR
) *next_pcs
= NULL
;
7897 struct cleanup
*old_chain
7898 = make_cleanup (VEC_cleanup (CORE_ADDR
), &next_pcs
);
7900 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7901 &arm_get_next_pcs_ops
,
7902 gdbarch_byte_order (gdbarch
),
7903 gdbarch_byte_order_for_code (gdbarch
),
7907 next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7909 /* If MEMADDR is the next instruction of current pc, do the
7910 software single step computation, and get the thumb mode by
7911 the destination address. */
7912 for (i
= 0; VEC_iterate (CORE_ADDR
, next_pcs
, i
, pc
); i
++)
7914 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7916 do_cleanups (old_chain
);
7918 if (IS_THUMB_ADDR (pc
))
7920 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7921 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7924 return ARM_BP_KIND_ARM
;
7928 do_cleanups (old_chain
);
7931 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7934 /* Extract from an array REGBUF containing the (raw) register state a
7935 function return value of type TYPE, and copy that, in virtual
7936 format, into VALBUF. */
7939 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7942 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
7943 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7945 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
7947 switch (gdbarch_tdep (gdbarch
)->fp_model
)
7951 /* The value is in register F0 in internal format. We need to
7952 extract the raw value and then convert it to the desired
7954 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
7956 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
7957 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
7958 valbuf
, gdbarch_byte_order (gdbarch
));
7962 case ARM_FLOAT_SOFT_FPA
:
7963 case ARM_FLOAT_SOFT_VFP
:
7964 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7965 not using the VFP ABI code. */
7967 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
7968 if (TYPE_LENGTH (type
) > 4)
7969 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
7970 valbuf
+ INT_REGISTER_SIZE
);
7974 internal_error (__FILE__
, __LINE__
,
7975 _("arm_extract_return_value: "
7976 "Floating point model not supported"));
7980 else if (TYPE_CODE (type
) == TYPE_CODE_INT
7981 || TYPE_CODE (type
) == TYPE_CODE_CHAR
7982 || TYPE_CODE (type
) == TYPE_CODE_BOOL
7983 || TYPE_CODE (type
) == TYPE_CODE_PTR
7984 || TYPE_CODE (type
) == TYPE_CODE_REF
7985 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
7987 /* If the type is a plain integer, then the access is
7988 straight-forward. Otherwise we have to play around a bit
7990 int len
= TYPE_LENGTH (type
);
7991 int regno
= ARM_A1_REGNUM
;
7996 /* By using store_unsigned_integer we avoid having to do
7997 anything special for small big-endian values. */
7998 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
7999 store_unsigned_integer (valbuf
,
8000 (len
> INT_REGISTER_SIZE
8001 ? INT_REGISTER_SIZE
: len
),
8003 len
-= INT_REGISTER_SIZE
;
8004 valbuf
+= INT_REGISTER_SIZE
;
8009 /* For a structure or union the behaviour is as if the value had
8010 been stored to word-aligned memory and then loaded into
8011 registers with 32-bit load instruction(s). */
8012 int len
= TYPE_LENGTH (type
);
8013 int regno
= ARM_A1_REGNUM
;
8014 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8018 regcache_cooked_read (regs
, regno
++, tmpbuf
);
8019 memcpy (valbuf
, tmpbuf
,
8020 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8021 len
-= INT_REGISTER_SIZE
;
8022 valbuf
+= INT_REGISTER_SIZE
;
8028 /* Will a function return an aggregate type in memory or in a
8029 register? Return 0 if an aggregate type can be returned in a
8030 register, 1 if it must be returned in memory. */
8033 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
8035 enum type_code code
;
8037 type
= check_typedef (type
);
8039 /* Simple, non-aggregate types (ie not including vectors and
8040 complex) are always returned in a register (or registers). */
8041 code
= TYPE_CODE (type
);
8042 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
8043 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
8046 if (TYPE_CODE_ARRAY
== code
&& TYPE_VECTOR (type
))
8048 /* Vector values should be returned using ARM registers if they
8049 are not over 16 bytes. */
8050 return (TYPE_LENGTH (type
) > 16);
8053 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
8055 /* The AAPCS says all aggregates not larger than a word are returned
8057 if (TYPE_LENGTH (type
) <= INT_REGISTER_SIZE
)
8066 /* All aggregate types that won't fit in a register must be returned
8068 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
8071 /* In the ARM ABI, "integer" like aggregate types are returned in
8072 registers. For an aggregate type to be integer like, its size
8073 must be less than or equal to INT_REGISTER_SIZE and the
8074 offset of each addressable subfield must be zero. Note that bit
8075 fields are not addressable, and all addressable subfields of
8076 unions always start at offset zero.
8078 This function is based on the behaviour of GCC 2.95.1.
8079 See: gcc/arm.c: arm_return_in_memory() for details.
8081 Note: All versions of GCC before GCC 2.95.2 do not set up the
8082 parameters correctly for a function returning the following
8083 structure: struct { float f;}; This should be returned in memory,
8084 not a register. Richard Earnshaw sent me a patch, but I do not
8085 know of any way to detect if a function like the above has been
8086 compiled with the correct calling convention. */
8088 /* Assume all other aggregate types can be returned in a register.
8089 Run a check for structures, unions and arrays. */
8092 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
8095 /* Need to check if this struct/union is "integer" like. For
8096 this to be true, its size must be less than or equal to
8097 INT_REGISTER_SIZE and the offset of each addressable
8098 subfield must be zero. Note that bit fields are not
8099 addressable, and unions always start at offset zero. If any
8100 of the subfields is a floating point type, the struct/union
8101 cannot be an integer type. */
8103 /* For each field in the object, check:
8104 1) Is it FP? --> yes, nRc = 1;
8105 2) Is it addressable (bitpos != 0) and
8106 not packed (bitsize == 0)?
8110 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
8112 enum type_code field_type_code
;
8115 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
8118 /* Is it a floating point type field? */
8119 if (field_type_code
== TYPE_CODE_FLT
)
8125 /* If bitpos != 0, then we have to care about it. */
8126 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
8128 /* Bitfields are not addressable. If the field bitsize is
8129 zero, then the field is not packed. Hence it cannot be
8130 a bitfield or any other packed type. */
8131 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8144 /* Write into appropriate registers a function return value of type
8145 TYPE, given in virtual format. */
8148 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8149 const gdb_byte
*valbuf
)
8151 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8152 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8154 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
8156 gdb_byte buf
[MAX_REGISTER_SIZE
];
8158 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8162 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
8163 gdbarch_byte_order (gdbarch
));
8164 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
8167 case ARM_FLOAT_SOFT_FPA
:
8168 case ARM_FLOAT_SOFT_VFP
:
8169 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8170 not using the VFP ABI code. */
8172 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
8173 if (TYPE_LENGTH (type
) > 4)
8174 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
8175 valbuf
+ INT_REGISTER_SIZE
);
8179 internal_error (__FILE__
, __LINE__
,
8180 _("arm_store_return_value: Floating "
8181 "point model not supported"));
8185 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8186 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8187 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8188 || TYPE_CODE (type
) == TYPE_CODE_PTR
8189 || TYPE_CODE (type
) == TYPE_CODE_REF
8190 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8192 if (TYPE_LENGTH (type
) <= 4)
8194 /* Values of one word or less are zero/sign-extended and
8196 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8197 LONGEST val
= unpack_long (type
, valbuf
);
8199 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
8200 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
8204 /* Integral values greater than one word are stored in consecutive
8205 registers starting with r0. This will always be a multiple of
8206 the regiser size. */
8207 int len
= TYPE_LENGTH (type
);
8208 int regno
= ARM_A1_REGNUM
;
8212 regcache_cooked_write (regs
, regno
++, valbuf
);
8213 len
-= INT_REGISTER_SIZE
;
8214 valbuf
+= INT_REGISTER_SIZE
;
8220 /* For a structure or union the behaviour is as if the value had
8221 been stored to word-aligned memory and then loaded into
8222 registers with 32-bit load instruction(s). */
8223 int len
= TYPE_LENGTH (type
);
8224 int regno
= ARM_A1_REGNUM
;
8225 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8229 memcpy (tmpbuf
, valbuf
,
8230 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8231 regcache_cooked_write (regs
, regno
++, tmpbuf
);
8232 len
-= INT_REGISTER_SIZE
;
8233 valbuf
+= INT_REGISTER_SIZE
;
8239 /* Handle function return values. */
8241 static enum return_value_convention
8242 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8243 struct type
*valtype
, struct regcache
*regcache
,
8244 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8246 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8247 struct type
*func_type
= function
? value_type (function
) : NULL
;
8248 enum arm_vfp_cprc_base_type vfp_base_type
;
8251 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8252 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8254 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8255 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8257 for (i
= 0; i
< vfp_base_count
; i
++)
8259 if (reg_char
== 'q')
8262 arm_neon_quad_write (gdbarch
, regcache
, i
,
8263 writebuf
+ i
* unit_length
);
8266 arm_neon_quad_read (gdbarch
, regcache
, i
,
8267 readbuf
+ i
* unit_length
);
8274 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8275 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8278 regcache_cooked_write (regcache
, regnum
,
8279 writebuf
+ i
* unit_length
);
8281 regcache_cooked_read (regcache
, regnum
,
8282 readbuf
+ i
* unit_length
);
8285 return RETURN_VALUE_REGISTER_CONVENTION
;
8288 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
8289 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
8290 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
8292 if (tdep
->struct_return
== pcc_struct_return
8293 || arm_return_in_memory (gdbarch
, valtype
))
8294 return RETURN_VALUE_STRUCT_CONVENTION
;
8296 else if (TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
)
8298 if (arm_return_in_memory (gdbarch
, valtype
))
8299 return RETURN_VALUE_STRUCT_CONVENTION
;
8303 arm_store_return_value (valtype
, regcache
, writebuf
);
8306 arm_extract_return_value (valtype
, regcache
, readbuf
);
8308 return RETURN_VALUE_REGISTER_CONVENTION
;
8313 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8315 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8316 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8317 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8319 gdb_byte buf
[INT_REGISTER_SIZE
];
8321 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8323 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8327 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
8331 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8332 return the target PC. Otherwise return 0. */
8335 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8339 CORE_ADDR start_addr
;
8341 /* Find the starting address and name of the function containing the PC. */
8342 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8344 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8346 start_addr
= arm_skip_bx_reg (frame
, pc
);
8347 if (start_addr
!= 0)
8353 /* If PC is in a Thumb call or return stub, return the address of the
8354 target PC, which is in a register. The thunk functions are called
8355 _call_via_xx, where x is the register name. The possible names
8356 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8357 functions, named __ARM_call_via_r[0-7]. */
8358 if (startswith (name
, "_call_via_")
8359 || startswith (name
, "__ARM_call_via_"))
8361 /* Use the name suffix to determine which register contains the
8363 static char *table
[15] =
8364 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8365 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8368 int offset
= strlen (name
) - 2;
8370 for (regno
= 0; regno
<= 14; regno
++)
8371 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8372 return get_frame_register_unsigned (frame
, regno
);
8375 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8376 non-interworking calls to foo. We could decode the stubs
8377 to find the target but it's easier to use the symbol table. */
8378 namelen
= strlen (name
);
8379 if (name
[0] == '_' && name
[1] == '_'
8380 && ((namelen
> 2 + strlen ("_from_thumb")
8381 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8382 || (namelen
> 2 + strlen ("_from_arm")
8383 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8386 int target_len
= namelen
- 2;
8387 struct bound_minimal_symbol minsym
;
8388 struct objfile
*objfile
;
8389 struct obj_section
*sec
;
8391 if (name
[namelen
- 1] == 'b')
8392 target_len
-= strlen ("_from_thumb");
8394 target_len
-= strlen ("_from_arm");
8396 target_name
= (char *) alloca (target_len
+ 1);
8397 memcpy (target_name
, name
+ 2, target_len
);
8398 target_name
[target_len
] = '\0';
8400 sec
= find_pc_section (pc
);
8401 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8402 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8403 if (minsym
.minsym
!= NULL
)
8404 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8409 return 0; /* not a stub */
8413 set_arm_command (char *args
, int from_tty
)
8415 printf_unfiltered (_("\
8416 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8417 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
8421 show_arm_command (char *args
, int from_tty
)
8423 cmd_show_list (showarmcmdlist
, from_tty
, "");
8427 arm_update_current_architecture (void)
8429 struct gdbarch_info info
;
8431 /* If the current architecture is not ARM, we have nothing to do. */
8432 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8435 /* Update the architecture. */
8436 gdbarch_info_init (&info
);
8438 if (!gdbarch_update_p (info
))
8439 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8443 set_fp_model_sfunc (char *args
, int from_tty
,
8444 struct cmd_list_element
*c
)
8448 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8449 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8451 arm_fp_model
= (enum arm_float_model
) fp_model
;
8455 if (fp_model
== ARM_FLOAT_LAST
)
8456 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8459 arm_update_current_architecture ();
8463 show_fp_model (struct ui_file
*file
, int from_tty
,
8464 struct cmd_list_element
*c
, const char *value
)
8466 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8468 if (arm_fp_model
== ARM_FLOAT_AUTO
8469 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8470 fprintf_filtered (file
, _("\
8471 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8472 fp_model_strings
[tdep
->fp_model
]);
8474 fprintf_filtered (file
, _("\
8475 The current ARM floating point model is \"%s\".\n"),
8476 fp_model_strings
[arm_fp_model
]);
8480 arm_set_abi (char *args
, int from_tty
,
8481 struct cmd_list_element
*c
)
8485 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8486 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8488 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8492 if (arm_abi
== ARM_ABI_LAST
)
8493 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8496 arm_update_current_architecture ();
8500 arm_show_abi (struct ui_file
*file
, int from_tty
,
8501 struct cmd_list_element
*c
, const char *value
)
8503 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8505 if (arm_abi_global
== ARM_ABI_AUTO
8506 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8507 fprintf_filtered (file
, _("\
8508 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8509 arm_abi_strings
[tdep
->arm_abi
]);
8511 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8516 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8517 struct cmd_list_element
*c
, const char *value
)
8519 fprintf_filtered (file
,
8520 _("The current execution mode assumed "
8521 "(when symbols are unavailable) is \"%s\".\n"),
8522 arm_fallback_mode_string
);
8526 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8527 struct cmd_list_element
*c
, const char *value
)
8529 fprintf_filtered (file
,
8530 _("The current execution mode assumed "
8531 "(even when symbols are available) is \"%s\".\n"),
8532 arm_force_mode_string
);
8535 /* If the user changes the register disassembly style used for info
8536 register and other commands, we have to also switch the style used
8537 in opcodes for disassembly output. This function is run in the "set
8538 arm disassembly" command, and does that. */
8541 set_disassembly_style_sfunc (char *args
, int from_tty
,
8542 struct cmd_list_element
*c
)
8544 set_disassembly_style ();
8547 /* Return the ARM register name corresponding to register I. */
8549 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8551 const int num_regs
= gdbarch_num_regs (gdbarch
);
8553 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
8554 && i
>= num_regs
&& i
< num_regs
+ 32)
8556 static const char *const vfp_pseudo_names
[] = {
8557 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8558 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8559 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8560 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8563 return vfp_pseudo_names
[i
- num_regs
];
8566 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
8567 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
8569 static const char *const neon_pseudo_names
[] = {
8570 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8571 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8574 return neon_pseudo_names
[i
- num_regs
- 32];
8577 if (i
>= ARRAY_SIZE (arm_register_names
))
8578 /* These registers are only supported on targets which supply
8579 an XML description. */
8582 return arm_register_names
[i
];
8586 set_disassembly_style (void)
8590 /* Find the style that the user wants. */
8591 for (current
= 0; current
< num_disassembly_options
; current
++)
8592 if (disassembly_style
== valid_disassembly_styles
[current
])
8594 gdb_assert (current
< num_disassembly_options
);
8596 /* Synchronize the disassembler. */
8597 set_arm_regname_option (current
);
8600 /* Test whether the coff symbol specific value corresponds to a Thumb
8604 coff_sym_is_thumb (int val
)
8606 return (val
== C_THUMBEXT
8607 || val
== C_THUMBSTAT
8608 || val
== C_THUMBEXTFUNC
8609 || val
== C_THUMBSTATFUNC
8610 || val
== C_THUMBLABEL
);
8613 /* arm_coff_make_msymbol_special()
8614 arm_elf_make_msymbol_special()
8616 These functions test whether the COFF or ELF symbol corresponds to
8617 an address in thumb code, and set a "special" bit in a minimal
8618 symbol to indicate that it does. */
8621 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8623 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8625 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8626 == ST_BRANCH_TO_THUMB
)
8627 MSYMBOL_SET_SPECIAL (msym
);
8631 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8633 if (coff_sym_is_thumb (val
))
8634 MSYMBOL_SET_SPECIAL (msym
);
8638 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
8640 struct arm_per_objfile
*data
= (struct arm_per_objfile
*) arg
;
8643 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
8644 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
8648 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8651 const char *name
= bfd_asymbol_name (sym
);
8652 struct arm_per_objfile
*data
;
8653 VEC(arm_mapping_symbol_s
) **map_p
;
8654 struct arm_mapping_symbol new_map_sym
;
8656 gdb_assert (name
[0] == '$');
8657 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8660 data
= (struct arm_per_objfile
*) objfile_data (objfile
,
8661 arm_objfile_data_key
);
8664 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
8665 struct arm_per_objfile
);
8666 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
8667 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
8668 objfile
->obfd
->section_count
,
8669 VEC(arm_mapping_symbol_s
) *);
8671 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
8673 new_map_sym
.value
= sym
->value
;
8674 new_map_sym
.type
= name
[1];
8676 /* Assume that most mapping symbols appear in order of increasing
8677 value. If they were randomly distributed, it would be faster to
8678 always push here and then sort at first use. */
8679 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
8681 struct arm_mapping_symbol
*prev_map_sym
;
8683 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
8684 if (prev_map_sym
->value
>= sym
->value
)
8687 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
8688 arm_compare_mapping_symbols
);
8689 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
8694 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
8698 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8700 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
8701 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8703 /* If necessary, set the T bit. */
8706 ULONGEST val
, t_bit
;
8707 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8708 t_bit
= arm_psr_thumb_bit (gdbarch
);
8709 if (arm_pc_is_thumb (gdbarch
, pc
))
8710 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8713 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8718 /* Read the contents of a NEON quad register, by reading from two
8719 double registers. This is used to implement the quad pseudo
8720 registers, and for argument passing in case the quad registers are
8721 missing; vectors are passed in quad registers when using the VFP
8722 ABI, even if a NEON unit is not present. REGNUM is the index of
8723 the quad register, in [0, 15]. */
8725 static enum register_status
8726 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8727 int regnum
, gdb_byte
*buf
)
8730 gdb_byte reg_buf
[8];
8731 int offset
, double_regnum
;
8732 enum register_status status
;
8734 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8735 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8738 /* d0 is always the least significant half of q0. */
8739 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8744 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
8745 if (status
!= REG_VALID
)
8747 memcpy (buf
+ offset
, reg_buf
, 8);
8749 offset
= 8 - offset
;
8750 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
8751 if (status
!= REG_VALID
)
8753 memcpy (buf
+ offset
, reg_buf
, 8);
8758 static enum register_status
8759 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8760 int regnum
, gdb_byte
*buf
)
8762 const int num_regs
= gdbarch_num_regs (gdbarch
);
8764 gdb_byte reg_buf
[8];
8765 int offset
, double_regnum
;
8767 gdb_assert (regnum
>= num_regs
);
8770 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8771 /* Quad-precision register. */
8772 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
8775 enum register_status status
;
8777 /* Single-precision register. */
8778 gdb_assert (regnum
< 32);
8780 /* s0 is always the least significant half of d0. */
8781 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8782 offset
= (regnum
& 1) ? 0 : 4;
8784 offset
= (regnum
& 1) ? 4 : 0;
8786 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8787 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8790 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
8791 if (status
== REG_VALID
)
8792 memcpy (buf
, reg_buf
+ offset
, 4);
8797 /* Store the contents of BUF to a NEON quad register, by writing to
8798 two double registers. This is used to implement the quad pseudo
8799 registers, and for argument passing in case the quad registers are
8800 missing; vectors are passed in quad registers when using the VFP
8801 ABI, even if a NEON unit is not present. REGNUM is the index
8802 of the quad register, in [0, 15]. */
8805 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8806 int regnum
, const gdb_byte
*buf
)
8809 int offset
, double_regnum
;
8811 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8812 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8815 /* d0 is always the least significant half of q0. */
8816 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8821 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
8822 offset
= 8 - offset
;
8823 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
8827 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8828 int regnum
, const gdb_byte
*buf
)
8830 const int num_regs
= gdbarch_num_regs (gdbarch
);
8832 gdb_byte reg_buf
[8];
8833 int offset
, double_regnum
;
8835 gdb_assert (regnum
>= num_regs
);
8838 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8839 /* Quad-precision register. */
8840 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
8843 /* Single-precision register. */
8844 gdb_assert (regnum
< 32);
8846 /* s0 is always the least significant half of d0. */
8847 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8848 offset
= (regnum
& 1) ? 0 : 4;
8850 offset
= (regnum
& 1) ? 4 : 0;
8852 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8853 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8856 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
8857 memcpy (reg_buf
+ offset
, buf
, 4);
8858 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
8862 static struct value
*
8863 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8865 const int *reg_p
= (const int *) baton
;
8866 return value_of_register (*reg_p
, frame
);
8869 static enum gdb_osabi
8870 arm_elf_osabi_sniffer (bfd
*abfd
)
8872 unsigned int elfosabi
;
8873 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8875 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8877 if (elfosabi
== ELFOSABI_ARM
)
8878 /* GNU tools use this value. Check note sections in this case,
8880 bfd_map_over_sections (abfd
,
8881 generic_elf_osabi_sniff_abi_tag_sections
,
8884 /* Anything else will be handled by the generic ELF sniffer. */
8889 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8890 struct reggroup
*group
)
8892 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8893 this, FPS register belongs to save_regroup, restore_reggroup, and
8894 all_reggroup, of course. */
8895 if (regnum
== ARM_FPS_REGNUM
)
8896 return (group
== float_reggroup
8897 || group
== save_reggroup
8898 || group
== restore_reggroup
8899 || group
== all_reggroup
);
8901 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8905 /* For backward-compatibility we allow two 'g' packet lengths with
8906 the remote protocol depending on whether FPA registers are
8907 supplied. M-profile targets do not have FPA registers, but some
8908 stubs already exist in the wild which use a 'g' packet which
8909 supplies them albeit with dummy values. The packet format which
8910 includes FPA registers should be considered deprecated for
8911 M-profile targets. */
8914 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8916 if (gdbarch_tdep (gdbarch
)->is_m
)
8918 /* If we know from the executable this is an M-profile target,
8919 cater for remote targets whose register set layout is the
8920 same as the FPA layout. */
8921 register_remote_g_packet_guess (gdbarch
,
8922 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8923 (16 * INT_REGISTER_SIZE
)
8924 + (8 * FP_REGISTER_SIZE
)
8925 + (2 * INT_REGISTER_SIZE
),
8926 tdesc_arm_with_m_fpa_layout
);
8928 /* The regular M-profile layout. */
8929 register_remote_g_packet_guess (gdbarch
,
8930 /* r0-r12,sp,lr,pc; xpsr */
8931 (16 * INT_REGISTER_SIZE
)
8932 + INT_REGISTER_SIZE
,
8935 /* M-profile plus M4F VFP. */
8936 register_remote_g_packet_guess (gdbarch
,
8937 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8938 (16 * INT_REGISTER_SIZE
)
8939 + (16 * VFP_REGISTER_SIZE
)
8940 + (2 * INT_REGISTER_SIZE
),
8941 tdesc_arm_with_m_vfp_d16
);
8944 /* Otherwise we don't have a useful guess. */
8947 /* Implement the code_of_frame_writable gdbarch method. */
8950 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
8952 if (gdbarch_tdep (gdbarch
)->is_m
8953 && get_frame_type (frame
) == SIGTRAMP_FRAME
)
8955 /* M-profile exception frames return to some magic PCs, where
8956 isn't writable at all. */
8964 /* Initialize the current architecture based on INFO. If possible,
8965 re-use an architecture from ARCHES, which is a list of
8966 architectures already created during this debugging session.
8968 Called e.g. at program startup, when reading a core file, and when
8969 reading a binary file. */
8971 static struct gdbarch
*
8972 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
8974 struct gdbarch_tdep
*tdep
;
8975 struct gdbarch
*gdbarch
;
8976 struct gdbarch_list
*best_arch
;
8977 enum arm_abi_kind arm_abi
= arm_abi_global
;
8978 enum arm_float_model fp_model
= arm_fp_model
;
8979 struct tdesc_arch_data
*tdesc_data
= NULL
;
8981 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
8982 int have_wmmx_registers
= 0;
8984 int have_fpa_registers
= 1;
8985 const struct target_desc
*tdesc
= info
.target_desc
;
8987 /* If we have an object to base this architecture on, try to determine
8990 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
8992 int ei_osabi
, e_flags
;
8994 switch (bfd_get_flavour (info
.abfd
))
8996 case bfd_target_coff_flavour
:
8997 /* Assume it's an old APCS-style ABI. */
8999 arm_abi
= ARM_ABI_APCS
;
9002 case bfd_target_elf_flavour
:
9003 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9004 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9006 if (ei_osabi
== ELFOSABI_ARM
)
9008 /* GNU tools used to use this value, but do not for EABI
9009 objects. There's nowhere to tag an EABI version
9010 anyway, so assume APCS. */
9011 arm_abi
= ARM_ABI_APCS
;
9013 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
9015 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9016 int attr_arch
, attr_profile
;
9020 case EF_ARM_EABI_UNKNOWN
:
9021 /* Assume GNU tools. */
9022 arm_abi
= ARM_ABI_APCS
;
9025 case EF_ARM_EABI_VER4
:
9026 case EF_ARM_EABI_VER5
:
9027 arm_abi
= ARM_ABI_AAPCS
;
9028 /* EABI binaries default to VFP float ordering.
9029 They may also contain build attributes that can
9030 be used to identify if the VFP argument-passing
9032 if (fp_model
== ARM_FLOAT_AUTO
)
9035 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9039 case AEABI_VFP_args_base
:
9040 /* "The user intended FP parameter/result
9041 passing to conform to AAPCS, base
9043 fp_model
= ARM_FLOAT_SOFT_VFP
;
9045 case AEABI_VFP_args_vfp
:
9046 /* "The user intended FP parameter/result
9047 passing to conform to AAPCS, VFP
9049 fp_model
= ARM_FLOAT_VFP
;
9051 case AEABI_VFP_args_toolchain
:
9052 /* "The user intended FP parameter/result
9053 passing to conform to tool chain-specific
9054 conventions" - we don't know any such
9055 conventions, so leave it as "auto". */
9057 case AEABI_VFP_args_compatible
:
9058 /* "Code is compatible with both the base
9059 and VFP variants; the user did not permit
9060 non-variadic functions to pass FP
9061 parameters/results" - leave it as
9065 /* Attribute value not mentioned in the
9066 November 2012 ABI, so leave it as
9071 fp_model
= ARM_FLOAT_SOFT_VFP
;
9077 /* Leave it as "auto". */
9078 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
9083 /* Detect M-profile programs. This only works if the
9084 executable file includes build attributes; GCC does
9085 copy them to the executable, but e.g. RealView does
9087 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9089 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
9091 Tag_CPU_arch_profile
);
9092 /* GCC specifies the profile for v6-M; RealView only
9093 specifies the profile for architectures starting with
9094 V7 (as opposed to architectures with a tag
9095 numerically greater than TAG_CPU_ARCH_V7). */
9096 if (!tdesc_has_registers (tdesc
)
9097 && (attr_arch
== TAG_CPU_ARCH_V6_M
9098 || attr_arch
== TAG_CPU_ARCH_V6S_M
9099 || attr_profile
== 'M'))
9104 if (fp_model
== ARM_FLOAT_AUTO
)
9106 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9108 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
9111 /* Leave it as "auto". Strictly speaking this case
9112 means FPA, but almost nobody uses that now, and
9113 many toolchains fail to set the appropriate bits
9114 for the floating-point model they use. */
9116 case EF_ARM_SOFT_FLOAT
:
9117 fp_model
= ARM_FLOAT_SOFT_FPA
;
9119 case EF_ARM_VFP_FLOAT
:
9120 fp_model
= ARM_FLOAT_VFP
;
9122 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
9123 fp_model
= ARM_FLOAT_SOFT_VFP
;
9128 if (e_flags
& EF_ARM_BE8
)
9129 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
9134 /* Leave it as "auto". */
9139 /* Check any target description for validity. */
9140 if (tdesc_has_registers (tdesc
))
9142 /* For most registers we require GDB's default names; but also allow
9143 the numeric names for sp / lr / pc, as a convenience. */
9144 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9145 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9146 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9148 const struct tdesc_feature
*feature
;
9151 feature
= tdesc_find_feature (tdesc
,
9152 "org.gnu.gdb.arm.core");
9153 if (feature
== NULL
)
9155 feature
= tdesc_find_feature (tdesc
,
9156 "org.gnu.gdb.arm.m-profile");
9157 if (feature
== NULL
)
9163 tdesc_data
= tdesc_data_alloc ();
9166 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9167 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9168 arm_register_names
[i
]);
9169 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9172 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9175 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9179 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9180 ARM_PS_REGNUM
, "xpsr");
9182 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9183 ARM_PS_REGNUM
, "cpsr");
9187 tdesc_data_cleanup (tdesc_data
);
9191 feature
= tdesc_find_feature (tdesc
,
9192 "org.gnu.gdb.arm.fpa");
9193 if (feature
!= NULL
)
9196 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9197 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9198 arm_register_names
[i
]);
9201 tdesc_data_cleanup (tdesc_data
);
9206 have_fpa_registers
= 0;
9208 feature
= tdesc_find_feature (tdesc
,
9209 "org.gnu.gdb.xscale.iwmmxt");
9210 if (feature
!= NULL
)
9212 static const char *const iwmmxt_names
[] = {
9213 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9214 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9215 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9216 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9220 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9222 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9223 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9225 /* Check for the control registers, but do not fail if they
9227 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9228 tdesc_numbered_register (feature
, tdesc_data
, i
,
9229 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9231 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9233 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9234 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9238 tdesc_data_cleanup (tdesc_data
);
9242 have_wmmx_registers
= 1;
9245 /* If we have a VFP unit, check whether the single precision registers
9246 are present. If not, then we will synthesize them as pseudo
9248 feature
= tdesc_find_feature (tdesc
,
9249 "org.gnu.gdb.arm.vfp");
9250 if (feature
!= NULL
)
9252 static const char *const vfp_double_names
[] = {
9253 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9254 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9255 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9256 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9259 /* Require the double precision registers. There must be either
9262 for (i
= 0; i
< 32; i
++)
9264 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9266 vfp_double_names
[i
]);
9270 if (!valid_p
&& i
== 16)
9273 /* Also require FPSCR. */
9274 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9275 ARM_FPSCR_REGNUM
, "fpscr");
9278 tdesc_data_cleanup (tdesc_data
);
9282 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9283 have_vfp_pseudos
= 1;
9285 vfp_register_count
= i
;
9287 /* If we have VFP, also check for NEON. The architecture allows
9288 NEON without VFP (integer vector operations only), but GDB
9289 does not support that. */
9290 feature
= tdesc_find_feature (tdesc
,
9291 "org.gnu.gdb.arm.neon");
9292 if (feature
!= NULL
)
9294 /* NEON requires 32 double-precision registers. */
9297 tdesc_data_cleanup (tdesc_data
);
9301 /* If there are quad registers defined by the stub, use
9302 their type; otherwise (normally) provide them with
9303 the default type. */
9304 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9305 have_neon_pseudos
= 1;
9312 /* If there is already a candidate, use it. */
9313 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9315 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9317 if (arm_abi
!= ARM_ABI_AUTO
9318 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9321 if (fp_model
!= ARM_FLOAT_AUTO
9322 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9325 /* There are various other properties in tdep that we do not
9326 need to check here: those derived from a target description,
9327 since gdbarches with a different target description are
9328 automatically disqualified. */
9330 /* Do check is_m, though, since it might come from the binary. */
9331 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9334 /* Found a match. */
9338 if (best_arch
!= NULL
)
9340 if (tdesc_data
!= NULL
)
9341 tdesc_data_cleanup (tdesc_data
);
9342 return best_arch
->gdbarch
;
9345 tdep
= XCNEW (struct gdbarch_tdep
);
9346 gdbarch
= gdbarch_alloc (&info
, tdep
);
9348 /* Record additional information about the architecture we are defining.
9349 These are gdbarch discriminators, like the OSABI. */
9350 tdep
->arm_abi
= arm_abi
;
9351 tdep
->fp_model
= fp_model
;
9353 tdep
->have_fpa_registers
= have_fpa_registers
;
9354 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9355 gdb_assert (vfp_register_count
== 0
9356 || vfp_register_count
== 16
9357 || vfp_register_count
== 32);
9358 tdep
->vfp_register_count
= vfp_register_count
;
9359 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9360 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9361 tdep
->have_neon
= have_neon
;
9363 arm_register_g_packet_guesses (gdbarch
);
9366 switch (info
.byte_order_for_code
)
9368 case BFD_ENDIAN_BIG
:
9369 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9370 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9371 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9372 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9376 case BFD_ENDIAN_LITTLE
:
9377 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9378 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9379 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9380 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9385 internal_error (__FILE__
, __LINE__
,
9386 _("arm_gdbarch_init: bad byte order for float format"));
9389 /* On ARM targets char defaults to unsigned. */
9390 set_gdbarch_char_signed (gdbarch
, 0);
9392 /* Note: for displaced stepping, this includes the breakpoint, and one word
9393 of additional scratch space. This setting isn't used for anything beside
9394 displaced stepping at present. */
9395 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
9397 /* This should be low enough for everything. */
9398 tdep
->lowest_pc
= 0x20;
9399 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9401 /* The default, for both APCS and AAPCS, is to return small
9402 structures in registers. */
9403 tdep
->struct_return
= reg_struct_return
;
9405 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9406 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9409 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9411 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9413 /* Frame handling. */
9414 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
9415 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
9416 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
9418 frame_base_set_default (gdbarch
, &arm_normal_base
);
9420 /* Address manipulation. */
9421 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9423 /* Advance PC across function entry code. */
9424 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9426 /* Detect whether PC is at a point where the stack has been destroyed. */
9427 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9429 /* Skip trampolines. */
9430 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9432 /* The stack grows downward. */
9433 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9435 /* Breakpoint manipulation. */
9436 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9437 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9438 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9439 arm_breakpoint_kind_from_current_state
);
9441 /* Information about registers, etc. */
9442 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9443 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9444 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9445 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9446 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9448 /* This "info float" is FPA-specific. Use the generic version if we
9450 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9451 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9453 /* Internal <-> external register number maps. */
9454 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9455 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9457 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9459 /* Returning results. */
9460 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9463 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9465 /* Minsymbol frobbing. */
9466 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9467 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9468 arm_coff_make_msymbol_special
);
9469 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9471 /* Thumb-2 IT block support. */
9472 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9473 arm_adjust_breakpoint_address
);
9475 /* Virtual tables. */
9476 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9478 /* Hook in the ABI-specific overrides, if they have been registered. */
9479 gdbarch_init_osabi (info
, gdbarch
);
9481 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9483 /* Add some default predicates. */
9485 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9486 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9487 dwarf2_append_unwinders (gdbarch
);
9488 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9489 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9490 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9492 /* Now we have tuned the configuration, set a few final things,
9493 based on what the OS ABI has told us. */
9495 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9496 binaries are always marked. */
9497 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9498 tdep
->arm_abi
= ARM_ABI_APCS
;
9500 /* Watchpoints are not steppable. */
9501 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9503 /* We used to default to FPA for generic ARM, but almost nobody
9504 uses that now, and we now provide a way for the user to force
9505 the model. So default to the most useful variant. */
9506 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9507 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9509 if (tdep
->jb_pc
>= 0)
9510 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9512 /* Floating point sizes and format. */
9513 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9514 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9516 set_gdbarch_double_format
9517 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9518 set_gdbarch_long_double_format
9519 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9523 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9524 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9527 if (have_vfp_pseudos
)
9529 /* NOTE: These are the only pseudo registers used by
9530 the ARM target at the moment. If more are added, a
9531 little more care in numbering will be needed. */
9533 int num_pseudos
= 32;
9534 if (have_neon_pseudos
)
9536 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9537 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9538 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9543 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9545 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
9547 /* Override tdesc_register_type to adjust the types of VFP
9548 registers for NEON. */
9549 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9552 /* Add standard register aliases. We add aliases even for those
9553 nanes which are used by the current architecture - it's simpler,
9554 and does no harm, since nothing ever lists user registers. */
9555 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9556 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9557 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9563 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9565 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9570 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9571 (unsigned long) tdep
->lowest_pc
);
9574 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
9577 _initialize_arm_tdep (void)
9580 const char *setname
;
9581 const char *setdesc
;
9582 const char *const *regnames
;
9584 char regdesc
[1024], *rdptr
= regdesc
;
9585 size_t rest
= sizeof (regdesc
);
9587 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9589 arm_objfile_data_key
9590 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
9592 /* Add ourselves to objfile event chain. */
9593 observer_attach_new_objfile (arm_exidx_new_objfile
);
9595 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
9597 /* Register an ELF OS ABI sniffer for ARM binaries. */
9598 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9599 bfd_target_elf_flavour
,
9600 arm_elf_osabi_sniffer
);
9602 /* Initialize the standard target descriptions. */
9603 initialize_tdesc_arm_with_m ();
9604 initialize_tdesc_arm_with_m_fpa_layout ();
9605 initialize_tdesc_arm_with_m_vfp_d16 ();
9606 initialize_tdesc_arm_with_iwmmxt ();
9607 initialize_tdesc_arm_with_vfpv2 ();
9608 initialize_tdesc_arm_with_vfpv3 ();
9609 initialize_tdesc_arm_with_neon ();
9611 /* Get the number of possible sets of register names defined in opcodes. */
9612 num_disassembly_options
= get_arm_regname_num_options ();
9614 /* Add root prefix command for all "set arm"/"show arm" commands. */
9615 add_prefix_cmd ("arm", no_class
, set_arm_command
,
9616 _("Various ARM-specific commands."),
9617 &setarmcmdlist
, "set arm ", 0, &setlist
);
9619 add_prefix_cmd ("arm", no_class
, show_arm_command
,
9620 _("Various ARM-specific commands."),
9621 &showarmcmdlist
, "show arm ", 0, &showlist
);
9623 /* Sync the opcode insn printer with our register viewer. */
9624 parse_arm_disassembler_option ("reg-names-std");
9626 /* Initialize the array that will be passed to
9627 add_setshow_enum_cmd(). */
9628 valid_disassembly_styles
= XNEWVEC (const char *,
9629 num_disassembly_options
+ 1);
9630 for (i
= 0; i
< num_disassembly_options
; i
++)
9632 get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
9633 valid_disassembly_styles
[i
] = setname
;
9634 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
9637 /* When we find the default names, tell the disassembler to use
9639 if (!strcmp (setname
, "std"))
9641 disassembly_style
= setname
;
9642 set_arm_regname_option (i
);
9645 /* Mark the end of valid options. */
9646 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
9648 /* Create the help text. */
9649 std::string helptext
= string_printf ("%s%s%s",
9650 _("The valid values are:\n"),
9652 _("The default is \"std\"."));
9654 add_setshow_enum_cmd("disassembler", no_class
,
9655 valid_disassembly_styles
, &disassembly_style
,
9656 _("Set the disassembly style."),
9657 _("Show the disassembly style."),
9659 set_disassembly_style_sfunc
,
9660 NULL
, /* FIXME: i18n: The disassembly style is
9662 &setarmcmdlist
, &showarmcmdlist
);
9664 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9665 _("Set usage of ARM 32-bit mode."),
9666 _("Show usage of ARM 32-bit mode."),
9667 _("When off, a 26-bit PC will be used."),
9669 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9671 &setarmcmdlist
, &showarmcmdlist
);
9673 /* Add a command to allow the user to force the FPU model. */
9674 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9675 _("Set the floating point type."),
9676 _("Show the floating point type."),
9677 _("auto - Determine the FP typefrom the OS-ABI.\n\
9678 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9679 fpa - FPA co-processor (GCC compiled).\n\
9680 softvfp - Software FP with pure-endian doubles.\n\
9681 vfp - VFP co-processor."),
9682 set_fp_model_sfunc
, show_fp_model
,
9683 &setarmcmdlist
, &showarmcmdlist
);
9685 /* Add a command to allow the user to force the ABI. */
9686 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9689 NULL
, arm_set_abi
, arm_show_abi
,
9690 &setarmcmdlist
, &showarmcmdlist
);
9692 /* Add two commands to allow the user to force the assumed
9694 add_setshow_enum_cmd ("fallback-mode", class_support
,
9695 arm_mode_strings
, &arm_fallback_mode_string
,
9696 _("Set the mode assumed when symbols are unavailable."),
9697 _("Show the mode assumed when symbols are unavailable."),
9698 NULL
, NULL
, arm_show_fallback_mode
,
9699 &setarmcmdlist
, &showarmcmdlist
);
9700 add_setshow_enum_cmd ("force-mode", class_support
,
9701 arm_mode_strings
, &arm_force_mode_string
,
9702 _("Set the mode assumed even when symbols are available."),
9703 _("Show the mode assumed even when symbols are available."),
9704 NULL
, NULL
, arm_show_force_mode
,
9705 &setarmcmdlist
, &showarmcmdlist
);
9707 /* Debugging flag. */
9708 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9709 _("Set ARM debugging."),
9710 _("Show ARM debugging."),
9711 _("When on, arm-specific debugging is enabled."),
9713 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9714 &setdebuglist
, &showdebuglist
);
9717 /* ARM-reversible process record data structures. */
9719 #define ARM_INSN_SIZE_BYTES 4
9720 #define THUMB_INSN_SIZE_BYTES 2
9721 #define THUMB2_INSN_SIZE_BYTES 4
9724 /* Position of the bit within a 32-bit ARM instruction
9725 that defines whether the instruction is a load or store. */
9726 #define INSN_S_L_BIT_NUM 20
9728 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9731 unsigned int reg_len = LENGTH; \
9734 REGS = XNEWVEC (uint32_t, reg_len); \
9735 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9740 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9743 unsigned int mem_len = LENGTH; \
9746 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9747 memcpy(&MEMS->len, &RECORD_BUF[0], \
9748 sizeof(struct arm_mem_r) * LENGTH); \
9753 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9754 #define INSN_RECORDED(ARM_RECORD) \
9755 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9757 /* ARM memory record structure. */
9760 uint32_t len
; /* Record length. */
9761 uint32_t addr
; /* Memory address. */
9764 /* ARM instruction record contains opcode of current insn
9765 and execution state (before entry to decode_insn()),
9766 contains list of to-be-modified registers and
9767 memory blocks (on return from decode_insn()). */
9769 typedef struct insn_decode_record_t
9771 struct gdbarch
*gdbarch
;
9772 struct regcache
*regcache
;
9773 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9774 uint32_t arm_insn
; /* Should accommodate thumb. */
9775 uint32_t cond
; /* Condition code. */
9776 uint32_t opcode
; /* Insn opcode. */
9777 uint32_t decode
; /* Insn decode bits. */
9778 uint32_t mem_rec_count
; /* No of mem records. */
9779 uint32_t reg_rec_count
; /* No of reg records. */
9780 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9781 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9782 } insn_decode_record
;
9785 /* Checks ARM SBZ and SBO mandatory fields. */
9788 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9790 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9809 enum arm_record_result
9811 ARM_RECORD_SUCCESS
= 0,
9812 ARM_RECORD_FAILURE
= 1
9819 } arm_record_strx_t
;
9830 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
9831 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
9834 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9835 ULONGEST u_regval
[2]= {0};
9837 uint32_t reg_src1
= 0, reg_src2
= 0;
9838 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
9840 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
9841 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
9843 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
9845 /* 1) Handle misc store, immediate offset. */
9846 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9847 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9848 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9849 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
9851 if (ARM_PC_REGNUM
== reg_src1
)
9853 /* If R15 was used as Rn, hence current PC+8. */
9854 u_regval
[0] = u_regval
[0] + 8;
9856 offset_8
= (immed_high
<< 4) | immed_low
;
9857 /* Calculate target store address. */
9858 if (14 == arm_insn_r
->opcode
)
9860 tgt_mem_addr
= u_regval
[0] + offset_8
;
9864 tgt_mem_addr
= u_regval
[0] - offset_8
;
9866 if (ARM_RECORD_STRH
== str_type
)
9868 record_buf_mem
[0] = 2;
9869 record_buf_mem
[1] = tgt_mem_addr
;
9870 arm_insn_r
->mem_rec_count
= 1;
9872 else if (ARM_RECORD_STRD
== str_type
)
9874 record_buf_mem
[0] = 4;
9875 record_buf_mem
[1] = tgt_mem_addr
;
9876 record_buf_mem
[2] = 4;
9877 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9878 arm_insn_r
->mem_rec_count
= 2;
9881 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
9883 /* 2) Store, register offset. */
9885 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9887 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9888 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9889 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9892 /* If R15 was used as Rn, hence current PC+8. */
9893 u_regval
[0] = u_regval
[0] + 8;
9895 /* Calculate target store address, Rn +/- Rm, register offset. */
9896 if (12 == arm_insn_r
->opcode
)
9898 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9902 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9904 if (ARM_RECORD_STRH
== str_type
)
9906 record_buf_mem
[0] = 2;
9907 record_buf_mem
[1] = tgt_mem_addr
;
9908 arm_insn_r
->mem_rec_count
= 1;
9910 else if (ARM_RECORD_STRD
== str_type
)
9912 record_buf_mem
[0] = 4;
9913 record_buf_mem
[1] = tgt_mem_addr
;
9914 record_buf_mem
[2] = 4;
9915 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9916 arm_insn_r
->mem_rec_count
= 2;
9919 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
9920 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9922 /* 3) Store, immediate pre-indexed. */
9923 /* 5) Store, immediate post-indexed. */
9924 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9925 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9926 offset_8
= (immed_high
<< 4) | immed_low
;
9927 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9928 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9929 /* Calculate target store address, Rn +/- Rm, register offset. */
9930 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9932 tgt_mem_addr
= u_regval
[0] + offset_8
;
9936 tgt_mem_addr
= u_regval
[0] - offset_8
;
9938 if (ARM_RECORD_STRH
== str_type
)
9940 record_buf_mem
[0] = 2;
9941 record_buf_mem
[1] = tgt_mem_addr
;
9942 arm_insn_r
->mem_rec_count
= 1;
9944 else if (ARM_RECORD_STRD
== str_type
)
9946 record_buf_mem
[0] = 4;
9947 record_buf_mem
[1] = tgt_mem_addr
;
9948 record_buf_mem
[2] = 4;
9949 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9950 arm_insn_r
->mem_rec_count
= 2;
9952 /* Record Rn also as it changes. */
9953 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9954 arm_insn_r
->reg_rec_count
= 1;
9956 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
9957 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9959 /* 4) Store, register pre-indexed. */
9960 /* 6) Store, register post -indexed. */
9961 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9962 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9963 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9964 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9965 /* Calculate target store address, Rn +/- Rm, register offset. */
9966 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9968 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9972 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9974 if (ARM_RECORD_STRH
== str_type
)
9976 record_buf_mem
[0] = 2;
9977 record_buf_mem
[1] = tgt_mem_addr
;
9978 arm_insn_r
->mem_rec_count
= 1;
9980 else if (ARM_RECORD_STRD
== str_type
)
9982 record_buf_mem
[0] = 4;
9983 record_buf_mem
[1] = tgt_mem_addr
;
9984 record_buf_mem
[2] = 4;
9985 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9986 arm_insn_r
->mem_rec_count
= 2;
9988 /* Record Rn also as it changes. */
9989 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9990 arm_insn_r
->reg_rec_count
= 1;
9995 /* Handling ARM extension space insns. */
9998 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10000 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10001 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10002 uint32_t record_buf
[8], record_buf_mem
[8];
10003 uint32_t reg_src1
= 0;
10004 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10005 ULONGEST u_regval
= 0;
10007 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10008 /* Handle unconditional insn extension space. */
10010 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10011 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10012 if (arm_insn_r
->cond
)
10014 /* PLD has no affect on architectural state, it just affects
10016 if (5 == ((opcode1
& 0xE0) >> 5))
10019 record_buf
[0] = ARM_PS_REGNUM
;
10020 record_buf
[1] = ARM_LR_REGNUM
;
10021 arm_insn_r
->reg_rec_count
= 2;
10023 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10027 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10028 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10031 /* Undefined instruction on ARM V5; need to handle if later
10032 versions define it. */
10035 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
10036 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10037 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
10039 /* Handle arithmetic insn extension space. */
10040 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
10041 && !INSN_RECORDED(arm_insn_r
))
10043 /* Handle MLA(S) and MUL(S). */
10044 if (0 <= insn_op1
&& 3 >= insn_op1
)
10046 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10047 record_buf
[1] = ARM_PS_REGNUM
;
10048 arm_insn_r
->reg_rec_count
= 2;
10050 else if (4 <= insn_op1
&& 15 >= insn_op1
)
10052 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10053 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10054 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10055 record_buf
[2] = ARM_PS_REGNUM
;
10056 arm_insn_r
->reg_rec_count
= 3;
10060 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
10061 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
10062 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
10064 /* Handle control insn extension space. */
10066 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
10067 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
10069 if (!bit (arm_insn_r
->arm_insn
,25))
10071 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
10073 if ((0 == insn_op1
) || (2 == insn_op1
))
10076 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10077 arm_insn_r
->reg_rec_count
= 1;
10079 else if (1 == insn_op1
)
10081 /* CSPR is going to be changed. */
10082 record_buf
[0] = ARM_PS_REGNUM
;
10083 arm_insn_r
->reg_rec_count
= 1;
10085 else if (3 == insn_op1
)
10087 /* SPSR is going to be changed. */
10088 /* We need to get SPSR value, which is yet to be done. */
10092 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
10097 record_buf
[0] = ARM_PS_REGNUM
;
10098 arm_insn_r
->reg_rec_count
= 1;
10100 else if (3 == insn_op1
)
10103 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10104 arm_insn_r
->reg_rec_count
= 1;
10107 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
10110 record_buf
[0] = ARM_PS_REGNUM
;
10111 record_buf
[1] = ARM_LR_REGNUM
;
10112 arm_insn_r
->reg_rec_count
= 2;
10114 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
10116 /* QADD, QSUB, QDADD, QDSUB */
10117 record_buf
[0] = ARM_PS_REGNUM
;
10118 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10119 arm_insn_r
->reg_rec_count
= 2;
10121 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
10124 record_buf
[0] = ARM_PS_REGNUM
;
10125 record_buf
[1] = ARM_LR_REGNUM
;
10126 arm_insn_r
->reg_rec_count
= 2;
10128 /* Save SPSR also;how? */
10131 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
10132 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
10133 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
10134 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
10137 if (0 == insn_op1
|| 1 == insn_op1
)
10139 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10140 /* We dont do optimization for SMULW<y> where we
10142 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10143 record_buf
[1] = ARM_PS_REGNUM
;
10144 arm_insn_r
->reg_rec_count
= 2;
10146 else if (2 == insn_op1
)
10149 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10150 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
10151 arm_insn_r
->reg_rec_count
= 2;
10153 else if (3 == insn_op1
)
10156 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10157 arm_insn_r
->reg_rec_count
= 1;
10163 /* MSR : immediate form. */
10166 /* CSPR is going to be changed. */
10167 record_buf
[0] = ARM_PS_REGNUM
;
10168 arm_insn_r
->reg_rec_count
= 1;
10170 else if (3 == insn_op1
)
10172 /* SPSR is going to be changed. */
10173 /* we need to get SPSR value, which is yet to be done */
10179 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10180 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10181 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10183 /* Handle load/store insn extension space. */
10185 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10186 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10187 && !INSN_RECORDED(arm_insn_r
))
10192 /* These insn, changes register and memory as well. */
10193 /* SWP or SWPB insn. */
10194 /* Get memory address given by Rn. */
10195 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10196 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10197 /* SWP insn ?, swaps word. */
10198 if (8 == arm_insn_r
->opcode
)
10200 record_buf_mem
[0] = 4;
10204 /* SWPB insn, swaps only byte. */
10205 record_buf_mem
[0] = 1;
10207 record_buf_mem
[1] = u_regval
;
10208 arm_insn_r
->mem_rec_count
= 1;
10209 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10210 arm_insn_r
->reg_rec_count
= 1;
10212 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10215 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10218 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10221 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10222 record_buf
[1] = record_buf
[0] + 1;
10223 arm_insn_r
->reg_rec_count
= 2;
10225 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10228 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10231 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10233 /* LDRH, LDRSB, LDRSH. */
10234 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10235 arm_insn_r
->reg_rec_count
= 1;
10240 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10241 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10242 && !INSN_RECORDED(arm_insn_r
))
10245 /* Handle coprocessor insn extension space. */
10248 /* To be done for ARMv5 and later; as of now we return -1. */
10252 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10253 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10258 /* Handling opcode 000 insns. */
10261 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10263 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10264 uint32_t record_buf
[8], record_buf_mem
[8];
10265 ULONGEST u_regval
[2] = {0};
10267 uint32_t reg_src1
= 0, reg_dest
= 0;
10268 uint32_t opcode1
= 0;
10270 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10271 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10272 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10274 /* Data processing insn /multiply insn. */
10275 if (9 == arm_insn_r
->decode
10276 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10277 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
10279 /* Handle multiply instructions. */
10280 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10281 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10283 /* Handle MLA and MUL. */
10284 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10285 record_buf
[1] = ARM_PS_REGNUM
;
10286 arm_insn_r
->reg_rec_count
= 2;
10288 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10290 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10291 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10292 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10293 record_buf
[2] = ARM_PS_REGNUM
;
10294 arm_insn_r
->reg_rec_count
= 3;
10297 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10298 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
10300 /* Handle misc load insns, as 20th bit (L = 1). */
10301 /* LDR insn has a capability to do branching, if
10302 MOV LR, PC is precceded by LDR insn having Rn as R15
10303 in that case, it emulates branch and link insn, and hence we
10304 need to save CSPR and PC as well. I am not sure this is right
10305 place; as opcode = 010 LDR insn make this happen, if R15 was
10307 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10308 if (15 != reg_dest
)
10310 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10311 arm_insn_r
->reg_rec_count
= 1;
10315 record_buf
[0] = reg_dest
;
10316 record_buf
[1] = ARM_PS_REGNUM
;
10317 arm_insn_r
->reg_rec_count
= 2;
10320 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10321 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
10322 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10323 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
10325 /* Handle MSR insn. */
10326 if (9 == arm_insn_r
->opcode
)
10328 /* CSPR is going to be changed. */
10329 record_buf
[0] = ARM_PS_REGNUM
;
10330 arm_insn_r
->reg_rec_count
= 1;
10334 /* SPSR is going to be changed. */
10335 /* How to read SPSR value? */
10339 else if (9 == arm_insn_r
->decode
10340 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10341 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10343 /* Handling SWP, SWPB. */
10344 /* These insn, changes register and memory as well. */
10345 /* SWP or SWPB insn. */
10347 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10348 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10349 /* SWP insn ?, swaps word. */
10350 if (8 == arm_insn_r
->opcode
)
10352 record_buf_mem
[0] = 4;
10356 /* SWPB insn, swaps only byte. */
10357 record_buf_mem
[0] = 1;
10359 record_buf_mem
[1] = u_regval
[0];
10360 arm_insn_r
->mem_rec_count
= 1;
10361 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10362 arm_insn_r
->reg_rec_count
= 1;
10364 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10365 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10367 /* Handle BLX, branch and link/exchange. */
10368 if (9 == arm_insn_r
->opcode
)
10370 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10371 and R14 stores the return address. */
10372 record_buf
[0] = ARM_PS_REGNUM
;
10373 record_buf
[1] = ARM_LR_REGNUM
;
10374 arm_insn_r
->reg_rec_count
= 2;
10377 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10379 /* Handle enhanced software breakpoint insn, BKPT. */
10380 /* CPSR is changed to be executed in ARM state, disabling normal
10381 interrupts, entering abort mode. */
10382 /* According to high vector configuration PC is set. */
10383 /* user hit breakpoint and type reverse, in
10384 that case, we need to go back with previous CPSR and
10385 Program Counter. */
10386 record_buf
[0] = ARM_PS_REGNUM
;
10387 record_buf
[1] = ARM_LR_REGNUM
;
10388 arm_insn_r
->reg_rec_count
= 2;
10390 /* Save SPSR also; how? */
10393 else if (11 == arm_insn_r
->decode
10394 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10396 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10398 /* Handle str(x) insn */
10399 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10402 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10403 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10405 /* Handle BX, branch and link/exchange. */
10406 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10407 record_buf
[0] = ARM_PS_REGNUM
;
10408 arm_insn_r
->reg_rec_count
= 1;
10410 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10411 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10412 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10414 /* Count leading zeros: CLZ. */
10415 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10416 arm_insn_r
->reg_rec_count
= 1;
10418 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10419 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10420 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10421 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
10424 /* Handle MRS insn. */
10425 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10426 arm_insn_r
->reg_rec_count
= 1;
10428 else if (arm_insn_r
->opcode
<= 15)
10430 /* Normal data processing insns. */
10431 /* Out of 11 shifter operands mode, all the insn modifies destination
10432 register, which is specified by 13-16 decode. */
10433 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10434 record_buf
[1] = ARM_PS_REGNUM
;
10435 arm_insn_r
->reg_rec_count
= 2;
10442 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10443 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10447 /* Handling opcode 001 insns. */
10450 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10452 uint32_t record_buf
[8], record_buf_mem
[8];
10454 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10455 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10457 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10458 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10459 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10462 /* Handle MSR insn. */
10463 if (9 == arm_insn_r
->opcode
)
10465 /* CSPR is going to be changed. */
10466 record_buf
[0] = ARM_PS_REGNUM
;
10467 arm_insn_r
->reg_rec_count
= 1;
10471 /* SPSR is going to be changed. */
10474 else if (arm_insn_r
->opcode
<= 15)
10476 /* Normal data processing insns. */
10477 /* Out of 11 shifter operands mode, all the insn modifies destination
10478 register, which is specified by 13-16 decode. */
10479 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10480 record_buf
[1] = ARM_PS_REGNUM
;
10481 arm_insn_r
->reg_rec_count
= 2;
10488 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10489 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10494 arm_record_media (insn_decode_record
*arm_insn_r
)
10496 uint32_t record_buf
[8];
10498 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10501 /* Parallel addition and subtraction, signed */
10503 /* Parallel addition and subtraction, unsigned */
10506 /* Packing, unpacking, saturation and reversal */
10508 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10510 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10516 /* Signed multiplies */
10518 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10519 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10521 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10523 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10524 else if (op1
== 0x4)
10525 record_buf
[arm_insn_r
->reg_rec_count
++]
10526 = bits (arm_insn_r
->arm_insn
, 12, 15);
10532 if (bit (arm_insn_r
->arm_insn
, 21)
10533 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10536 record_buf
[arm_insn_r
->reg_rec_count
++]
10537 = bits (arm_insn_r
->arm_insn
, 12, 15);
10539 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10540 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10542 /* USAD8 and USADA8 */
10543 record_buf
[arm_insn_r
->reg_rec_count
++]
10544 = bits (arm_insn_r
->arm_insn
, 16, 19);
10551 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10552 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10554 /* Permanently UNDEFINED */
10559 /* BFC, BFI and UBFX */
10560 record_buf
[arm_insn_r
->reg_rec_count
++]
10561 = bits (arm_insn_r
->arm_insn
, 12, 15);
10570 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10575 /* Handle ARM mode instructions with opcode 010. */
10578 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10580 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10582 uint32_t reg_base
, reg_dest
;
10583 uint32_t offset_12
, tgt_mem_addr
;
10584 uint32_t record_buf
[8], record_buf_mem
[8];
10585 unsigned char wback
;
10588 /* Calculate wback. */
10589 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10590 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10592 arm_insn_r
->reg_rec_count
= 0;
10593 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10595 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10597 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10600 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10601 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10603 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10604 preceeds a LDR instruction having R15 as reg_base, it
10605 emulates a branch and link instruction, and hence we need to save
10606 CPSR and PC as well. */
10607 if (ARM_PC_REGNUM
== reg_dest
)
10608 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10610 /* If wback is true, also save the base register, which is going to be
10613 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10617 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10619 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10620 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10622 /* Handle bit U. */
10623 if (bit (arm_insn_r
->arm_insn
, 23))
10625 /* U == 1: Add the offset. */
10626 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10630 /* U == 0: subtract the offset. */
10631 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10634 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10636 if (bit (arm_insn_r
->arm_insn
, 22))
10638 /* STRB and STRBT: 1 byte. */
10639 record_buf_mem
[0] = 1;
10643 /* STR and STRT: 4 bytes. */
10644 record_buf_mem
[0] = 4;
10647 /* Handle bit P. */
10648 if (bit (arm_insn_r
->arm_insn
, 24))
10649 record_buf_mem
[1] = tgt_mem_addr
;
10651 record_buf_mem
[1] = (uint32_t) u_regval
;
10653 arm_insn_r
->mem_rec_count
= 1;
10655 /* If wback is true, also save the base register, which is going to be
10658 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10661 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10662 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10666 /* Handling opcode 011 insns. */
10669 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10671 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10673 uint32_t shift_imm
= 0;
10674 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10675 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10676 uint32_t record_buf
[8], record_buf_mem
[8];
10679 ULONGEST u_regval
[2];
10681 if (bit (arm_insn_r
->arm_insn
, 4))
10682 return arm_record_media (arm_insn_r
);
10684 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10685 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10687 /* Handle enhanced store insns and LDRD DSP insn,
10688 order begins according to addressing modes for store insns
10692 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10694 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10695 /* LDR insn has a capability to do branching, if
10696 MOV LR, PC is precedded by LDR insn having Rn as R15
10697 in that case, it emulates branch and link insn, and hence we
10698 need to save CSPR and PC as well. */
10699 if (15 != reg_dest
)
10701 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10702 arm_insn_r
->reg_rec_count
= 1;
10706 record_buf
[0] = reg_dest
;
10707 record_buf
[1] = ARM_PS_REGNUM
;
10708 arm_insn_r
->reg_rec_count
= 2;
10713 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10715 /* Store insn, register offset and register pre-indexed,
10716 register post-indexed. */
10718 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10720 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10721 regcache_raw_read_unsigned (reg_cache
, reg_src1
10723 regcache_raw_read_unsigned (reg_cache
, reg_src2
10725 if (15 == reg_src2
)
10727 /* If R15 was used as Rn, hence current PC+8. */
10728 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10729 u_regval
[0] = u_regval
[0] + 8;
10731 /* Calculate target store address, Rn +/- Rm, register offset. */
10733 if (bit (arm_insn_r
->arm_insn
, 23))
10735 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10739 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10742 switch (arm_insn_r
->opcode
)
10756 record_buf_mem
[0] = 4;
10771 record_buf_mem
[0] = 1;
10775 gdb_assert_not_reached ("no decoding pattern found");
10778 record_buf_mem
[1] = tgt_mem_addr
;
10779 arm_insn_r
->mem_rec_count
= 1;
10781 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10782 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10783 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10784 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10785 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10786 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10789 /* Rn is going to be changed in pre-indexed mode and
10790 post-indexed mode as well. */
10791 record_buf
[0] = reg_src2
;
10792 arm_insn_r
->reg_rec_count
= 1;
10797 /* Store insn, scaled register offset; scaled pre-indexed. */
10798 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
10800 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10802 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10803 /* Get shift_imm. */
10804 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
10805 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10806 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
10807 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10808 /* Offset_12 used as shift. */
10812 /* Offset_12 used as index. */
10813 offset_12
= u_regval
[0] << shift_imm
;
10817 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
10823 if (bit (u_regval
[0], 31))
10825 offset_12
= 0xFFFFFFFF;
10834 /* This is arithmetic shift. */
10835 offset_12
= s_word
>> shift_imm
;
10842 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
10844 /* Get C flag value and shift it by 31. */
10845 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
10846 | (u_regval
[0]) >> 1);
10850 offset_12
= (u_regval
[0] >> shift_imm
) \
10852 (sizeof(uint32_t) - shift_imm
));
10857 gdb_assert_not_reached ("no decoding pattern found");
10861 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10863 if (bit (arm_insn_r
->arm_insn
, 23))
10865 tgt_mem_addr
= u_regval
[1] + offset_12
;
10869 tgt_mem_addr
= u_regval
[1] - offset_12
;
10872 switch (arm_insn_r
->opcode
)
10886 record_buf_mem
[0] = 4;
10901 record_buf_mem
[0] = 1;
10905 gdb_assert_not_reached ("no decoding pattern found");
10908 record_buf_mem
[1] = tgt_mem_addr
;
10909 arm_insn_r
->mem_rec_count
= 1;
10911 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10912 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10913 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10914 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10915 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10916 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10919 /* Rn is going to be changed in register scaled pre-indexed
10920 mode,and scaled post indexed mode. */
10921 record_buf
[0] = reg_src2
;
10922 arm_insn_r
->reg_rec_count
= 1;
10927 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10928 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10932 /* Handle ARM mode instructions with opcode 100. */
10935 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
10937 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10938 uint32_t register_count
= 0, register_bits
;
10939 uint32_t reg_base
, addr_mode
;
10940 uint32_t record_buf
[24], record_buf_mem
[48];
10944 /* Fetch the list of registers. */
10945 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
10946 arm_insn_r
->reg_rec_count
= 0;
10948 /* Fetch the base register that contains the address we are loading data
10950 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10952 /* Calculate wback. */
10953 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
10955 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10957 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10959 /* Find out which registers are going to be loaded from memory. */
10960 while (register_bits
)
10962 if (register_bits
& 0x00000001)
10963 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
10964 register_bits
= register_bits
>> 1;
10969 /* If wback is true, also save the base register, which is going to be
10972 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10974 /* Save the CPSR register. */
10975 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10979 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10981 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
10983 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10985 /* Find out how many registers are going to be stored to memory. */
10986 while (register_bits
)
10988 if (register_bits
& 0x00000001)
10990 register_bits
= register_bits
>> 1;
10995 /* STMDA (STMED): Decrement after. */
10997 record_buf_mem
[1] = (uint32_t) u_regval
10998 - register_count
* INT_REGISTER_SIZE
+ 4;
11000 /* STM (STMIA, STMEA): Increment after. */
11002 record_buf_mem
[1] = (uint32_t) u_regval
;
11004 /* STMDB (STMFD): Decrement before. */
11006 record_buf_mem
[1] = (uint32_t) u_regval
11007 - register_count
* INT_REGISTER_SIZE
;
11009 /* STMIB (STMFA): Increment before. */
11011 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11014 gdb_assert_not_reached ("no decoding pattern found");
11018 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11019 arm_insn_r
->mem_rec_count
= 1;
11021 /* If wback is true, also save the base register, which is going to be
11024 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11027 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11028 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11032 /* Handling opcode 101 insns. */
11035 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11037 uint32_t record_buf
[8];
11039 /* Handle B, BL, BLX(1) insns. */
11040 /* B simply branches so we do nothing here. */
11041 /* Note: BLX(1) doesnt fall here but instead it falls into
11042 extension space. */
11043 if (bit (arm_insn_r
->arm_insn
, 24))
11045 record_buf
[0] = ARM_LR_REGNUM
;
11046 arm_insn_r
->reg_rec_count
= 1;
11049 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11055 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11057 printf_unfiltered (_("Process record does not support instruction "
11058 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11059 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11064 /* Record handler for vector data transfer instructions. */
11067 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11069 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11070 uint32_t record_buf
[4];
11072 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11073 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11074 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11075 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11076 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11078 /* Handle VMOV instruction. */
11079 if (bit_l
&& bit_c
)
11081 record_buf
[0] = reg_t
;
11082 arm_insn_r
->reg_rec_count
= 1;
11084 else if (bit_l
&& !bit_c
)
11086 /* Handle VMOV instruction. */
11087 if (bits_a
== 0x00)
11089 record_buf
[0] = reg_t
;
11090 arm_insn_r
->reg_rec_count
= 1;
11092 /* Handle VMRS instruction. */
11093 else if (bits_a
== 0x07)
11096 reg_t
= ARM_PS_REGNUM
;
11098 record_buf
[0] = reg_t
;
11099 arm_insn_r
->reg_rec_count
= 1;
11102 else if (!bit_l
&& !bit_c
)
11104 /* Handle VMOV instruction. */
11105 if (bits_a
== 0x00)
11107 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11109 arm_insn_r
->reg_rec_count
= 1;
11111 /* Handle VMSR instruction. */
11112 else if (bits_a
== 0x07)
11114 record_buf
[0] = ARM_FPSCR_REGNUM
;
11115 arm_insn_r
->reg_rec_count
= 1;
11118 else if (!bit_l
&& bit_c
)
11120 /* Handle VMOV instruction. */
11121 if (!(bits_a
& 0x04))
11123 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11125 arm_insn_r
->reg_rec_count
= 1;
11127 /* Handle VDUP instruction. */
11130 if (bit (arm_insn_r
->arm_insn
, 21))
11132 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11133 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11134 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11135 arm_insn_r
->reg_rec_count
= 2;
11139 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11140 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11141 arm_insn_r
->reg_rec_count
= 1;
11146 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11150 /* Record handler for extension register load/store instructions. */
11153 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11155 uint32_t opcode
, single_reg
;
11156 uint8_t op_vldm_vstm
;
11157 uint32_t record_buf
[8], record_buf_mem
[128];
11158 ULONGEST u_regval
= 0;
11160 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11162 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11163 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11164 op_vldm_vstm
= opcode
& 0x1b;
11166 /* Handle VMOV instructions. */
11167 if ((opcode
& 0x1e) == 0x04)
11169 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11171 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11172 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11173 arm_insn_r
->reg_rec_count
= 2;
11177 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11178 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11182 /* The first S register number m is REG_M:M (M is bit 5),
11183 the corresponding D register number is REG_M:M / 2, which
11185 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11186 /* The second S register number is REG_M:M + 1, the
11187 corresponding D register number is (REG_M:M + 1) / 2.
11188 IOW, if bit M is 1, the first and second S registers
11189 are mapped to different D registers, otherwise, they are
11190 in the same D register. */
11193 record_buf
[arm_insn_r
->reg_rec_count
++]
11194 = ARM_D0_REGNUM
+ reg_m
+ 1;
11199 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11200 arm_insn_r
->reg_rec_count
= 1;
11204 /* Handle VSTM and VPUSH instructions. */
11205 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11206 || op_vldm_vstm
== 0x12)
11208 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11209 uint32_t memory_index
= 0;
11211 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11212 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11213 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11214 imm_off32
= imm_off8
<< 2;
11215 memory_count
= imm_off8
;
11217 if (bit (arm_insn_r
->arm_insn
, 23))
11218 start_address
= u_regval
;
11220 start_address
= u_regval
- imm_off32
;
11222 if (bit (arm_insn_r
->arm_insn
, 21))
11224 record_buf
[0] = reg_rn
;
11225 arm_insn_r
->reg_rec_count
= 1;
11228 while (memory_count
> 0)
11232 record_buf_mem
[memory_index
] = 4;
11233 record_buf_mem
[memory_index
+ 1] = start_address
;
11234 start_address
= start_address
+ 4;
11235 memory_index
= memory_index
+ 2;
11239 record_buf_mem
[memory_index
] = 4;
11240 record_buf_mem
[memory_index
+ 1] = start_address
;
11241 record_buf_mem
[memory_index
+ 2] = 4;
11242 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11243 start_address
= start_address
+ 8;
11244 memory_index
= memory_index
+ 4;
11248 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11250 /* Handle VLDM instructions. */
11251 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11252 || op_vldm_vstm
== 0x13)
11254 uint32_t reg_count
, reg_vd
;
11255 uint32_t reg_index
= 0;
11256 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11258 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11259 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11261 /* REG_VD is the first D register number. If the instruction
11262 loads memory to S registers (SINGLE_REG is TRUE), the register
11263 number is (REG_VD << 1 | bit D), so the corresponding D
11264 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11266 reg_vd
= reg_vd
| (bit_d
<< 4);
11268 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11269 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11271 /* If the instruction loads memory to D register, REG_COUNT should
11272 be divided by 2, according to the ARM Architecture Reference
11273 Manual. If the instruction loads memory to S register, divide by
11274 2 as well because two S registers are mapped to D register. */
11275 reg_count
= reg_count
/ 2;
11276 if (single_reg
&& bit_d
)
11278 /* Increase the register count if S register list starts from
11279 an odd number (bit d is one). */
11283 while (reg_count
> 0)
11285 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11288 arm_insn_r
->reg_rec_count
= reg_index
;
11290 /* VSTR Vector store register. */
11291 else if ((opcode
& 0x13) == 0x10)
11293 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11294 uint32_t memory_index
= 0;
11296 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11297 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11298 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11299 imm_off32
= imm_off8
<< 2;
11301 if (bit (arm_insn_r
->arm_insn
, 23))
11302 start_address
= u_regval
+ imm_off32
;
11304 start_address
= u_regval
- imm_off32
;
11308 record_buf_mem
[memory_index
] = 4;
11309 record_buf_mem
[memory_index
+ 1] = start_address
;
11310 arm_insn_r
->mem_rec_count
= 1;
11314 record_buf_mem
[memory_index
] = 4;
11315 record_buf_mem
[memory_index
+ 1] = start_address
;
11316 record_buf_mem
[memory_index
+ 2] = 4;
11317 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11318 arm_insn_r
->mem_rec_count
= 2;
11321 /* VLDR Vector load register. */
11322 else if ((opcode
& 0x13) == 0x11)
11324 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11328 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11329 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11333 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11334 /* Record register D rather than pseudo register S. */
11335 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11337 arm_insn_r
->reg_rec_count
= 1;
11340 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11341 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11345 /* Record handler for arm/thumb mode VFP data processing instructions. */
11348 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11350 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11351 uint32_t record_buf
[4];
11352 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11353 enum insn_types curr_insn_type
= INSN_INV
;
11355 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11356 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11357 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11358 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11359 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11360 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11361 opc1
= opc1
& 0x04;
11363 /* Handle VMLA, VMLS. */
11366 if (bit (arm_insn_r
->arm_insn
, 10))
11368 if (bit (arm_insn_r
->arm_insn
, 6))
11369 curr_insn_type
= INSN_T0
;
11371 curr_insn_type
= INSN_T1
;
11376 curr_insn_type
= INSN_T1
;
11378 curr_insn_type
= INSN_T2
;
11381 /* Handle VNMLA, VNMLS, VNMUL. */
11382 else if (opc1
== 0x01)
11385 curr_insn_type
= INSN_T1
;
11387 curr_insn_type
= INSN_T2
;
11390 else if (opc1
== 0x02 && !(opc3
& 0x01))
11392 if (bit (arm_insn_r
->arm_insn
, 10))
11394 if (bit (arm_insn_r
->arm_insn
, 6))
11395 curr_insn_type
= INSN_T0
;
11397 curr_insn_type
= INSN_T1
;
11402 curr_insn_type
= INSN_T1
;
11404 curr_insn_type
= INSN_T2
;
11407 /* Handle VADD, VSUB. */
11408 else if (opc1
== 0x03)
11410 if (!bit (arm_insn_r
->arm_insn
, 9))
11412 if (bit (arm_insn_r
->arm_insn
, 6))
11413 curr_insn_type
= INSN_T0
;
11415 curr_insn_type
= INSN_T1
;
11420 curr_insn_type
= INSN_T1
;
11422 curr_insn_type
= INSN_T2
;
11426 else if (opc1
== 0x0b)
11429 curr_insn_type
= INSN_T1
;
11431 curr_insn_type
= INSN_T2
;
11433 /* Handle all other vfp data processing instructions. */
11434 else if (opc1
== 0x0b)
11437 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11439 if (bit (arm_insn_r
->arm_insn
, 4))
11441 if (bit (arm_insn_r
->arm_insn
, 6))
11442 curr_insn_type
= INSN_T0
;
11444 curr_insn_type
= INSN_T1
;
11449 curr_insn_type
= INSN_T1
;
11451 curr_insn_type
= INSN_T2
;
11454 /* Handle VNEG and VABS. */
11455 else if ((opc2
== 0x01 && opc3
== 0x01)
11456 || (opc2
== 0x00 && opc3
== 0x03))
11458 if (!bit (arm_insn_r
->arm_insn
, 11))
11460 if (bit (arm_insn_r
->arm_insn
, 6))
11461 curr_insn_type
= INSN_T0
;
11463 curr_insn_type
= INSN_T1
;
11468 curr_insn_type
= INSN_T1
;
11470 curr_insn_type
= INSN_T2
;
11473 /* Handle VSQRT. */
11474 else if (opc2
== 0x01 && opc3
== 0x03)
11477 curr_insn_type
= INSN_T1
;
11479 curr_insn_type
= INSN_T2
;
11482 else if (opc2
== 0x07 && opc3
== 0x03)
11485 curr_insn_type
= INSN_T1
;
11487 curr_insn_type
= INSN_T2
;
11489 else if (opc3
& 0x01)
11492 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11494 if (!bit (arm_insn_r
->arm_insn
, 18))
11495 curr_insn_type
= INSN_T2
;
11499 curr_insn_type
= INSN_T1
;
11501 curr_insn_type
= INSN_T2
;
11505 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11508 curr_insn_type
= INSN_T1
;
11510 curr_insn_type
= INSN_T2
;
11512 /* Handle VCVTB, VCVTT. */
11513 else if ((opc2
& 0x0e) == 0x02)
11514 curr_insn_type
= INSN_T2
;
11515 /* Handle VCMP, VCMPE. */
11516 else if ((opc2
& 0x0e) == 0x04)
11517 curr_insn_type
= INSN_T3
;
11521 switch (curr_insn_type
)
11524 reg_vd
= reg_vd
| (bit_d
<< 4);
11525 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11526 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11527 arm_insn_r
->reg_rec_count
= 2;
11531 reg_vd
= reg_vd
| (bit_d
<< 4);
11532 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11533 arm_insn_r
->reg_rec_count
= 1;
11537 reg_vd
= (reg_vd
<< 1) | bit_d
;
11538 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11539 arm_insn_r
->reg_rec_count
= 1;
11543 record_buf
[0] = ARM_FPSCR_REGNUM
;
11544 arm_insn_r
->reg_rec_count
= 1;
11548 gdb_assert_not_reached ("no decoding pattern found");
11552 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11556 /* Handling opcode 110 insns. */
11559 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11561 uint32_t op1
, op1_ebit
, coproc
;
11563 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11564 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11565 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11567 if ((coproc
& 0x0e) == 0x0a)
11569 /* Handle extension register ld/st instructions. */
11571 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11573 /* 64-bit transfers between arm core and extension registers. */
11574 if ((op1
& 0x3e) == 0x04)
11575 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11579 /* Handle coprocessor ld/st instructions. */
11584 return arm_record_unsupported_insn (arm_insn_r
);
11587 return arm_record_unsupported_insn (arm_insn_r
);
11590 /* Move to coprocessor from two arm core registers. */
11592 return arm_record_unsupported_insn (arm_insn_r
);
11594 /* Move to two arm core registers from coprocessor. */
11599 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11600 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11601 arm_insn_r
->reg_rec_count
= 2;
11603 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11607 return arm_record_unsupported_insn (arm_insn_r
);
11610 /* Handling opcode 111 insns. */
11613 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11615 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
11616 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
11617 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11619 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11620 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11621 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
11622 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11623 op
= bit (arm_insn_r
->arm_insn
, 4);
11625 /* Handle arm SWI/SVC system call instructions. */
11628 if (tdep
->arm_syscall_record
!= NULL
)
11630 ULONGEST svc_operand
, svc_number
;
11632 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11634 if (svc_operand
) /* OABI. */
11635 svc_number
= svc_operand
- 0x900000;
11637 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11639 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11643 printf_unfiltered (_("no syscall record support\n"));
11648 if ((coproc
& 0x0e) == 0x0a)
11650 /* VFP data-processing instructions. */
11651 if (!op1_sbit
&& !op
)
11652 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11654 /* Advanced SIMD, VFP instructions. */
11655 if (!op1_sbit
&& op
)
11656 return arm_record_vdata_transfer_insn (arm_insn_r
);
11660 /* Coprocessor data operations. */
11661 if (!op1_sbit
&& !op
)
11662 return arm_record_unsupported_insn (arm_insn_r
);
11664 /* Move to Coprocessor from ARM core register. */
11665 if (!op1_sbit
&& !op1_ebit
&& op
)
11666 return arm_record_unsupported_insn (arm_insn_r
);
11668 /* Move to arm core register from coprocessor. */
11669 if (!op1_sbit
&& op1_ebit
&& op
)
11671 uint32_t record_buf
[1];
11673 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11674 if (record_buf
[0] == 15)
11675 record_buf
[0] = ARM_PS_REGNUM
;
11677 arm_insn_r
->reg_rec_count
= 1;
11678 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11684 return arm_record_unsupported_insn (arm_insn_r
);
11687 /* Handling opcode 000 insns. */
11690 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
11692 uint32_t record_buf
[8];
11693 uint32_t reg_src1
= 0;
11695 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11697 record_buf
[0] = ARM_PS_REGNUM
;
11698 record_buf
[1] = reg_src1
;
11699 thumb_insn_r
->reg_rec_count
= 2;
11701 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11707 /* Handling opcode 001 insns. */
11710 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
11712 uint32_t record_buf
[8];
11713 uint32_t reg_src1
= 0;
11715 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11717 record_buf
[0] = ARM_PS_REGNUM
;
11718 record_buf
[1] = reg_src1
;
11719 thumb_insn_r
->reg_rec_count
= 2;
11721 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11726 /* Handling opcode 010 insns. */
11729 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
11731 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11732 uint32_t record_buf
[8], record_buf_mem
[8];
11734 uint32_t reg_src1
= 0, reg_src2
= 0;
11735 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
11737 ULONGEST u_regval
[2] = {0};
11739 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
11741 if (bit (thumb_insn_r
->arm_insn
, 12))
11743 /* Handle load/store register offset. */
11744 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
11745 if (opcode2
>= 12 && opcode2
<= 15)
11747 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11748 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
11749 record_buf
[0] = reg_src1
;
11750 thumb_insn_r
->reg_rec_count
= 1;
11752 else if (opcode2
>= 8 && opcode2
<= 10)
11754 /* STR(2), STRB(2), STRH(2) . */
11755 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11756 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
11757 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11758 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11760 record_buf_mem
[0] = 4; /* STR (2). */
11761 else if (10 == opcode2
)
11762 record_buf_mem
[0] = 1; /* STRB (2). */
11763 else if (9 == opcode2
)
11764 record_buf_mem
[0] = 2; /* STRH (2). */
11765 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
11766 thumb_insn_r
->mem_rec_count
= 1;
11769 else if (bit (thumb_insn_r
->arm_insn
, 11))
11771 /* Handle load from literal pool. */
11773 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11774 record_buf
[0] = reg_src1
;
11775 thumb_insn_r
->reg_rec_count
= 1;
11779 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
11780 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11781 if ((3 == opcode2
) && (!opcode3
))
11783 /* Branch with exchange. */
11784 record_buf
[0] = ARM_PS_REGNUM
;
11785 thumb_insn_r
->reg_rec_count
= 1;
11789 /* Format 8; special data processing insns. */
11790 record_buf
[0] = ARM_PS_REGNUM
;
11791 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
11792 | bits (thumb_insn_r
->arm_insn
, 0, 2));
11793 thumb_insn_r
->reg_rec_count
= 2;
11798 /* Format 5; data processing insns. */
11799 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11800 if (bit (thumb_insn_r
->arm_insn
, 7))
11802 reg_src1
= reg_src1
+ 8;
11804 record_buf
[0] = ARM_PS_REGNUM
;
11805 record_buf
[1] = reg_src1
;
11806 thumb_insn_r
->reg_rec_count
= 2;
11809 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11810 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11816 /* Handling opcode 001 insns. */
11819 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
11821 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11822 uint32_t record_buf
[8], record_buf_mem
[8];
11824 uint32_t reg_src1
= 0;
11825 uint32_t opcode
= 0, immed_5
= 0;
11827 ULONGEST u_regval
= 0;
11829 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11834 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11835 record_buf
[0] = reg_src1
;
11836 thumb_insn_r
->reg_rec_count
= 1;
11841 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11842 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11843 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11844 record_buf_mem
[0] = 4;
11845 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
11846 thumb_insn_r
->mem_rec_count
= 1;
11849 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11850 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11856 /* Handling opcode 100 insns. */
11859 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
11861 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11862 uint32_t record_buf
[8], record_buf_mem
[8];
11864 uint32_t reg_src1
= 0;
11865 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
11867 ULONGEST u_regval
= 0;
11869 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11874 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11875 record_buf
[0] = reg_src1
;
11876 thumb_insn_r
->reg_rec_count
= 1;
11878 else if (1 == opcode
)
11881 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11882 record_buf
[0] = reg_src1
;
11883 thumb_insn_r
->reg_rec_count
= 1;
11885 else if (2 == opcode
)
11888 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11889 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11890 record_buf_mem
[0] = 4;
11891 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
11892 thumb_insn_r
->mem_rec_count
= 1;
11894 else if (0 == opcode
)
11897 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11898 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11899 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11900 record_buf_mem
[0] = 2;
11901 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
11902 thumb_insn_r
->mem_rec_count
= 1;
11905 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11906 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11912 /* Handling opcode 101 insns. */
11915 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
11917 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11919 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
11920 uint32_t register_bits
= 0, register_count
= 0;
11921 uint32_t index
= 0, start_address
= 0;
11922 uint32_t record_buf
[24], record_buf_mem
[48];
11925 ULONGEST u_regval
= 0;
11927 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11928 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
11929 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
11934 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11935 while (register_bits
)
11937 if (register_bits
& 0x00000001)
11938 record_buf
[index
++] = register_count
;
11939 register_bits
= register_bits
>> 1;
11942 record_buf
[index
++] = ARM_PS_REGNUM
;
11943 record_buf
[index
++] = ARM_SP_REGNUM
;
11944 thumb_insn_r
->reg_rec_count
= index
;
11946 else if (10 == opcode2
)
11949 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11950 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11951 while (register_bits
)
11953 if (register_bits
& 0x00000001)
11955 register_bits
= register_bits
>> 1;
11957 start_address
= u_regval
- \
11958 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
11959 thumb_insn_r
->mem_rec_count
= register_count
;
11960 while (register_count
)
11962 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
11963 record_buf_mem
[(register_count
* 2) - 2] = 4;
11964 start_address
= start_address
+ 4;
11967 record_buf
[0] = ARM_SP_REGNUM
;
11968 thumb_insn_r
->reg_rec_count
= 1;
11970 else if (0x1E == opcode1
)
11973 /* Handle enhanced software breakpoint insn, BKPT. */
11974 /* CPSR is changed to be executed in ARM state, disabling normal
11975 interrupts, entering abort mode. */
11976 /* According to high vector configuration PC is set. */
11977 /* User hits breakpoint and type reverse, in that case, we need to go back with
11978 previous CPSR and Program Counter. */
11979 record_buf
[0] = ARM_PS_REGNUM
;
11980 record_buf
[1] = ARM_LR_REGNUM
;
11981 thumb_insn_r
->reg_rec_count
= 2;
11982 /* We need to save SPSR value, which is not yet done. */
11983 printf_unfiltered (_("Process record does not support instruction "
11984 "0x%0x at address %s.\n"),
11985 thumb_insn_r
->arm_insn
,
11986 paddress (thumb_insn_r
->gdbarch
,
11987 thumb_insn_r
->this_addr
));
11990 else if ((0 == opcode
) || (1 == opcode
))
11992 /* ADD(5), ADD(6). */
11993 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11994 record_buf
[0] = reg_src1
;
11995 thumb_insn_r
->reg_rec_count
= 1;
11997 else if (2 == opcode
)
11999 /* ADD(7), SUB(4). */
12000 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12001 record_buf
[0] = ARM_SP_REGNUM
;
12002 thumb_insn_r
->reg_rec_count
= 1;
12005 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12006 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12012 /* Handling opcode 110 insns. */
12015 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12017 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12018 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12020 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12021 uint32_t reg_src1
= 0;
12022 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12023 uint32_t index
= 0, start_address
= 0;
12024 uint32_t record_buf
[24], record_buf_mem
[48];
12026 ULONGEST u_regval
= 0;
12028 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12029 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12035 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12037 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12038 while (register_bits
)
12040 if (register_bits
& 0x00000001)
12041 record_buf
[index
++] = register_count
;
12042 register_bits
= register_bits
>> 1;
12045 record_buf
[index
++] = reg_src1
;
12046 thumb_insn_r
->reg_rec_count
= index
;
12048 else if (0 == opcode2
)
12050 /* It handles both STMIA. */
12051 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12053 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12054 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12055 while (register_bits
)
12057 if (register_bits
& 0x00000001)
12059 register_bits
= register_bits
>> 1;
12061 start_address
= u_regval
;
12062 thumb_insn_r
->mem_rec_count
= register_count
;
12063 while (register_count
)
12065 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12066 record_buf_mem
[(register_count
* 2) - 2] = 4;
12067 start_address
= start_address
+ 4;
12071 else if (0x1F == opcode1
)
12073 /* Handle arm syscall insn. */
12074 if (tdep
->arm_syscall_record
!= NULL
)
12076 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12077 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12081 printf_unfiltered (_("no syscall record support\n"));
12086 /* B (1), conditional branch is automatically taken care in process_record,
12087 as PC is saved there. */
12089 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12090 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12096 /* Handling opcode 111 insns. */
12099 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12101 uint32_t record_buf
[8];
12102 uint32_t bits_h
= 0;
12104 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12106 if (2 == bits_h
|| 3 == bits_h
)
12109 record_buf
[0] = ARM_LR_REGNUM
;
12110 thumb_insn_r
->reg_rec_count
= 1;
12112 else if (1 == bits_h
)
12115 record_buf
[0] = ARM_PS_REGNUM
;
12116 record_buf
[1] = ARM_LR_REGNUM
;
12117 thumb_insn_r
->reg_rec_count
= 2;
12120 /* B(2) is automatically taken care in process_record, as PC is
12123 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12128 /* Handler for thumb2 load/store multiple instructions. */
12131 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12133 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12135 uint32_t reg_rn
, op
;
12136 uint32_t register_bits
= 0, register_count
= 0;
12137 uint32_t index
= 0, start_address
= 0;
12138 uint32_t record_buf
[24], record_buf_mem
[48];
12140 ULONGEST u_regval
= 0;
12142 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12143 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12145 if (0 == op
|| 3 == op
)
12147 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12149 /* Handle RFE instruction. */
12150 record_buf
[0] = ARM_PS_REGNUM
;
12151 thumb2_insn_r
->reg_rec_count
= 1;
12155 /* Handle SRS instruction after reading banked SP. */
12156 return arm_record_unsupported_insn (thumb2_insn_r
);
12159 else if (1 == op
|| 2 == op
)
12161 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12163 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12164 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12165 while (register_bits
)
12167 if (register_bits
& 0x00000001)
12168 record_buf
[index
++] = register_count
;
12171 register_bits
= register_bits
>> 1;
12173 record_buf
[index
++] = reg_rn
;
12174 record_buf
[index
++] = ARM_PS_REGNUM
;
12175 thumb2_insn_r
->reg_rec_count
= index
;
12179 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12180 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12181 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12182 while (register_bits
)
12184 if (register_bits
& 0x00000001)
12187 register_bits
= register_bits
>> 1;
12192 /* Start address calculation for LDMDB/LDMEA. */
12193 start_address
= u_regval
;
12197 /* Start address calculation for LDMDB/LDMEA. */
12198 start_address
= u_regval
- register_count
* 4;
12201 thumb2_insn_r
->mem_rec_count
= register_count
;
12202 while (register_count
)
12204 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12205 record_buf_mem
[register_count
* 2 - 2] = 4;
12206 start_address
= start_address
+ 4;
12209 record_buf
[0] = reg_rn
;
12210 record_buf
[1] = ARM_PS_REGNUM
;
12211 thumb2_insn_r
->reg_rec_count
= 2;
12215 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12217 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12219 return ARM_RECORD_SUCCESS
;
12222 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12226 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12228 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12230 uint32_t reg_rd
, reg_rn
, offset_imm
;
12231 uint32_t reg_dest1
, reg_dest2
;
12232 uint32_t address
, offset_addr
;
12233 uint32_t record_buf
[8], record_buf_mem
[8];
12234 uint32_t op1
, op2
, op3
;
12236 ULONGEST u_regval
[2];
12238 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12239 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12240 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12242 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12244 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12246 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12247 record_buf
[0] = reg_dest1
;
12248 record_buf
[1] = ARM_PS_REGNUM
;
12249 thumb2_insn_r
->reg_rec_count
= 2;
12252 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12254 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12255 record_buf
[2] = reg_dest2
;
12256 thumb2_insn_r
->reg_rec_count
= 3;
12261 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12262 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12264 if (0 == op1
&& 0 == op2
)
12266 /* Handle STREX. */
12267 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12268 address
= u_regval
[0] + (offset_imm
* 4);
12269 record_buf_mem
[0] = 4;
12270 record_buf_mem
[1] = address
;
12271 thumb2_insn_r
->mem_rec_count
= 1;
12272 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12273 record_buf
[0] = reg_rd
;
12274 thumb2_insn_r
->reg_rec_count
= 1;
12276 else if (1 == op1
&& 0 == op2
)
12278 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12279 record_buf
[0] = reg_rd
;
12280 thumb2_insn_r
->reg_rec_count
= 1;
12281 address
= u_regval
[0];
12282 record_buf_mem
[1] = address
;
12286 /* Handle STREXB. */
12287 record_buf_mem
[0] = 1;
12288 thumb2_insn_r
->mem_rec_count
= 1;
12292 /* Handle STREXH. */
12293 record_buf_mem
[0] = 2 ;
12294 thumb2_insn_r
->mem_rec_count
= 1;
12298 /* Handle STREXD. */
12299 address
= u_regval
[0];
12300 record_buf_mem
[0] = 4;
12301 record_buf_mem
[2] = 4;
12302 record_buf_mem
[3] = address
+ 4;
12303 thumb2_insn_r
->mem_rec_count
= 2;
12308 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12310 if (bit (thumb2_insn_r
->arm_insn
, 24))
12312 if (bit (thumb2_insn_r
->arm_insn
, 23))
12313 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12315 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12317 address
= offset_addr
;
12320 address
= u_regval
[0];
12322 record_buf_mem
[0] = 4;
12323 record_buf_mem
[1] = address
;
12324 record_buf_mem
[2] = 4;
12325 record_buf_mem
[3] = address
+ 4;
12326 thumb2_insn_r
->mem_rec_count
= 2;
12327 record_buf
[0] = reg_rn
;
12328 thumb2_insn_r
->reg_rec_count
= 1;
12332 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12334 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12336 return ARM_RECORD_SUCCESS
;
12339 /* Handler for thumb2 data processing (shift register and modified immediate)
12343 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12345 uint32_t reg_rd
, op
;
12346 uint32_t record_buf
[8];
12348 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12349 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12351 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12353 record_buf
[0] = ARM_PS_REGNUM
;
12354 thumb2_insn_r
->reg_rec_count
= 1;
12358 record_buf
[0] = reg_rd
;
12359 record_buf
[1] = ARM_PS_REGNUM
;
12360 thumb2_insn_r
->reg_rec_count
= 2;
12363 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12365 return ARM_RECORD_SUCCESS
;
12368 /* Generic handler for thumb2 instructions which effect destination and PS
12372 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12375 uint32_t record_buf
[8];
12377 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12379 record_buf
[0] = reg_rd
;
12380 record_buf
[1] = ARM_PS_REGNUM
;
12381 thumb2_insn_r
->reg_rec_count
= 2;
12383 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12385 return ARM_RECORD_SUCCESS
;
12388 /* Handler for thumb2 branch and miscellaneous control instructions. */
12391 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12393 uint32_t op
, op1
, op2
;
12394 uint32_t record_buf
[8];
12396 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12397 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12398 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12400 /* Handle MSR insn. */
12401 if (!(op1
& 0x2) && 0x38 == op
)
12405 /* CPSR is going to be changed. */
12406 record_buf
[0] = ARM_PS_REGNUM
;
12407 thumb2_insn_r
->reg_rec_count
= 1;
12411 arm_record_unsupported_insn(thumb2_insn_r
);
12415 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12418 record_buf
[0] = ARM_PS_REGNUM
;
12419 record_buf
[1] = ARM_LR_REGNUM
;
12420 thumb2_insn_r
->reg_rec_count
= 2;
12423 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12425 return ARM_RECORD_SUCCESS
;
12428 /* Handler for thumb2 store single data item instructions. */
12431 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12433 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12435 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12436 uint32_t address
, offset_addr
;
12437 uint32_t record_buf
[8], record_buf_mem
[8];
12440 ULONGEST u_regval
[2];
12442 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12443 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12444 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12445 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12447 if (bit (thumb2_insn_r
->arm_insn
, 23))
12450 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12451 offset_addr
= u_regval
[0] + offset_imm
;
12452 address
= offset_addr
;
12457 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12459 /* Handle STRB (register). */
12460 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12461 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12462 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12463 offset_addr
= u_regval
[1] << shift_imm
;
12464 address
= u_regval
[0] + offset_addr
;
12468 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12469 if (bit (thumb2_insn_r
->arm_insn
, 10))
12471 if (bit (thumb2_insn_r
->arm_insn
, 9))
12472 offset_addr
= u_regval
[0] + offset_imm
;
12474 offset_addr
= u_regval
[0] - offset_imm
;
12476 address
= offset_addr
;
12479 address
= u_regval
[0];
12485 /* Store byte instructions. */
12488 record_buf_mem
[0] = 1;
12490 /* Store half word instructions. */
12493 record_buf_mem
[0] = 2;
12495 /* Store word instructions. */
12498 record_buf_mem
[0] = 4;
12502 gdb_assert_not_reached ("no decoding pattern found");
12506 record_buf_mem
[1] = address
;
12507 thumb2_insn_r
->mem_rec_count
= 1;
12508 record_buf
[0] = reg_rn
;
12509 thumb2_insn_r
->reg_rec_count
= 1;
12511 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12513 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12515 return ARM_RECORD_SUCCESS
;
12518 /* Handler for thumb2 load memory hints instructions. */
12521 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12523 uint32_t record_buf
[8];
12524 uint32_t reg_rt
, reg_rn
;
12526 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12527 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12529 if (ARM_PC_REGNUM
!= reg_rt
)
12531 record_buf
[0] = reg_rt
;
12532 record_buf
[1] = reg_rn
;
12533 record_buf
[2] = ARM_PS_REGNUM
;
12534 thumb2_insn_r
->reg_rec_count
= 3;
12536 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12538 return ARM_RECORD_SUCCESS
;
12541 return ARM_RECORD_FAILURE
;
12544 /* Handler for thumb2 load word instructions. */
12547 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12549 uint32_t record_buf
[8];
12551 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12552 record_buf
[1] = ARM_PS_REGNUM
;
12553 thumb2_insn_r
->reg_rec_count
= 2;
12555 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12557 return ARM_RECORD_SUCCESS
;
12560 /* Handler for thumb2 long multiply, long multiply accumulate, and
12561 divide instructions. */
12564 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12566 uint32_t opcode1
= 0, opcode2
= 0;
12567 uint32_t record_buf
[8];
12569 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12570 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12572 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12574 /* Handle SMULL, UMULL, SMULAL. */
12575 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12576 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12577 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12578 record_buf
[2] = ARM_PS_REGNUM
;
12579 thumb2_insn_r
->reg_rec_count
= 3;
12581 else if (1 == opcode1
|| 3 == opcode2
)
12583 /* Handle SDIV and UDIV. */
12584 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12585 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12586 record_buf
[2] = ARM_PS_REGNUM
;
12587 thumb2_insn_r
->reg_rec_count
= 3;
12590 return ARM_RECORD_FAILURE
;
12592 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12594 return ARM_RECORD_SUCCESS
;
12597 /* Record handler for thumb32 coprocessor instructions. */
12600 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12602 if (bit (thumb2_insn_r
->arm_insn
, 25))
12603 return arm_record_coproc_data_proc (thumb2_insn_r
);
12605 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12608 /* Record handler for advance SIMD structure load/store instructions. */
12611 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12613 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12614 uint32_t l_bit
, a_bit
, b_bits
;
12615 uint32_t record_buf
[128], record_buf_mem
[128];
12616 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12617 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12620 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12621 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12622 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12623 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12624 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12625 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12626 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12627 f_elem
= 8 / f_ebytes
;
12631 ULONGEST u_regval
= 0;
12632 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12633 address
= u_regval
;
12638 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12640 if (b_bits
== 0x07)
12642 else if (b_bits
== 0x0a)
12644 else if (b_bits
== 0x06)
12646 else if (b_bits
== 0x02)
12651 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12653 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12655 record_buf_mem
[index_m
++] = f_ebytes
;
12656 record_buf_mem
[index_m
++] = address
;
12657 address
= address
+ f_ebytes
;
12658 thumb2_insn_r
->mem_rec_count
+= 1;
12663 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12665 if (b_bits
== 0x09 || b_bits
== 0x08)
12667 else if (b_bits
== 0x03)
12672 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12673 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12675 for (loop_t
= 0; loop_t
< 2; loop_t
++)
12677 record_buf_mem
[index_m
++] = f_ebytes
;
12678 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12679 thumb2_insn_r
->mem_rec_count
+= 1;
12681 address
= address
+ (2 * f_ebytes
);
12685 else if ((b_bits
& 0x0e) == 0x04)
12687 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12689 for (loop_t
= 0; loop_t
< 3; loop_t
++)
12691 record_buf_mem
[index_m
++] = f_ebytes
;
12692 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12693 thumb2_insn_r
->mem_rec_count
+= 1;
12695 address
= address
+ (3 * f_ebytes
);
12699 else if (!(b_bits
& 0x0e))
12701 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12703 for (loop_t
= 0; loop_t
< 4; loop_t
++)
12705 record_buf_mem
[index_m
++] = f_ebytes
;
12706 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12707 thumb2_insn_r
->mem_rec_count
+= 1;
12709 address
= address
+ (4 * f_ebytes
);
12715 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
12717 if (bft_size
== 0x00)
12719 else if (bft_size
== 0x01)
12721 else if (bft_size
== 0x02)
12727 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
12728 thumb2_insn_r
->mem_rec_count
= 1;
12730 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
12731 thumb2_insn_r
->mem_rec_count
= 2;
12733 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
12734 thumb2_insn_r
->mem_rec_count
= 3;
12736 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
12737 thumb2_insn_r
->mem_rec_count
= 4;
12739 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
12741 record_buf_mem
[index_m
] = f_ebytes
;
12742 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
12751 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12752 thumb2_insn_r
->reg_rec_count
= 1;
12754 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12755 thumb2_insn_r
->reg_rec_count
= 2;
12757 else if ((b_bits
& 0x0e) == 0x04)
12758 thumb2_insn_r
->reg_rec_count
= 3;
12760 else if (!(b_bits
& 0x0e))
12761 thumb2_insn_r
->reg_rec_count
= 4;
12766 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
12767 thumb2_insn_r
->reg_rec_count
= 1;
12769 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
12770 thumb2_insn_r
->reg_rec_count
= 2;
12772 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
12773 thumb2_insn_r
->reg_rec_count
= 3;
12775 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
12776 thumb2_insn_r
->reg_rec_count
= 4;
12778 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
12779 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
12783 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
12785 record_buf
[index_r
] = reg_rn
;
12786 thumb2_insn_r
->reg_rec_count
+= 1;
12789 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12791 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12796 /* Decodes thumb2 instruction type and invokes its record handler. */
12798 static unsigned int
12799 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
12801 uint32_t op
, op1
, op2
;
12803 op
= bit (thumb2_insn_r
->arm_insn
, 15);
12804 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
12805 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12809 if (!(op2
& 0x64 ))
12811 /* Load/store multiple instruction. */
12812 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
12814 else if (!((op2
& 0x64) ^ 0x04))
12816 /* Load/store (dual/exclusive) and table branch instruction. */
12817 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
12819 else if (!((op2
& 0x20) ^ 0x20))
12821 /* Data-processing (shifted register). */
12822 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12824 else if (op2
& 0x40)
12826 /* Co-processor instructions. */
12827 return thumb2_record_coproc_insn (thumb2_insn_r
);
12830 else if (op1
== 0x02)
12834 /* Branches and miscellaneous control instructions. */
12835 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
12837 else if (op2
& 0x20)
12839 /* Data-processing (plain binary immediate) instruction. */
12840 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12844 /* Data-processing (modified immediate). */
12845 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12848 else if (op1
== 0x03)
12850 if (!(op2
& 0x71 ))
12852 /* Store single data item. */
12853 return thumb2_record_str_single_data (thumb2_insn_r
);
12855 else if (!((op2
& 0x71) ^ 0x10))
12857 /* Advanced SIMD or structure load/store instructions. */
12858 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
12860 else if (!((op2
& 0x67) ^ 0x01))
12862 /* Load byte, memory hints instruction. */
12863 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12865 else if (!((op2
& 0x67) ^ 0x03))
12867 /* Load halfword, memory hints instruction. */
12868 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12870 else if (!((op2
& 0x67) ^ 0x05))
12872 /* Load word instruction. */
12873 return thumb2_record_ld_word (thumb2_insn_r
);
12875 else if (!((op2
& 0x70) ^ 0x20))
12877 /* Data-processing (register) instruction. */
12878 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12880 else if (!((op2
& 0x78) ^ 0x30))
12882 /* Multiply, multiply accumulate, abs diff instruction. */
12883 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12885 else if (!((op2
& 0x78) ^ 0x38))
12887 /* Long multiply, long multiply accumulate, and divide. */
12888 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
12890 else if (op2
& 0x40)
12892 /* Co-processor instructions. */
12893 return thumb2_record_coproc_insn (thumb2_insn_r
);
12900 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12901 and positive val on fauilure. */
12904 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
12906 gdb_byte buf
[insn_size
];
12908 memset (&buf
[0], 0, insn_size
);
12910 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
12912 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
12914 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
12918 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
12920 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12924 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
12925 uint32_t insn_size
)
12928 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12930 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
12932 arm_record_data_proc_misc_ld_str
, /* 000. */
12933 arm_record_data_proc_imm
, /* 001. */
12934 arm_record_ld_st_imm_offset
, /* 010. */
12935 arm_record_ld_st_reg_offset
, /* 011. */
12936 arm_record_ld_st_multiple
, /* 100. */
12937 arm_record_b_bl
, /* 101. */
12938 arm_record_asimd_vfp_coproc
, /* 110. */
12939 arm_record_coproc_data_proc
/* 111. */
12942 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12944 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
12946 thumb_record_shift_add_sub
, /* 000. */
12947 thumb_record_add_sub_cmp_mov
, /* 001. */
12948 thumb_record_ld_st_reg_offset
, /* 010. */
12949 thumb_record_ld_st_imm_offset
, /* 011. */
12950 thumb_record_ld_st_stack
, /* 100. */
12951 thumb_record_misc
, /* 101. */
12952 thumb_record_ldm_stm_swi
, /* 110. */
12953 thumb_record_branch
/* 111. */
12956 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
12957 uint32_t insn_id
= 0;
12959 if (extract_arm_insn (arm_record
, insn_size
))
12963 printf_unfiltered (_("Process record: error reading memory at "
12964 "addr %s len = %d.\n"),
12965 paddress (arm_record
->gdbarch
,
12966 arm_record
->this_addr
), insn_size
);
12970 else if (ARM_RECORD
== record_type
)
12972 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
12973 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
12975 if (arm_record
->cond
== 0xf)
12976 ret
= arm_record_extension_space (arm_record
);
12979 /* If this insn has fallen into extension space
12980 then we need not decode it anymore. */
12981 ret
= arm_handle_insn
[insn_id
] (arm_record
);
12983 if (ret
!= ARM_RECORD_SUCCESS
)
12985 arm_record_unsupported_insn (arm_record
);
12989 else if (THUMB_RECORD
== record_type
)
12991 /* As thumb does not have condition codes, we set negative. */
12992 arm_record
->cond
= -1;
12993 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
12994 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
12995 if (ret
!= ARM_RECORD_SUCCESS
)
12997 arm_record_unsupported_insn (arm_record
);
13001 else if (THUMB2_RECORD
== record_type
)
13003 /* As thumb does not have condition codes, we set negative. */
13004 arm_record
->cond
= -1;
13006 /* Swap first half of 32bit thumb instruction with second half. */
13007 arm_record
->arm_insn
13008 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13010 ret
= thumb2_record_decode_insn_handler (arm_record
);
13012 if (ret
!= ARM_RECORD_SUCCESS
)
13014 arm_record_unsupported_insn (arm_record
);
13020 /* Throw assertion. */
13021 gdb_assert_not_reached ("not a valid instruction, could not decode");
13028 /* Cleans up local record registers and memory allocations. */
13031 deallocate_reg_mem (insn_decode_record
*record
)
13033 xfree (record
->arm_regs
);
13034 xfree (record
->arm_mems
);
13038 /* Parse the current instruction and record the values of the registers and
13039 memory that will be changed in current instruction to record_arch_list".
13040 Return -1 if something is wrong. */
13043 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13044 CORE_ADDR insn_addr
)
13047 uint32_t no_of_rec
= 0;
13048 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13049 ULONGEST t_bit
= 0, insn_id
= 0;
13051 ULONGEST u_regval
= 0;
13053 insn_decode_record arm_record
;
13055 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13056 arm_record
.regcache
= regcache
;
13057 arm_record
.this_addr
= insn_addr
;
13058 arm_record
.gdbarch
= gdbarch
;
13061 if (record_debug
> 1)
13063 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13065 paddress (gdbarch
, arm_record
.this_addr
));
13068 if (extract_arm_insn (&arm_record
, 2))
13072 printf_unfiltered (_("Process record: error reading memory at "
13073 "addr %s len = %d.\n"),
13074 paddress (arm_record
.gdbarch
,
13075 arm_record
.this_addr
), 2);
13080 /* Check the insn, whether it is thumb or arm one. */
13082 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13083 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13086 if (!(u_regval
& t_bit
))
13088 /* We are decoding arm insn. */
13089 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13093 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13094 /* is it thumb2 insn? */
13095 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13097 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13098 THUMB2_INSN_SIZE_BYTES
);
13102 /* We are decoding thumb insn. */
13103 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
13109 /* Record registers. */
13110 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13111 if (arm_record
.arm_regs
)
13113 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13115 if (record_full_arch_list_add_reg
13116 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13120 /* Record memories. */
13121 if (arm_record
.arm_mems
)
13123 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13125 if (record_full_arch_list_add_mem
13126 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13127 arm_record
.arm_mems
[no_of_rec
].len
))
13132 if (record_full_arch_list_add_end ())
13137 deallocate_reg_mem (&arm_record
);