1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
59 #include "record-full.h"
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
84 /* Per-objfile data used for mapping symbols. */
85 static const struct objfile_data
*arm_objfile_data_key
;
87 struct arm_mapping_symbol
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
93 DEF_VEC_O(arm_mapping_symbol_s
);
95 struct arm_per_objfile
97 VEC(arm_mapping_symbol_s
) **section_maps
;
100 /* The list of available "set arm ..." and "show arm ..." commands. */
101 static struct cmd_list_element
*setarmcmdlist
= NULL
;
102 static struct cmd_list_element
*showarmcmdlist
= NULL
;
104 /* The type of floating-point to use. Keep this in sync with enum
105 arm_float_model, and the help string in _initialize_arm_tdep. */
106 static const char *const fp_model_strings
[] =
116 /* A variable that can be configured by the user. */
117 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
118 static const char *current_fp_model
= "auto";
120 /* The ABI to use. Keep this in sync with arm_abi_kind. */
121 static const char *const arm_abi_strings
[] =
129 /* A variable that can be configured by the user. */
130 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
131 static const char *arm_abi_string
= "auto";
133 /* The execution mode to assume. */
134 static const char *const arm_mode_strings
[] =
142 static const char *arm_fallback_mode_string
= "auto";
143 static const char *arm_force_mode_string
= "auto";
145 /* Internal override of the execution mode. -1 means no override,
146 0 means override to ARM mode, 1 means override to Thumb mode.
147 The effect is the same as if arm_force_mode has been set by the
148 user (except the internal override has precedence over a user's
149 arm_force_mode override). */
150 static int arm_override_mode
= -1;
152 /* Number of different reg name sets (options). */
153 static int num_disassembly_options
;
155 /* The standard register names, and all the valid aliases for them. Note
156 that `fp', `sp' and `pc' are not added in this alias list, because they
157 have been added as builtin user registers in
158 std-regs.c:_initialize_frame_reg. */
163 } arm_register_aliases
[] = {
164 /* Basic register numbers. */
181 /* Synonyms (argument and variable registers). */
194 /* Other platform-specific names for r9. */
200 /* Names used by GCC (not listed in the ARM EABI). */
202 /* A special name from the older ATPCS. */
206 static const char *const arm_register_names
[] =
207 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
208 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
209 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
210 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
211 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
212 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
213 "fps", "cpsr" }; /* 24 25 */
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles
;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style
;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element
*);
225 static void set_disassembly_style (void);
227 static void convert_from_extended (const struct floatformat
*, const void *,
229 static void convert_to_extended (const struct floatformat
*, void *,
232 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
233 struct regcache
*regcache
,
234 int regnum
, gdb_byte
*buf
);
235 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
236 struct regcache
*regcache
,
237 int regnum
, const gdb_byte
*buf
);
239 struct arm_prologue_cache
241 /* The stack pointer at the time this frame was created; i.e. the
242 caller's stack pointer when this function was called. It is used
243 to identify this frame. */
246 /* The frame base for this frame is just prev_sp - frame size.
247 FRAMESIZE is the distance from the frame pointer to the
248 initial stack pointer. */
252 /* The register used to hold the frame pointer for this frame. */
255 /* Saved register offsets. */
256 struct trad_frame_saved_reg
*saved_regs
;
259 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
260 CORE_ADDR prologue_start
,
261 CORE_ADDR prologue_end
,
262 struct arm_prologue_cache
*cache
);
264 /* Architecture version for displaced stepping. This effects the behaviour of
265 certain instructions, and really should not be hard-wired. */
267 #define DISPLACED_STEPPING_ARCH_VERSION 5
269 /* Set to true if the 32-bit mode is in use. */
273 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
276 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
278 if (gdbarch_tdep (gdbarch
)->is_m
)
284 /* Determine if FRAME is executing in Thumb mode. */
287 arm_frame_is_thumb (struct frame_info
*frame
)
290 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
292 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
293 directly (from a signal frame or dummy frame) or by interpreting
294 the saved LR (from a prologue or DWARF frame). So consult it and
295 trust the unwinders. */
296 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
298 return (cpsr
& t_bit
) != 0;
301 /* Callback for VEC_lower_bound. */
304 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
305 const struct arm_mapping_symbol
*rhs
)
307 return lhs
->value
< rhs
->value
;
310 /* Search for the mapping symbol covering MEMADDR. If one is found,
311 return its type. Otherwise, return 0. If START is non-NULL,
312 set *START to the location of the mapping symbol. */
315 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
317 struct obj_section
*sec
;
319 /* If there are mapping symbols, consult them. */
320 sec
= find_pc_section (memaddr
);
323 struct arm_per_objfile
*data
;
324 VEC(arm_mapping_symbol_s
) *map
;
325 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
329 data
= (struct arm_per_objfile
*) objfile_data (sec
->objfile
,
330 arm_objfile_data_key
);
333 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
334 if (!VEC_empty (arm_mapping_symbol_s
, map
))
336 struct arm_mapping_symbol
*map_sym
;
338 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
339 arm_compare_mapping_symbols
);
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
347 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
348 if (map_sym
->value
== map_key
.value
)
351 *start
= map_sym
->value
+ obj_section_addr (sec
);
352 return map_sym
->type
;
358 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
360 *start
= map_sym
->value
+ obj_section_addr (sec
);
361 return map_sym
->type
;
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
375 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
377 struct bound_minimal_symbol sym
;
379 struct displaced_step_closure
* dsc
380 = get_displaced_step_closure_by_addr(memaddr
);
382 /* If checking the mode of displaced instruction in copy area, the mode
383 should be determined by instruction on the original address. */
387 fprintf_unfiltered (gdb_stdlog
,
388 "displaced: check mode of %.8lx instead of %.8lx\n",
389 (unsigned long) dsc
->insn_addr
,
390 (unsigned long) memaddr
);
391 memaddr
= dsc
->insn_addr
;
394 /* If bit 0 of the address is set, assume this is a Thumb address. */
395 if (IS_THUMB_ADDR (memaddr
))
398 /* Respect internal mode override if active. */
399 if (arm_override_mode
!= -1)
400 return arm_override_mode
;
402 /* If the user wants to override the symbol table, let him. */
403 if (strcmp (arm_force_mode_string
, "arm") == 0)
405 if (strcmp (arm_force_mode_string
, "thumb") == 0)
408 /* ARM v6-M and v7-M are always in Thumb mode. */
409 if (gdbarch_tdep (gdbarch
)->is_m
)
412 /* If there are mapping symbols, consult them. */
413 type
= arm_find_mapping_symbol (memaddr
, NULL
);
417 /* Thumb functions have a "special" bit set in minimal symbols. */
418 sym
= lookup_minimal_symbol_by_pc (memaddr
);
420 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
422 /* If the user wants to override the fallback mode, let them. */
423 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
425 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
428 /* If we couldn't find any symbol, but we're talking to a running
429 target, then trust the current value of $cpsr. This lets
430 "display/i $pc" always show the correct mode (though if there is
431 a symbol table we will not reach here, so it still may not be
432 displayed in the mode it will be executed). */
433 if (target_has_registers
)
434 return arm_frame_is_thumb (get_current_frame ());
436 /* Otherwise we're out of luck; we assume ARM. */
440 /* Remove useless bits from addresses in a running program. */
442 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
444 /* On M-profile devices, do not strip the low bit from EXC_RETURN
445 (the magic exception return address). */
446 if (gdbarch_tdep (gdbarch
)->is_m
447 && (val
& 0xfffffff0) == 0xfffffff0)
451 return UNMAKE_THUMB_ADDR (val
);
453 return (val
& 0x03fffffc);
456 /* Return 1 if PC is the start of a compiler helper function which
457 can be safely ignored during prologue skipping. IS_THUMB is true
458 if the function is known to be a Thumb function due to the way it
461 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
463 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
464 struct bound_minimal_symbol msym
;
466 msym
= lookup_minimal_symbol_by_pc (pc
);
467 if (msym
.minsym
!= NULL
468 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
469 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
471 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
473 /* The GNU linker's Thumb call stub to foo is named
475 if (strstr (name
, "_from_thumb") != NULL
)
478 /* On soft-float targets, __truncdfsf2 is called to convert promoted
479 arguments to their argument types in non-prototyped
481 if (startswith (name
, "__truncdfsf2"))
483 if (startswith (name
, "__aeabi_d2f"))
486 /* Internal functions related to thread-local storage. */
487 if (startswith (name
, "__tls_get_addr"))
489 if (startswith (name
, "__aeabi_read_tp"))
494 /* If we run against a stripped glibc, we may be unable to identify
495 special functions by name. Check for one important case,
496 __aeabi_read_tp, by comparing the *code* against the default
497 implementation (this is hand-written ARM assembler in glibc). */
500 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
501 == 0xe3e00a0f /* mov r0, #0xffff0fff */
502 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
503 == 0xe240f01f) /* sub pc, r0, #31 */
510 /* Support routines for instruction parsing. */
511 #define submask(x) ((1L << ((x) + 1)) - 1)
512 #define bit(obj,st) (((obj) >> (st)) & 1)
513 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
514 #define sbits(obj,st,fn) \
515 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
516 #define BranchDest(addr,instr) \
517 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
519 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
520 the first 16-bit of instruction, and INSN2 is the second 16-bit of
522 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
523 ((bits ((insn1), 0, 3) << 12) \
524 | (bits ((insn1), 10, 10) << 11) \
525 | (bits ((insn2), 12, 14) << 8) \
526 | bits ((insn2), 0, 7))
528 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
529 the 32-bit instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
531 ((bits ((insn), 16, 19) << 12) \
532 | bits ((insn), 0, 11))
534 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
537 thumb_expand_immediate (unsigned int imm
)
539 unsigned int count
= imm
>> 7;
547 return (imm
& 0xff) | ((imm
& 0xff) << 16);
549 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
551 return (imm
& 0xff) | ((imm
& 0xff) << 8)
552 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
555 return (0x80 | (imm
& 0x7f)) << (32 - count
);
558 /* Return 1 if the 16-bit Thumb instruction INST might change
559 control flow, 0 otherwise. */
562 thumb_instruction_changes_pc (unsigned short inst
)
564 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
567 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
570 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
573 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
576 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
579 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
585 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
586 might change control flow, 0 otherwise. */
589 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
591 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
593 /* Branches and miscellaneous control instructions. */
595 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
600 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
602 /* SUBS PC, LR, #imm8. */
605 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
607 /* Conditional branch. */
614 if ((inst1
& 0xfe50) == 0xe810)
616 /* Load multiple or RFE. */
618 if (bit (inst1
, 7) && !bit (inst1
, 8))
624 else if (!bit (inst1
, 7) && bit (inst1
, 8))
630 else if (bit (inst1
, 7) && bit (inst1
, 8))
635 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
644 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
646 /* MOV PC or MOVS PC. */
650 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
653 if (bits (inst1
, 0, 3) == 15)
659 if ((inst2
& 0x0fc0) == 0x0000)
665 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
671 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
684 thumb_instruction_restores_sp (unsigned short insn
)
686 return (insn
== 0x46bd /* mov sp, r7 */
687 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
688 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
698 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
699 CORE_ADDR start
, CORE_ADDR limit
,
700 struct arm_prologue_cache
*cache
)
702 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
703 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
706 struct pv_area
*stack
;
707 struct cleanup
*back_to
;
709 CORE_ADDR unrecognized_pc
= 0;
711 for (i
= 0; i
< 16; i
++)
712 regs
[i
] = pv_register (i
, 0);
713 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
714 back_to
= make_cleanup_free_pv_area (stack
);
716 while (start
< limit
)
720 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
722 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
727 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
730 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
731 whether to save LR (R14). */
732 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
734 /* Calculate offsets of saved R0-R7 and LR. */
735 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
736 if (mask
& (1 << regno
))
738 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
740 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
743 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
745 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
746 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
749 else if (thumb_instruction_restores_sp (insn
))
751 /* Don't scan past the epilogue. */
754 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
755 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
757 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
758 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
759 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
761 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
762 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
763 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
765 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
766 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
767 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
768 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
769 regs
[bits (insn
, 6, 8)]);
770 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
771 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
773 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
774 int rm
= bits (insn
, 3, 6);
775 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
777 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
779 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
780 int src_reg
= (insn
& 0x78) >> 3;
781 regs
[dst_reg
] = regs
[src_reg
];
783 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
785 /* Handle stores to the stack. Normally pushes are used,
786 but with GCC -mtpcs-frame, there may be other stores
787 in the prologue to create the frame. */
788 int regno
= (insn
>> 8) & 0x7;
791 offset
= (insn
& 0xff) << 2;
792 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
794 if (pv_area_store_would_trash (stack
, addr
))
797 pv_area_store (stack
, addr
, 4, regs
[regno
]);
799 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
801 int rd
= bits (insn
, 0, 2);
802 int rn
= bits (insn
, 3, 5);
805 offset
= bits (insn
, 6, 10) << 2;
806 addr
= pv_add_constant (regs
[rn
], offset
);
808 if (pv_area_store_would_trash (stack
, addr
))
811 pv_area_store (stack
, addr
, 4, regs
[rd
]);
813 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
814 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
815 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
816 /* Ignore stores of argument registers to the stack. */
818 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
819 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
820 /* Ignore block loads from the stack, potentially copying
821 parameters from memory. */
823 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
824 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
825 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
826 /* Similarly ignore single loads from the stack. */
828 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
829 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
830 /* Skip register copies, i.e. saves to another register
831 instead of the stack. */
833 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
834 /* Recognize constant loads; even with small stacks these are necessary
836 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
837 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
839 /* Constant pool loads, for the same reason. */
840 unsigned int constant
;
843 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
844 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
845 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
847 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
849 unsigned short inst2
;
851 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
852 byte_order_for_code
);
854 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
856 /* BL, BLX. Allow some special function calls when
857 skipping the prologue; GCC generates these before
858 storing arguments to the stack. */
860 int j1
, j2
, imm1
, imm2
;
862 imm1
= sbits (insn
, 0, 10);
863 imm2
= bits (inst2
, 0, 10);
864 j1
= bit (inst2
, 13);
865 j2
= bit (inst2
, 11);
867 offset
= ((imm1
<< 12) + (imm2
<< 1));
868 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
870 nextpc
= start
+ 4 + offset
;
871 /* For BLX make sure to clear the low bits. */
872 if (bit (inst2
, 12) == 0)
873 nextpc
= nextpc
& 0xfffffffc;
875 if (!skip_prologue_function (gdbarch
, nextpc
,
876 bit (inst2
, 12) != 0))
880 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
882 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
884 pv_t addr
= regs
[bits (insn
, 0, 3)];
887 if (pv_area_store_would_trash (stack
, addr
))
890 /* Calculate offsets of saved registers. */
891 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
892 if (inst2
& (1 << regno
))
894 addr
= pv_add_constant (addr
, -4);
895 pv_area_store (stack
, addr
, 4, regs
[regno
]);
899 regs
[bits (insn
, 0, 3)] = addr
;
902 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
904 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
906 int regno1
= bits (inst2
, 12, 15);
907 int regno2
= bits (inst2
, 8, 11);
908 pv_t addr
= regs
[bits (insn
, 0, 3)];
910 offset
= inst2
& 0xff;
912 addr
= pv_add_constant (addr
, offset
);
914 addr
= pv_add_constant (addr
, -offset
);
916 if (pv_area_store_would_trash (stack
, addr
))
919 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
920 pv_area_store (stack
, pv_add_constant (addr
, 4),
924 regs
[bits (insn
, 0, 3)] = addr
;
927 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
928 && (inst2
& 0x0c00) == 0x0c00
929 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
931 int regno
= bits (inst2
, 12, 15);
932 pv_t addr
= regs
[bits (insn
, 0, 3)];
934 offset
= inst2
& 0xff;
936 addr
= pv_add_constant (addr
, offset
);
938 addr
= pv_add_constant (addr
, -offset
);
940 if (pv_area_store_would_trash (stack
, addr
))
943 pv_area_store (stack
, addr
, 4, regs
[regno
]);
946 regs
[bits (insn
, 0, 3)] = addr
;
949 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
950 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
952 int regno
= bits (inst2
, 12, 15);
955 offset
= inst2
& 0xfff;
956 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
958 if (pv_area_store_would_trash (stack
, addr
))
961 pv_area_store (stack
, addr
, 4, regs
[regno
]);
964 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
965 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
966 /* Ignore stores of argument registers to the stack. */
969 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
970 && (inst2
& 0x0d00) == 0x0c00
971 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
977 && (inst2
& 0x8000) == 0x0000
978 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
979 /* Ignore block loads from the stack, potentially copying
980 parameters from memory. */
983 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
985 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
986 /* Similarly ignore dual loads from the stack. */
989 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
990 && (inst2
& 0x0d00) == 0x0c00
991 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
992 /* Similarly ignore single loads from the stack. */
995 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
996 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
997 /* Similarly ignore single loads from the stack. */
1000 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1001 && (inst2
& 0x8000) == 0x0000)
1003 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1004 | (bits (inst2
, 12, 14) << 8)
1005 | bits (inst2
, 0, 7));
1007 regs
[bits (inst2
, 8, 11)]
1008 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1009 thumb_expand_immediate (imm
));
1012 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1013 && (inst2
& 0x8000) == 0x0000)
1015 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1016 | (bits (inst2
, 12, 14) << 8)
1017 | bits (inst2
, 0, 7));
1019 regs
[bits (inst2
, 8, 11)]
1020 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1023 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1024 && (inst2
& 0x8000) == 0x0000)
1026 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1027 | (bits (inst2
, 12, 14) << 8)
1028 | bits (inst2
, 0, 7));
1030 regs
[bits (inst2
, 8, 11)]
1031 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1032 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1035 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1036 && (inst2
& 0x8000) == 0x0000)
1038 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1039 | (bits (inst2
, 12, 14) << 8)
1040 | bits (inst2
, 0, 7));
1042 regs
[bits (inst2
, 8, 11)]
1043 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1046 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1048 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1049 | (bits (inst2
, 12, 14) << 8)
1050 | bits (inst2
, 0, 7));
1052 regs
[bits (inst2
, 8, 11)]
1053 = pv_constant (thumb_expand_immediate (imm
));
1056 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1059 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1061 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1064 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1065 && (inst2
& 0xf0f0) == 0)
1067 int dst_reg
= (inst2
& 0x0f00) >> 8;
1068 int src_reg
= inst2
& 0xf;
1069 regs
[dst_reg
] = regs
[src_reg
];
1072 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1074 /* Constant pool loads. */
1075 unsigned int constant
;
1078 offset
= bits (inst2
, 0, 11);
1080 loc
= start
+ 4 + offset
;
1082 loc
= start
+ 4 - offset
;
1084 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1085 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1088 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1090 /* Constant pool loads. */
1091 unsigned int constant
;
1094 offset
= bits (inst2
, 0, 7) << 2;
1096 loc
= start
+ 4 + offset
;
1098 loc
= start
+ 4 - offset
;
1100 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1101 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1103 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1104 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1107 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1109 /* Don't scan past anything that might change control flow. */
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc
= start
;
1121 else if (thumb_instruction_changes_pc (insn
))
1123 /* Don't scan past anything that might change control flow. */
1128 /* The optimizer might shove anything into the prologue,
1129 so we just skip what we don't recognize. */
1130 unrecognized_pc
= start
;
1137 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1138 paddress (gdbarch
, start
));
1140 if (unrecognized_pc
== 0)
1141 unrecognized_pc
= start
;
1145 do_cleanups (back_to
);
1146 return unrecognized_pc
;
1149 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1151 /* Frame pointer is fp. Frame size is constant. */
1152 cache
->framereg
= ARM_FP_REGNUM
;
1153 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1155 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1157 /* Frame pointer is r7. Frame size is constant. */
1158 cache
->framereg
= THUMB_FP_REGNUM
;
1159 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1163 /* Try the stack pointer... this is a bit desperate. */
1164 cache
->framereg
= ARM_SP_REGNUM
;
1165 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1168 for (i
= 0; i
< 16; i
++)
1169 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1170 cache
->saved_regs
[i
].addr
= offset
;
1172 do_cleanups (back_to
);
1173 return unrecognized_pc
;
1177 /* Try to analyze the instructions starting from PC, which load symbol
1178 __stack_chk_guard. Return the address of instruction after loading this
1179 symbol, set the dest register number to *BASEREG, and set the size of
1180 instructions for loading symbol in OFFSET. Return 0 if instructions are
1184 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1185 unsigned int *destreg
, int *offset
)
1187 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1188 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1189 unsigned int low
, high
, address
;
1194 unsigned short insn1
1195 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1197 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1199 *destreg
= bits (insn1
, 8, 10);
1201 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1202 address
= read_memory_unsigned_integer (address
, 4,
1203 byte_order_for_code
);
1205 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1207 unsigned short insn2
1208 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1210 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1213 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1215 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1217 /* movt Rd, #const */
1218 if ((insn1
& 0xfbc0) == 0xf2c0)
1220 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1221 *destreg
= bits (insn2
, 8, 11);
1223 address
= (high
<< 16 | low
);
1230 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1232 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1234 address
= bits (insn
, 0, 11) + pc
+ 8;
1235 address
= read_memory_unsigned_integer (address
, 4,
1236 byte_order_for_code
);
1238 *destreg
= bits (insn
, 12, 15);
1241 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1243 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1246 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1248 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1250 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1251 *destreg
= bits (insn
, 12, 15);
1253 address
= (high
<< 16 | low
);
1261 /* Try to skip a sequence of instructions used for stack protector. If PC
1262 points to the first instruction of this sequence, return the address of
1263 first instruction after this sequence, otherwise, return original PC.
1265 On arm, this sequence of instructions is composed of mainly three steps,
1266 Step 1: load symbol __stack_chk_guard,
1267 Step 2: load from address of __stack_chk_guard,
1268 Step 3: store it to somewhere else.
1270 Usually, instructions on step 2 and step 3 are the same on various ARM
1271 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1272 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1273 instructions in step 1 vary from different ARM architectures. On ARMv7,
1276 movw Rn, #:lower16:__stack_chk_guard
1277 movt Rn, #:upper16:__stack_chk_guard
1284 .word __stack_chk_guard
1286 Since ldr/str is a very popular instruction, we can't use them as
1287 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1288 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1289 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1292 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1294 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1295 unsigned int basereg
;
1296 struct bound_minimal_symbol stack_chk_guard
;
1298 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1301 /* Try to parse the instructions in Step 1. */
1302 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1307 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1308 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1309 Otherwise, this sequence cannot be for stack protector. */
1310 if (stack_chk_guard
.minsym
== NULL
1311 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1316 unsigned int destreg
;
1318 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1320 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1321 if ((insn
& 0xf800) != 0x6800)
1323 if (bits (insn
, 3, 5) != basereg
)
1325 destreg
= bits (insn
, 0, 2);
1327 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1328 byte_order_for_code
);
1329 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1330 if ((insn
& 0xf800) != 0x6000)
1332 if (destreg
!= bits (insn
, 0, 2))
1337 unsigned int destreg
;
1339 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1341 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1342 if ((insn
& 0x0e500000) != 0x04100000)
1344 if (bits (insn
, 16, 19) != basereg
)
1346 destreg
= bits (insn
, 12, 15);
1347 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1348 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1349 4, byte_order_for_code
);
1350 if ((insn
& 0x0e500000) != 0x04000000)
1352 if (bits (insn
, 12, 15) != destreg
)
1355 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1358 return pc
+ offset
+ 4;
1360 return pc
+ offset
+ 8;
1363 /* Advance the PC across any function entry prologue instructions to
1364 reach some "real" code.
1366 The APCS (ARM Procedure Call Standard) defines the following
1370 [stmfd sp!, {a1,a2,a3,a4}]
1371 stmfd sp!, {...,fp,ip,lr,pc}
1372 [stfe f7, [sp, #-12]!]
1373 [stfe f6, [sp, #-12]!]
1374 [stfe f5, [sp, #-12]!]
1375 [stfe f4, [sp, #-12]!]
1376 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1379 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1381 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1383 CORE_ADDR func_addr
, limit_pc
;
1385 /* See if we can determine the end of the prologue via the symbol table.
1386 If so, then return either PC, or the PC after the prologue, whichever
1388 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1390 CORE_ADDR post_prologue_pc
1391 = skip_prologue_using_sal (gdbarch
, func_addr
);
1392 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1394 if (post_prologue_pc
)
1396 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1399 /* GCC always emits a line note before the prologue and another
1400 one after, even if the two are at the same address or on the
1401 same line. Take advantage of this so that we do not need to
1402 know every instruction that might appear in the prologue. We
1403 will have producer information for most binaries; if it is
1404 missing (e.g. for -gstabs), assuming the GNU tools. */
1405 if (post_prologue_pc
1407 || COMPUNIT_PRODUCER (cust
) == NULL
1408 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1409 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1410 return post_prologue_pc
;
1412 if (post_prologue_pc
!= 0)
1414 CORE_ADDR analyzed_limit
;
1416 /* For non-GCC compilers, make sure the entire line is an
1417 acceptable prologue; GDB will round this function's
1418 return value up to the end of the following line so we
1419 can not skip just part of a line (and we do not want to).
1421 RealView does not treat the prologue specially, but does
1422 associate prologue code with the opening brace; so this
1423 lets us skip the first line if we think it is the opening
1425 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1426 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1427 post_prologue_pc
, NULL
);
1429 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1430 post_prologue_pc
, NULL
);
1432 if (analyzed_limit
!= post_prologue_pc
)
1435 return post_prologue_pc
;
1439 /* Can't determine prologue from the symbol table, need to examine
1442 /* Find an upper limit on the function prologue using the debug
1443 information. If the debug information could not be used to provide
1444 that bound, then use an arbitrary large number as the upper bound. */
1445 /* Like arm_scan_prologue, stop no later than pc + 64. */
1446 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1448 limit_pc
= pc
+ 64; /* Magic. */
1451 /* Check if this is Thumb code. */
1452 if (arm_pc_is_thumb (gdbarch
, pc
))
1453 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1455 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1459 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1460 This function decodes a Thumb function prologue to determine:
1461 1) the size of the stack frame
1462 2) which registers are saved on it
1463 3) the offsets of saved regs
1464 4) the offset from the stack pointer to the frame pointer
1466 A typical Thumb function prologue would create this stack frame
1467 (offsets relative to FP)
1468 old SP -> 24 stack parameters
1471 R7 -> 0 local variables (16 bytes)
1472 SP -> -12 additional stack space (12 bytes)
1473 The frame size would thus be 36 bytes, and the frame offset would be
1474 12 bytes. The frame register is R7.
1476 The comments for thumb_skip_prolog() describe the algorithm we use
1477 to detect the end of the prolog. */
1481 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1482 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1484 CORE_ADDR prologue_start
;
1485 CORE_ADDR prologue_end
;
1487 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1490 /* See comment in arm_scan_prologue for an explanation of
1492 if (prologue_end
> prologue_start
+ 64)
1494 prologue_end
= prologue_start
+ 64;
1498 /* We're in the boondocks: we have no idea where the start of the
1502 prologue_end
= min (prologue_end
, prev_pc
);
1504 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1507 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1510 arm_instruction_changes_pc (uint32_t this_instr
)
1512 if (bits (this_instr
, 28, 31) == INST_NV
)
1513 /* Unconditional instructions. */
1514 switch (bits (this_instr
, 24, 27))
1518 /* Branch with Link and change to Thumb. */
1523 /* Coprocessor register transfer. */
1524 if (bits (this_instr
, 12, 15) == 15)
1525 error (_("Invalid update to pc in instruction"));
1531 switch (bits (this_instr
, 25, 27))
1534 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1536 /* Multiplies and extra load/stores. */
1537 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1538 /* Neither multiplies nor extension load/stores are allowed
1542 /* Otherwise, miscellaneous instructions. */
1544 /* BX <reg>, BXJ <reg>, BLX <reg> */
1545 if (bits (this_instr
, 4, 27) == 0x12fff1
1546 || bits (this_instr
, 4, 27) == 0x12fff2
1547 || bits (this_instr
, 4, 27) == 0x12fff3)
1550 /* Other miscellaneous instructions are unpredictable if they
1554 /* Data processing instruction. Fall through. */
1557 if (bits (this_instr
, 12, 15) == 15)
1564 /* Media instructions and architecturally undefined instructions. */
1565 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1569 if (bit (this_instr
, 20) == 0)
1573 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1579 /* Load/store multiple. */
1580 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1586 /* Branch and branch with link. */
1591 /* Coprocessor transfers or SWIs can not affect PC. */
1595 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1599 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1603 arm_instruction_restores_sp (unsigned int insn
)
1605 if (bits (insn
, 28, 31) != INST_NV
)
1607 if ((insn
& 0x0df0f000) == 0x0080d000
1608 /* ADD SP (register or immediate). */
1609 || (insn
& 0x0df0f000) == 0x0040d000
1610 /* SUB SP (register or immediate). */
1611 || (insn
& 0x0ffffff0) == 0x01a0d000
1613 || (insn
& 0x0fff0000) == 0x08bd0000
1615 || (insn
& 0x0fff0000) == 0x049d0000)
1616 /* POP of a single register. */
1623 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1624 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1625 fill it in. Return the first address not recognized as a prologue
1628 We recognize all the instructions typically found in ARM prologues,
1629 plus harmless instructions which can be skipped (either for analysis
1630 purposes, or a more restrictive set that can be skipped when finding
1631 the end of the prologue). */
1634 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1635 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1636 struct arm_prologue_cache
*cache
)
1638 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1639 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1641 CORE_ADDR offset
, current_pc
;
1642 pv_t regs
[ARM_FPS_REGNUM
];
1643 struct pv_area
*stack
;
1644 struct cleanup
*back_to
;
1645 CORE_ADDR unrecognized_pc
= 0;
1647 /* Search the prologue looking for instructions that set up the
1648 frame pointer, adjust the stack pointer, and save registers.
1650 Be careful, however, and if it doesn't look like a prologue,
1651 don't try to scan it. If, for instance, a frameless function
1652 begins with stmfd sp!, then we will tell ourselves there is
1653 a frame, which will confuse stack traceback, as well as "finish"
1654 and other operations that rely on a knowledge of the stack
1657 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1658 regs
[regno
] = pv_register (regno
, 0);
1659 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1660 back_to
= make_cleanup_free_pv_area (stack
);
1662 for (current_pc
= prologue_start
;
1663 current_pc
< prologue_end
;
1667 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1669 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1671 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1674 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1675 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1677 unsigned imm
= insn
& 0xff; /* immediate value */
1678 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1679 int rd
= bits (insn
, 12, 15);
1680 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1681 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1684 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1685 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1687 unsigned imm
= insn
& 0xff; /* immediate value */
1688 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1689 int rd
= bits (insn
, 12, 15);
1690 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1691 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1694 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1697 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1699 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1700 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1701 regs
[bits (insn
, 12, 15)]);
1704 else if ((insn
& 0xffff0000) == 0xe92d0000)
1705 /* stmfd sp!, {..., fp, ip, lr, pc}
1707 stmfd sp!, {a1, a2, a3, a4} */
1709 int mask
= insn
& 0xffff;
1711 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1714 /* Calculate offsets of saved registers. */
1715 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1716 if (mask
& (1 << regno
))
1719 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1720 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1723 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1724 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1725 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1727 /* No need to add this to saved_regs -- it's just an arg reg. */
1730 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1731 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1732 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1737 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1739 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1741 /* No need to add this to saved_regs -- it's just arg regs. */
1744 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1746 unsigned imm
= insn
& 0xff; /* immediate value */
1747 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1748 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1749 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1751 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1753 unsigned imm
= insn
& 0xff; /* immediate value */
1754 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1755 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1756 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1758 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1760 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1762 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1765 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1766 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1767 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1769 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1771 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1773 int n_saved_fp_regs
;
1774 unsigned int fp_start_reg
, fp_bound_reg
;
1776 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1779 if ((insn
& 0x800) == 0x800) /* N0 is set */
1781 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1782 n_saved_fp_regs
= 3;
1784 n_saved_fp_regs
= 1;
1788 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs
= 2;
1791 n_saved_fp_regs
= 4;
1794 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1795 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1796 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1798 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1799 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1800 regs
[fp_start_reg
++]);
1803 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1805 /* Allow some special function calls when skipping the
1806 prologue; GCC generates these before storing arguments to
1808 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1810 if (skip_prologue_function (gdbarch
, dest
, 0))
1815 else if ((insn
& 0xf0000000) != 0xe0000000)
1816 break; /* Condition not true, exit early. */
1817 else if (arm_instruction_changes_pc (insn
))
1818 /* Don't scan past anything that might change control flow. */
1820 else if (arm_instruction_restores_sp (insn
))
1822 /* Don't scan past the epilogue. */
1825 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1826 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1827 /* Ignore block loads from the stack, potentially copying
1828 parameters from memory. */
1830 else if ((insn
& 0xfc500000) == 0xe4100000
1831 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1832 /* Similarly ignore single loads from the stack. */
1834 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1835 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1836 register instead of the stack. */
1840 /* The optimizer might shove anything into the prologue, if
1841 we build up cache (cache != NULL) from scanning prologue,
1842 we just skip what we don't recognize and scan further to
1843 make cache as complete as possible. However, if we skip
1844 prologue, we'll stop immediately on unrecognized
1846 unrecognized_pc
= current_pc
;
1854 if (unrecognized_pc
== 0)
1855 unrecognized_pc
= current_pc
;
1859 int framereg
, framesize
;
1861 /* The frame size is just the distance from the frame register
1862 to the original stack pointer. */
1863 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1865 /* Frame pointer is fp. */
1866 framereg
= ARM_FP_REGNUM
;
1867 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1871 /* Try the stack pointer... this is a bit desperate. */
1872 framereg
= ARM_SP_REGNUM
;
1873 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1876 cache
->framereg
= framereg
;
1877 cache
->framesize
= framesize
;
1879 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1880 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1881 cache
->saved_regs
[regno
].addr
= offset
;
1885 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1886 paddress (gdbarch
, unrecognized_pc
));
1888 do_cleanups (back_to
);
1889 return unrecognized_pc
;
1893 arm_scan_prologue (struct frame_info
*this_frame
,
1894 struct arm_prologue_cache
*cache
)
1896 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1897 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1899 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1900 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1901 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1902 pv_t regs
[ARM_FPS_REGNUM
];
1903 struct pv_area
*stack
;
1904 struct cleanup
*back_to
;
1907 /* Assume there is no frame until proven otherwise. */
1908 cache
->framereg
= ARM_SP_REGNUM
;
1909 cache
->framesize
= 0;
1911 /* Check for Thumb prologue. */
1912 if (arm_frame_is_thumb (this_frame
))
1914 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1918 /* Find the function prologue. If we can't find the function in
1919 the symbol table, peek in the stack frame to find the PC. */
1920 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1923 /* One way to find the end of the prologue (which works well
1924 for unoptimized code) is to do the following:
1926 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1929 prologue_end = prev_pc;
1930 else if (sal.end < prologue_end)
1931 prologue_end = sal.end;
1933 This mechanism is very accurate so long as the optimizer
1934 doesn't move any instructions from the function body into the
1935 prologue. If this happens, sal.end will be the last
1936 instruction in the first hunk of prologue code just before
1937 the first instruction that the scheduler has moved from
1938 the body to the prologue.
1940 In order to make sure that we scan all of the prologue
1941 instructions, we use a slightly less accurate mechanism which
1942 may scan more than necessary. To help compensate for this
1943 lack of accuracy, the prologue scanning loop below contains
1944 several clauses which'll cause the loop to terminate early if
1945 an implausible prologue instruction is encountered.
1951 is a suitable endpoint since it accounts for the largest
1952 possible prologue plus up to five instructions inserted by
1955 if (prologue_end
> prologue_start
+ 64)
1957 prologue_end
= prologue_start
+ 64; /* See above. */
1962 /* We have no symbol information. Our only option is to assume this
1963 function has a standard stack frame and the normal frame register.
1964 Then, we can find the value of our frame pointer on entrance to
1965 the callee (or at the present moment if this is the innermost frame).
1966 The value stored there should be the address of the stmfd + 8. */
1967 CORE_ADDR frame_loc
;
1968 LONGEST return_value
;
1970 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1971 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
1975 prologue_start
= gdbarch_addr_bits_remove
1976 (gdbarch
, return_value
) - 8;
1977 prologue_end
= prologue_start
+ 64; /* See above. */
1981 if (prev_pc
< prologue_end
)
1982 prologue_end
= prev_pc
;
1984 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1987 static struct arm_prologue_cache
*
1988 arm_make_prologue_cache (struct frame_info
*this_frame
)
1991 struct arm_prologue_cache
*cache
;
1992 CORE_ADDR unwound_fp
;
1994 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1995 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1997 arm_scan_prologue (this_frame
, cache
);
1999 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2000 if (unwound_fp
== 0)
2003 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2005 /* Calculate actual addresses of saved registers using offsets
2006 determined by arm_scan_prologue. */
2007 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2008 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2009 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2014 /* Implementation of the stop_reason hook for arm_prologue frames. */
2016 static enum unwind_stop_reason
2017 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
2020 struct arm_prologue_cache
*cache
;
2023 if (*this_cache
== NULL
)
2024 *this_cache
= arm_make_prologue_cache (this_frame
);
2025 cache
= (struct arm_prologue_cache
*) *this_cache
;
2027 /* This is meant to halt the backtrace at "_start". */
2028 pc
= get_frame_pc (this_frame
);
2029 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2030 return UNWIND_OUTERMOST
;
2032 /* If we've hit a wall, stop. */
2033 if (cache
->prev_sp
== 0)
2034 return UNWIND_OUTERMOST
;
2036 return UNWIND_NO_REASON
;
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2043 arm_prologue_this_id (struct frame_info
*this_frame
,
2045 struct frame_id
*this_id
)
2047 struct arm_prologue_cache
*cache
;
2051 if (*this_cache
== NULL
)
2052 *this_cache
= arm_make_prologue_cache (this_frame
);
2053 cache
= (struct arm_prologue_cache
*) *this_cache
;
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 pc
= get_frame_pc (this_frame
);
2059 func
= get_frame_func (this_frame
);
2063 id
= frame_id_build (cache
->prev_sp
, func
);
2067 static struct value
*
2068 arm_prologue_prev_register (struct frame_info
*this_frame
,
2072 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2073 struct arm_prologue_cache
*cache
;
2075 if (*this_cache
== NULL
)
2076 *this_cache
= arm_make_prologue_cache (this_frame
);
2077 cache
= (struct arm_prologue_cache
*) *this_cache
;
2079 /* If we are asked to unwind the PC, then we need to return the LR
2080 instead. The prologue may save PC, but it will point into this
2081 frame's prologue, not the next frame's resume location. Also
2082 strip the saved T bit. A valid LR may have the low bit set, but
2083 a valid PC never does. */
2084 if (prev_regnum
== ARM_PC_REGNUM
)
2088 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2089 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2090 arm_addr_bits_remove (gdbarch
, lr
));
2093 /* SP is generally not saved to the stack, but this frame is
2094 identified by the next frame's stack pointer at the time of the call.
2095 The value was already reconstructed into PREV_SP. */
2096 if (prev_regnum
== ARM_SP_REGNUM
)
2097 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2099 /* The CPSR may have been changed by the call instruction and by the
2100 called function. The only bit we can reconstruct is the T bit,
2101 by checking the low bit of LR as of the call. This is a reliable
2102 indicator of Thumb-ness except for some ARM v4T pre-interworking
2103 Thumb code, which could get away with a clear low bit as long as
2104 the called function did not use bx. Guess that all other
2105 bits are unchanged; the condition flags are presumably lost,
2106 but the processor status is likely valid. */
2107 if (prev_regnum
== ARM_PS_REGNUM
)
2110 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2112 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2113 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2114 if (IS_THUMB_ADDR (lr
))
2118 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2121 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2125 struct frame_unwind arm_prologue_unwind
= {
2127 arm_prologue_unwind_stop_reason
,
2128 arm_prologue_this_id
,
2129 arm_prologue_prev_register
,
2131 default_frame_sniffer
2134 /* Maintain a list of ARM exception table entries per objfile, similar to the
2135 list of mapping symbols. We only cache entries for standard ARM-defined
2136 personality routines; the cache will contain only the frame unwinding
2137 instructions associated with the entry (not the descriptors). */
2139 static const struct objfile_data
*arm_exidx_data_key
;
2141 struct arm_exidx_entry
2146 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2147 DEF_VEC_O(arm_exidx_entry_s
);
2149 struct arm_exidx_data
2151 VEC(arm_exidx_entry_s
) **section_maps
;
2155 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2157 struct arm_exidx_data
*data
= (struct arm_exidx_data
*) arg
;
2160 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2161 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2165 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2166 const struct arm_exidx_entry
*rhs
)
2168 return lhs
->addr
< rhs
->addr
;
2171 static struct obj_section
*
2172 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2174 struct obj_section
*osect
;
2176 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2177 if (bfd_get_section_flags (objfile
->obfd
,
2178 osect
->the_bfd_section
) & SEC_ALLOC
)
2180 bfd_vma start
, size
;
2181 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2182 size
= bfd_get_section_size (osect
->the_bfd_section
);
2184 if (start
<= vma
&& vma
< start
+ size
)
2191 /* Parse contents of exception table and exception index sections
2192 of OBJFILE, and fill in the exception table entry cache.
2194 For each entry that refers to a standard ARM-defined personality
2195 routine, extract the frame unwinding instructions (from either
2196 the index or the table section). The unwinding instructions
2198 - extracting them from the rest of the table data
2199 - converting to host endianness
2200 - appending the implicit 0xb0 ("Finish") code
2202 The extracted and normalized instructions are stored for later
2203 retrieval by the arm_find_exidx_entry routine. */
2206 arm_exidx_new_objfile (struct objfile
*objfile
)
2208 struct cleanup
*cleanups
;
2209 struct arm_exidx_data
*data
;
2210 asection
*exidx
, *extab
;
2211 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2212 bfd_size_type exidx_size
= 0, extab_size
= 0;
2213 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2216 /* If we've already touched this file, do nothing. */
2217 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2219 cleanups
= make_cleanup (null_cleanup
, NULL
);
2221 /* Read contents of exception table and index. */
2222 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2225 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2226 exidx_size
= bfd_get_section_size (exidx
);
2227 exidx_data
= (gdb_byte
*) xmalloc (exidx_size
);
2228 make_cleanup (xfree
, exidx_data
);
2230 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2231 exidx_data
, 0, exidx_size
))
2233 do_cleanups (cleanups
);
2238 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2241 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2242 extab_size
= bfd_get_section_size (extab
);
2243 extab_data
= (gdb_byte
*) xmalloc (extab_size
);
2244 make_cleanup (xfree
, extab_data
);
2246 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2247 extab_data
, 0, extab_size
))
2249 do_cleanups (cleanups
);
2254 /* Allocate exception table data structure. */
2255 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2256 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2257 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2258 objfile
->obfd
->section_count
,
2259 VEC(arm_exidx_entry_s
) *);
2261 /* Fill in exception table. */
2262 for (i
= 0; i
< exidx_size
/ 8; i
++)
2264 struct arm_exidx_entry new_exidx_entry
;
2265 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2266 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2267 bfd_vma addr
= 0, word
= 0;
2268 int n_bytes
= 0, n_words
= 0;
2269 struct obj_section
*sec
;
2270 gdb_byte
*entry
= NULL
;
2272 /* Extract address of start of function. */
2273 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2274 idx
+= exidx_vma
+ i
* 8;
2276 /* Find section containing function and compute section offset. */
2277 sec
= arm_obj_section_from_vma (objfile
, idx
);
2280 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2282 /* Determine address of exception table entry. */
2285 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2287 else if ((val
& 0xff000000) == 0x80000000)
2289 /* Exception table entry embedded in .ARM.exidx
2290 -- must be short form. */
2294 else if (!(val
& 0x80000000))
2296 /* Exception table entry in .ARM.extab. */
2297 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2298 addr
+= exidx_vma
+ i
* 8 + 4;
2300 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2302 word
= bfd_h_get_32 (objfile
->obfd
,
2303 extab_data
+ addr
- extab_vma
);
2306 if ((word
& 0xff000000) == 0x80000000)
2311 else if ((word
& 0xff000000) == 0x81000000
2312 || (word
& 0xff000000) == 0x82000000)
2316 n_words
= ((word
>> 16) & 0xff);
2318 else if (!(word
& 0x80000000))
2321 struct obj_section
*pers_sec
;
2322 int gnu_personality
= 0;
2324 /* Custom personality routine. */
2325 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2326 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2328 /* Check whether we've got one of the variants of the
2329 GNU personality routines. */
2330 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2333 static const char *personality
[] =
2335 "__gcc_personality_v0",
2336 "__gxx_personality_v0",
2337 "__gcj_personality_v0",
2338 "__gnu_objc_personality_v0",
2342 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2345 for (k
= 0; personality
[k
]; k
++)
2346 if (lookup_minimal_symbol_by_pc_name
2347 (pc
, personality
[k
], objfile
))
2349 gnu_personality
= 1;
2354 /* If so, the next word contains a word count in the high
2355 byte, followed by the same unwind instructions as the
2356 pre-defined forms. */
2358 && addr
+ 4 <= extab_vma
+ extab_size
)
2360 word
= bfd_h_get_32 (objfile
->obfd
,
2361 extab_data
+ addr
- extab_vma
);
2364 n_words
= ((word
>> 24) & 0xff);
2370 /* Sanity check address. */
2372 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2373 n_words
= n_bytes
= 0;
2375 /* The unwind instructions reside in WORD (only the N_BYTES least
2376 significant bytes are valid), followed by N_WORDS words in the
2377 extab section starting at ADDR. */
2378 if (n_bytes
|| n_words
)
2381 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2382 n_bytes
+ n_words
* 4 + 1);
2385 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2389 word
= bfd_h_get_32 (objfile
->obfd
,
2390 extab_data
+ addr
- extab_vma
);
2393 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2394 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2395 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2396 *p
++ = (gdb_byte
) (word
& 0xff);
2399 /* Implied "Finish" to terminate the list. */
2403 /* Push entry onto vector. They are guaranteed to always
2404 appear in order of increasing addresses. */
2405 new_exidx_entry
.addr
= idx
;
2406 new_exidx_entry
.entry
= entry
;
2407 VEC_safe_push (arm_exidx_entry_s
,
2408 data
->section_maps
[sec
->the_bfd_section
->index
],
2412 do_cleanups (cleanups
);
2415 /* Search for the exception table entry covering MEMADDR. If one is found,
2416 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2417 set *START to the start of the region covered by this entry. */
2420 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2422 struct obj_section
*sec
;
2424 sec
= find_pc_section (memaddr
);
2427 struct arm_exidx_data
*data
;
2428 VEC(arm_exidx_entry_s
) *map
;
2429 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2432 data
= ((struct arm_exidx_data
*)
2433 objfile_data (sec
->objfile
, arm_exidx_data_key
));
2436 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2437 if (!VEC_empty (arm_exidx_entry_s
, map
))
2439 struct arm_exidx_entry
*map_sym
;
2441 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2442 arm_compare_exidx_entries
);
2444 /* VEC_lower_bound finds the earliest ordered insertion
2445 point. If the following symbol starts at this exact
2446 address, we use that; otherwise, the preceding
2447 exception table entry covers this address. */
2448 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2450 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2451 if (map_sym
->addr
== map_key
.addr
)
2454 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2455 return map_sym
->entry
;
2461 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2463 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2464 return map_sym
->entry
;
2473 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2474 instruction list from the ARM exception table entry ENTRY, allocate and
2475 return a prologue cache structure describing how to unwind this frame.
2477 Return NULL if the unwinding instruction list contains a "spare",
2478 "reserved" or "refuse to unwind" instruction as defined in section
2479 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2480 for the ARM Architecture" document. */
2482 static struct arm_prologue_cache
*
2483 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2488 struct arm_prologue_cache
*cache
;
2489 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2490 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2496 /* Whenever we reload SP, we actually have to retrieve its
2497 actual value in the current frame. */
2500 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2502 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2503 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2507 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2508 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2514 /* Decode next unwind instruction. */
2517 if ((insn
& 0xc0) == 0)
2519 int offset
= insn
& 0x3f;
2520 vsp
+= (offset
<< 2) + 4;
2522 else if ((insn
& 0xc0) == 0x40)
2524 int offset
= insn
& 0x3f;
2525 vsp
-= (offset
<< 2) + 4;
2527 else if ((insn
& 0xf0) == 0x80)
2529 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2532 /* The special case of an all-zero mask identifies
2533 "Refuse to unwind". We return NULL to fall back
2534 to the prologue analyzer. */
2538 /* Pop registers r4..r15 under mask. */
2539 for (i
= 0; i
< 12; i
++)
2540 if (mask
& (1 << i
))
2542 cache
->saved_regs
[4 + i
].addr
= vsp
;
2546 /* Special-case popping SP -- we need to reload vsp. */
2547 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2550 else if ((insn
& 0xf0) == 0x90)
2552 int reg
= insn
& 0xf;
2554 /* Reserved cases. */
2555 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2558 /* Set SP from another register and mark VSP for reload. */
2559 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2562 else if ((insn
& 0xf0) == 0xa0)
2564 int count
= insn
& 0x7;
2565 int pop_lr
= (insn
& 0x8) != 0;
2568 /* Pop r4..r[4+count]. */
2569 for (i
= 0; i
<= count
; i
++)
2571 cache
->saved_regs
[4 + i
].addr
= vsp
;
2575 /* If indicated by flag, pop LR as well. */
2578 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2582 else if (insn
== 0xb0)
2584 /* We could only have updated PC by popping into it; if so, it
2585 will show up as address. Otherwise, copy LR into PC. */
2586 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2587 cache
->saved_regs
[ARM_PC_REGNUM
]
2588 = cache
->saved_regs
[ARM_LR_REGNUM
];
2593 else if (insn
== 0xb1)
2595 int mask
= *entry
++;
2598 /* All-zero mask and mask >= 16 is "spare". */
2599 if (mask
== 0 || mask
>= 16)
2602 /* Pop r0..r3 under mask. */
2603 for (i
= 0; i
< 4; i
++)
2604 if (mask
& (1 << i
))
2606 cache
->saved_regs
[i
].addr
= vsp
;
2610 else if (insn
== 0xb2)
2612 ULONGEST offset
= 0;
2617 offset
|= (*entry
& 0x7f) << shift
;
2620 while (*entry
++ & 0x80);
2622 vsp
+= 0x204 + (offset
<< 2);
2624 else if (insn
== 0xb3)
2626 int start
= *entry
>> 4;
2627 int count
= (*entry
++) & 0xf;
2630 /* Only registers D0..D15 are valid here. */
2631 if (start
+ count
>= 16)
2634 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2635 for (i
= 0; i
<= count
; i
++)
2637 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2641 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2644 else if ((insn
& 0xf8) == 0xb8)
2646 int count
= insn
& 0x7;
2649 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2650 for (i
= 0; i
<= count
; i
++)
2652 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2656 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2659 else if (insn
== 0xc6)
2661 int start
= *entry
>> 4;
2662 int count
= (*entry
++) & 0xf;
2665 /* Only registers WR0..WR15 are valid. */
2666 if (start
+ count
>= 16)
2669 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2670 for (i
= 0; i
<= count
; i
++)
2672 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2676 else if (insn
== 0xc7)
2678 int mask
= *entry
++;
2681 /* All-zero mask and mask >= 16 is "spare". */
2682 if (mask
== 0 || mask
>= 16)
2685 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2686 for (i
= 0; i
< 4; i
++)
2687 if (mask
& (1 << i
))
2689 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2693 else if ((insn
& 0xf8) == 0xc0)
2695 int count
= insn
& 0x7;
2698 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2699 for (i
= 0; i
<= count
; i
++)
2701 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2705 else if (insn
== 0xc8)
2707 int start
= *entry
>> 4;
2708 int count
= (*entry
++) & 0xf;
2711 /* Only registers D0..D31 are valid. */
2712 if (start
+ count
>= 16)
2715 /* Pop VFP double-precision registers
2716 D[16+start]..D[16+start+count]. */
2717 for (i
= 0; i
<= count
; i
++)
2719 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2723 else if (insn
== 0xc9)
2725 int start
= *entry
>> 4;
2726 int count
= (*entry
++) & 0xf;
2729 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2730 for (i
= 0; i
<= count
; i
++)
2732 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2736 else if ((insn
& 0xf8) == 0xd0)
2738 int count
= insn
& 0x7;
2741 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2742 for (i
= 0; i
<= count
; i
++)
2744 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2750 /* Everything else is "spare". */
2755 /* If we restore SP from a register, assume this was the frame register.
2756 Otherwise just fall back to SP as frame register. */
2757 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2758 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2760 cache
->framereg
= ARM_SP_REGNUM
;
2762 /* Determine offset to previous frame. */
2764 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2766 /* We already got the previous SP. */
2767 cache
->prev_sp
= vsp
;
2772 /* Unwinding via ARM exception table entries. Note that the sniffer
2773 already computes a filled-in prologue cache, which is then used
2774 with the same arm_prologue_this_id and arm_prologue_prev_register
2775 routines also used for prologue-parsing based unwinding. */
2778 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2779 struct frame_info
*this_frame
,
2780 void **this_prologue_cache
)
2782 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2783 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2784 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2785 struct arm_prologue_cache
*cache
;
2788 /* See if we have an ARM exception table entry covering this address. */
2789 addr_in_block
= get_frame_address_in_block (this_frame
);
2790 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2794 /* The ARM exception table does not describe unwind information
2795 for arbitrary PC values, but is guaranteed to be correct only
2796 at call sites. We have to decide here whether we want to use
2797 ARM exception table information for this frame, or fall back
2798 to using prologue parsing. (Note that if we have DWARF CFI,
2799 this sniffer isn't even called -- CFI is always preferred.)
2801 Before we make this decision, however, we check whether we
2802 actually have *symbol* information for the current frame.
2803 If not, prologue parsing would not work anyway, so we might
2804 as well use the exception table and hope for the best. */
2805 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2809 /* If the next frame is "normal", we are at a call site in this
2810 frame, so exception information is guaranteed to be valid. */
2811 if (get_next_frame (this_frame
)
2812 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2815 /* We also assume exception information is valid if we're currently
2816 blocked in a system call. The system library is supposed to
2817 ensure this, so that e.g. pthread cancellation works. */
2818 if (arm_frame_is_thumb (this_frame
))
2822 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2823 byte_order_for_code
, &insn
)
2824 && (insn
& 0xff00) == 0xdf00 /* svc */)
2831 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2832 byte_order_for_code
, &insn
)
2833 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2837 /* Bail out if we don't know that exception information is valid. */
2841 /* The ARM exception index does not mark the *end* of the region
2842 covered by the entry, and some functions will not have any entry.
2843 To correctly recognize the end of the covered region, the linker
2844 should have inserted dummy records with a CANTUNWIND marker.
2846 Unfortunately, current versions of GNU ld do not reliably do
2847 this, and thus we may have found an incorrect entry above.
2848 As a (temporary) sanity check, we only use the entry if it
2849 lies *within* the bounds of the function. Note that this check
2850 might reject perfectly valid entries that just happen to cover
2851 multiple functions; therefore this check ought to be removed
2852 once the linker is fixed. */
2853 if (func_start
> exidx_region
)
2857 /* Decode the list of unwinding instructions into a prologue cache.
2858 Note that this may fail due to e.g. a "refuse to unwind" code. */
2859 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2863 *this_prologue_cache
= cache
;
2867 struct frame_unwind arm_exidx_unwind
= {
2869 default_frame_unwind_stop_reason
,
2870 arm_prologue_this_id
,
2871 arm_prologue_prev_register
,
2873 arm_exidx_unwind_sniffer
2876 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2877 trampoline, return the target PC. Otherwise return 0.
2879 void call0a (char c, short s, int i, long l) {}
2883 (*pointer_to_call0a) (c, s, i, l);
2886 Instead of calling a stub library function _call_via_xx (xx is
2887 the register name), GCC may inline the trampoline in the object
2888 file as below (register r2 has the address of call0a).
2891 .type main, %function
2900 The trampoline 'bx r2' doesn't belong to main. */
2903 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2905 /* The heuristics of recognizing such trampoline is that FRAME is
2906 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2907 if (arm_frame_is_thumb (frame
))
2911 if (target_read_memory (pc
, buf
, 2) == 0)
2913 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2914 enum bfd_endian byte_order_for_code
2915 = gdbarch_byte_order_for_code (gdbarch
);
2917 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2919 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2922 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2924 /* Clear the LSB so that gdb core sets step-resume
2925 breakpoint at the right address. */
2926 return UNMAKE_THUMB_ADDR (dest
);
2934 static struct arm_prologue_cache
*
2935 arm_make_stub_cache (struct frame_info
*this_frame
)
2937 struct arm_prologue_cache
*cache
;
2939 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2940 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2942 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2947 /* Our frame ID for a stub frame is the current SP and LR. */
2950 arm_stub_this_id (struct frame_info
*this_frame
,
2952 struct frame_id
*this_id
)
2954 struct arm_prologue_cache
*cache
;
2956 if (*this_cache
== NULL
)
2957 *this_cache
= arm_make_stub_cache (this_frame
);
2958 cache
= (struct arm_prologue_cache
*) *this_cache
;
2960 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2964 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2965 struct frame_info
*this_frame
,
2966 void **this_prologue_cache
)
2968 CORE_ADDR addr_in_block
;
2970 CORE_ADDR pc
, start_addr
;
2973 addr_in_block
= get_frame_address_in_block (this_frame
);
2974 pc
= get_frame_pc (this_frame
);
2975 if (in_plt_section (addr_in_block
)
2976 /* We also use the stub winder if the target memory is unreadable
2977 to avoid having the prologue unwinder trying to read it. */
2978 || target_read_memory (pc
, dummy
, 4) != 0)
2981 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2982 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2988 struct frame_unwind arm_stub_unwind
= {
2990 default_frame_unwind_stop_reason
,
2992 arm_prologue_prev_register
,
2994 arm_stub_unwind_sniffer
2997 /* Put here the code to store, into CACHE->saved_regs, the addresses
2998 of the saved registers of frame described by THIS_FRAME. CACHE is
3001 static struct arm_prologue_cache
*
3002 arm_m_exception_cache (struct frame_info
*this_frame
)
3004 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3005 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3006 struct arm_prologue_cache
*cache
;
3007 CORE_ADDR unwound_sp
;
3010 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3011 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3013 unwound_sp
= get_frame_register_unsigned (this_frame
,
3016 /* The hardware saves eight 32-bit words, comprising xPSR,
3017 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3018 "B1.5.6 Exception entry behavior" in
3019 "ARMv7-M Architecture Reference Manual". */
3020 cache
->saved_regs
[0].addr
= unwound_sp
;
3021 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3022 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3023 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3024 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
3025 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
3026 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
3027 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3029 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3030 aligner between the top of the 32-byte stack frame and the
3031 previous context's stack pointer. */
3032 cache
->prev_sp
= unwound_sp
+ 32;
3033 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3034 && (xpsr
& (1 << 9)) != 0)
3035 cache
->prev_sp
+= 4;
3040 /* Implementation of function hook 'this_id' in
3041 'struct frame_uwnind'. */
3044 arm_m_exception_this_id (struct frame_info
*this_frame
,
3046 struct frame_id
*this_id
)
3048 struct arm_prologue_cache
*cache
;
3050 if (*this_cache
== NULL
)
3051 *this_cache
= arm_m_exception_cache (this_frame
);
3052 cache
= (struct arm_prologue_cache
*) *this_cache
;
3054 /* Our frame ID for a stub frame is the current SP and LR. */
3055 *this_id
= frame_id_build (cache
->prev_sp
,
3056 get_frame_pc (this_frame
));
3059 /* Implementation of function hook 'prev_register' in
3060 'struct frame_uwnind'. */
3062 static struct value
*
3063 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3067 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3068 struct arm_prologue_cache
*cache
;
3070 if (*this_cache
== NULL
)
3071 *this_cache
= arm_m_exception_cache (this_frame
);
3072 cache
= (struct arm_prologue_cache
*) *this_cache
;
3074 /* The value was already reconstructed into PREV_SP. */
3075 if (prev_regnum
== ARM_SP_REGNUM
)
3076 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3079 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3083 /* Implementation of function hook 'sniffer' in
3084 'struct frame_uwnind'. */
3087 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3088 struct frame_info
*this_frame
,
3089 void **this_prologue_cache
)
3091 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3093 /* No need to check is_m; this sniffer is only registered for
3094 M-profile architectures. */
3096 /* Exception frames return to one of these magic PCs. Other values
3097 are not defined as of v7-M. See details in "B1.5.8 Exception
3098 return behavior" in "ARMv7-M Architecture Reference Manual". */
3099 if (this_pc
== 0xfffffff1 || this_pc
== 0xfffffff9
3100 || this_pc
== 0xfffffffd)
3106 /* Frame unwinder for M-profile exceptions. */
3108 struct frame_unwind arm_m_exception_unwind
=
3111 default_frame_unwind_stop_reason
,
3112 arm_m_exception_this_id
,
3113 arm_m_exception_prev_register
,
3115 arm_m_exception_unwind_sniffer
3119 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3121 struct arm_prologue_cache
*cache
;
3123 if (*this_cache
== NULL
)
3124 *this_cache
= arm_make_prologue_cache (this_frame
);
3125 cache
= (struct arm_prologue_cache
*) *this_cache
;
3127 return cache
->prev_sp
- cache
->framesize
;
3130 struct frame_base arm_normal_base
= {
3131 &arm_prologue_unwind
,
3132 arm_normal_frame_base
,
3133 arm_normal_frame_base
,
3134 arm_normal_frame_base
3137 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3138 dummy frame. The frame ID's base needs to match the TOS value
3139 saved by save_dummy_frame_tos() and returned from
3140 arm_push_dummy_call, and the PC needs to match the dummy frame's
3143 static struct frame_id
3144 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3146 return frame_id_build (get_frame_register_unsigned (this_frame
,
3148 get_frame_pc (this_frame
));
3151 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3152 be used to construct the previous frame's ID, after looking up the
3153 containing function). */
3156 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3159 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3160 return arm_addr_bits_remove (gdbarch
, pc
);
3164 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3166 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3169 static struct value
*
3170 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3173 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3175 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3180 /* The PC is normally copied from the return column, which
3181 describes saves of LR. However, that version may have an
3182 extra bit set to indicate Thumb state. The bit is not
3184 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3185 return frame_unwind_got_constant (this_frame
, regnum
,
3186 arm_addr_bits_remove (gdbarch
, lr
));
3189 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3190 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3191 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3192 if (IS_THUMB_ADDR (lr
))
3196 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3199 internal_error (__FILE__
, __LINE__
,
3200 _("Unexpected register %d"), regnum
);
3205 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3206 struct dwarf2_frame_state_reg
*reg
,
3207 struct frame_info
*this_frame
)
3213 reg
->how
= DWARF2_FRAME_REG_FN
;
3214 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3217 reg
->how
= DWARF2_FRAME_REG_CFA
;
3222 /* Implement the stack_frame_destroyed_p gdbarch method. */
3225 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3227 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3228 unsigned int insn
, insn2
;
3229 int found_return
= 0, found_stack_adjust
= 0;
3230 CORE_ADDR func_start
, func_end
;
3234 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3237 /* The epilogue is a sequence of instructions along the following lines:
3239 - add stack frame size to SP or FP
3240 - [if frame pointer used] restore SP from FP
3241 - restore registers from SP [may include PC]
3242 - a return-type instruction [if PC wasn't already restored]
3244 In a first pass, we scan forward from the current PC and verify the
3245 instructions we find as compatible with this sequence, ending in a
3248 However, this is not sufficient to distinguish indirect function calls
3249 within a function from indirect tail calls in the epilogue in some cases.
3250 Therefore, if we didn't already find any SP-changing instruction during
3251 forward scan, we add a backward scanning heuristic to ensure we actually
3252 are in the epilogue. */
3255 while (scan_pc
< func_end
&& !found_return
)
3257 if (target_read_memory (scan_pc
, buf
, 2))
3261 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3263 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3265 else if (insn
== 0x46f7) /* mov pc, lr */
3267 else if (thumb_instruction_restores_sp (insn
))
3269 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3272 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3274 if (target_read_memory (scan_pc
, buf
, 2))
3278 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3280 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3282 if (insn2
& 0x8000) /* <registers> include PC. */
3285 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3286 && (insn2
& 0x0fff) == 0x0b04)
3288 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3291 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3292 && (insn2
& 0x0e00) == 0x0a00)
3304 /* Since any instruction in the epilogue sequence, with the possible
3305 exception of return itself, updates the stack pointer, we need to
3306 scan backwards for at most one instruction. Try either a 16-bit or
3307 a 32-bit instruction. This is just a heuristic, so we do not worry
3308 too much about false positives. */
3310 if (pc
- 4 < func_start
)
3312 if (target_read_memory (pc
- 4, buf
, 4))
3315 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3316 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3318 if (thumb_instruction_restores_sp (insn2
))
3319 found_stack_adjust
= 1;
3320 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3321 found_stack_adjust
= 1;
3322 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3323 && (insn2
& 0x0fff) == 0x0b04)
3324 found_stack_adjust
= 1;
3325 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3326 && (insn2
& 0x0e00) == 0x0a00)
3327 found_stack_adjust
= 1;
3329 return found_stack_adjust
;
3332 /* Implement the stack_frame_destroyed_p gdbarch method. */
3335 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3337 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3340 CORE_ADDR func_start
, func_end
;
3342 if (arm_pc_is_thumb (gdbarch
, pc
))
3343 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3345 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3348 /* We are in the epilogue if the previous instruction was a stack
3349 adjustment and the next instruction is a possible return (bx, mov
3350 pc, or pop). We could have to scan backwards to find the stack
3351 adjustment, or forwards to find the return, but this is a decent
3352 approximation. First scan forwards. */
3355 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3356 if (bits (insn
, 28, 31) != INST_NV
)
3358 if ((insn
& 0x0ffffff0) == 0x012fff10)
3361 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3364 else if ((insn
& 0x0fff0000) == 0x08bd0000
3365 && (insn
& 0x0000c000) != 0)
3366 /* POP (LDMIA), including PC or LR. */
3373 /* Scan backwards. This is just a heuristic, so do not worry about
3374 false positives from mode changes. */
3376 if (pc
< func_start
+ 4)
3379 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3380 if (arm_instruction_restores_sp (insn
))
3387 /* When arguments must be pushed onto the stack, they go on in reverse
3388 order. The code below implements a FILO (stack) to do this. */
3393 struct stack_item
*prev
;
3397 static struct stack_item
*
3398 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3400 struct stack_item
*si
;
3401 si
= XNEW (struct stack_item
);
3402 si
->data
= (gdb_byte
*) xmalloc (len
);
3405 memcpy (si
->data
, contents
, len
);
3409 static struct stack_item
*
3410 pop_stack_item (struct stack_item
*si
)
3412 struct stack_item
*dead
= si
;
3420 /* Return the alignment (in bytes) of the given type. */
3423 arm_type_align (struct type
*t
)
3429 t
= check_typedef (t
);
3430 switch (TYPE_CODE (t
))
3433 /* Should never happen. */
3434 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3438 case TYPE_CODE_ENUM
:
3442 case TYPE_CODE_RANGE
:
3444 case TYPE_CODE_CHAR
:
3445 case TYPE_CODE_BOOL
:
3446 return TYPE_LENGTH (t
);
3448 case TYPE_CODE_ARRAY
:
3449 case TYPE_CODE_COMPLEX
:
3450 /* TODO: What about vector types? */
3451 return arm_type_align (TYPE_TARGET_TYPE (t
));
3453 case TYPE_CODE_STRUCT
:
3454 case TYPE_CODE_UNION
:
3456 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3458 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3466 /* Possible base types for a candidate for passing and returning in
3469 enum arm_vfp_cprc_base_type
3478 /* The length of one element of base type B. */
3481 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3485 case VFP_CPRC_SINGLE
:
3487 case VFP_CPRC_DOUBLE
:
3489 case VFP_CPRC_VEC64
:
3491 case VFP_CPRC_VEC128
:
3494 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3499 /* The character ('s', 'd' or 'q') for the type of VFP register used
3500 for passing base type B. */
3503 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3507 case VFP_CPRC_SINGLE
:
3509 case VFP_CPRC_DOUBLE
:
3511 case VFP_CPRC_VEC64
:
3513 case VFP_CPRC_VEC128
:
3516 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3521 /* Determine whether T may be part of a candidate for passing and
3522 returning in VFP registers, ignoring the limit on the total number
3523 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3524 classification of the first valid component found; if it is not
3525 VFP_CPRC_UNKNOWN, all components must have the same classification
3526 as *BASE_TYPE. If it is found that T contains a type not permitted
3527 for passing and returning in VFP registers, a type differently
3528 classified from *BASE_TYPE, or two types differently classified
3529 from each other, return -1, otherwise return the total number of
3530 base-type elements found (possibly 0 in an empty structure or
3531 array). Vector types are not currently supported, matching the
3532 generic AAPCS support. */
3535 arm_vfp_cprc_sub_candidate (struct type
*t
,
3536 enum arm_vfp_cprc_base_type
*base_type
)
3538 t
= check_typedef (t
);
3539 switch (TYPE_CODE (t
))
3542 switch (TYPE_LENGTH (t
))
3545 if (*base_type
== VFP_CPRC_UNKNOWN
)
3546 *base_type
= VFP_CPRC_SINGLE
;
3547 else if (*base_type
!= VFP_CPRC_SINGLE
)
3552 if (*base_type
== VFP_CPRC_UNKNOWN
)
3553 *base_type
= VFP_CPRC_DOUBLE
;
3554 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3563 case TYPE_CODE_COMPLEX
:
3564 /* Arguments of complex T where T is one of the types float or
3565 double get treated as if they are implemented as:
3574 switch (TYPE_LENGTH (t
))
3577 if (*base_type
== VFP_CPRC_UNKNOWN
)
3578 *base_type
= VFP_CPRC_SINGLE
;
3579 else if (*base_type
!= VFP_CPRC_SINGLE
)
3584 if (*base_type
== VFP_CPRC_UNKNOWN
)
3585 *base_type
= VFP_CPRC_DOUBLE
;
3586 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3595 case TYPE_CODE_ARRAY
:
3599 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3602 if (TYPE_LENGTH (t
) == 0)
3604 gdb_assert (count
== 0);
3607 else if (count
== 0)
3609 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3610 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3611 return TYPE_LENGTH (t
) / unitlen
;
3615 case TYPE_CODE_STRUCT
:
3620 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3622 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3624 if (sub_count
== -1)
3628 if (TYPE_LENGTH (t
) == 0)
3630 gdb_assert (count
== 0);
3633 else if (count
== 0)
3635 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3636 if (TYPE_LENGTH (t
) != unitlen
* count
)
3641 case TYPE_CODE_UNION
:
3646 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3648 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3650 if (sub_count
== -1)
3652 count
= (count
> sub_count
? count
: sub_count
);
3654 if (TYPE_LENGTH (t
) == 0)
3656 gdb_assert (count
== 0);
3659 else if (count
== 0)
3661 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3662 if (TYPE_LENGTH (t
) != unitlen
* count
)
3674 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3675 if passed to or returned from a non-variadic function with the VFP
3676 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3677 *BASE_TYPE to the base type for T and *COUNT to the number of
3678 elements of that base type before returning. */
3681 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3684 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3685 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3686 if (c
<= 0 || c
> 4)
3693 /* Return 1 if the VFP ABI should be used for passing arguments to and
3694 returning values from a function of type FUNC_TYPE, 0
3698 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3700 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3701 /* Variadic functions always use the base ABI. Assume that functions
3702 without debug info are not variadic. */
3703 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3705 /* The VFP ABI is only supported as a variant of AAPCS. */
3706 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3708 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3711 /* We currently only support passing parameters in integer registers, which
3712 conforms with GCC's default model, and VFP argument passing following
3713 the VFP variant of AAPCS. Several other variants exist and
3714 we should probably support some of them based on the selected ABI. */
3717 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3718 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3719 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3720 CORE_ADDR struct_addr
)
3722 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3726 struct stack_item
*si
= NULL
;
3729 unsigned vfp_regs_free
= (1 << 16) - 1;
3731 /* Determine the type of this function and whether the VFP ABI
3733 ftype
= check_typedef (value_type (function
));
3734 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3735 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3736 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3738 /* Set the return address. For the ARM, the return breakpoint is
3739 always at BP_ADDR. */
3740 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3742 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3744 /* Walk through the list of args and determine how large a temporary
3745 stack is required. Need to take care here as structs may be
3746 passed on the stack, and we have to push them. */
3749 argreg
= ARM_A1_REGNUM
;
3752 /* The struct_return pointer occupies the first parameter
3753 passing register. */
3757 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3758 gdbarch_register_name (gdbarch
, argreg
),
3759 paddress (gdbarch
, struct_addr
));
3760 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3764 for (argnum
= 0; argnum
< nargs
; argnum
++)
3767 struct type
*arg_type
;
3768 struct type
*target_type
;
3769 enum type_code typecode
;
3770 const bfd_byte
*val
;
3772 enum arm_vfp_cprc_base_type vfp_base_type
;
3774 int may_use_core_reg
= 1;
3776 arg_type
= check_typedef (value_type (args
[argnum
]));
3777 len
= TYPE_LENGTH (arg_type
);
3778 target_type
= TYPE_TARGET_TYPE (arg_type
);
3779 typecode
= TYPE_CODE (arg_type
);
3780 val
= value_contents (args
[argnum
]);
3782 align
= arm_type_align (arg_type
);
3783 /* Round alignment up to a whole number of words. */
3784 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3785 /* Different ABIs have different maximum alignments. */
3786 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3788 /* The APCS ABI only requires word alignment. */
3789 align
= INT_REGISTER_SIZE
;
3793 /* The AAPCS requires at most doubleword alignment. */
3794 if (align
> INT_REGISTER_SIZE
* 2)
3795 align
= INT_REGISTER_SIZE
* 2;
3799 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3807 /* Because this is a CPRC it cannot go in a core register or
3808 cause a core register to be skipped for alignment.
3809 Either it goes in VFP registers and the rest of this loop
3810 iteration is skipped for this argument, or it goes on the
3811 stack (and the stack alignment code is correct for this
3813 may_use_core_reg
= 0;
3815 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3816 shift
= unit_length
/ 4;
3817 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3818 for (regno
= 0; regno
< 16; regno
+= shift
)
3819 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3828 vfp_regs_free
&= ~(mask
<< regno
);
3829 reg_scaled
= regno
/ shift
;
3830 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3831 for (i
= 0; i
< vfp_base_count
; i
++)
3835 if (reg_char
== 'q')
3836 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3837 val
+ i
* unit_length
);
3840 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3841 reg_char
, reg_scaled
+ i
);
3842 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3844 regcache_cooked_write (regcache
, regnum
,
3845 val
+ i
* unit_length
);
3852 /* This CPRC could not go in VFP registers, so all VFP
3853 registers are now marked as used. */
3858 /* Push stack padding for dowubleword alignment. */
3859 if (nstack
& (align
- 1))
3861 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3862 nstack
+= INT_REGISTER_SIZE
;
3865 /* Doubleword aligned quantities must go in even register pairs. */
3866 if (may_use_core_reg
3867 && argreg
<= ARM_LAST_ARG_REGNUM
3868 && align
> INT_REGISTER_SIZE
3872 /* If the argument is a pointer to a function, and it is a
3873 Thumb function, create a LOCAL copy of the value and set
3874 the THUMB bit in it. */
3875 if (TYPE_CODE_PTR
== typecode
3876 && target_type
!= NULL
3877 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3879 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3880 if (arm_pc_is_thumb (gdbarch
, regval
))
3882 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3883 store_unsigned_integer (copy
, len
, byte_order
,
3884 MAKE_THUMB_ADDR (regval
));
3889 /* Copy the argument to general registers or the stack in
3890 register-sized pieces. Large arguments are split between
3891 registers and stack. */
3894 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3896 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3898 /* The argument is being passed in a general purpose
3901 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3902 if (byte_order
== BFD_ENDIAN_BIG
)
3903 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3905 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3907 gdbarch_register_name
3909 phex (regval
, INT_REGISTER_SIZE
));
3910 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3915 /* Push the arguments onto the stack. */
3917 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3919 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3920 nstack
+= INT_REGISTER_SIZE
;
3927 /* If we have an odd number of words to push, then decrement the stack
3928 by one word now, so first stack argument will be dword aligned. */
3935 write_memory (sp
, si
->data
, si
->len
);
3936 si
= pop_stack_item (si
);
3939 /* Finally, update teh SP register. */
3940 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3946 /* Always align the frame to an 8-byte boundary. This is required on
3947 some platforms and harmless on the rest. */
3950 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3952 /* Align the stack to eight bytes. */
3953 return sp
& ~ (CORE_ADDR
) 7;
3957 print_fpu_flags (struct ui_file
*file
, int flags
)
3959 if (flags
& (1 << 0))
3960 fputs_filtered ("IVO ", file
);
3961 if (flags
& (1 << 1))
3962 fputs_filtered ("DVZ ", file
);
3963 if (flags
& (1 << 2))
3964 fputs_filtered ("OFL ", file
);
3965 if (flags
& (1 << 3))
3966 fputs_filtered ("UFL ", file
);
3967 if (flags
& (1 << 4))
3968 fputs_filtered ("INX ", file
);
3969 fputc_filtered ('\n', file
);
3972 /* Print interesting information about the floating point processor
3973 (if present) or emulator. */
3975 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3976 struct frame_info
*frame
, const char *args
)
3978 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3981 type
= (status
>> 24) & 127;
3982 if (status
& (1 << 31))
3983 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3985 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3986 /* i18n: [floating point unit] mask */
3987 fputs_filtered (_("mask: "), file
);
3988 print_fpu_flags (file
, status
>> 16);
3989 /* i18n: [floating point unit] flags */
3990 fputs_filtered (_("flags: "), file
);
3991 print_fpu_flags (file
, status
);
3994 /* Construct the ARM extended floating point type. */
3995 static struct type
*
3996 arm_ext_type (struct gdbarch
*gdbarch
)
3998 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4000 if (!tdep
->arm_ext_type
)
4002 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4003 floatformats_arm_ext
);
4005 return tdep
->arm_ext_type
;
4008 static struct type
*
4009 arm_neon_double_type (struct gdbarch
*gdbarch
)
4011 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4013 if (tdep
->neon_double_type
== NULL
)
4015 struct type
*t
, *elem
;
4017 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4019 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4020 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4021 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4022 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4023 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4024 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4025 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4026 append_composite_type_field (t
, "u64", elem
);
4027 elem
= builtin_type (gdbarch
)->builtin_float
;
4028 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4029 elem
= builtin_type (gdbarch
)->builtin_double
;
4030 append_composite_type_field (t
, "f64", elem
);
4032 TYPE_VECTOR (t
) = 1;
4033 TYPE_NAME (t
) = "neon_d";
4034 tdep
->neon_double_type
= t
;
4037 return tdep
->neon_double_type
;
4040 /* FIXME: The vector types are not correctly ordered on big-endian
4041 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4042 bits of d0 - regardless of what unit size is being held in d0. So
4043 the offset of the first uint8 in d0 is 7, but the offset of the
4044 first float is 4. This code works as-is for little-endian
4047 static struct type
*
4048 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4050 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4052 if (tdep
->neon_quad_type
== NULL
)
4054 struct type
*t
, *elem
;
4056 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4058 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4059 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4060 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4061 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4062 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4063 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4064 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4065 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4066 elem
= builtin_type (gdbarch
)->builtin_float
;
4067 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4068 elem
= builtin_type (gdbarch
)->builtin_double
;
4069 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4071 TYPE_VECTOR (t
) = 1;
4072 TYPE_NAME (t
) = "neon_q";
4073 tdep
->neon_quad_type
= t
;
4076 return tdep
->neon_quad_type
;
4079 /* Return the GDB type object for the "standard" data type of data in
4082 static struct type
*
4083 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4085 int num_regs
= gdbarch_num_regs (gdbarch
);
4087 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4088 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4089 return builtin_type (gdbarch
)->builtin_float
;
4091 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4092 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4093 return arm_neon_quad_type (gdbarch
);
4095 /* If the target description has register information, we are only
4096 in this function so that we can override the types of
4097 double-precision registers for NEON. */
4098 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4100 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4102 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4103 && TYPE_CODE (t
) == TYPE_CODE_FLT
4104 && gdbarch_tdep (gdbarch
)->have_neon
)
4105 return arm_neon_double_type (gdbarch
);
4110 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4112 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4113 return builtin_type (gdbarch
)->builtin_void
;
4115 return arm_ext_type (gdbarch
);
4117 else if (regnum
== ARM_SP_REGNUM
)
4118 return builtin_type (gdbarch
)->builtin_data_ptr
;
4119 else if (regnum
== ARM_PC_REGNUM
)
4120 return builtin_type (gdbarch
)->builtin_func_ptr
;
4121 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4122 /* These registers are only supported on targets which supply
4123 an XML description. */
4124 return builtin_type (gdbarch
)->builtin_int0
;
4126 return builtin_type (gdbarch
)->builtin_uint32
;
4129 /* Map a DWARF register REGNUM onto the appropriate GDB register
4133 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4135 /* Core integer regs. */
4136 if (reg
>= 0 && reg
<= 15)
4139 /* Legacy FPA encoding. These were once used in a way which
4140 overlapped with VFP register numbering, so their use is
4141 discouraged, but GDB doesn't support the ARM toolchain
4142 which used them for VFP. */
4143 if (reg
>= 16 && reg
<= 23)
4144 return ARM_F0_REGNUM
+ reg
- 16;
4146 /* New assignments for the FPA registers. */
4147 if (reg
>= 96 && reg
<= 103)
4148 return ARM_F0_REGNUM
+ reg
- 96;
4150 /* WMMX register assignments. */
4151 if (reg
>= 104 && reg
<= 111)
4152 return ARM_WCGR0_REGNUM
+ reg
- 104;
4154 if (reg
>= 112 && reg
<= 127)
4155 return ARM_WR0_REGNUM
+ reg
- 112;
4157 if (reg
>= 192 && reg
<= 199)
4158 return ARM_WC0_REGNUM
+ reg
- 192;
4160 /* VFP v2 registers. A double precision value is actually
4161 in d1 rather than s2, but the ABI only defines numbering
4162 for the single precision registers. This will "just work"
4163 in GDB for little endian targets (we'll read eight bytes,
4164 starting in s0 and then progressing to s1), but will be
4165 reversed on big endian targets with VFP. This won't
4166 be a problem for the new Neon quad registers; you're supposed
4167 to use DW_OP_piece for those. */
4168 if (reg
>= 64 && reg
<= 95)
4172 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4173 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4177 /* VFP v3 / Neon registers. This range is also used for VFP v2
4178 registers, except that it now describes d0 instead of s0. */
4179 if (reg
>= 256 && reg
<= 287)
4183 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4184 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4191 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4193 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4196 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4198 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4199 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4201 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4202 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4204 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4205 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4207 if (reg
< NUM_GREGS
)
4208 return SIM_ARM_R0_REGNUM
+ reg
;
4211 if (reg
< NUM_FREGS
)
4212 return SIM_ARM_FP0_REGNUM
+ reg
;
4215 if (reg
< NUM_SREGS
)
4216 return SIM_ARM_FPS_REGNUM
+ reg
;
4219 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4222 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4223 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4224 It is thought that this is is the floating-point register format on
4225 little-endian systems. */
4228 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4229 void *dbl
, int endianess
)
4233 if (endianess
== BFD_ENDIAN_BIG
)
4234 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4236 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4238 floatformat_from_doublest (fmt
, &d
, dbl
);
4242 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4247 floatformat_to_doublest (fmt
, ptr
, &d
);
4248 if (endianess
== BFD_ENDIAN_BIG
)
4249 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4251 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4256 condition_true (unsigned long cond
, unsigned long status_reg
)
4258 if (cond
== INST_AL
|| cond
== INST_NV
)
4264 return ((status_reg
& FLAG_Z
) != 0);
4266 return ((status_reg
& FLAG_Z
) == 0);
4268 return ((status_reg
& FLAG_C
) != 0);
4270 return ((status_reg
& FLAG_C
) == 0);
4272 return ((status_reg
& FLAG_N
) != 0);
4274 return ((status_reg
& FLAG_N
) == 0);
4276 return ((status_reg
& FLAG_V
) != 0);
4278 return ((status_reg
& FLAG_V
) == 0);
4280 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4282 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4284 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4286 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4288 return (((status_reg
& FLAG_Z
) == 0)
4289 && (((status_reg
& FLAG_N
) == 0)
4290 == ((status_reg
& FLAG_V
) == 0)));
4292 return (((status_reg
& FLAG_Z
) != 0)
4293 || (((status_reg
& FLAG_N
) == 0)
4294 != ((status_reg
& FLAG_V
) == 0)));
4299 static unsigned long
4300 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4301 unsigned long pc_val
, unsigned long status_reg
)
4303 unsigned long res
, shift
;
4304 int rm
= bits (inst
, 0, 3);
4305 unsigned long shifttype
= bits (inst
, 5, 6);
4309 int rs
= bits (inst
, 8, 11);
4310 shift
= (rs
== 15 ? pc_val
+ 8
4311 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4314 shift
= bits (inst
, 7, 11);
4316 res
= (rm
== ARM_PC_REGNUM
4317 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4318 : get_frame_register_unsigned (frame
, rm
));
4323 res
= shift
>= 32 ? 0 : res
<< shift
;
4327 res
= shift
>= 32 ? 0 : res
>> shift
;
4333 res
= ((res
& 0x80000000L
)
4334 ? ~((~res
) >> shift
) : res
>> shift
);
4337 case 3: /* ROR/RRX */
4340 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4342 res
= (res
>> shift
) | (res
<< (32 - shift
));
4346 return res
& 0xffffffff;
4349 /* Return number of 1-bits in VAL. */
4352 bitcount (unsigned long val
)
4355 for (nbits
= 0; val
!= 0; nbits
++)
4356 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4361 thumb_advance_itstate (unsigned int itstate
)
4363 /* Preserve IT[7:5], the first three bits of the condition. Shift
4364 the upcoming condition flags left by one bit. */
4365 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4367 /* If we have finished the IT block, clear the state. */
4368 if ((itstate
& 0x0f) == 0)
4374 /* Find the next PC after the current instruction executes. In some
4375 cases we can not statically determine the answer (see the IT state
4376 handling in this function); in that case, a breakpoint may be
4377 inserted in addition to the returned PC, which will be used to set
4378 another breakpoint by our caller. */
4381 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4383 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4384 struct address_space
*aspace
= get_frame_address_space (frame
);
4385 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4386 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4387 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4388 unsigned short inst1
;
4389 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4390 unsigned long offset
;
4391 ULONGEST status
, itstate
;
4393 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4394 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4396 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4398 /* Thumb-2 conditional execution support. There are eight bits in
4399 the CPSR which describe conditional execution state. Once
4400 reconstructed (they're in a funny order), the low five bits
4401 describe the low bit of the condition for each instruction and
4402 how many instructions remain. The high three bits describe the
4403 base condition. One of the low four bits will be set if an IT
4404 block is active. These bits read as zero on earlier
4406 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4407 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4409 /* If-Then handling. On GNU/Linux, where this routine is used, we
4410 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4411 can disable execution of the undefined instruction. So we might
4412 miss the breakpoint if we set it on a skipped conditional
4413 instruction. Because conditional instructions can change the
4414 flags, affecting the execution of further instructions, we may
4415 need to set two breakpoints. */
4417 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4419 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4421 /* An IT instruction. Because this instruction does not
4422 modify the flags, we can accurately predict the next
4423 executed instruction. */
4424 itstate
= inst1
& 0x00ff;
4425 pc
+= thumb_insn_size (inst1
);
4427 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4429 inst1
= read_memory_unsigned_integer (pc
, 2,
4430 byte_order_for_code
);
4431 pc
+= thumb_insn_size (inst1
);
4432 itstate
= thumb_advance_itstate (itstate
);
4435 return MAKE_THUMB_ADDR (pc
);
4437 else if (itstate
!= 0)
4439 /* We are in a conditional block. Check the condition. */
4440 if (! condition_true (itstate
>> 4, status
))
4442 /* Advance to the next executed instruction. */
4443 pc
+= thumb_insn_size (inst1
);
4444 itstate
= thumb_advance_itstate (itstate
);
4446 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4448 inst1
= read_memory_unsigned_integer (pc
, 2,
4449 byte_order_for_code
);
4450 pc
+= thumb_insn_size (inst1
);
4451 itstate
= thumb_advance_itstate (itstate
);
4454 return MAKE_THUMB_ADDR (pc
);
4456 else if ((itstate
& 0x0f) == 0x08)
4458 /* This is the last instruction of the conditional
4459 block, and it is executed. We can handle it normally
4460 because the following instruction is not conditional,
4461 and we must handle it normally because it is
4462 permitted to branch. Fall through. */
4468 /* There are conditional instructions after this one.
4469 If this instruction modifies the flags, then we can
4470 not predict what the next executed instruction will
4471 be. Fortunately, this instruction is architecturally
4472 forbidden to branch; we know it will fall through.
4473 Start by skipping past it. */
4474 pc
+= thumb_insn_size (inst1
);
4475 itstate
= thumb_advance_itstate (itstate
);
4477 /* Set a breakpoint on the following instruction. */
4478 gdb_assert ((itstate
& 0x0f) != 0);
4479 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4480 MAKE_THUMB_ADDR (pc
));
4481 cond_negated
= (itstate
>> 4) & 1;
4483 /* Skip all following instructions with the same
4484 condition. If there is a later instruction in the IT
4485 block with the opposite condition, set the other
4486 breakpoint there. If not, then set a breakpoint on
4487 the instruction after the IT block. */
4490 inst1
= read_memory_unsigned_integer (pc
, 2,
4491 byte_order_for_code
);
4492 pc
+= thumb_insn_size (inst1
);
4493 itstate
= thumb_advance_itstate (itstate
);
4495 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4497 return MAKE_THUMB_ADDR (pc
);
4501 else if (itstate
& 0x0f)
4503 /* We are in a conditional block. Check the condition. */
4504 int cond
= itstate
>> 4;
4506 if (! condition_true (cond
, status
))
4507 /* Advance to the next instruction. All the 32-bit
4508 instructions share a common prefix. */
4509 return MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
));
4511 /* Otherwise, handle the instruction normally. */
4514 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4518 /* Fetch the saved PC from the stack. It's stored above
4519 all of the other registers. */
4520 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4521 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4522 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4524 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4526 unsigned long cond
= bits (inst1
, 8, 11);
4527 if (cond
== 0x0f) /* 0x0f = SWI */
4529 struct gdbarch_tdep
*tdep
;
4530 tdep
= gdbarch_tdep (gdbarch
);
4532 if (tdep
->syscall_next_pc
!= NULL
)
4533 nextpc
= tdep
->syscall_next_pc (frame
);
4536 else if (cond
!= 0x0f && condition_true (cond
, status
))
4537 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4539 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4541 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4543 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
4545 unsigned short inst2
;
4546 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4548 /* Default to the next instruction. */
4550 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4552 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4554 /* Branches and miscellaneous control instructions. */
4556 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4559 int j1
, j2
, imm1
, imm2
;
4561 imm1
= sbits (inst1
, 0, 10);
4562 imm2
= bits (inst2
, 0, 10);
4563 j1
= bit (inst2
, 13);
4564 j2
= bit (inst2
, 11);
4566 offset
= ((imm1
<< 12) + (imm2
<< 1));
4567 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4569 nextpc
= pc_val
+ offset
;
4570 /* For BLX make sure to clear the low bits. */
4571 if (bit (inst2
, 12) == 0)
4572 nextpc
= nextpc
& 0xfffffffc;
4574 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4576 /* SUBS PC, LR, #imm8. */
4577 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4578 nextpc
-= inst2
& 0x00ff;
4580 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4582 /* Conditional branch. */
4583 if (condition_true (bits (inst1
, 6, 9), status
))
4585 int sign
, j1
, j2
, imm1
, imm2
;
4587 sign
= sbits (inst1
, 10, 10);
4588 imm1
= bits (inst1
, 0, 5);
4589 imm2
= bits (inst2
, 0, 10);
4590 j1
= bit (inst2
, 13);
4591 j2
= bit (inst2
, 11);
4593 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4594 offset
+= (imm1
<< 12) + (imm2
<< 1);
4596 nextpc
= pc_val
+ offset
;
4600 else if ((inst1
& 0xfe50) == 0xe810)
4602 /* Load multiple or RFE. */
4603 int rn
, offset
, load_pc
= 1;
4605 rn
= bits (inst1
, 0, 3);
4606 if (bit (inst1
, 7) && !bit (inst1
, 8))
4609 if (!bit (inst2
, 15))
4611 offset
= bitcount (inst2
) * 4 - 4;
4613 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4616 if (!bit (inst2
, 15))
4620 else if (bit (inst1
, 7) && bit (inst1
, 8))
4625 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4635 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4636 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4639 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4641 /* MOV PC or MOVS PC. */
4642 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4643 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4645 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4649 int rn
, load_pc
= 1;
4651 rn
= bits (inst1
, 0, 3);
4652 base
= get_frame_register_unsigned (frame
, rn
);
4653 if (rn
== ARM_PC_REGNUM
)
4655 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4657 base
+= bits (inst2
, 0, 11);
4659 base
-= bits (inst2
, 0, 11);
4661 else if (bit (inst1
, 7))
4662 base
+= bits (inst2
, 0, 11);
4663 else if (bit (inst2
, 11))
4665 if (bit (inst2
, 10))
4668 base
+= bits (inst2
, 0, 7);
4670 base
-= bits (inst2
, 0, 7);
4673 else if ((inst2
& 0x0fc0) == 0x0000)
4675 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4676 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4683 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4685 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4688 CORE_ADDR tbl_reg
, table
, offset
, length
;
4690 tbl_reg
= bits (inst1
, 0, 3);
4691 if (tbl_reg
== 0x0f)
4692 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4694 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4696 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4697 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4698 nextpc
= pc_val
+ length
;
4700 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4703 CORE_ADDR tbl_reg
, table
, offset
, length
;
4705 tbl_reg
= bits (inst1
, 0, 3);
4706 if (tbl_reg
== 0x0f)
4707 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4709 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4711 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4712 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4713 nextpc
= pc_val
+ length
;
4716 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4718 if (bits (inst1
, 3, 6) == 0x0f)
4719 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
4721 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4723 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4725 if (bits (inst1
, 3, 6) == 0x0f)
4728 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4730 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4732 else if ((inst1
& 0xf500) == 0xb100)
4735 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4736 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4738 if (bit (inst1
, 11) && reg
!= 0)
4739 nextpc
= pc_val
+ imm
;
4740 else if (!bit (inst1
, 11) && reg
== 0)
4741 nextpc
= pc_val
+ imm
;
4746 /* Get the raw next address. PC is the current program counter, in
4747 FRAME, which is assumed to be executing in ARM mode.
4749 The value returned has the execution state of the next instruction
4750 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4751 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4755 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4757 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4758 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4759 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4760 unsigned long pc_val
;
4761 unsigned long this_instr
;
4762 unsigned long status
;
4765 pc_val
= (unsigned long) pc
;
4766 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4768 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4769 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4771 if (bits (this_instr
, 28, 31) == INST_NV
)
4772 switch (bits (this_instr
, 24, 27))
4777 /* Branch with Link and change to Thumb. */
4778 nextpc
= BranchDest (pc
, this_instr
);
4779 nextpc
|= bit (this_instr
, 24) << 1;
4780 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4786 /* Coprocessor register transfer. */
4787 if (bits (this_instr
, 12, 15) == 15)
4788 error (_("Invalid update to pc in instruction"));
4791 else if (condition_true (bits (this_instr
, 28, 31), status
))
4793 switch (bits (this_instr
, 24, 27))
4796 case 0x1: /* data processing */
4800 unsigned long operand1
, operand2
, result
= 0;
4804 if (bits (this_instr
, 12, 15) != 15)
4807 if (bits (this_instr
, 22, 25) == 0
4808 && bits (this_instr
, 4, 7) == 9) /* multiply */
4809 error (_("Invalid update to pc in instruction"));
4811 /* BX <reg>, BLX <reg> */
4812 if (bits (this_instr
, 4, 27) == 0x12fff1
4813 || bits (this_instr
, 4, 27) == 0x12fff3)
4815 rn
= bits (this_instr
, 0, 3);
4816 nextpc
= ((rn
== ARM_PC_REGNUM
)
4818 : get_frame_register_unsigned (frame
, rn
));
4823 /* Multiply into PC. */
4824 c
= (status
& FLAG_C
) ? 1 : 0;
4825 rn
= bits (this_instr
, 16, 19);
4826 operand1
= ((rn
== ARM_PC_REGNUM
)
4828 : get_frame_register_unsigned (frame
, rn
));
4830 if (bit (this_instr
, 25))
4832 unsigned long immval
= bits (this_instr
, 0, 7);
4833 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4834 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4837 else /* operand 2 is a shifted register. */
4838 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4841 switch (bits (this_instr
, 21, 24))
4844 result
= operand1
& operand2
;
4848 result
= operand1
^ operand2
;
4852 result
= operand1
- operand2
;
4856 result
= operand2
- operand1
;
4860 result
= operand1
+ operand2
;
4864 result
= operand1
+ operand2
+ c
;
4868 result
= operand1
- operand2
+ c
;
4872 result
= operand2
- operand1
+ c
;
4878 case 0xb: /* tst, teq, cmp, cmn */
4879 result
= (unsigned long) nextpc
;
4883 result
= operand1
| operand2
;
4887 /* Always step into a function. */
4892 result
= operand1
& ~operand2
;
4900 /* In 26-bit APCS the bottom two bits of the result are
4901 ignored, and we always end up in ARM state. */
4903 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4911 case 0x5: /* data transfer */
4914 if (bits (this_instr
, 25, 27) == 0x3 && bit (this_instr
, 4) == 1)
4916 /* Media instructions and architecturally undefined
4921 if (bit (this_instr
, 20))
4924 if (bits (this_instr
, 12, 15) == 15)
4930 if (bit (this_instr
, 22))
4931 error (_("Invalid update to pc in instruction"));
4933 /* byte write to PC */
4934 rn
= bits (this_instr
, 16, 19);
4935 base
= ((rn
== ARM_PC_REGNUM
)
4937 : get_frame_register_unsigned (frame
, rn
));
4939 if (bit (this_instr
, 24))
4942 int c
= (status
& FLAG_C
) ? 1 : 0;
4943 unsigned long offset
=
4944 (bit (this_instr
, 25)
4945 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4946 : bits (this_instr
, 0, 11));
4948 if (bit (this_instr
, 23))
4954 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
4961 case 0x9: /* block transfer */
4962 if (bit (this_instr
, 20))
4965 if (bit (this_instr
, 15))
4969 unsigned long rn_val
4970 = get_frame_register_unsigned (frame
,
4971 bits (this_instr
, 16, 19));
4973 if (bit (this_instr
, 23))
4976 unsigned long reglist
= bits (this_instr
, 0, 14);
4977 offset
= bitcount (reglist
) * 4;
4978 if (bit (this_instr
, 24)) /* pre */
4981 else if (bit (this_instr
, 24))
4985 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
4992 case 0xb: /* branch & link */
4993 case 0xa: /* branch */
4995 nextpc
= BranchDest (pc
, this_instr
);
5001 case 0xe: /* coproc ops */
5005 struct gdbarch_tdep
*tdep
;
5006 tdep
= gdbarch_tdep (gdbarch
);
5008 if (tdep
->syscall_next_pc
!= NULL
)
5009 nextpc
= tdep
->syscall_next_pc (frame
);
5015 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
5023 /* Determine next PC after current instruction executes. Will call either
5024 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5025 loop is detected. */
5028 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
5032 if (arm_frame_is_thumb (frame
))
5033 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
5035 nextpc
= arm_get_next_pc_raw (frame
, pc
);
5040 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5041 of the appropriate mode (as encoded in the PC value), even if this
5042 differs from what would be expected according to the symbol tables. */
5045 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
5046 struct address_space
*aspace
,
5049 struct cleanup
*old_chain
5050 = make_cleanup_restore_integer (&arm_override_mode
);
5052 arm_override_mode
= IS_THUMB_ADDR (pc
);
5053 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
5055 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
5057 do_cleanups (old_chain
);
5060 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5061 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5062 is found, attempt to step through it. A breakpoint is placed at the end of
5066 thumb_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5068 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5069 struct address_space
*aspace
= get_frame_address_space (frame
);
5070 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5071 CORE_ADDR pc
= get_frame_pc (frame
);
5072 CORE_ADDR breaks
[2] = {-1, -1};
5074 unsigned short insn1
, insn2
;
5077 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5078 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5079 ULONGEST status
, itstate
;
5081 /* We currently do not support atomic sequences within an IT block. */
5082 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
5083 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
5087 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5088 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5090 if (thumb_insn_size (insn1
) != 4)
5093 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5095 if (!((insn1
& 0xfff0) == 0xe850
5096 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
5099 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5101 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5103 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5106 if (thumb_insn_size (insn1
) != 4)
5108 /* Assume that there is at most one conditional branch in the
5109 atomic sequence. If a conditional branch is found, put a
5110 breakpoint in its destination address. */
5111 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
5113 if (last_breakpoint
> 0)
5114 return 0; /* More than one conditional branch found,
5115 fallback to the standard code. */
5117 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
5121 /* We do not support atomic sequences that use any *other*
5122 instructions but conditional branches to change the PC.
5123 Fall back to standard code to avoid losing control of
5125 else if (thumb_instruction_changes_pc (insn1
))
5130 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5133 /* Assume that there is at most one conditional branch in the
5134 atomic sequence. If a conditional branch is found, put a
5135 breakpoint in its destination address. */
5136 if ((insn1
& 0xf800) == 0xf000
5137 && (insn2
& 0xd000) == 0x8000
5138 && (insn1
& 0x0380) != 0x0380)
5140 int sign
, j1
, j2
, imm1
, imm2
;
5141 unsigned int offset
;
5143 sign
= sbits (insn1
, 10, 10);
5144 imm1
= bits (insn1
, 0, 5);
5145 imm2
= bits (insn2
, 0, 10);
5146 j1
= bit (insn2
, 13);
5147 j2
= bit (insn2
, 11);
5149 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
5150 offset
+= (imm1
<< 12) + (imm2
<< 1);
5152 if (last_breakpoint
> 0)
5153 return 0; /* More than one conditional branch found,
5154 fallback to the standard code. */
5156 breaks
[1] = loc
+ offset
;
5160 /* We do not support atomic sequences that use any *other*
5161 instructions but conditional branches to change the PC.
5162 Fall back to standard code to avoid losing control of
5164 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
5167 /* If we find a strex{,b,h,d}, we're done. */
5168 if ((insn1
& 0xfff0) == 0xe840
5169 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
5174 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5175 if (insn_count
== atomic_sequence_length
)
5178 /* Insert a breakpoint right after the end of the atomic sequence. */
5181 /* Check for duplicated breakpoints. Check also for a breakpoint
5182 placed (branch instruction's destination) anywhere in sequence. */
5184 && (breaks
[1] == breaks
[0]
5185 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5186 last_breakpoint
= 0;
5188 /* Effectively inserts the breakpoints. */
5189 for (index
= 0; index
<= last_breakpoint
; index
++)
5190 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
5191 MAKE_THUMB_ADDR (breaks
[index
]));
5197 arm_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5199 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5200 struct address_space
*aspace
= get_frame_address_space (frame
);
5201 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5202 CORE_ADDR pc
= get_frame_pc (frame
);
5203 CORE_ADDR breaks
[2] = {-1, -1};
5208 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5209 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5211 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5212 Note that we do not currently support conditionally executed atomic
5214 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5216 if ((insn
& 0xff9000f0) != 0xe1900090)
5219 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5221 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5223 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5226 /* Assume that there is at most one conditional branch in the atomic
5227 sequence. If a conditional branch is found, put a breakpoint in
5228 its destination address. */
5229 if (bits (insn
, 24, 27) == 0xa)
5231 if (last_breakpoint
> 0)
5232 return 0; /* More than one conditional branch found, fallback
5233 to the standard single-step code. */
5235 breaks
[1] = BranchDest (loc
- 4, insn
);
5239 /* We do not support atomic sequences that use any *other* instructions
5240 but conditional branches to change the PC. Fall back to standard
5241 code to avoid losing control of execution. */
5242 else if (arm_instruction_changes_pc (insn
))
5245 /* If we find a strex{,b,h,d}, we're done. */
5246 if ((insn
& 0xff9000f0) == 0xe1800090)
5250 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5251 if (insn_count
== atomic_sequence_length
)
5254 /* Insert a breakpoint right after the end of the atomic sequence. */
5257 /* Check for duplicated breakpoints. Check also for a breakpoint
5258 placed (branch instruction's destination) anywhere in sequence. */
5260 && (breaks
[1] == breaks
[0]
5261 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5262 last_breakpoint
= 0;
5264 /* Effectively inserts the breakpoints. */
5265 for (index
= 0; index
<= last_breakpoint
; index
++)
5266 arm_insert_single_step_breakpoint (gdbarch
, aspace
, breaks
[index
]);
5272 arm_deal_with_atomic_sequence (struct frame_info
*frame
)
5274 if (arm_frame_is_thumb (frame
))
5275 return thumb_deal_with_atomic_sequence_raw (frame
);
5277 return arm_deal_with_atomic_sequence_raw (frame
);
5280 /* single_step() is called just before we want to resume the inferior,
5281 if we want to single-step it but there is no hardware or kernel
5282 single-step support. We find the target of the coming instruction
5283 and breakpoint it. */
5286 arm_software_single_step (struct frame_info
*frame
)
5288 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5289 struct address_space
*aspace
= get_frame_address_space (frame
);
5292 if (arm_deal_with_atomic_sequence (frame
))
5295 next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
5296 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
5301 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5302 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5303 NULL if an error occurs. BUF is freed. */
5306 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
5307 int old_len
, int new_len
)
5310 int bytes_to_read
= new_len
- old_len
;
5312 new_buf
= (gdb_byte
*) xmalloc (new_len
);
5313 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
5315 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
5323 /* An IT block is at most the 2-byte IT instruction followed by
5324 four 4-byte instructions. The furthest back we must search to
5325 find an IT block that affects the current instruction is thus
5326 2 + 3 * 4 == 14 bytes. */
5327 #define MAX_IT_BLOCK_PREFIX 14
5329 /* Use a quick scan if there are more than this many bytes of
5331 #define IT_SCAN_THRESHOLD 32
5333 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5334 A breakpoint in an IT block may not be hit, depending on the
5337 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
5341 CORE_ADDR boundary
, func_start
;
5343 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
5344 int i
, any
, last_it
, last_it_count
;
5346 /* If we are using BKPT breakpoints, none of this is necessary. */
5347 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
5350 /* ARM mode does not have this problem. */
5351 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
5354 /* We are setting a breakpoint in Thumb code that could potentially
5355 contain an IT block. The first step is to find how much Thumb
5356 code there is; we do not need to read outside of known Thumb
5358 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
5360 /* Thumb-2 code must have mapping symbols to have a chance. */
5363 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
5365 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
5366 && func_start
> boundary
)
5367 boundary
= func_start
;
5369 /* Search for a candidate IT instruction. We have to do some fancy
5370 footwork to distinguish a real IT instruction from the second
5371 half of a 32-bit instruction, but there is no need for that if
5372 there's no candidate. */
5373 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
5375 /* No room for an IT instruction. */
5378 buf
= (gdb_byte
*) xmalloc (buf_len
);
5379 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
5382 for (i
= 0; i
< buf_len
; i
+= 2)
5384 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5385 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5397 /* OK, the code bytes before this instruction contain at least one
5398 halfword which resembles an IT instruction. We know that it's
5399 Thumb code, but there are still two possibilities. Either the
5400 halfword really is an IT instruction, or it is the second half of
5401 a 32-bit Thumb instruction. The only way we can tell is to
5402 scan forwards from a known instruction boundary. */
5403 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5407 /* There's a lot of code before this instruction. Start with an
5408 optimistic search; it's easy to recognize halfwords that can
5409 not be the start of a 32-bit instruction, and use that to
5410 lock on to the instruction boundaries. */
5411 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5414 buf_len
= IT_SCAN_THRESHOLD
;
5417 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5419 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5420 if (thumb_insn_size (inst1
) == 2)
5427 /* At this point, if DEFINITE, BUF[I] is the first place we
5428 are sure that we know the instruction boundaries, and it is far
5429 enough from BPADDR that we could not miss an IT instruction
5430 affecting BPADDR. If ! DEFINITE, give up - start from a
5434 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5438 buf_len
= bpaddr
- boundary
;
5444 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5447 buf_len
= bpaddr
- boundary
;
5451 /* Scan forwards. Find the last IT instruction before BPADDR. */
5456 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5458 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5463 else if (inst1
& 0x0002)
5465 else if (inst1
& 0x0004)
5470 i
+= thumb_insn_size (inst1
);
5476 /* There wasn't really an IT instruction after all. */
5479 if (last_it_count
< 1)
5480 /* It was too far away. */
5483 /* This really is a trouble spot. Move the breakpoint to the IT
5485 return bpaddr
- buf_len
+ last_it
;
5488 /* ARM displaced stepping support.
5490 Generally ARM displaced stepping works as follows:
5492 1. When an instruction is to be single-stepped, it is first decoded by
5493 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5494 Depending on the type of instruction, it is then copied to a scratch
5495 location, possibly in a modified form. The copy_* set of functions
5496 performs such modification, as necessary. A breakpoint is placed after
5497 the modified instruction in the scratch space to return control to GDB.
5498 Note in particular that instructions which modify the PC will no longer
5499 do so after modification.
5501 2. The instruction is single-stepped, by setting the PC to the scratch
5502 location address, and resuming. Control returns to GDB when the
5505 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5506 function used for the current instruction. This function's job is to
5507 put the CPU/memory state back to what it would have been if the
5508 instruction had been executed unmodified in its original location. */
5510 /* NOP instruction (mov r0, r0). */
5511 #define ARM_NOP 0xe1a00000
5512 #define THUMB_NOP 0x4600
5514 /* Helper for register reads for displaced stepping. In particular, this
5515 returns the PC as it would be seen by the instruction at its original
5519 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5523 CORE_ADDR from
= dsc
->insn_addr
;
5525 if (regno
== ARM_PC_REGNUM
)
5527 /* Compute pipeline offset:
5528 - When executing an ARM instruction, PC reads as the address of the
5529 current instruction plus 8.
5530 - When executing a Thumb instruction, PC reads as the address of the
5531 current instruction plus 4. */
5538 if (debug_displaced
)
5539 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5540 (unsigned long) from
);
5541 return (ULONGEST
) from
;
5545 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5546 if (debug_displaced
)
5547 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5548 regno
, (unsigned long) ret
);
5554 displaced_in_arm_mode (struct regcache
*regs
)
5557 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5559 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5561 return (ps
& t_bit
) == 0;
5564 /* Write to the PC as from a branch instruction. */
5567 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5571 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5572 architecture versions < 6. */
5573 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5574 val
& ~(ULONGEST
) 0x3);
5576 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5577 val
& ~(ULONGEST
) 0x1);
5580 /* Write to the PC as from a branch-exchange instruction. */
5583 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5586 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5588 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5592 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5593 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5595 else if ((val
& 2) == 0)
5597 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5598 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5602 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5603 mode, align dest to 4 bytes). */
5604 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5605 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5606 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5610 /* Write to the PC as if from a load instruction. */
5613 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5616 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5617 bx_write_pc (regs
, val
);
5619 branch_write_pc (regs
, dsc
, val
);
5622 /* Write to the PC as if from an ALU instruction. */
5625 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5628 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5629 bx_write_pc (regs
, val
);
5631 branch_write_pc (regs
, dsc
, val
);
5634 /* Helper for writing to registers for displaced stepping. Writing to the PC
5635 has a varying effects depending on the instruction which does the write:
5636 this is controlled by the WRITE_PC argument. */
5639 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5640 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5642 if (regno
== ARM_PC_REGNUM
)
5644 if (debug_displaced
)
5645 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5646 (unsigned long) val
);
5649 case BRANCH_WRITE_PC
:
5650 branch_write_pc (regs
, dsc
, val
);
5654 bx_write_pc (regs
, val
);
5658 load_write_pc (regs
, dsc
, val
);
5662 alu_write_pc (regs
, dsc
, val
);
5665 case CANNOT_WRITE_PC
:
5666 warning (_("Instruction wrote to PC in an unexpected way when "
5667 "single-stepping"));
5671 internal_error (__FILE__
, __LINE__
,
5672 _("Invalid argument to displaced_write_reg"));
5675 dsc
->wrote_to_pc
= 1;
5679 if (debug_displaced
)
5680 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5681 regno
, (unsigned long) val
);
5682 regcache_cooked_write_unsigned (regs
, regno
, val
);
5686 /* This function is used to concisely determine if an instruction INSN
5687 references PC. Register fields of interest in INSN should have the
5688 corresponding fields of BITMASK set to 0b1111. The function
5689 returns return 1 if any of these fields in INSN reference the PC
5690 (also 0b1111, r15), else it returns 0. */
5693 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5695 uint32_t lowbit
= 1;
5697 while (bitmask
!= 0)
5701 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5707 mask
= lowbit
* 0xf;
5709 if ((insn
& mask
) == mask
)
5718 /* The simplest copy function. Many instructions have the same effect no
5719 matter what address they are executed at: in those cases, use this. */
5722 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5723 const char *iname
, struct displaced_step_closure
*dsc
)
5725 if (debug_displaced
)
5726 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5727 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5730 dsc
->modinsn
[0] = insn
;
5736 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5737 uint16_t insn2
, const char *iname
,
5738 struct displaced_step_closure
*dsc
)
5740 if (debug_displaced
)
5741 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5742 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5745 dsc
->modinsn
[0] = insn1
;
5746 dsc
->modinsn
[1] = insn2
;
5752 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5755 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5757 struct displaced_step_closure
*dsc
)
5759 if (debug_displaced
)
5760 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5761 "opcode/class '%s' unmodified\n", insn
,
5764 dsc
->modinsn
[0] = insn
;
5769 /* Preload instructions with immediate offset. */
5772 cleanup_preload (struct gdbarch
*gdbarch
,
5773 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5775 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5776 if (!dsc
->u
.preload
.immed
)
5777 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5781 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5782 struct displaced_step_closure
*dsc
, unsigned int rn
)
5785 /* Preload instructions:
5787 {pli/pld} [rn, #+/-imm]
5789 {pli/pld} [r0, #+/-imm]. */
5791 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5792 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5793 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5794 dsc
->u
.preload
.immed
= 1;
5796 dsc
->cleanup
= &cleanup_preload
;
5800 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5801 struct displaced_step_closure
*dsc
)
5803 unsigned int rn
= bits (insn
, 16, 19);
5805 if (!insn_references_pc (insn
, 0x000f0000ul
))
5806 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5808 if (debug_displaced
)
5809 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5810 (unsigned long) insn
);
5812 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5814 install_preload (gdbarch
, regs
, dsc
, rn
);
5820 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5821 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5823 unsigned int rn
= bits (insn1
, 0, 3);
5824 unsigned int u_bit
= bit (insn1
, 7);
5825 int imm12
= bits (insn2
, 0, 11);
5828 if (rn
!= ARM_PC_REGNUM
)
5829 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5831 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5832 PLD (literal) Encoding T1. */
5833 if (debug_displaced
)
5834 fprintf_unfiltered (gdb_stdlog
,
5835 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5836 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5842 /* Rewrite instruction {pli/pld} PC imm12 into:
5843 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5847 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5849 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5850 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5852 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5854 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5855 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5856 dsc
->u
.preload
.immed
= 0;
5858 /* {pli/pld} [r0, r1] */
5859 dsc
->modinsn
[0] = insn1
& 0xfff0;
5860 dsc
->modinsn
[1] = 0xf001;
5863 dsc
->cleanup
= &cleanup_preload
;
5867 /* Preload instructions with register offset. */
5870 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5871 struct displaced_step_closure
*dsc
, unsigned int rn
,
5874 ULONGEST rn_val
, rm_val
;
5876 /* Preload register-offset instructions:
5878 {pli/pld} [rn, rm {, shift}]
5880 {pli/pld} [r0, r1 {, shift}]. */
5882 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5883 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5884 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5885 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5886 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5887 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5888 dsc
->u
.preload
.immed
= 0;
5890 dsc
->cleanup
= &cleanup_preload
;
5894 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5895 struct regcache
*regs
,
5896 struct displaced_step_closure
*dsc
)
5898 unsigned int rn
= bits (insn
, 16, 19);
5899 unsigned int rm
= bits (insn
, 0, 3);
5902 if (!insn_references_pc (insn
, 0x000f000ful
))
5903 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5905 if (debug_displaced
)
5906 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5907 (unsigned long) insn
);
5909 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5911 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5915 /* Copy/cleanup coprocessor load and store instructions. */
5918 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5919 struct regcache
*regs
,
5920 struct displaced_step_closure
*dsc
)
5922 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5924 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5926 if (dsc
->u
.ldst
.writeback
)
5927 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5931 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5932 struct displaced_step_closure
*dsc
,
5933 int writeback
, unsigned int rn
)
5937 /* Coprocessor load/store instructions:
5939 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5941 {stc/stc2} [r0, #+/-imm].
5943 ldc/ldc2 are handled identically. */
5945 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5946 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5947 /* PC should be 4-byte aligned. */
5948 rn_val
= rn_val
& 0xfffffffc;
5949 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5951 dsc
->u
.ldst
.writeback
= writeback
;
5952 dsc
->u
.ldst
.rn
= rn
;
5954 dsc
->cleanup
= &cleanup_copro_load_store
;
5958 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
5959 struct regcache
*regs
,
5960 struct displaced_step_closure
*dsc
)
5962 unsigned int rn
= bits (insn
, 16, 19);
5964 if (!insn_references_pc (insn
, 0x000f0000ul
))
5965 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
5967 if (debug_displaced
)
5968 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5969 "load/store insn %.8lx\n", (unsigned long) insn
);
5971 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5973 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
5979 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
5980 uint16_t insn2
, struct regcache
*regs
,
5981 struct displaced_step_closure
*dsc
)
5983 unsigned int rn
= bits (insn1
, 0, 3);
5985 if (rn
!= ARM_PC_REGNUM
)
5986 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
5987 "copro load/store", dsc
);
5989 if (debug_displaced
)
5990 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5991 "load/store insn %.4x%.4x\n", insn1
, insn2
);
5993 dsc
->modinsn
[0] = insn1
& 0xfff0;
5994 dsc
->modinsn
[1] = insn2
;
5997 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5998 doesn't support writeback, so pass 0. */
5999 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
6004 /* Clean up branch instructions (actually perform the branch, by setting
6008 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6009 struct displaced_step_closure
*dsc
)
6011 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6012 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
6013 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
6014 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
6019 if (dsc
->u
.branch
.link
)
6021 /* The value of LR should be the next insn of current one. In order
6022 not to confuse logic hanlding later insn `bx lr', if current insn mode
6023 is Thumb, the bit 0 of LR value should be set to 1. */
6024 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6027 next_insn_addr
|= 0x1;
6029 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
6033 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
6036 /* Copy B/BL/BLX instructions with immediate destinations. */
6039 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6040 struct displaced_step_closure
*dsc
,
6041 unsigned int cond
, int exchange
, int link
, long offset
)
6043 /* Implement "BL<cond> <label>" as:
6045 Preparation: cond <- instruction condition
6046 Insn: mov r0, r0 (nop)
6047 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6049 B<cond> similar, but don't set r14 in cleanup. */
6051 dsc
->u
.branch
.cond
= cond
;
6052 dsc
->u
.branch
.link
= link
;
6053 dsc
->u
.branch
.exchange
= exchange
;
6055 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
6056 if (link
&& exchange
)
6057 /* For BLX, offset is computed from the Align (PC, 4). */
6058 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
6061 dsc
->u
.branch
.dest
+= 4 + offset
;
6063 dsc
->u
.branch
.dest
+= 8 + offset
;
6065 dsc
->cleanup
= &cleanup_branch
;
6068 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
6069 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6071 unsigned int cond
= bits (insn
, 28, 31);
6072 int exchange
= (cond
== 0xf);
6073 int link
= exchange
|| bit (insn
, 24);
6076 if (debug_displaced
)
6077 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
6078 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
6079 (unsigned long) insn
);
6081 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6082 then arrange the switch into Thumb mode. */
6083 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
6085 offset
= bits (insn
, 0, 23) << 2;
6087 if (bit (offset
, 25))
6088 offset
= offset
| ~0x3ffffff;
6090 dsc
->modinsn
[0] = ARM_NOP
;
6092 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6097 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
6098 uint16_t insn2
, struct regcache
*regs
,
6099 struct displaced_step_closure
*dsc
)
6101 int link
= bit (insn2
, 14);
6102 int exchange
= link
&& !bit (insn2
, 12);
6105 int j1
= bit (insn2
, 13);
6106 int j2
= bit (insn2
, 11);
6107 int s
= sbits (insn1
, 10, 10);
6108 int i1
= !(j1
^ bit (insn1
, 10));
6109 int i2
= !(j2
^ bit (insn1
, 10));
6111 if (!link
&& !exchange
) /* B */
6113 offset
= (bits (insn2
, 0, 10) << 1);
6114 if (bit (insn2
, 12)) /* Encoding T4 */
6116 offset
|= (bits (insn1
, 0, 9) << 12)
6122 else /* Encoding T3 */
6124 offset
|= (bits (insn1
, 0, 5) << 12)
6128 cond
= bits (insn1
, 6, 9);
6133 offset
= (bits (insn1
, 0, 9) << 12);
6134 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
6135 offset
|= exchange
?
6136 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
6139 if (debug_displaced
)
6140 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
6141 "%.4x %.4x with offset %.8lx\n",
6142 link
? (exchange
) ? "blx" : "bl" : "b",
6143 insn1
, insn2
, offset
);
6145 dsc
->modinsn
[0] = THUMB_NOP
;
6147 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6151 /* Copy B Thumb instructions. */
6153 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
6154 struct displaced_step_closure
*dsc
)
6156 unsigned int cond
= 0;
6158 unsigned short bit_12_15
= bits (insn
, 12, 15);
6159 CORE_ADDR from
= dsc
->insn_addr
;
6161 if (bit_12_15
== 0xd)
6163 /* offset = SignExtend (imm8:0, 32) */
6164 offset
= sbits ((insn
<< 1), 0, 8);
6165 cond
= bits (insn
, 8, 11);
6167 else if (bit_12_15
== 0xe) /* Encoding T2 */
6169 offset
= sbits ((insn
<< 1), 0, 11);
6173 if (debug_displaced
)
6174 fprintf_unfiltered (gdb_stdlog
,
6175 "displaced: copying b immediate insn %.4x "
6176 "with offset %d\n", insn
, offset
);
6178 dsc
->u
.branch
.cond
= cond
;
6179 dsc
->u
.branch
.link
= 0;
6180 dsc
->u
.branch
.exchange
= 0;
6181 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
6183 dsc
->modinsn
[0] = THUMB_NOP
;
6185 dsc
->cleanup
= &cleanup_branch
;
6190 /* Copy BX/BLX with register-specified destinations. */
6193 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6194 struct displaced_step_closure
*dsc
, int link
,
6195 unsigned int cond
, unsigned int rm
)
6197 /* Implement {BX,BLX}<cond> <reg>" as:
6199 Preparation: cond <- instruction condition
6200 Insn: mov r0, r0 (nop)
6201 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6203 Don't set r14 in cleanup for BX. */
6205 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
6207 dsc
->u
.branch
.cond
= cond
;
6208 dsc
->u
.branch
.link
= link
;
6210 dsc
->u
.branch
.exchange
= 1;
6212 dsc
->cleanup
= &cleanup_branch
;
6216 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6217 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6219 unsigned int cond
= bits (insn
, 28, 31);
6222 int link
= bit (insn
, 5);
6223 unsigned int rm
= bits (insn
, 0, 3);
6225 if (debug_displaced
)
6226 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
6227 (unsigned long) insn
);
6229 dsc
->modinsn
[0] = ARM_NOP
;
6231 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
6236 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6237 struct regcache
*regs
,
6238 struct displaced_step_closure
*dsc
)
6240 int link
= bit (insn
, 7);
6241 unsigned int rm
= bits (insn
, 3, 6);
6243 if (debug_displaced
)
6244 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
6245 (unsigned short) insn
);
6247 dsc
->modinsn
[0] = THUMB_NOP
;
6249 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
6255 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6258 cleanup_alu_imm (struct gdbarch
*gdbarch
,
6259 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6261 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6262 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6263 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6264 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6268 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6269 struct displaced_step_closure
*dsc
)
6271 unsigned int rn
= bits (insn
, 16, 19);
6272 unsigned int rd
= bits (insn
, 12, 15);
6273 unsigned int op
= bits (insn
, 21, 24);
6274 int is_mov
= (op
== 0xd);
6275 ULONGEST rd_val
, rn_val
;
6277 if (!insn_references_pc (insn
, 0x000ff000ul
))
6278 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
6280 if (debug_displaced
)
6281 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
6282 "%.8lx\n", is_mov
? "move" : "ALU",
6283 (unsigned long) insn
);
6285 /* Instruction is of form:
6287 <op><cond> rd, [rn,] #imm
6291 Preparation: tmp1, tmp2 <- r0, r1;
6293 Insn: <op><cond> r0, r1, #imm
6294 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6297 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6298 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6299 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6300 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6301 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6302 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6306 dsc
->modinsn
[0] = insn
& 0xfff00fff;
6308 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
6310 dsc
->cleanup
= &cleanup_alu_imm
;
6316 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6317 uint16_t insn2
, struct regcache
*regs
,
6318 struct displaced_step_closure
*dsc
)
6320 unsigned int op
= bits (insn1
, 5, 8);
6321 unsigned int rn
, rm
, rd
;
6322 ULONGEST rd_val
, rn_val
;
6324 rn
= bits (insn1
, 0, 3); /* Rn */
6325 rm
= bits (insn2
, 0, 3); /* Rm */
6326 rd
= bits (insn2
, 8, 11); /* Rd */
6328 /* This routine is only called for instruction MOV. */
6329 gdb_assert (op
== 0x2 && rn
== 0xf);
6331 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
6332 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
6334 if (debug_displaced
)
6335 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
6336 "ALU", insn1
, insn2
);
6338 /* Instruction is of form:
6340 <op><cond> rd, [rn,] #imm
6344 Preparation: tmp1, tmp2 <- r0, r1;
6346 Insn: <op><cond> r0, r1, #imm
6347 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6350 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6351 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6352 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6353 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6354 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6355 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6358 dsc
->modinsn
[0] = insn1
;
6359 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
6362 dsc
->cleanup
= &cleanup_alu_imm
;
6367 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6370 cleanup_alu_reg (struct gdbarch
*gdbarch
,
6371 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6376 rd_val
= displaced_read_reg (regs
, dsc
, 0);
6378 for (i
= 0; i
< 3; i
++)
6379 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6381 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6385 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6386 struct displaced_step_closure
*dsc
,
6387 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6389 ULONGEST rd_val
, rn_val
, rm_val
;
6391 /* Instruction is of form:
6393 <op><cond> rd, [rn,] rm [, <shift>]
6397 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6398 r0, r1, r2 <- rd, rn, rm
6399 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6400 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6403 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6404 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6405 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6406 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6407 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6408 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6409 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6410 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6411 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6414 dsc
->cleanup
= &cleanup_alu_reg
;
6418 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6419 struct displaced_step_closure
*dsc
)
6421 unsigned int op
= bits (insn
, 21, 24);
6422 int is_mov
= (op
== 0xd);
6424 if (!insn_references_pc (insn
, 0x000ff00ful
))
6425 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6427 if (debug_displaced
)
6428 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6429 is_mov
? "move" : "ALU", (unsigned long) insn
);
6432 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6434 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6436 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6442 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6443 struct regcache
*regs
,
6444 struct displaced_step_closure
*dsc
)
6448 rm
= bits (insn
, 3, 6);
6449 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6451 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
6452 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6454 if (debug_displaced
)
6455 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
6456 (unsigned short) insn
);
6458 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
6460 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
6465 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6468 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6469 struct regcache
*regs
,
6470 struct displaced_step_closure
*dsc
)
6472 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6475 for (i
= 0; i
< 4; i
++)
6476 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6478 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6482 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6483 struct displaced_step_closure
*dsc
,
6484 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6488 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6490 /* Instruction is of form:
6492 <op><cond> rd, [rn,] rm, <shift> rs
6496 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6497 r0, r1, r2, r3 <- rd, rn, rm, rs
6498 Insn: <op><cond> r0, r1, r2, <shift> r3
6500 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6504 for (i
= 0; i
< 4; i
++)
6505 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6507 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6508 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6509 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6510 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6511 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6512 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6513 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6514 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6516 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6520 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6521 struct regcache
*regs
,
6522 struct displaced_step_closure
*dsc
)
6524 unsigned int op
= bits (insn
, 21, 24);
6525 int is_mov
= (op
== 0xd);
6526 unsigned int rd
, rn
, rm
, rs
;
6528 if (!insn_references_pc (insn
, 0x000fff0ful
))
6529 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6531 if (debug_displaced
)
6532 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6533 "%.8lx\n", is_mov
? "move" : "ALU",
6534 (unsigned long) insn
);
6536 rn
= bits (insn
, 16, 19);
6537 rm
= bits (insn
, 0, 3);
6538 rs
= bits (insn
, 8, 11);
6539 rd
= bits (insn
, 12, 15);
6542 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6544 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6546 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6551 /* Clean up load instructions. */
6554 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6555 struct displaced_step_closure
*dsc
)
6557 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6559 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6560 if (dsc
->u
.ldst
.xfersize
== 8)
6561 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6562 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6564 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6565 if (dsc
->u
.ldst
.xfersize
> 4)
6566 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6567 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6568 if (!dsc
->u
.ldst
.immed
)
6569 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6571 /* Handle register writeback. */
6572 if (dsc
->u
.ldst
.writeback
)
6573 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6574 /* Put result in right place. */
6575 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6576 if (dsc
->u
.ldst
.xfersize
== 8)
6577 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6580 /* Clean up store instructions. */
6583 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6584 struct displaced_step_closure
*dsc
)
6586 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6588 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6589 if (dsc
->u
.ldst
.xfersize
> 4)
6590 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6591 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6592 if (!dsc
->u
.ldst
.immed
)
6593 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6594 if (!dsc
->u
.ldst
.restore_r4
)
6595 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6598 if (dsc
->u
.ldst
.writeback
)
6599 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6602 /* Copy "extra" load/store instructions. These are halfword/doubleword
6603 transfers, which have a different encoding to byte/word transfers. */
6606 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6607 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6609 unsigned int op1
= bits (insn
, 20, 24);
6610 unsigned int op2
= bits (insn
, 5, 6);
6611 unsigned int rt
= bits (insn
, 12, 15);
6612 unsigned int rn
= bits (insn
, 16, 19);
6613 unsigned int rm
= bits (insn
, 0, 3);
6614 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6615 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6616 int immed
= (op1
& 0x4) != 0;
6618 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6620 if (!insn_references_pc (insn
, 0x000ff00ful
))
6621 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6623 if (debug_displaced
)
6624 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6625 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6626 (unsigned long) insn
);
6628 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6631 internal_error (__FILE__
, __LINE__
,
6632 _("copy_extra_ld_st: instruction decode error"));
6634 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6635 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6636 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6638 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6640 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6641 if (bytesize
[opcode
] == 8)
6642 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6643 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6645 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6647 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6648 if (bytesize
[opcode
] == 8)
6649 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6650 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6652 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6655 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6656 dsc
->u
.ldst
.rn
= rn
;
6657 dsc
->u
.ldst
.immed
= immed
;
6658 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6659 dsc
->u
.ldst
.restore_r4
= 0;
6662 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6664 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6665 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6667 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6669 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6670 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6672 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6677 /* Copy byte/half word/word loads and stores. */
6680 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6681 struct displaced_step_closure
*dsc
, int load
,
6682 int immed
, int writeback
, int size
, int usermode
,
6683 int rt
, int rm
, int rn
)
6685 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6687 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6688 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6690 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6692 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6694 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6695 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6697 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6699 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6700 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6702 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6704 dsc
->u
.ldst
.xfersize
= size
;
6705 dsc
->u
.ldst
.rn
= rn
;
6706 dsc
->u
.ldst
.immed
= immed
;
6707 dsc
->u
.ldst
.writeback
= writeback
;
6709 /* To write PC we can do:
6711 Before this sequence of instructions:
6712 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6713 r2 is the Rn value got from dispalced_read_reg.
6715 Insn1: push {pc} Write address of STR instruction + offset on stack
6716 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6717 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6718 = addr(Insn1) + offset - addr(Insn3) - 8
6720 Insn4: add r4, r4, #8 r4 = offset - 8
6721 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6723 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6725 Otherwise we don't know what value to write for PC, since the offset is
6726 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6727 of this can be found in Section "Saving from r15" in
6728 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6730 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6735 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6736 uint16_t insn2
, struct regcache
*regs
,
6737 struct displaced_step_closure
*dsc
, int size
)
6739 unsigned int u_bit
= bit (insn1
, 7);
6740 unsigned int rt
= bits (insn2
, 12, 15);
6741 int imm12
= bits (insn2
, 0, 11);
6744 if (debug_displaced
)
6745 fprintf_unfiltered (gdb_stdlog
,
6746 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6747 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6753 /* Rewrite instruction LDR Rt imm12 into:
6755 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6759 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6762 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6763 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6764 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6766 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6768 pc_val
= pc_val
& 0xfffffffc;
6770 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6771 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6775 dsc
->u
.ldst
.xfersize
= size
;
6776 dsc
->u
.ldst
.immed
= 0;
6777 dsc
->u
.ldst
.writeback
= 0;
6778 dsc
->u
.ldst
.restore_r4
= 0;
6780 /* LDR R0, R2, R3 */
6781 dsc
->modinsn
[0] = 0xf852;
6782 dsc
->modinsn
[1] = 0x3;
6785 dsc
->cleanup
= &cleanup_load
;
6791 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6792 uint16_t insn2
, struct regcache
*regs
,
6793 struct displaced_step_closure
*dsc
,
6794 int writeback
, int immed
)
6796 unsigned int rt
= bits (insn2
, 12, 15);
6797 unsigned int rn
= bits (insn1
, 0, 3);
6798 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6799 /* In LDR (register), there is also a register Rm, which is not allowed to
6800 be PC, so we don't have to check it. */
6802 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6803 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6806 if (debug_displaced
)
6807 fprintf_unfiltered (gdb_stdlog
,
6808 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6809 rt
, rn
, insn1
, insn2
);
6811 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6814 dsc
->u
.ldst
.restore_r4
= 0;
6817 /* ldr[b]<cond> rt, [rn, #imm], etc.
6819 ldr[b]<cond> r0, [r2, #imm]. */
6821 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6822 dsc
->modinsn
[1] = insn2
& 0x0fff;
6825 /* ldr[b]<cond> rt, [rn, rm], etc.
6827 ldr[b]<cond> r0, [r2, r3]. */
6829 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6830 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6840 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6841 struct regcache
*regs
,
6842 struct displaced_step_closure
*dsc
,
6843 int load
, int size
, int usermode
)
6845 int immed
= !bit (insn
, 25);
6846 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6847 unsigned int rt
= bits (insn
, 12, 15);
6848 unsigned int rn
= bits (insn
, 16, 19);
6849 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6851 if (!insn_references_pc (insn
, 0x000ff00ful
))
6852 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6854 if (debug_displaced
)
6855 fprintf_unfiltered (gdb_stdlog
,
6856 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6857 load
? (size
== 1 ? "ldrb" : "ldr")
6858 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6860 (unsigned long) insn
);
6862 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6863 usermode
, rt
, rm
, rn
);
6865 if (load
|| rt
!= ARM_PC_REGNUM
)
6867 dsc
->u
.ldst
.restore_r4
= 0;
6870 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6872 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6873 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6875 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6877 {ldr,str}[b]<cond> r0, [r2, r3]. */
6878 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6882 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6883 dsc
->u
.ldst
.restore_r4
= 1;
6884 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6885 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6886 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6887 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6888 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6892 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6894 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6899 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6904 /* Cleanup LDM instructions with fully-populated register list. This is an
6905 unfortunate corner case: it's impossible to implement correctly by modifying
6906 the instruction. The issue is as follows: we have an instruction,
6910 which we must rewrite to avoid loading PC. A possible solution would be to
6911 do the load in two halves, something like (with suitable cleanup
6915 ldm[id][ab] r8!, {r0-r7}
6917 ldm[id][ab] r8, {r7-r14}
6920 but at present there's no suitable place for <temp>, since the scratch space
6921 is overwritten before the cleanup routine is called. For now, we simply
6922 emulate the instruction. */
6925 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6926 struct displaced_step_closure
*dsc
)
6928 int inc
= dsc
->u
.block
.increment
;
6929 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6930 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6931 uint32_t regmask
= dsc
->u
.block
.regmask
;
6932 int regno
= inc
? 0 : 15;
6933 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6934 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6935 && (regmask
& 0x8000) != 0;
6936 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6937 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6938 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6943 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6944 sensible we can do here. Complain loudly. */
6945 if (exception_return
)
6946 error (_("Cannot single-step exception return"));
6948 /* We don't handle any stores here for now. */
6949 gdb_assert (dsc
->u
.block
.load
!= 0);
6951 if (debug_displaced
)
6952 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
6953 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
6954 dsc
->u
.block
.increment
? "inc" : "dec",
6955 dsc
->u
.block
.before
? "before" : "after");
6962 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
6965 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
6968 xfer_addr
+= bump_before
;
6970 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
6971 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
6973 xfer_addr
+= bump_after
;
6975 regmask
&= ~(1 << regno
);
6978 if (dsc
->u
.block
.writeback
)
6979 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
6983 /* Clean up an STM which included the PC in the register list. */
6986 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6987 struct displaced_step_closure
*dsc
)
6989 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6990 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
6991 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
6992 CORE_ADDR stm_insn_addr
;
6995 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6997 /* If condition code fails, there's nothing else to do. */
6998 if (!store_executed
)
7001 if (dsc
->u
.block
.increment
)
7003 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
7005 if (dsc
->u
.block
.before
)
7010 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
7012 if (dsc
->u
.block
.before
)
7016 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
7017 stm_insn_addr
= dsc
->scratch_base
;
7018 offset
= pc_val
- stm_insn_addr
;
7020 if (debug_displaced
)
7021 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
7022 "STM instruction\n", offset
);
7024 /* Rewrite the stored PC to the proper value for the non-displaced original
7026 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
7027 dsc
->insn_addr
+ offset
);
7030 /* Clean up an LDM which includes the PC in the register list. We clumped all
7031 the registers in the transferred list into a contiguous range r0...rX (to
7032 avoid loading PC directly and losing control of the debugged program), so we
7033 must undo that here. */
7036 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
7037 struct regcache
*regs
,
7038 struct displaced_step_closure
*dsc
)
7040 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7041 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7042 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
7043 unsigned int regs_loaded
= bitcount (mask
);
7044 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
7046 /* The method employed here will fail if the register list is fully populated
7047 (we need to avoid loading PC directly). */
7048 gdb_assert (num_to_shuffle
< 16);
7053 clobbered
= (1 << num_to_shuffle
) - 1;
7055 while (num_to_shuffle
> 0)
7057 if ((mask
& (1 << write_reg
)) != 0)
7059 unsigned int read_reg
= num_to_shuffle
- 1;
7061 if (read_reg
!= write_reg
)
7063 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
7064 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
7065 if (debug_displaced
)
7066 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
7067 "loaded register r%d to r%d\n"), read_reg
,
7070 else if (debug_displaced
)
7071 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
7072 "r%d already in the right place\n"),
7075 clobbered
&= ~(1 << write_reg
);
7083 /* Restore any registers we scribbled over. */
7084 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
7086 if ((clobbered
& (1 << write_reg
)) != 0)
7088 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
7090 if (debug_displaced
)
7091 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
7092 "clobbered register r%d\n"), write_reg
);
7093 clobbered
&= ~(1 << write_reg
);
7097 /* Perform register writeback manually. */
7098 if (dsc
->u
.block
.writeback
)
7100 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
7102 if (dsc
->u
.block
.increment
)
7103 new_rn_val
+= regs_loaded
* 4;
7105 new_rn_val
-= regs_loaded
* 4;
7107 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
7112 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7113 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7116 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
7117 struct regcache
*regs
,
7118 struct displaced_step_closure
*dsc
)
7120 int load
= bit (insn
, 20);
7121 int user
= bit (insn
, 22);
7122 int increment
= bit (insn
, 23);
7123 int before
= bit (insn
, 24);
7124 int writeback
= bit (insn
, 21);
7125 int rn
= bits (insn
, 16, 19);
7127 /* Block transfers which don't mention PC can be run directly
7129 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
7130 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
7132 if (rn
== ARM_PC_REGNUM
)
7134 warning (_("displaced: Unpredictable LDM or STM with "
7135 "base register r15"));
7136 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
7139 if (debug_displaced
)
7140 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7141 "%.8lx\n", (unsigned long) insn
);
7143 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7144 dsc
->u
.block
.rn
= rn
;
7146 dsc
->u
.block
.load
= load
;
7147 dsc
->u
.block
.user
= user
;
7148 dsc
->u
.block
.increment
= increment
;
7149 dsc
->u
.block
.before
= before
;
7150 dsc
->u
.block
.writeback
= writeback
;
7151 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
7153 dsc
->u
.block
.regmask
= insn
& 0xffff;
7157 if ((insn
& 0xffff) == 0xffff)
7159 /* LDM with a fully-populated register list. This case is
7160 particularly tricky. Implement for now by fully emulating the
7161 instruction (which might not behave perfectly in all cases, but
7162 these instructions should be rare enough for that not to matter
7164 dsc
->modinsn
[0] = ARM_NOP
;
7166 dsc
->cleanup
= &cleanup_block_load_all
;
7170 /* LDM of a list of registers which includes PC. Implement by
7171 rewriting the list of registers to be transferred into a
7172 contiguous chunk r0...rX before doing the transfer, then shuffling
7173 registers into the correct places in the cleanup routine. */
7174 unsigned int regmask
= insn
& 0xffff;
7175 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7176 unsigned int to
= 0, from
= 0, i
, new_rn
;
7178 for (i
= 0; i
< num_in_list
; i
++)
7179 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7181 /* Writeback makes things complicated. We need to avoid clobbering
7182 the base register with one of the registers in our modified
7183 register list, but just using a different register can't work in
7186 ldm r14!, {r0-r13,pc}
7188 which would need to be rewritten as:
7192 but that can't work, because there's no free register for N.
7194 Solve this by turning off the writeback bit, and emulating
7195 writeback manually in the cleanup routine. */
7200 new_regmask
= (1 << num_in_list
) - 1;
7202 if (debug_displaced
)
7203 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7204 "{..., pc}: original reg list %.4x, modified "
7205 "list %.4x\n"), rn
, writeback
? "!" : "",
7206 (int) insn
& 0xffff, new_regmask
);
7208 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
7210 dsc
->cleanup
= &cleanup_block_load_pc
;
7215 /* STM of a list of registers which includes PC. Run the instruction
7216 as-is, but out of line: this will store the wrong value for the PC,
7217 so we must manually fix up the memory in the cleanup routine.
7218 Doing things this way has the advantage that we can auto-detect
7219 the offset of the PC write (which is architecture-dependent) in
7220 the cleanup routine. */
7221 dsc
->modinsn
[0] = insn
;
7223 dsc
->cleanup
= &cleanup_block_store_pc
;
7230 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7231 struct regcache
*regs
,
7232 struct displaced_step_closure
*dsc
)
7234 int rn
= bits (insn1
, 0, 3);
7235 int load
= bit (insn1
, 4);
7236 int writeback
= bit (insn1
, 5);
7238 /* Block transfers which don't mention PC can be run directly
7240 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
7241 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
7243 if (rn
== ARM_PC_REGNUM
)
7245 warning (_("displaced: Unpredictable LDM or STM with "
7246 "base register r15"));
7247 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7248 "unpredictable ldm/stm", dsc
);
7251 if (debug_displaced
)
7252 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7253 "%.4x%.4x\n", insn1
, insn2
);
7255 /* Clear bit 13, since it should be always zero. */
7256 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
7257 dsc
->u
.block
.rn
= rn
;
7259 dsc
->u
.block
.load
= load
;
7260 dsc
->u
.block
.user
= 0;
7261 dsc
->u
.block
.increment
= bit (insn1
, 7);
7262 dsc
->u
.block
.before
= bit (insn1
, 8);
7263 dsc
->u
.block
.writeback
= writeback
;
7264 dsc
->u
.block
.cond
= INST_AL
;
7265 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7269 if (dsc
->u
.block
.regmask
== 0xffff)
7271 /* This branch is impossible to happen. */
7276 unsigned int regmask
= dsc
->u
.block
.regmask
;
7277 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7278 unsigned int to
= 0, from
= 0, i
, new_rn
;
7280 for (i
= 0; i
< num_in_list
; i
++)
7281 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7286 new_regmask
= (1 << num_in_list
) - 1;
7288 if (debug_displaced
)
7289 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7290 "{..., pc}: original reg list %.4x, modified "
7291 "list %.4x\n"), rn
, writeback
? "!" : "",
7292 (int) dsc
->u
.block
.regmask
, new_regmask
);
7294 dsc
->modinsn
[0] = insn1
;
7295 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
7298 dsc
->cleanup
= &cleanup_block_load_pc
;
7303 dsc
->modinsn
[0] = insn1
;
7304 dsc
->modinsn
[1] = insn2
;
7306 dsc
->cleanup
= &cleanup_block_store_pc
;
7311 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7312 for Linux, where some SVC instructions must be treated specially. */
7315 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7316 struct displaced_step_closure
*dsc
)
7318 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
7320 if (debug_displaced
)
7321 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
7322 "%.8lx\n", (unsigned long) resume_addr
);
7324 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
7328 /* Common copy routine for svc instruciton. */
7331 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7332 struct displaced_step_closure
*dsc
)
7334 /* Preparation: none.
7335 Insn: unmodified svc.
7336 Cleanup: pc <- insn_addr + insn_size. */
7338 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7340 dsc
->wrote_to_pc
= 1;
7342 /* Allow OS-specific code to override SVC handling. */
7343 if (dsc
->u
.svc
.copy_svc_os
)
7344 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
7347 dsc
->cleanup
= &cleanup_svc
;
7353 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
7354 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7357 if (debug_displaced
)
7358 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
7359 (unsigned long) insn
);
7361 dsc
->modinsn
[0] = insn
;
7363 return install_svc (gdbarch
, regs
, dsc
);
7367 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
7368 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7371 if (debug_displaced
)
7372 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
7375 dsc
->modinsn
[0] = insn
;
7377 return install_svc (gdbarch
, regs
, dsc
);
7380 /* Copy undefined instructions. */
7383 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
7384 struct displaced_step_closure
*dsc
)
7386 if (debug_displaced
)
7387 fprintf_unfiltered (gdb_stdlog
,
7388 "displaced: copying undefined insn %.8lx\n",
7389 (unsigned long) insn
);
7391 dsc
->modinsn
[0] = insn
;
7397 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7398 struct displaced_step_closure
*dsc
)
7401 if (debug_displaced
)
7402 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7403 "%.4x %.4x\n", (unsigned short) insn1
,
7404 (unsigned short) insn2
);
7406 dsc
->modinsn
[0] = insn1
;
7407 dsc
->modinsn
[1] = insn2
;
7413 /* Copy unpredictable instructions. */
7416 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7417 struct displaced_step_closure
*dsc
)
7419 if (debug_displaced
)
7420 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7421 "%.8lx\n", (unsigned long) insn
);
7423 dsc
->modinsn
[0] = insn
;
7428 /* The decode_* functions are instruction decoding helpers. They mostly follow
7429 the presentation in the ARM ARM. */
7432 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7433 struct regcache
*regs
,
7434 struct displaced_step_closure
*dsc
)
7436 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7437 unsigned int rn
= bits (insn
, 16, 19);
7439 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7440 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7441 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7442 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7443 else if ((op1
& 0x60) == 0x20)
7444 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7445 else if ((op1
& 0x71) == 0x40)
7446 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7448 else if ((op1
& 0x77) == 0x41)
7449 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7450 else if ((op1
& 0x77) == 0x45)
7451 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7452 else if ((op1
& 0x77) == 0x51)
7455 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7457 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7459 else if ((op1
& 0x77) == 0x55)
7460 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7461 else if (op1
== 0x57)
7464 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7465 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7466 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7467 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7468 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7470 else if ((op1
& 0x63) == 0x43)
7471 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7472 else if ((op2
& 0x1) == 0x0)
7473 switch (op1
& ~0x80)
7476 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7478 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7479 case 0x71: case 0x75:
7481 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7482 case 0x63: case 0x67: case 0x73: case 0x77:
7483 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7485 return arm_copy_undef (gdbarch
, insn
, dsc
);
7488 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7492 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7493 struct regcache
*regs
,
7494 struct displaced_step_closure
*dsc
)
7496 if (bit (insn
, 27) == 0)
7497 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7498 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7499 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7502 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7505 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7507 case 0x4: case 0x5: case 0x6: case 0x7:
7508 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7511 switch ((insn
& 0xe00000) >> 21)
7513 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7515 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7518 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7521 return arm_copy_undef (gdbarch
, insn
, dsc
);
7526 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7527 switch ((insn
& 0xe00000) >> 21)
7530 /* ldc/ldc2 imm (undefined for rn == pc). */
7531 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7532 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7535 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7537 case 0x4: case 0x5: case 0x6: case 0x7:
7538 /* ldc/ldc2 lit (undefined for rn != pc). */
7539 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7540 : arm_copy_undef (gdbarch
, insn
, dsc
);
7543 return arm_copy_undef (gdbarch
, insn
, dsc
);
7548 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7551 if (bits (insn
, 16, 19) == 0xf)
7553 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7555 return arm_copy_undef (gdbarch
, insn
, dsc
);
7559 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7561 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7565 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7567 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7570 return arm_copy_undef (gdbarch
, insn
, dsc
);
7574 /* Decode miscellaneous instructions in dp/misc encoding space. */
7577 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7578 struct regcache
*regs
,
7579 struct displaced_step_closure
*dsc
)
7581 unsigned int op2
= bits (insn
, 4, 6);
7582 unsigned int op
= bits (insn
, 21, 22);
7583 unsigned int op1
= bits (insn
, 16, 19);
7588 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7591 if (op
== 0x1) /* bx. */
7592 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7594 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7596 return arm_copy_undef (gdbarch
, insn
, dsc
);
7600 /* Not really supported. */
7601 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7603 return arm_copy_undef (gdbarch
, insn
, dsc
);
7607 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7608 regs
, dsc
); /* blx register. */
7610 return arm_copy_undef (gdbarch
, insn
, dsc
);
7613 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7617 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7619 /* Not really supported. */
7620 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7623 return arm_copy_undef (gdbarch
, insn
, dsc
);
7628 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7629 struct regcache
*regs
,
7630 struct displaced_step_closure
*dsc
)
7633 switch (bits (insn
, 20, 24))
7636 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7639 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7641 case 0x12: case 0x16:
7642 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7645 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7649 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7651 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7652 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7653 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7654 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7655 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7656 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7657 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7658 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7659 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7660 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7661 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7662 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7663 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7664 /* 2nd arg means "unpriveleged". */
7665 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7669 /* Should be unreachable. */
7674 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7675 struct regcache
*regs
,
7676 struct displaced_step_closure
*dsc
)
7678 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7679 uint32_t op1
= bits (insn
, 20, 24);
7680 int rn_f
= bits (insn
, 16, 19) == 0xf;
7682 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7683 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7684 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7685 else if ((!a
&& (op1
& 0x17) == 0x02)
7686 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7687 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7688 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7689 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7690 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7691 else if ((!a
&& (op1
& 0x17) == 0x03)
7692 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7693 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7694 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7695 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7696 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7697 else if ((!a
&& (op1
& 0x17) == 0x06)
7698 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7699 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7700 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7701 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7702 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7703 else if ((!a
&& (op1
& 0x17) == 0x07)
7704 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7705 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7707 /* Should be unreachable. */
7712 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7713 struct displaced_step_closure
*dsc
)
7715 switch (bits (insn
, 20, 24))
7717 case 0x00: case 0x01: case 0x02: case 0x03:
7718 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7720 case 0x04: case 0x05: case 0x06: case 0x07:
7721 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7723 case 0x08: case 0x09: case 0x0a: case 0x0b:
7724 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7725 return arm_copy_unmodified (gdbarch
, insn
,
7726 "decode/pack/unpack/saturate/reverse", dsc
);
7729 if (bits (insn
, 5, 7) == 0) /* op2. */
7731 if (bits (insn
, 12, 15) == 0xf)
7732 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7734 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7737 return arm_copy_undef (gdbarch
, insn
, dsc
);
7739 case 0x1a: case 0x1b:
7740 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7741 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7743 return arm_copy_undef (gdbarch
, insn
, dsc
);
7745 case 0x1c: case 0x1d:
7746 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7748 if (bits (insn
, 0, 3) == 0xf)
7749 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7751 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7754 return arm_copy_undef (gdbarch
, insn
, dsc
);
7756 case 0x1e: case 0x1f:
7757 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7758 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7760 return arm_copy_undef (gdbarch
, insn
, dsc
);
7763 /* Should be unreachable. */
7768 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7769 struct regcache
*regs
,
7770 struct displaced_step_closure
*dsc
)
7773 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7775 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7779 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7780 struct regcache
*regs
,
7781 struct displaced_step_closure
*dsc
)
7783 unsigned int opcode
= bits (insn
, 20, 24);
7787 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7788 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7790 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7791 case 0x12: case 0x16:
7792 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7794 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7795 case 0x13: case 0x17:
7796 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7798 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7799 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7800 /* Note: no writeback for these instructions. Bit 25 will always be
7801 zero though (via caller), so the following works OK. */
7802 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7805 /* Should be unreachable. */
7809 /* Decode shifted register instructions. */
7812 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7813 uint16_t insn2
, struct regcache
*regs
,
7814 struct displaced_step_closure
*dsc
)
7816 /* PC is only allowed to be used in instruction MOV. */
7818 unsigned int op
= bits (insn1
, 5, 8);
7819 unsigned int rn
= bits (insn1
, 0, 3);
7821 if (op
== 0x2 && rn
== 0xf) /* MOV */
7822 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7824 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7825 "dp (shift reg)", dsc
);
7829 /* Decode extension register load/store. Exactly the same as
7830 arm_decode_ext_reg_ld_st. */
7833 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7834 uint16_t insn2
, struct regcache
*regs
,
7835 struct displaced_step_closure
*dsc
)
7837 unsigned int opcode
= bits (insn1
, 4, 8);
7841 case 0x04: case 0x05:
7842 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7843 "vfp/neon vmov", dsc
);
7845 case 0x08: case 0x0c: /* 01x00 */
7846 case 0x0a: case 0x0e: /* 01x10 */
7847 case 0x12: case 0x16: /* 10x10 */
7848 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7849 "vfp/neon vstm/vpush", dsc
);
7851 case 0x09: case 0x0d: /* 01x01 */
7852 case 0x0b: case 0x0f: /* 01x11 */
7853 case 0x13: case 0x17: /* 10x11 */
7854 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7855 "vfp/neon vldm/vpop", dsc
);
7857 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7858 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7860 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7861 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7864 /* Should be unreachable. */
7869 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7870 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7872 unsigned int op1
= bits (insn
, 20, 25);
7873 int op
= bit (insn
, 4);
7874 unsigned int coproc
= bits (insn
, 8, 11);
7875 unsigned int rn
= bits (insn
, 16, 19);
7877 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7878 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7879 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7880 && (coproc
& 0xe) != 0xa)
7882 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7883 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7884 && (coproc
& 0xe) != 0xa)
7885 /* ldc/ldc2 imm/lit. */
7886 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7887 else if ((op1
& 0x3e) == 0x00)
7888 return arm_copy_undef (gdbarch
, insn
, dsc
);
7889 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7890 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7891 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7892 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7893 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7894 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7895 else if ((op1
& 0x30) == 0x20 && !op
)
7897 if ((coproc
& 0xe) == 0xa)
7898 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7900 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7902 else if ((op1
& 0x30) == 0x20 && op
)
7903 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7904 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7905 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7906 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7907 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7908 else if ((op1
& 0x30) == 0x30)
7909 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7911 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7915 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7916 uint16_t insn2
, struct regcache
*regs
,
7917 struct displaced_step_closure
*dsc
)
7919 unsigned int coproc
= bits (insn2
, 8, 11);
7920 unsigned int op1
= bits (insn1
, 4, 9);
7921 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7922 unsigned int bit_9
= bit (insn1
, 9);
7923 unsigned int bit_4
= bit (insn1
, 4);
7924 unsigned int rn
= bits (insn1
, 0, 3);
7929 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7930 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7932 else if (bit_5_8
== 0) /* UNDEFINED. */
7933 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7936 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7937 if ((coproc
& 0xe) == 0xa)
7938 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7940 else /* coproc is not 101x. */
7942 if (bit_4
== 0) /* STC/STC2. */
7943 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7945 else /* LDC/LDC2 {literal, immeidate}. */
7946 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
7952 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7958 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7959 struct displaced_step_closure
*dsc
, int rd
)
7965 Preparation: Rd <- PC
7971 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7972 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
7976 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7977 struct displaced_step_closure
*dsc
,
7978 int rd
, unsigned int imm
)
7981 /* Encoding T2: ADDS Rd, #imm */
7982 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
7984 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
7990 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
7991 struct regcache
*regs
,
7992 struct displaced_step_closure
*dsc
)
7994 unsigned int rd
= bits (insn
, 8, 10);
7995 unsigned int imm8
= bits (insn
, 0, 7);
7997 if (debug_displaced
)
7998 fprintf_unfiltered (gdb_stdlog
,
7999 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8002 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
8006 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
8007 uint16_t insn2
, struct regcache
*regs
,
8008 struct displaced_step_closure
*dsc
)
8010 unsigned int rd
= bits (insn2
, 8, 11);
8011 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8012 extract raw immediate encoding rather than computing immediate. When
8013 generating ADD or SUB instruction, we can simply perform OR operation to
8014 set immediate into ADD. */
8015 unsigned int imm_3_8
= insn2
& 0x70ff;
8016 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
8018 if (debug_displaced
)
8019 fprintf_unfiltered (gdb_stdlog
,
8020 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8021 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
8023 if (bit (insn1
, 7)) /* Encoding T2 */
8025 /* Encoding T3: SUB Rd, Rd, #imm */
8026 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
8027 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8029 else /* Encoding T3 */
8031 /* Encoding T3: ADD Rd, Rd, #imm */
8032 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
8033 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8037 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8043 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
8044 struct regcache
*regs
,
8045 struct displaced_step_closure
*dsc
)
8047 unsigned int rt
= bits (insn1
, 8, 10);
8049 int imm8
= (bits (insn1
, 0, 7) << 2);
8050 CORE_ADDR from
= dsc
->insn_addr
;
8056 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8058 Insn: LDR R0, [R2, R3];
8059 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8061 if (debug_displaced
)
8062 fprintf_unfiltered (gdb_stdlog
,
8063 "displaced: copying thumb ldr r%d [pc #%d]\n"
8066 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
8067 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
8068 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
8069 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8070 /* The assembler calculates the required value of the offset from the
8071 Align(PC,4) value of this instruction to the label. */
8072 pc
= pc
& 0xfffffffc;
8074 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
8075 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
8078 dsc
->u
.ldst
.xfersize
= 4;
8080 dsc
->u
.ldst
.immed
= 0;
8081 dsc
->u
.ldst
.writeback
= 0;
8082 dsc
->u
.ldst
.restore_r4
= 0;
8084 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8086 dsc
->cleanup
= &cleanup_load
;
8091 /* Copy Thumb cbnz/cbz insruction. */
8094 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
8095 struct regcache
*regs
,
8096 struct displaced_step_closure
*dsc
)
8098 int non_zero
= bit (insn1
, 11);
8099 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
8100 CORE_ADDR from
= dsc
->insn_addr
;
8101 int rn
= bits (insn1
, 0, 2);
8102 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
8104 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
8105 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8106 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8107 condition is false, let it be, cleanup_branch will do nothing. */
8108 if (dsc
->u
.branch
.cond
)
8110 dsc
->u
.branch
.cond
= INST_AL
;
8111 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
8114 dsc
->u
.branch
.dest
= from
+ 2;
8116 dsc
->u
.branch
.link
= 0;
8117 dsc
->u
.branch
.exchange
= 0;
8119 if (debug_displaced
)
8120 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
8121 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
8122 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
8124 dsc
->modinsn
[0] = THUMB_NOP
;
8126 dsc
->cleanup
= &cleanup_branch
;
8130 /* Copy Table Branch Byte/Halfword */
8132 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
8133 uint16_t insn2
, struct regcache
*regs
,
8134 struct displaced_step_closure
*dsc
)
8136 ULONGEST rn_val
, rm_val
;
8137 int is_tbh
= bit (insn2
, 4);
8138 CORE_ADDR halfwords
= 0;
8139 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8141 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
8142 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
8148 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
8149 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
8155 target_read_memory (rn_val
+ rm_val
, buf
, 1);
8156 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
8159 if (debug_displaced
)
8160 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
8161 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
8162 (unsigned int) rn_val
, (unsigned int) rm_val
,
8163 (unsigned int) halfwords
);
8165 dsc
->u
.branch
.cond
= INST_AL
;
8166 dsc
->u
.branch
.link
= 0;
8167 dsc
->u
.branch
.exchange
= 0;
8168 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
8170 dsc
->cleanup
= &cleanup_branch
;
8176 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8177 struct displaced_step_closure
*dsc
)
8180 int val
= displaced_read_reg (regs
, dsc
, 7);
8181 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
8184 val
= displaced_read_reg (regs
, dsc
, 8);
8185 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
8188 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
8193 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
8194 struct regcache
*regs
,
8195 struct displaced_step_closure
*dsc
)
8197 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
8199 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8202 (1) register list is full, that is, r0-r7 are used.
8203 Prepare: tmp[0] <- r8
8205 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8206 MOV r8, r7; Move value of r7 to r8;
8207 POP {r7}; Store PC value into r7.
8209 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8211 (2) register list is not full, supposing there are N registers in
8212 register list (except PC, 0 <= N <= 7).
8213 Prepare: for each i, 0 - N, tmp[i] <- ri.
8215 POP {r0, r1, ...., rN};
8217 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8218 from tmp[] properly.
8220 if (debug_displaced
)
8221 fprintf_unfiltered (gdb_stdlog
,
8222 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8223 dsc
->u
.block
.regmask
, insn1
);
8225 if (dsc
->u
.block
.regmask
== 0xff)
8227 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
8229 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
8230 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
8231 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
8234 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
8238 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
8239 unsigned int new_regmask
, bit
= 1;
8240 unsigned int to
= 0, from
= 0, i
, new_rn
;
8242 for (i
= 0; i
< num_in_list
+ 1; i
++)
8243 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
8245 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
8247 if (debug_displaced
)
8248 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
8249 "{..., pc}: original reg list %.4x,"
8250 " modified list %.4x\n"),
8251 (int) dsc
->u
.block
.regmask
, new_regmask
);
8253 dsc
->u
.block
.regmask
|= 0x8000;
8254 dsc
->u
.block
.writeback
= 0;
8255 dsc
->u
.block
.cond
= INST_AL
;
8257 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
8259 dsc
->cleanup
= &cleanup_block_load_pc
;
8266 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8267 struct regcache
*regs
,
8268 struct displaced_step_closure
*dsc
)
8270 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
8271 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
8274 /* 16-bit thumb instructions. */
8275 switch (op_bit_12_15
)
8277 /* Shift (imme), add, subtract, move and compare. */
8278 case 0: case 1: case 2: case 3:
8279 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8280 "shift/add/sub/mov/cmp",
8284 switch (op_bit_10_11
)
8286 case 0: /* Data-processing */
8287 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8291 case 1: /* Special data instructions and branch and exchange. */
8293 unsigned short op
= bits (insn1
, 7, 9);
8294 if (op
== 6 || op
== 7) /* BX or BLX */
8295 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
8296 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8297 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
8299 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
8303 default: /* LDR (literal) */
8304 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
8307 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8308 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
8311 if (op_bit_10_11
< 2) /* Generate PC-relative address */
8312 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
8313 else /* Generate SP-relative address */
8314 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
8316 case 11: /* Misc 16-bit instructions */
8318 switch (bits (insn1
, 8, 11))
8320 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8321 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
8323 case 12: case 13: /* POP */
8324 if (bit (insn1
, 8)) /* PC is in register list. */
8325 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
8327 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
8329 case 15: /* If-Then, and hints */
8330 if (bits (insn1
, 0, 3))
8331 /* If-Then makes up to four following instructions conditional.
8332 IT instruction itself is not conditional, so handle it as a
8333 common unmodified instruction. */
8334 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
8337 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
8340 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
8345 if (op_bit_10_11
< 2) /* Store multiple registers */
8346 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
8347 else /* Load multiple registers */
8348 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
8350 case 13: /* Conditional branch and supervisor call */
8351 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
8352 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8354 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
8356 case 14: /* Unconditional branch */
8357 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8364 internal_error (__FILE__
, __LINE__
,
8365 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8369 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
8370 uint16_t insn1
, uint16_t insn2
,
8371 struct regcache
*regs
,
8372 struct displaced_step_closure
*dsc
)
8374 int rt
= bits (insn2
, 12, 15);
8375 int rn
= bits (insn1
, 0, 3);
8376 int op1
= bits (insn1
, 7, 8);
8379 switch (bits (insn1
, 5, 6))
8381 case 0: /* Load byte and memory hints */
8382 if (rt
== 0xf) /* PLD/PLI */
8385 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8386 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8388 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8393 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8394 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8397 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8398 "ldrb{reg, immediate}/ldrbt",
8403 case 1: /* Load halfword and memory hints. */
8404 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8405 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8406 "pld/unalloc memhint", dsc
);
8410 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8413 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8417 case 2: /* Load word */
8419 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8422 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8423 else if (op1
== 0x1) /* Encoding T3 */
8424 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8426 else /* op1 == 0x0 */
8428 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8429 /* LDR (immediate) */
8430 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8431 dsc
, bit (insn2
, 8), 1);
8432 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8433 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8436 /* LDR (register) */
8437 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8443 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8450 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8451 uint16_t insn2
, struct regcache
*regs
,
8452 struct displaced_step_closure
*dsc
)
8455 unsigned short op
= bit (insn2
, 15);
8456 unsigned int op1
= bits (insn1
, 11, 12);
8462 switch (bits (insn1
, 9, 10))
8467 /* Load/store {dual, execlusive}, table branch. */
8468 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8469 && bits (insn2
, 5, 7) == 0)
8470 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8473 /* PC is not allowed to use in load/store {dual, exclusive}
8475 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8476 "load/store dual/ex", dsc
);
8478 else /* load/store multiple */
8480 switch (bits (insn1
, 7, 8))
8482 case 0: case 3: /* SRS, RFE */
8483 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8486 case 1: case 2: /* LDM/STM/PUSH/POP */
8487 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8494 /* Data-processing (shift register). */
8495 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8498 default: /* Coprocessor instructions. */
8499 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8504 case 2: /* op1 = 2 */
8505 if (op
) /* Branch and misc control. */
8507 if (bit (insn2
, 14) /* BLX/BL */
8508 || bit (insn2
, 12) /* Unconditional branch */
8509 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8510 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8512 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8517 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8519 int op
= bits (insn1
, 4, 8);
8520 int rn
= bits (insn1
, 0, 3);
8521 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8522 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8525 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8528 else /* Data processing (modified immeidate) */
8529 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8533 case 3: /* op1 = 3 */
8534 switch (bits (insn1
, 9, 10))
8538 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8540 else /* NEON Load/Store and Store single data item */
8541 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8542 "neon elt/struct load/store",
8545 case 1: /* op1 = 3, bits (9, 10) == 1 */
8546 switch (bits (insn1
, 7, 8))
8548 case 0: case 1: /* Data processing (register) */
8549 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8552 case 2: /* Multiply and absolute difference */
8553 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8554 "mul/mua/diff", dsc
);
8556 case 3: /* Long multiply and divide */
8557 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8562 default: /* Coprocessor instructions */
8563 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8572 internal_error (__FILE__
, __LINE__
,
8573 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8578 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8579 CORE_ADDR to
, struct regcache
*regs
,
8580 struct displaced_step_closure
*dsc
)
8582 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8584 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8586 if (debug_displaced
)
8587 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8588 "at %.8lx\n", insn1
, (unsigned long) from
);
8591 dsc
->insn_size
= thumb_insn_size (insn1
);
8592 if (thumb_insn_size (insn1
) == 4)
8595 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8596 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8599 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8603 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8604 CORE_ADDR to
, struct regcache
*regs
,
8605 struct displaced_step_closure
*dsc
)
8608 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8611 /* Most displaced instructions use a 1-instruction scratch space, so set this
8612 here and override below if/when necessary. */
8614 dsc
->insn_addr
= from
;
8615 dsc
->scratch_base
= to
;
8616 dsc
->cleanup
= NULL
;
8617 dsc
->wrote_to_pc
= 0;
8619 if (!displaced_in_arm_mode (regs
))
8620 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8624 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8625 if (debug_displaced
)
8626 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8627 "at %.8lx\n", (unsigned long) insn
,
8628 (unsigned long) from
);
8630 if ((insn
& 0xf0000000) == 0xf0000000)
8631 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8632 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8634 case 0x0: case 0x1: case 0x2: case 0x3:
8635 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8638 case 0x4: case 0x5: case 0x6:
8639 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8643 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8646 case 0x8: case 0x9: case 0xa: case 0xb:
8647 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8650 case 0xc: case 0xd: case 0xe: case 0xf:
8651 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8656 internal_error (__FILE__
, __LINE__
,
8657 _("arm_process_displaced_insn: Instruction decode error"));
8660 /* Actually set up the scratch space for a displaced instruction. */
8663 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8664 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8666 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8667 unsigned int i
, len
, offset
;
8668 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8669 int size
= dsc
->is_thumb
? 2 : 4;
8670 const gdb_byte
*bkp_insn
;
8673 /* Poke modified instruction(s). */
8674 for (i
= 0; i
< dsc
->numinsns
; i
++)
8676 if (debug_displaced
)
8678 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8680 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8683 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8684 (unsigned short)dsc
->modinsn
[i
]);
8686 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8687 (unsigned long) to
+ offset
);
8690 write_memory_unsigned_integer (to
+ offset
, size
,
8691 byte_order_for_code
,
8696 /* Choose the correct breakpoint instruction. */
8699 bkp_insn
= tdep
->thumb_breakpoint
;
8700 len
= tdep
->thumb_breakpoint_size
;
8704 bkp_insn
= tdep
->arm_breakpoint
;
8705 len
= tdep
->arm_breakpoint_size
;
8708 /* Put breakpoint afterwards. */
8709 write_memory (to
+ offset
, bkp_insn
, len
);
8711 if (debug_displaced
)
8712 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8713 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8716 /* Entry point for copying an instruction into scratch space for displaced
8719 struct displaced_step_closure
*
8720 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8721 CORE_ADDR from
, CORE_ADDR to
,
8722 struct regcache
*regs
)
8724 struct displaced_step_closure
*dsc
= XNEW (struct displaced_step_closure
);
8726 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8727 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8732 /* Entry point for cleaning things up after a displaced instruction has been
8736 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8737 struct displaced_step_closure
*dsc
,
8738 CORE_ADDR from
, CORE_ADDR to
,
8739 struct regcache
*regs
)
8742 dsc
->cleanup (gdbarch
, regs
, dsc
);
8744 if (!dsc
->wrote_to_pc
)
8745 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8746 dsc
->insn_addr
+ dsc
->insn_size
);
8750 #include "bfd-in2.h"
8751 #include "libcoff.h"
8754 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8756 struct gdbarch
*gdbarch
= (struct gdbarch
*) info
->application_data
;
8758 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8760 static asymbol
*asym
;
8761 static combined_entry_type ce
;
8762 static struct coff_symbol_struct csym
;
8763 static struct bfd fake_bfd
;
8764 static bfd_target fake_target
;
8766 if (csym
.native
== NULL
)
8768 /* Create a fake symbol vector containing a Thumb symbol.
8769 This is solely so that the code in print_insn_little_arm()
8770 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8771 the presence of a Thumb symbol and switch to decoding
8772 Thumb instructions. */
8774 fake_target
.flavour
= bfd_target_coff_flavour
;
8775 fake_bfd
.xvec
= &fake_target
;
8776 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8778 csym
.symbol
.the_bfd
= &fake_bfd
;
8779 csym
.symbol
.name
= "fake";
8780 asym
= (asymbol
*) & csym
;
8783 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8784 info
->symbols
= &asym
;
8787 info
->symbols
= NULL
;
8789 if (info
->endian
== BFD_ENDIAN_BIG
)
8790 return print_insn_big_arm (memaddr
, info
);
8792 return print_insn_little_arm (memaddr
, info
);
8795 /* The following define instruction sequences that will cause ARM
8796 cpu's to take an undefined instruction trap. These are used to
8797 signal a breakpoint to GDB.
8799 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8800 modes. A different instruction is required for each mode. The ARM
8801 cpu's can also be big or little endian. Thus four different
8802 instructions are needed to support all cases.
8804 Note: ARMv4 defines several new instructions that will take the
8805 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8806 not in fact add the new instructions. The new undefined
8807 instructions in ARMv4 are all instructions that had no defined
8808 behaviour in earlier chips. There is no guarantee that they will
8809 raise an exception, but may be treated as NOP's. In practice, it
8810 may only safe to rely on instructions matching:
8812 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8813 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8814 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8816 Even this may only true if the condition predicate is true. The
8817 following use a condition predicate of ALWAYS so it is always TRUE.
8819 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8820 and NetBSD all use a software interrupt rather than an undefined
8821 instruction to force a trap. This can be handled by by the
8822 abi-specific code during establishment of the gdbarch vector. */
8824 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8825 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8826 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8827 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8829 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8830 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8831 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8832 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8834 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8835 the program counter value to determine whether a 16-bit or 32-bit
8836 breakpoint should be used. It returns a pointer to a string of
8837 bytes that encode a breakpoint instruction, stores the length of
8838 the string to *lenptr, and adjusts the program counter (if
8839 necessary) to point to the actual memory location where the
8840 breakpoint should be inserted. */
8842 static const unsigned char *
8843 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8845 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8846 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8848 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8850 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8852 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8853 check whether we are replacing a 32-bit instruction. */
8854 if (tdep
->thumb2_breakpoint
!= NULL
)
8857 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8859 unsigned short inst1
;
8860 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8861 if (thumb_insn_size (inst1
) == 4)
8863 *lenptr
= tdep
->thumb2_breakpoint_size
;
8864 return tdep
->thumb2_breakpoint
;
8869 *lenptr
= tdep
->thumb_breakpoint_size
;
8870 return tdep
->thumb_breakpoint
;
8874 *lenptr
= tdep
->arm_breakpoint_size
;
8875 return tdep
->arm_breakpoint
;
8880 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8883 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8885 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8886 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8887 that this is not confused with a 32-bit ARM breakpoint. */
8891 /* Extract from an array REGBUF containing the (raw) register state a
8892 function return value of type TYPE, and copy that, in virtual
8893 format, into VALBUF. */
8896 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8899 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8900 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8902 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8904 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8908 /* The value is in register F0 in internal format. We need to
8909 extract the raw value and then convert it to the desired
8911 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8913 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8914 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8915 valbuf
, gdbarch_byte_order (gdbarch
));
8919 case ARM_FLOAT_SOFT_FPA
:
8920 case ARM_FLOAT_SOFT_VFP
:
8921 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8922 not using the VFP ABI code. */
8924 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8925 if (TYPE_LENGTH (type
) > 4)
8926 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8927 valbuf
+ INT_REGISTER_SIZE
);
8931 internal_error (__FILE__
, __LINE__
,
8932 _("arm_extract_return_value: "
8933 "Floating point model not supported"));
8937 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8938 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8939 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8940 || TYPE_CODE (type
) == TYPE_CODE_PTR
8941 || TYPE_CODE (type
) == TYPE_CODE_REF
8942 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8944 /* If the type is a plain integer, then the access is
8945 straight-forward. Otherwise we have to play around a bit
8947 int len
= TYPE_LENGTH (type
);
8948 int regno
= ARM_A1_REGNUM
;
8953 /* By using store_unsigned_integer we avoid having to do
8954 anything special for small big-endian values. */
8955 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8956 store_unsigned_integer (valbuf
,
8957 (len
> INT_REGISTER_SIZE
8958 ? INT_REGISTER_SIZE
: len
),
8960 len
-= INT_REGISTER_SIZE
;
8961 valbuf
+= INT_REGISTER_SIZE
;
8966 /* For a structure or union the behaviour is as if the value had
8967 been stored to word-aligned memory and then loaded into
8968 registers with 32-bit load instruction(s). */
8969 int len
= TYPE_LENGTH (type
);
8970 int regno
= ARM_A1_REGNUM
;
8971 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8975 regcache_cooked_read (regs
, regno
++, tmpbuf
);
8976 memcpy (valbuf
, tmpbuf
,
8977 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8978 len
-= INT_REGISTER_SIZE
;
8979 valbuf
+= INT_REGISTER_SIZE
;
8985 /* Will a function return an aggregate type in memory or in a
8986 register? Return 0 if an aggregate type can be returned in a
8987 register, 1 if it must be returned in memory. */
8990 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
8993 enum type_code code
;
8995 type
= check_typedef (type
);
8997 /* In the ARM ABI, "integer" like aggregate types are returned in
8998 registers. For an aggregate type to be integer like, its size
8999 must be less than or equal to INT_REGISTER_SIZE and the
9000 offset of each addressable subfield must be zero. Note that bit
9001 fields are not addressable, and all addressable subfields of
9002 unions always start at offset zero.
9004 This function is based on the behaviour of GCC 2.95.1.
9005 See: gcc/arm.c: arm_return_in_memory() for details.
9007 Note: All versions of GCC before GCC 2.95.2 do not set up the
9008 parameters correctly for a function returning the following
9009 structure: struct { float f;}; This should be returned in memory,
9010 not a register. Richard Earnshaw sent me a patch, but I do not
9011 know of any way to detect if a function like the above has been
9012 compiled with the correct calling convention. */
9014 /* All aggregate types that won't fit in a register must be returned
9016 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
9021 /* The AAPCS says all aggregates not larger than a word are returned
9023 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
9026 /* The only aggregate types that can be returned in a register are
9027 structs and unions. Arrays must be returned in memory. */
9028 code
= TYPE_CODE (type
);
9029 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
9034 /* Assume all other aggregate types can be returned in a register.
9035 Run a check for structures, unions and arrays. */
9038 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
9041 /* Need to check if this struct/union is "integer" like. For
9042 this to be true, its size must be less than or equal to
9043 INT_REGISTER_SIZE and the offset of each addressable
9044 subfield must be zero. Note that bit fields are not
9045 addressable, and unions always start at offset zero. If any
9046 of the subfields is a floating point type, the struct/union
9047 cannot be an integer type. */
9049 /* For each field in the object, check:
9050 1) Is it FP? --> yes, nRc = 1;
9051 2) Is it addressable (bitpos != 0) and
9052 not packed (bitsize == 0)?
9056 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
9058 enum type_code field_type_code
;
9059 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
9062 /* Is it a floating point type field? */
9063 if (field_type_code
== TYPE_CODE_FLT
)
9069 /* If bitpos != 0, then we have to care about it. */
9070 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
9072 /* Bitfields are not addressable. If the field bitsize is
9073 zero, then the field is not packed. Hence it cannot be
9074 a bitfield or any other packed type. */
9075 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
9087 /* Write into appropriate registers a function return value of type
9088 TYPE, given in virtual format. */
9091 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
9092 const gdb_byte
*valbuf
)
9094 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
9095 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9097 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
9099 gdb_byte buf
[MAX_REGISTER_SIZE
];
9101 switch (gdbarch_tdep (gdbarch
)->fp_model
)
9105 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
9106 gdbarch_byte_order (gdbarch
));
9107 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
9110 case ARM_FLOAT_SOFT_FPA
:
9111 case ARM_FLOAT_SOFT_VFP
:
9112 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9113 not using the VFP ABI code. */
9115 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
9116 if (TYPE_LENGTH (type
) > 4)
9117 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
9118 valbuf
+ INT_REGISTER_SIZE
);
9122 internal_error (__FILE__
, __LINE__
,
9123 _("arm_store_return_value: Floating "
9124 "point model not supported"));
9128 else if (TYPE_CODE (type
) == TYPE_CODE_INT
9129 || TYPE_CODE (type
) == TYPE_CODE_CHAR
9130 || TYPE_CODE (type
) == TYPE_CODE_BOOL
9131 || TYPE_CODE (type
) == TYPE_CODE_PTR
9132 || TYPE_CODE (type
) == TYPE_CODE_REF
9133 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
9135 if (TYPE_LENGTH (type
) <= 4)
9137 /* Values of one word or less are zero/sign-extended and
9139 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9140 LONGEST val
= unpack_long (type
, valbuf
);
9142 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
9143 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
9147 /* Integral values greater than one word are stored in consecutive
9148 registers starting with r0. This will always be a multiple of
9149 the regiser size. */
9150 int len
= TYPE_LENGTH (type
);
9151 int regno
= ARM_A1_REGNUM
;
9155 regcache_cooked_write (regs
, regno
++, valbuf
);
9156 len
-= INT_REGISTER_SIZE
;
9157 valbuf
+= INT_REGISTER_SIZE
;
9163 /* For a structure or union the behaviour is as if the value had
9164 been stored to word-aligned memory and then loaded into
9165 registers with 32-bit load instruction(s). */
9166 int len
= TYPE_LENGTH (type
);
9167 int regno
= ARM_A1_REGNUM
;
9168 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9172 memcpy (tmpbuf
, valbuf
,
9173 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9174 regcache_cooked_write (regs
, regno
++, tmpbuf
);
9175 len
-= INT_REGISTER_SIZE
;
9176 valbuf
+= INT_REGISTER_SIZE
;
9182 /* Handle function return values. */
9184 static enum return_value_convention
9185 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
9186 struct type
*valtype
, struct regcache
*regcache
,
9187 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
9189 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9190 struct type
*func_type
= function
? value_type (function
) : NULL
;
9191 enum arm_vfp_cprc_base_type vfp_base_type
;
9194 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
9195 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
9197 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
9198 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
9200 for (i
= 0; i
< vfp_base_count
; i
++)
9202 if (reg_char
== 'q')
9205 arm_neon_quad_write (gdbarch
, regcache
, i
,
9206 writebuf
+ i
* unit_length
);
9209 arm_neon_quad_read (gdbarch
, regcache
, i
,
9210 readbuf
+ i
* unit_length
);
9217 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
9218 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9221 regcache_cooked_write (regcache
, regnum
,
9222 writebuf
+ i
* unit_length
);
9224 regcache_cooked_read (regcache
, regnum
,
9225 readbuf
+ i
* unit_length
);
9228 return RETURN_VALUE_REGISTER_CONVENTION
;
9231 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
9232 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
9233 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
9235 if (tdep
->struct_return
== pcc_struct_return
9236 || arm_return_in_memory (gdbarch
, valtype
))
9237 return RETURN_VALUE_STRUCT_CONVENTION
;
9240 /* AAPCS returns complex types longer than a register in memory. */
9241 if (tdep
->arm_abi
!= ARM_ABI_APCS
9242 && TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
9243 && TYPE_LENGTH (valtype
) > INT_REGISTER_SIZE
)
9244 return RETURN_VALUE_STRUCT_CONVENTION
;
9247 arm_store_return_value (valtype
, regcache
, writebuf
);
9250 arm_extract_return_value (valtype
, regcache
, readbuf
);
9252 return RETURN_VALUE_REGISTER_CONVENTION
;
9257 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
9259 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
9260 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9261 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9263 gdb_byte buf
[INT_REGISTER_SIZE
];
9265 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
9267 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
9271 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
9275 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9276 return the target PC. Otherwise return 0. */
9279 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
9283 CORE_ADDR start_addr
;
9285 /* Find the starting address and name of the function containing the PC. */
9286 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
9288 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9290 start_addr
= arm_skip_bx_reg (frame
, pc
);
9291 if (start_addr
!= 0)
9297 /* If PC is in a Thumb call or return stub, return the address of the
9298 target PC, which is in a register. The thunk functions are called
9299 _call_via_xx, where x is the register name. The possible names
9300 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9301 functions, named __ARM_call_via_r[0-7]. */
9302 if (startswith (name
, "_call_via_")
9303 || startswith (name
, "__ARM_call_via_"))
9305 /* Use the name suffix to determine which register contains the
9307 static char *table
[15] =
9308 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9309 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9312 int offset
= strlen (name
) - 2;
9314 for (regno
= 0; regno
<= 14; regno
++)
9315 if (strcmp (&name
[offset
], table
[regno
]) == 0)
9316 return get_frame_register_unsigned (frame
, regno
);
9319 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9320 non-interworking calls to foo. We could decode the stubs
9321 to find the target but it's easier to use the symbol table. */
9322 namelen
= strlen (name
);
9323 if (name
[0] == '_' && name
[1] == '_'
9324 && ((namelen
> 2 + strlen ("_from_thumb")
9325 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
9326 || (namelen
> 2 + strlen ("_from_arm")
9327 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
9330 int target_len
= namelen
- 2;
9331 struct bound_minimal_symbol minsym
;
9332 struct objfile
*objfile
;
9333 struct obj_section
*sec
;
9335 if (name
[namelen
- 1] == 'b')
9336 target_len
-= strlen ("_from_thumb");
9338 target_len
-= strlen ("_from_arm");
9340 target_name
= (char *) alloca (target_len
+ 1);
9341 memcpy (target_name
, name
+ 2, target_len
);
9342 target_name
[target_len
] = '\0';
9344 sec
= find_pc_section (pc
);
9345 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
9346 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
9347 if (minsym
.minsym
!= NULL
)
9348 return BMSYMBOL_VALUE_ADDRESS (minsym
);
9353 return 0; /* not a stub */
9357 set_arm_command (char *args
, int from_tty
)
9359 printf_unfiltered (_("\
9360 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9361 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
9365 show_arm_command (char *args
, int from_tty
)
9367 cmd_show_list (showarmcmdlist
, from_tty
, "");
9371 arm_update_current_architecture (void)
9373 struct gdbarch_info info
;
9375 /* If the current architecture is not ARM, we have nothing to do. */
9376 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
9379 /* Update the architecture. */
9380 gdbarch_info_init (&info
);
9382 if (!gdbarch_update_p (info
))
9383 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
9387 set_fp_model_sfunc (char *args
, int from_tty
,
9388 struct cmd_list_element
*c
)
9392 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
9393 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
9395 arm_fp_model
= (enum arm_float_model
) fp_model
;
9399 if (fp_model
== ARM_FLOAT_LAST
)
9400 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9403 arm_update_current_architecture ();
9407 show_fp_model (struct ui_file
*file
, int from_tty
,
9408 struct cmd_list_element
*c
, const char *value
)
9410 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9412 if (arm_fp_model
== ARM_FLOAT_AUTO
9413 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9414 fprintf_filtered (file
, _("\
9415 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9416 fp_model_strings
[tdep
->fp_model
]);
9418 fprintf_filtered (file
, _("\
9419 The current ARM floating point model is \"%s\".\n"),
9420 fp_model_strings
[arm_fp_model
]);
9424 arm_set_abi (char *args
, int from_tty
,
9425 struct cmd_list_element
*c
)
9429 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9430 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9432 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
9436 if (arm_abi
== ARM_ABI_LAST
)
9437 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9440 arm_update_current_architecture ();
9444 arm_show_abi (struct ui_file
*file
, int from_tty
,
9445 struct cmd_list_element
*c
, const char *value
)
9447 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9449 if (arm_abi_global
== ARM_ABI_AUTO
9450 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9451 fprintf_filtered (file
, _("\
9452 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9453 arm_abi_strings
[tdep
->arm_abi
]);
9455 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9460 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9461 struct cmd_list_element
*c
, const char *value
)
9463 fprintf_filtered (file
,
9464 _("The current execution mode assumed "
9465 "(when symbols are unavailable) is \"%s\".\n"),
9466 arm_fallback_mode_string
);
9470 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9471 struct cmd_list_element
*c
, const char *value
)
9473 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9475 fprintf_filtered (file
,
9476 _("The current execution mode assumed "
9477 "(even when symbols are available) is \"%s\".\n"),
9478 arm_force_mode_string
);
9481 /* If the user changes the register disassembly style used for info
9482 register and other commands, we have to also switch the style used
9483 in opcodes for disassembly output. This function is run in the "set
9484 arm disassembly" command, and does that. */
9487 set_disassembly_style_sfunc (char *args
, int from_tty
,
9488 struct cmd_list_element
*c
)
9490 set_disassembly_style ();
9493 /* Return the ARM register name corresponding to register I. */
9495 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9497 const int num_regs
= gdbarch_num_regs (gdbarch
);
9499 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9500 && i
>= num_regs
&& i
< num_regs
+ 32)
9502 static const char *const vfp_pseudo_names
[] = {
9503 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9504 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9505 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9506 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9509 return vfp_pseudo_names
[i
- num_regs
];
9512 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9513 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9515 static const char *const neon_pseudo_names
[] = {
9516 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9517 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9520 return neon_pseudo_names
[i
- num_regs
- 32];
9523 if (i
>= ARRAY_SIZE (arm_register_names
))
9524 /* These registers are only supported on targets which supply
9525 an XML description. */
9528 return arm_register_names
[i
];
9532 set_disassembly_style (void)
9536 /* Find the style that the user wants. */
9537 for (current
= 0; current
< num_disassembly_options
; current
++)
9538 if (disassembly_style
== valid_disassembly_styles
[current
])
9540 gdb_assert (current
< num_disassembly_options
);
9542 /* Synchronize the disassembler. */
9543 set_arm_regname_option (current
);
9546 /* Test whether the coff symbol specific value corresponds to a Thumb
9550 coff_sym_is_thumb (int val
)
9552 return (val
== C_THUMBEXT
9553 || val
== C_THUMBSTAT
9554 || val
== C_THUMBEXTFUNC
9555 || val
== C_THUMBSTATFUNC
9556 || val
== C_THUMBLABEL
);
9559 /* arm_coff_make_msymbol_special()
9560 arm_elf_make_msymbol_special()
9562 These functions test whether the COFF or ELF symbol corresponds to
9563 an address in thumb code, and set a "special" bit in a minimal
9564 symbol to indicate that it does. */
9567 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9569 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9570 == ST_BRANCH_TO_THUMB
)
9571 MSYMBOL_SET_SPECIAL (msym
);
9575 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9577 if (coff_sym_is_thumb (val
))
9578 MSYMBOL_SET_SPECIAL (msym
);
9582 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9584 struct arm_per_objfile
*data
= (struct arm_per_objfile
*) arg
;
9587 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9588 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9592 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9595 const char *name
= bfd_asymbol_name (sym
);
9596 struct arm_per_objfile
*data
;
9597 VEC(arm_mapping_symbol_s
) **map_p
;
9598 struct arm_mapping_symbol new_map_sym
;
9600 gdb_assert (name
[0] == '$');
9601 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9604 data
= (struct arm_per_objfile
*) objfile_data (objfile
,
9605 arm_objfile_data_key
);
9608 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9609 struct arm_per_objfile
);
9610 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9611 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9612 objfile
->obfd
->section_count
,
9613 VEC(arm_mapping_symbol_s
) *);
9615 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9617 new_map_sym
.value
= sym
->value
;
9618 new_map_sym
.type
= name
[1];
9620 /* Assume that most mapping symbols appear in order of increasing
9621 value. If they were randomly distributed, it would be faster to
9622 always push here and then sort at first use. */
9623 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9625 struct arm_mapping_symbol
*prev_map_sym
;
9627 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9628 if (prev_map_sym
->value
>= sym
->value
)
9631 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9632 arm_compare_mapping_symbols
);
9633 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9638 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9642 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9644 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9645 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9647 /* If necessary, set the T bit. */
9650 ULONGEST val
, t_bit
;
9651 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9652 t_bit
= arm_psr_thumb_bit (gdbarch
);
9653 if (arm_pc_is_thumb (gdbarch
, pc
))
9654 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9657 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9662 /* Read the contents of a NEON quad register, by reading from two
9663 double registers. This is used to implement the quad pseudo
9664 registers, and for argument passing in case the quad registers are
9665 missing; vectors are passed in quad registers when using the VFP
9666 ABI, even if a NEON unit is not present. REGNUM is the index of
9667 the quad register, in [0, 15]. */
9669 static enum register_status
9670 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9671 int regnum
, gdb_byte
*buf
)
9674 gdb_byte reg_buf
[8];
9675 int offset
, double_regnum
;
9676 enum register_status status
;
9678 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9679 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9682 /* d0 is always the least significant half of q0. */
9683 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9688 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9689 if (status
!= REG_VALID
)
9691 memcpy (buf
+ offset
, reg_buf
, 8);
9693 offset
= 8 - offset
;
9694 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9695 if (status
!= REG_VALID
)
9697 memcpy (buf
+ offset
, reg_buf
, 8);
9702 static enum register_status
9703 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9704 int regnum
, gdb_byte
*buf
)
9706 const int num_regs
= gdbarch_num_regs (gdbarch
);
9708 gdb_byte reg_buf
[8];
9709 int offset
, double_regnum
;
9711 gdb_assert (regnum
>= num_regs
);
9714 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9715 /* Quad-precision register. */
9716 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9719 enum register_status status
;
9721 /* Single-precision register. */
9722 gdb_assert (regnum
< 32);
9724 /* s0 is always the least significant half of d0. */
9725 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9726 offset
= (regnum
& 1) ? 0 : 4;
9728 offset
= (regnum
& 1) ? 4 : 0;
9730 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9731 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9734 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9735 if (status
== REG_VALID
)
9736 memcpy (buf
, reg_buf
+ offset
, 4);
9741 /* Store the contents of BUF to a NEON quad register, by writing to
9742 two double registers. This is used to implement the quad pseudo
9743 registers, and for argument passing in case the quad registers are
9744 missing; vectors are passed in quad registers when using the VFP
9745 ABI, even if a NEON unit is not present. REGNUM is the index
9746 of the quad register, in [0, 15]. */
9749 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9750 int regnum
, const gdb_byte
*buf
)
9753 int offset
, double_regnum
;
9755 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9756 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9759 /* d0 is always the least significant half of q0. */
9760 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9765 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9766 offset
= 8 - offset
;
9767 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9771 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9772 int regnum
, const gdb_byte
*buf
)
9774 const int num_regs
= gdbarch_num_regs (gdbarch
);
9776 gdb_byte reg_buf
[8];
9777 int offset
, double_regnum
;
9779 gdb_assert (regnum
>= num_regs
);
9782 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9783 /* Quad-precision register. */
9784 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9787 /* Single-precision register. */
9788 gdb_assert (regnum
< 32);
9790 /* s0 is always the least significant half of d0. */
9791 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9792 offset
= (regnum
& 1) ? 0 : 4;
9794 offset
= (regnum
& 1) ? 4 : 0;
9796 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9797 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9800 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9801 memcpy (reg_buf
+ offset
, buf
, 4);
9802 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9806 static struct value
*
9807 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9809 const int *reg_p
= (const int *) baton
;
9810 return value_of_register (*reg_p
, frame
);
9813 static enum gdb_osabi
9814 arm_elf_osabi_sniffer (bfd
*abfd
)
9816 unsigned int elfosabi
;
9817 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9819 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9821 if (elfosabi
== ELFOSABI_ARM
)
9822 /* GNU tools use this value. Check note sections in this case,
9824 bfd_map_over_sections (abfd
,
9825 generic_elf_osabi_sniff_abi_tag_sections
,
9828 /* Anything else will be handled by the generic ELF sniffer. */
9833 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9834 struct reggroup
*group
)
9836 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9837 this, FPS register belongs to save_regroup, restore_reggroup, and
9838 all_reggroup, of course. */
9839 if (regnum
== ARM_FPS_REGNUM
)
9840 return (group
== float_reggroup
9841 || group
== save_reggroup
9842 || group
== restore_reggroup
9843 || group
== all_reggroup
);
9845 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9849 /* For backward-compatibility we allow two 'g' packet lengths with
9850 the remote protocol depending on whether FPA registers are
9851 supplied. M-profile targets do not have FPA registers, but some
9852 stubs already exist in the wild which use a 'g' packet which
9853 supplies them albeit with dummy values. The packet format which
9854 includes FPA registers should be considered deprecated for
9855 M-profile targets. */
9858 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
9860 if (gdbarch_tdep (gdbarch
)->is_m
)
9862 /* If we know from the executable this is an M-profile target,
9863 cater for remote targets whose register set layout is the
9864 same as the FPA layout. */
9865 register_remote_g_packet_guess (gdbarch
,
9866 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9867 (16 * INT_REGISTER_SIZE
)
9868 + (8 * FP_REGISTER_SIZE
)
9869 + (2 * INT_REGISTER_SIZE
),
9870 tdesc_arm_with_m_fpa_layout
);
9872 /* The regular M-profile layout. */
9873 register_remote_g_packet_guess (gdbarch
,
9874 /* r0-r12,sp,lr,pc; xpsr */
9875 (16 * INT_REGISTER_SIZE
)
9876 + INT_REGISTER_SIZE
,
9879 /* M-profile plus M4F VFP. */
9880 register_remote_g_packet_guess (gdbarch
,
9881 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9882 (16 * INT_REGISTER_SIZE
)
9883 + (16 * VFP_REGISTER_SIZE
)
9884 + (2 * INT_REGISTER_SIZE
),
9885 tdesc_arm_with_m_vfp_d16
);
9888 /* Otherwise we don't have a useful guess. */
9892 /* Initialize the current architecture based on INFO. If possible,
9893 re-use an architecture from ARCHES, which is a list of
9894 architectures already created during this debugging session.
9896 Called e.g. at program startup, when reading a core file, and when
9897 reading a binary file. */
9899 static struct gdbarch
*
9900 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9902 struct gdbarch_tdep
*tdep
;
9903 struct gdbarch
*gdbarch
;
9904 struct gdbarch_list
*best_arch
;
9905 enum arm_abi_kind arm_abi
= arm_abi_global
;
9906 enum arm_float_model fp_model
= arm_fp_model
;
9907 struct tdesc_arch_data
*tdesc_data
= NULL
;
9909 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9910 int have_wmmx_registers
= 0;
9912 int have_fpa_registers
= 1;
9913 const struct target_desc
*tdesc
= info
.target_desc
;
9915 /* If we have an object to base this architecture on, try to determine
9918 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9920 int ei_osabi
, e_flags
;
9922 switch (bfd_get_flavour (info
.abfd
))
9924 case bfd_target_aout_flavour
:
9925 /* Assume it's an old APCS-style ABI. */
9926 arm_abi
= ARM_ABI_APCS
;
9929 case bfd_target_coff_flavour
:
9930 /* Assume it's an old APCS-style ABI. */
9932 arm_abi
= ARM_ABI_APCS
;
9935 case bfd_target_elf_flavour
:
9936 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9937 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9939 if (ei_osabi
== ELFOSABI_ARM
)
9941 /* GNU tools used to use this value, but do not for EABI
9942 objects. There's nowhere to tag an EABI version
9943 anyway, so assume APCS. */
9944 arm_abi
= ARM_ABI_APCS
;
9946 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
9948 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9949 int attr_arch
, attr_profile
;
9953 case EF_ARM_EABI_UNKNOWN
:
9954 /* Assume GNU tools. */
9955 arm_abi
= ARM_ABI_APCS
;
9958 case EF_ARM_EABI_VER4
:
9959 case EF_ARM_EABI_VER5
:
9960 arm_abi
= ARM_ABI_AAPCS
;
9961 /* EABI binaries default to VFP float ordering.
9962 They may also contain build attributes that can
9963 be used to identify if the VFP argument-passing
9965 if (fp_model
== ARM_FLOAT_AUTO
)
9968 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9972 case AEABI_VFP_args_base
:
9973 /* "The user intended FP parameter/result
9974 passing to conform to AAPCS, base
9976 fp_model
= ARM_FLOAT_SOFT_VFP
;
9978 case AEABI_VFP_args_vfp
:
9979 /* "The user intended FP parameter/result
9980 passing to conform to AAPCS, VFP
9982 fp_model
= ARM_FLOAT_VFP
;
9984 case AEABI_VFP_args_toolchain
:
9985 /* "The user intended FP parameter/result
9986 passing to conform to tool chain-specific
9987 conventions" - we don't know any such
9988 conventions, so leave it as "auto". */
9990 case AEABI_VFP_args_compatible
:
9991 /* "Code is compatible with both the base
9992 and VFP variants; the user did not permit
9993 non-variadic functions to pass FP
9994 parameters/results" - leave it as
9998 /* Attribute value not mentioned in the
9999 November 2012 ABI, so leave it as
10004 fp_model
= ARM_FLOAT_SOFT_VFP
;
10010 /* Leave it as "auto". */
10011 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
10016 /* Detect M-profile programs. This only works if the
10017 executable file includes build attributes; GCC does
10018 copy them to the executable, but e.g. RealView does
10020 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
10022 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
10024 Tag_CPU_arch_profile
);
10025 /* GCC specifies the profile for v6-M; RealView only
10026 specifies the profile for architectures starting with
10027 V7 (as opposed to architectures with a tag
10028 numerically greater than TAG_CPU_ARCH_V7). */
10029 if (!tdesc_has_registers (tdesc
)
10030 && (attr_arch
== TAG_CPU_ARCH_V6_M
10031 || attr_arch
== TAG_CPU_ARCH_V6S_M
10032 || attr_profile
== 'M'))
10037 if (fp_model
== ARM_FLOAT_AUTO
)
10039 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
10041 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
10044 /* Leave it as "auto". Strictly speaking this case
10045 means FPA, but almost nobody uses that now, and
10046 many toolchains fail to set the appropriate bits
10047 for the floating-point model they use. */
10049 case EF_ARM_SOFT_FLOAT
:
10050 fp_model
= ARM_FLOAT_SOFT_FPA
;
10052 case EF_ARM_VFP_FLOAT
:
10053 fp_model
= ARM_FLOAT_VFP
;
10055 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
10056 fp_model
= ARM_FLOAT_SOFT_VFP
;
10061 if (e_flags
& EF_ARM_BE8
)
10062 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
10067 /* Leave it as "auto". */
10072 /* Check any target description for validity. */
10073 if (tdesc_has_registers (tdesc
))
10075 /* For most registers we require GDB's default names; but also allow
10076 the numeric names for sp / lr / pc, as a convenience. */
10077 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
10078 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
10079 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
10081 const struct tdesc_feature
*feature
;
10084 feature
= tdesc_find_feature (tdesc
,
10085 "org.gnu.gdb.arm.core");
10086 if (feature
== NULL
)
10088 feature
= tdesc_find_feature (tdesc
,
10089 "org.gnu.gdb.arm.m-profile");
10090 if (feature
== NULL
)
10096 tdesc_data
= tdesc_data_alloc ();
10099 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
10100 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10101 arm_register_names
[i
]);
10102 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10105 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10108 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10112 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10113 ARM_PS_REGNUM
, "xpsr");
10115 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10116 ARM_PS_REGNUM
, "cpsr");
10120 tdesc_data_cleanup (tdesc_data
);
10124 feature
= tdesc_find_feature (tdesc
,
10125 "org.gnu.gdb.arm.fpa");
10126 if (feature
!= NULL
)
10129 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
10130 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10131 arm_register_names
[i
]);
10134 tdesc_data_cleanup (tdesc_data
);
10139 have_fpa_registers
= 0;
10141 feature
= tdesc_find_feature (tdesc
,
10142 "org.gnu.gdb.xscale.iwmmxt");
10143 if (feature
!= NULL
)
10145 static const char *const iwmmxt_names
[] = {
10146 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10147 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10148 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10149 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10153 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
10155 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10156 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10158 /* Check for the control registers, but do not fail if they
10160 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
10161 tdesc_numbered_register (feature
, tdesc_data
, i
,
10162 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10164 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
10166 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10167 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10171 tdesc_data_cleanup (tdesc_data
);
10175 have_wmmx_registers
= 1;
10178 /* If we have a VFP unit, check whether the single precision registers
10179 are present. If not, then we will synthesize them as pseudo
10181 feature
= tdesc_find_feature (tdesc
,
10182 "org.gnu.gdb.arm.vfp");
10183 if (feature
!= NULL
)
10185 static const char *const vfp_double_names
[] = {
10186 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10187 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10188 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10189 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10192 /* Require the double precision registers. There must be either
10195 for (i
= 0; i
< 32; i
++)
10197 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10199 vfp_double_names
[i
]);
10203 if (!valid_p
&& i
== 16)
10206 /* Also require FPSCR. */
10207 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10208 ARM_FPSCR_REGNUM
, "fpscr");
10211 tdesc_data_cleanup (tdesc_data
);
10215 if (tdesc_unnumbered_register (feature
, "s0") == 0)
10216 have_vfp_pseudos
= 1;
10218 vfp_register_count
= i
;
10220 /* If we have VFP, also check for NEON. The architecture allows
10221 NEON without VFP (integer vector operations only), but GDB
10222 does not support that. */
10223 feature
= tdesc_find_feature (tdesc
,
10224 "org.gnu.gdb.arm.neon");
10225 if (feature
!= NULL
)
10227 /* NEON requires 32 double-precision registers. */
10230 tdesc_data_cleanup (tdesc_data
);
10234 /* If there are quad registers defined by the stub, use
10235 their type; otherwise (normally) provide them with
10236 the default type. */
10237 if (tdesc_unnumbered_register (feature
, "q0") == 0)
10238 have_neon_pseudos
= 1;
10245 /* If there is already a candidate, use it. */
10246 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
10248 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
10250 if (arm_abi
!= ARM_ABI_AUTO
10251 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
10254 if (fp_model
!= ARM_FLOAT_AUTO
10255 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
10258 /* There are various other properties in tdep that we do not
10259 need to check here: those derived from a target description,
10260 since gdbarches with a different target description are
10261 automatically disqualified. */
10263 /* Do check is_m, though, since it might come from the binary. */
10264 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
10267 /* Found a match. */
10271 if (best_arch
!= NULL
)
10273 if (tdesc_data
!= NULL
)
10274 tdesc_data_cleanup (tdesc_data
);
10275 return best_arch
->gdbarch
;
10278 tdep
= XCNEW (struct gdbarch_tdep
);
10279 gdbarch
= gdbarch_alloc (&info
, tdep
);
10281 /* Record additional information about the architecture we are defining.
10282 These are gdbarch discriminators, like the OSABI. */
10283 tdep
->arm_abi
= arm_abi
;
10284 tdep
->fp_model
= fp_model
;
10286 tdep
->have_fpa_registers
= have_fpa_registers
;
10287 tdep
->have_wmmx_registers
= have_wmmx_registers
;
10288 gdb_assert (vfp_register_count
== 0
10289 || vfp_register_count
== 16
10290 || vfp_register_count
== 32);
10291 tdep
->vfp_register_count
= vfp_register_count
;
10292 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
10293 tdep
->have_neon_pseudos
= have_neon_pseudos
;
10294 tdep
->have_neon
= have_neon
;
10296 arm_register_g_packet_guesses (gdbarch
);
10299 switch (info
.byte_order_for_code
)
10301 case BFD_ENDIAN_BIG
:
10302 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
10303 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
10304 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
10305 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
10309 case BFD_ENDIAN_LITTLE
:
10310 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
10311 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
10312 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
10313 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
10318 internal_error (__FILE__
, __LINE__
,
10319 _("arm_gdbarch_init: bad byte order for float format"));
10322 /* On ARM targets char defaults to unsigned. */
10323 set_gdbarch_char_signed (gdbarch
, 0);
10325 /* Note: for displaced stepping, this includes the breakpoint, and one word
10326 of additional scratch space. This setting isn't used for anything beside
10327 displaced stepping at present. */
10328 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
10330 /* This should be low enough for everything. */
10331 tdep
->lowest_pc
= 0x20;
10332 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
10334 /* The default, for both APCS and AAPCS, is to return small
10335 structures in registers. */
10336 tdep
->struct_return
= reg_struct_return
;
10338 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
10339 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
10341 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
10343 /* Frame handling. */
10344 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
10345 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
10346 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
10348 frame_base_set_default (gdbarch
, &arm_normal_base
);
10350 /* Address manipulation. */
10351 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
10353 /* Advance PC across function entry code. */
10354 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
10356 /* Detect whether PC is at a point where the stack has been destroyed. */
10357 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
10359 /* Skip trampolines. */
10360 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
10362 /* The stack grows downward. */
10363 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
10365 /* Breakpoint manipulation. */
10366 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
10367 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
10368 arm_remote_breakpoint_from_pc
);
10370 /* Information about registers, etc. */
10371 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
10372 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
10373 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
10374 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10375 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
10377 /* This "info float" is FPA-specific. Use the generic version if we
10378 do not have FPA. */
10379 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
10380 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
10382 /* Internal <-> external register number maps. */
10383 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
10384 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
10386 set_gdbarch_register_name (gdbarch
, arm_register_name
);
10388 /* Returning results. */
10389 set_gdbarch_return_value (gdbarch
, arm_return_value
);
10392 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
10394 /* Minsymbol frobbing. */
10395 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
10396 set_gdbarch_coff_make_msymbol_special (gdbarch
,
10397 arm_coff_make_msymbol_special
);
10398 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
10400 /* Thumb-2 IT block support. */
10401 set_gdbarch_adjust_breakpoint_address (gdbarch
,
10402 arm_adjust_breakpoint_address
);
10404 /* Virtual tables. */
10405 set_gdbarch_vbit_in_delta (gdbarch
, 1);
10407 /* Hook in the ABI-specific overrides, if they have been registered. */
10408 gdbarch_init_osabi (info
, gdbarch
);
10410 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
10412 /* Add some default predicates. */
10414 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
10415 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
10416 dwarf2_append_unwinders (gdbarch
);
10417 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
10418 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
10420 /* Now we have tuned the configuration, set a few final things,
10421 based on what the OS ABI has told us. */
10423 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10424 binaries are always marked. */
10425 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
10426 tdep
->arm_abi
= ARM_ABI_APCS
;
10428 /* Watchpoints are not steppable. */
10429 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
10431 /* We used to default to FPA for generic ARM, but almost nobody
10432 uses that now, and we now provide a way for the user to force
10433 the model. So default to the most useful variant. */
10434 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
10435 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
10437 if (tdep
->jb_pc
>= 0)
10438 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
10440 /* Floating point sizes and format. */
10441 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
10442 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
10444 set_gdbarch_double_format
10445 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10446 set_gdbarch_long_double_format
10447 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10451 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
10452 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10455 if (have_vfp_pseudos
)
10457 /* NOTE: These are the only pseudo registers used by
10458 the ARM target at the moment. If more are added, a
10459 little more care in numbering will be needed. */
10461 int num_pseudos
= 32;
10462 if (have_neon_pseudos
)
10464 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10465 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10466 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10471 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10473 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10475 /* Override tdesc_register_type to adjust the types of VFP
10476 registers for NEON. */
10477 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10480 /* Add standard register aliases. We add aliases even for those
10481 nanes which are used by the current architecture - it's simpler,
10482 and does no harm, since nothing ever lists user registers. */
10483 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10484 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10485 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10491 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10493 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10498 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10499 (unsigned long) tdep
->lowest_pc
);
10502 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10505 _initialize_arm_tdep (void)
10507 struct ui_file
*stb
;
10509 struct cmd_list_element
*new_set
, *new_show
;
10510 const char *setname
;
10511 const char *setdesc
;
10512 const char *const *regnames
;
10514 static char *helptext
;
10515 char regdesc
[1024], *rdptr
= regdesc
;
10516 size_t rest
= sizeof (regdesc
);
10518 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10520 arm_objfile_data_key
10521 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10523 /* Add ourselves to objfile event chain. */
10524 observer_attach_new_objfile (arm_exidx_new_objfile
);
10526 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10528 /* Register an ELF OS ABI sniffer for ARM binaries. */
10529 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10530 bfd_target_elf_flavour
,
10531 arm_elf_osabi_sniffer
);
10533 /* Initialize the standard target descriptions. */
10534 initialize_tdesc_arm_with_m ();
10535 initialize_tdesc_arm_with_m_fpa_layout ();
10536 initialize_tdesc_arm_with_m_vfp_d16 ();
10537 initialize_tdesc_arm_with_iwmmxt ();
10538 initialize_tdesc_arm_with_vfpv2 ();
10539 initialize_tdesc_arm_with_vfpv3 ();
10540 initialize_tdesc_arm_with_neon ();
10542 /* Get the number of possible sets of register names defined in opcodes. */
10543 num_disassembly_options
= get_arm_regname_num_options ();
10545 /* Add root prefix command for all "set arm"/"show arm" commands. */
10546 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10547 _("Various ARM-specific commands."),
10548 &setarmcmdlist
, "set arm ", 0, &setlist
);
10550 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10551 _("Various ARM-specific commands."),
10552 &showarmcmdlist
, "show arm ", 0, &showlist
);
10554 /* Sync the opcode insn printer with our register viewer. */
10555 parse_arm_disassembler_option ("reg-names-std");
10557 /* Initialize the array that will be passed to
10558 add_setshow_enum_cmd(). */
10559 valid_disassembly_styles
= XNEWVEC (const char *,
10560 num_disassembly_options
+ 1);
10561 for (i
= 0; i
< num_disassembly_options
; i
++)
10563 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10564 valid_disassembly_styles
[i
] = setname
;
10565 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10568 /* When we find the default names, tell the disassembler to use
10570 if (!strcmp (setname
, "std"))
10572 disassembly_style
= setname
;
10573 set_arm_regname_option (i
);
10576 /* Mark the end of valid options. */
10577 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10579 /* Create the help text. */
10580 stb
= mem_fileopen ();
10581 fprintf_unfiltered (stb
, "%s%s%s",
10582 _("The valid values are:\n"),
10584 _("The default is \"std\"."));
10585 helptext
= ui_file_xstrdup (stb
, NULL
);
10586 ui_file_delete (stb
);
10588 add_setshow_enum_cmd("disassembler", no_class
,
10589 valid_disassembly_styles
, &disassembly_style
,
10590 _("Set the disassembly style."),
10591 _("Show the disassembly style."),
10593 set_disassembly_style_sfunc
,
10594 NULL
, /* FIXME: i18n: The disassembly style is
10596 &setarmcmdlist
, &showarmcmdlist
);
10598 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10599 _("Set usage of ARM 32-bit mode."),
10600 _("Show usage of ARM 32-bit mode."),
10601 _("When off, a 26-bit PC will be used."),
10603 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10605 &setarmcmdlist
, &showarmcmdlist
);
10607 /* Add a command to allow the user to force the FPU model. */
10608 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10609 _("Set the floating point type."),
10610 _("Show the floating point type."),
10611 _("auto - Determine the FP typefrom the OS-ABI.\n\
10612 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10613 fpa - FPA co-processor (GCC compiled).\n\
10614 softvfp - Software FP with pure-endian doubles.\n\
10615 vfp - VFP co-processor."),
10616 set_fp_model_sfunc
, show_fp_model
,
10617 &setarmcmdlist
, &showarmcmdlist
);
10619 /* Add a command to allow the user to force the ABI. */
10620 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10622 _("Show the ABI."),
10623 NULL
, arm_set_abi
, arm_show_abi
,
10624 &setarmcmdlist
, &showarmcmdlist
);
10626 /* Add two commands to allow the user to force the assumed
10628 add_setshow_enum_cmd ("fallback-mode", class_support
,
10629 arm_mode_strings
, &arm_fallback_mode_string
,
10630 _("Set the mode assumed when symbols are unavailable."),
10631 _("Show the mode assumed when symbols are unavailable."),
10632 NULL
, NULL
, arm_show_fallback_mode
,
10633 &setarmcmdlist
, &showarmcmdlist
);
10634 add_setshow_enum_cmd ("force-mode", class_support
,
10635 arm_mode_strings
, &arm_force_mode_string
,
10636 _("Set the mode assumed even when symbols are available."),
10637 _("Show the mode assumed even when symbols are available."),
10638 NULL
, NULL
, arm_show_force_mode
,
10639 &setarmcmdlist
, &showarmcmdlist
);
10641 /* Debugging flag. */
10642 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10643 _("Set ARM debugging."),
10644 _("Show ARM debugging."),
10645 _("When on, arm-specific debugging is enabled."),
10647 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10648 &setdebuglist
, &showdebuglist
);
10651 /* ARM-reversible process record data structures. */
10653 #define ARM_INSN_SIZE_BYTES 4
10654 #define THUMB_INSN_SIZE_BYTES 2
10655 #define THUMB2_INSN_SIZE_BYTES 4
10658 /* Position of the bit within a 32-bit ARM instruction
10659 that defines whether the instruction is a load or store. */
10660 #define INSN_S_L_BIT_NUM 20
10662 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10665 unsigned int reg_len = LENGTH; \
10668 REGS = XNEWVEC (uint32_t, reg_len); \
10669 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10674 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10677 unsigned int mem_len = LENGTH; \
10680 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10681 memcpy(&MEMS->len, &RECORD_BUF[0], \
10682 sizeof(struct arm_mem_r) * LENGTH); \
10687 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10688 #define INSN_RECORDED(ARM_RECORD) \
10689 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10691 /* ARM memory record structure. */
10694 uint32_t len
; /* Record length. */
10695 uint32_t addr
; /* Memory address. */
10698 /* ARM instruction record contains opcode of current insn
10699 and execution state (before entry to decode_insn()),
10700 contains list of to-be-modified registers and
10701 memory blocks (on return from decode_insn()). */
10703 typedef struct insn_decode_record_t
10705 struct gdbarch
*gdbarch
;
10706 struct regcache
*regcache
;
10707 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
10708 uint32_t arm_insn
; /* Should accommodate thumb. */
10709 uint32_t cond
; /* Condition code. */
10710 uint32_t opcode
; /* Insn opcode. */
10711 uint32_t decode
; /* Insn decode bits. */
10712 uint32_t mem_rec_count
; /* No of mem records. */
10713 uint32_t reg_rec_count
; /* No of reg records. */
10714 uint32_t *arm_regs
; /* Registers to be saved for this record. */
10715 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
10716 } insn_decode_record
;
10719 /* Checks ARM SBZ and SBO mandatory fields. */
10722 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
10724 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
10743 enum arm_record_result
10745 ARM_RECORD_SUCCESS
= 0,
10746 ARM_RECORD_FAILURE
= 1
10753 } arm_record_strx_t
;
10764 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10765 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10768 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10769 ULONGEST u_regval
[2]= {0};
10771 uint32_t reg_src1
= 0, reg_src2
= 0;
10772 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10773 uint32_t opcode1
= 0;
10775 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10776 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10777 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10780 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10782 /* 1) Handle misc store, immediate offset. */
10783 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10784 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10785 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10786 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10788 if (ARM_PC_REGNUM
== reg_src1
)
10790 /* If R15 was used as Rn, hence current PC+8. */
10791 u_regval
[0] = u_regval
[0] + 8;
10793 offset_8
= (immed_high
<< 4) | immed_low
;
10794 /* Calculate target store address. */
10795 if (14 == arm_insn_r
->opcode
)
10797 tgt_mem_addr
= u_regval
[0] + offset_8
;
10801 tgt_mem_addr
= u_regval
[0] - offset_8
;
10803 if (ARM_RECORD_STRH
== str_type
)
10805 record_buf_mem
[0] = 2;
10806 record_buf_mem
[1] = tgt_mem_addr
;
10807 arm_insn_r
->mem_rec_count
= 1;
10809 else if (ARM_RECORD_STRD
== str_type
)
10811 record_buf_mem
[0] = 4;
10812 record_buf_mem
[1] = tgt_mem_addr
;
10813 record_buf_mem
[2] = 4;
10814 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10815 arm_insn_r
->mem_rec_count
= 2;
10818 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10820 /* 2) Store, register offset. */
10822 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10824 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10825 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10826 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10827 if (15 == reg_src2
)
10829 /* If R15 was used as Rn, hence current PC+8. */
10830 u_regval
[0] = u_regval
[0] + 8;
10832 /* Calculate target store address, Rn +/- Rm, register offset. */
10833 if (12 == arm_insn_r
->opcode
)
10835 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10839 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10841 if (ARM_RECORD_STRH
== str_type
)
10843 record_buf_mem
[0] = 2;
10844 record_buf_mem
[1] = tgt_mem_addr
;
10845 arm_insn_r
->mem_rec_count
= 1;
10847 else if (ARM_RECORD_STRD
== str_type
)
10849 record_buf_mem
[0] = 4;
10850 record_buf_mem
[1] = tgt_mem_addr
;
10851 record_buf_mem
[2] = 4;
10852 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10853 arm_insn_r
->mem_rec_count
= 2;
10856 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10857 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10859 /* 3) Store, immediate pre-indexed. */
10860 /* 5) Store, immediate post-indexed. */
10861 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10862 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10863 offset_8
= (immed_high
<< 4) | immed_low
;
10864 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10865 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10866 /* Calculate target store address, Rn +/- Rm, register offset. */
10867 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10869 tgt_mem_addr
= u_regval
[0] + offset_8
;
10873 tgt_mem_addr
= u_regval
[0] - offset_8
;
10875 if (ARM_RECORD_STRH
== str_type
)
10877 record_buf_mem
[0] = 2;
10878 record_buf_mem
[1] = tgt_mem_addr
;
10879 arm_insn_r
->mem_rec_count
= 1;
10881 else if (ARM_RECORD_STRD
== str_type
)
10883 record_buf_mem
[0] = 4;
10884 record_buf_mem
[1] = tgt_mem_addr
;
10885 record_buf_mem
[2] = 4;
10886 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10887 arm_insn_r
->mem_rec_count
= 2;
10889 /* Record Rn also as it changes. */
10890 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10891 arm_insn_r
->reg_rec_count
= 1;
10893 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10894 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10896 /* 4) Store, register pre-indexed. */
10897 /* 6) Store, register post -indexed. */
10898 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10899 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10900 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10901 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10902 /* Calculate target store address, Rn +/- Rm, register offset. */
10903 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10905 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10909 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10911 if (ARM_RECORD_STRH
== str_type
)
10913 record_buf_mem
[0] = 2;
10914 record_buf_mem
[1] = tgt_mem_addr
;
10915 arm_insn_r
->mem_rec_count
= 1;
10917 else if (ARM_RECORD_STRD
== str_type
)
10919 record_buf_mem
[0] = 4;
10920 record_buf_mem
[1] = tgt_mem_addr
;
10921 record_buf_mem
[2] = 4;
10922 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10923 arm_insn_r
->mem_rec_count
= 2;
10925 /* Record Rn also as it changes. */
10926 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10927 arm_insn_r
->reg_rec_count
= 1;
10932 /* Handling ARM extension space insns. */
10935 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10937 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10938 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10939 uint32_t record_buf
[8], record_buf_mem
[8];
10940 uint32_t reg_src1
= 0;
10941 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10942 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10943 ULONGEST u_regval
= 0;
10945 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10946 /* Handle unconditional insn extension space. */
10948 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10949 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10950 if (arm_insn_r
->cond
)
10952 /* PLD has no affect on architectural state, it just affects
10954 if (5 == ((opcode1
& 0xE0) >> 5))
10957 record_buf
[0] = ARM_PS_REGNUM
;
10958 record_buf
[1] = ARM_LR_REGNUM
;
10959 arm_insn_r
->reg_rec_count
= 2;
10961 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10965 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10966 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10969 /* Undefined instruction on ARM V5; need to handle if later
10970 versions define it. */
10973 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
10974 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10975 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
10977 /* Handle arithmetic insn extension space. */
10978 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
10979 && !INSN_RECORDED(arm_insn_r
))
10981 /* Handle MLA(S) and MUL(S). */
10982 if (0 <= insn_op1
&& 3 >= insn_op1
)
10984 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10985 record_buf
[1] = ARM_PS_REGNUM
;
10986 arm_insn_r
->reg_rec_count
= 2;
10988 else if (4 <= insn_op1
&& 15 >= insn_op1
)
10990 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10991 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10992 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10993 record_buf
[2] = ARM_PS_REGNUM
;
10994 arm_insn_r
->reg_rec_count
= 3;
10998 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
10999 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
11000 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
11002 /* Handle control insn extension space. */
11004 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
11005 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
11007 if (!bit (arm_insn_r
->arm_insn
,25))
11009 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
11011 if ((0 == insn_op1
) || (2 == insn_op1
))
11014 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11015 arm_insn_r
->reg_rec_count
= 1;
11017 else if (1 == insn_op1
)
11019 /* CSPR is going to be changed. */
11020 record_buf
[0] = ARM_PS_REGNUM
;
11021 arm_insn_r
->reg_rec_count
= 1;
11023 else if (3 == insn_op1
)
11025 /* SPSR is going to be changed. */
11026 /* We need to get SPSR value, which is yet to be done. */
11027 printf_unfiltered (_("Process record does not support "
11028 "instruction 0x%0x at address %s.\n"),
11029 arm_insn_r
->arm_insn
,
11030 paddress (arm_insn_r
->gdbarch
,
11031 arm_insn_r
->this_addr
));
11035 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
11040 record_buf
[0] = ARM_PS_REGNUM
;
11041 arm_insn_r
->reg_rec_count
= 1;
11043 else if (3 == insn_op1
)
11046 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11047 arm_insn_r
->reg_rec_count
= 1;
11050 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
11053 record_buf
[0] = ARM_PS_REGNUM
;
11054 record_buf
[1] = ARM_LR_REGNUM
;
11055 arm_insn_r
->reg_rec_count
= 2;
11057 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
11059 /* QADD, QSUB, QDADD, QDSUB */
11060 record_buf
[0] = ARM_PS_REGNUM
;
11061 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11062 arm_insn_r
->reg_rec_count
= 2;
11064 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
11067 record_buf
[0] = ARM_PS_REGNUM
;
11068 record_buf
[1] = ARM_LR_REGNUM
;
11069 arm_insn_r
->reg_rec_count
= 2;
11071 /* Save SPSR also;how? */
11072 printf_unfiltered (_("Process record does not support "
11073 "instruction 0x%0x at address %s.\n"),
11074 arm_insn_r
->arm_insn
,
11075 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11078 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
11079 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
11080 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
11081 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
11084 if (0 == insn_op1
|| 1 == insn_op1
)
11086 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11087 /* We dont do optimization for SMULW<y> where we
11089 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11090 record_buf
[1] = ARM_PS_REGNUM
;
11091 arm_insn_r
->reg_rec_count
= 2;
11093 else if (2 == insn_op1
)
11096 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11097 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11098 arm_insn_r
->reg_rec_count
= 2;
11100 else if (3 == insn_op1
)
11103 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11104 arm_insn_r
->reg_rec_count
= 1;
11110 /* MSR : immediate form. */
11113 /* CSPR is going to be changed. */
11114 record_buf
[0] = ARM_PS_REGNUM
;
11115 arm_insn_r
->reg_rec_count
= 1;
11117 else if (3 == insn_op1
)
11119 /* SPSR is going to be changed. */
11120 /* we need to get SPSR value, which is yet to be done */
11121 printf_unfiltered (_("Process record does not support "
11122 "instruction 0x%0x at address %s.\n"),
11123 arm_insn_r
->arm_insn
,
11124 paddress (arm_insn_r
->gdbarch
,
11125 arm_insn_r
->this_addr
));
11131 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11132 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
11133 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
11135 /* Handle load/store insn extension space. */
11137 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
11138 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
11139 && !INSN_RECORDED(arm_insn_r
))
11144 /* These insn, changes register and memory as well. */
11145 /* SWP or SWPB insn. */
11146 /* Get memory address given by Rn. */
11147 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11148 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11149 /* SWP insn ?, swaps word. */
11150 if (8 == arm_insn_r
->opcode
)
11152 record_buf_mem
[0] = 4;
11156 /* SWPB insn, swaps only byte. */
11157 record_buf_mem
[0] = 1;
11159 record_buf_mem
[1] = u_regval
;
11160 arm_insn_r
->mem_rec_count
= 1;
11161 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11162 arm_insn_r
->reg_rec_count
= 1;
11164 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11167 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11170 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11173 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11174 record_buf
[1] = record_buf
[0] + 1;
11175 arm_insn_r
->reg_rec_count
= 2;
11177 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11180 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11183 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
11185 /* LDRH, LDRSB, LDRSH. */
11186 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11187 arm_insn_r
->reg_rec_count
= 1;
11192 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
11193 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
11194 && !INSN_RECORDED(arm_insn_r
))
11197 /* Handle coprocessor insn extension space. */
11200 /* To be done for ARMv5 and later; as of now we return -1. */
11202 printf_unfiltered (_("Process record does not support instruction x%0x "
11203 "at address %s.\n"),arm_insn_r
->arm_insn
,
11204 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11207 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11208 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11213 /* Handling opcode 000 insns. */
11216 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
11218 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11219 uint32_t record_buf
[8], record_buf_mem
[8];
11220 ULONGEST u_regval
[2] = {0};
11222 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11223 uint32_t immed_high
= 0, immed_low
= 0, offset_8
= 0, tgt_mem_addr
= 0;
11224 uint32_t opcode1
= 0;
11226 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11227 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11228 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
11230 /* Data processing insn /multiply insn. */
11231 if (9 == arm_insn_r
->decode
11232 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11233 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
11235 /* Handle multiply instructions. */
11236 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11237 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
11239 /* Handle MLA and MUL. */
11240 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11241 record_buf
[1] = ARM_PS_REGNUM
;
11242 arm_insn_r
->reg_rec_count
= 2;
11244 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11246 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11247 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11248 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11249 record_buf
[2] = ARM_PS_REGNUM
;
11250 arm_insn_r
->reg_rec_count
= 3;
11253 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11254 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
11256 /* Handle misc load insns, as 20th bit (L = 1). */
11257 /* LDR insn has a capability to do branching, if
11258 MOV LR, PC is precceded by LDR insn having Rn as R15
11259 in that case, it emulates branch and link insn, and hence we
11260 need to save CSPR and PC as well. I am not sure this is right
11261 place; as opcode = 010 LDR insn make this happen, if R15 was
11263 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11264 if (15 != reg_dest
)
11266 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11267 arm_insn_r
->reg_rec_count
= 1;
11271 record_buf
[0] = reg_dest
;
11272 record_buf
[1] = ARM_PS_REGNUM
;
11273 arm_insn_r
->reg_rec_count
= 2;
11276 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11277 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
11278 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11279 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
11281 /* Handle MSR insn. */
11282 if (9 == arm_insn_r
->opcode
)
11284 /* CSPR is going to be changed. */
11285 record_buf
[0] = ARM_PS_REGNUM
;
11286 arm_insn_r
->reg_rec_count
= 1;
11290 /* SPSR is going to be changed. */
11291 /* How to read SPSR value? */
11292 printf_unfiltered (_("Process record does not support instruction "
11293 "0x%0x at address %s.\n"),
11294 arm_insn_r
->arm_insn
,
11295 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11299 else if (9 == arm_insn_r
->decode
11300 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11301 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11303 /* Handling SWP, SWPB. */
11304 /* These insn, changes register and memory as well. */
11305 /* SWP or SWPB insn. */
11307 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11308 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11309 /* SWP insn ?, swaps word. */
11310 if (8 == arm_insn_r
->opcode
)
11312 record_buf_mem
[0] = 4;
11316 /* SWPB insn, swaps only byte. */
11317 record_buf_mem
[0] = 1;
11319 record_buf_mem
[1] = u_regval
[0];
11320 arm_insn_r
->mem_rec_count
= 1;
11321 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11322 arm_insn_r
->reg_rec_count
= 1;
11324 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
11325 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11327 /* Handle BLX, branch and link/exchange. */
11328 if (9 == arm_insn_r
->opcode
)
11330 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11331 and R14 stores the return address. */
11332 record_buf
[0] = ARM_PS_REGNUM
;
11333 record_buf
[1] = ARM_LR_REGNUM
;
11334 arm_insn_r
->reg_rec_count
= 2;
11337 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
11339 /* Handle enhanced software breakpoint insn, BKPT. */
11340 /* CPSR is changed to be executed in ARM state, disabling normal
11341 interrupts, entering abort mode. */
11342 /* According to high vector configuration PC is set. */
11343 /* user hit breakpoint and type reverse, in
11344 that case, we need to go back with previous CPSR and
11345 Program Counter. */
11346 record_buf
[0] = ARM_PS_REGNUM
;
11347 record_buf
[1] = ARM_LR_REGNUM
;
11348 arm_insn_r
->reg_rec_count
= 2;
11350 /* Save SPSR also; how? */
11351 printf_unfiltered (_("Process record does not support instruction "
11352 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11353 paddress (arm_insn_r
->gdbarch
,
11354 arm_insn_r
->this_addr
));
11357 else if (11 == arm_insn_r
->decode
11358 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11360 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11362 /* Handle str(x) insn */
11363 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11366 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
11367 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11369 /* Handle BX, branch and link/exchange. */
11370 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11371 record_buf
[0] = ARM_PS_REGNUM
;
11372 arm_insn_r
->reg_rec_count
= 1;
11374 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
11375 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
11376 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
11378 /* Count leading zeros: CLZ. */
11379 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11380 arm_insn_r
->reg_rec_count
= 1;
11382 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11383 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11384 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
11385 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
11388 /* Handle MRS insn. */
11389 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11390 arm_insn_r
->reg_rec_count
= 1;
11392 else if (arm_insn_r
->opcode
<= 15)
11394 /* Normal data processing insns. */
11395 /* Out of 11 shifter operands mode, all the insn modifies destination
11396 register, which is specified by 13-16 decode. */
11397 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11398 record_buf
[1] = ARM_PS_REGNUM
;
11399 arm_insn_r
->reg_rec_count
= 2;
11406 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11407 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11411 /* Handling opcode 001 insns. */
11414 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
11416 uint32_t record_buf
[8], record_buf_mem
[8];
11418 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11419 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11421 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11422 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
11423 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11426 /* Handle MSR insn. */
11427 if (9 == arm_insn_r
->opcode
)
11429 /* CSPR is going to be changed. */
11430 record_buf
[0] = ARM_PS_REGNUM
;
11431 arm_insn_r
->reg_rec_count
= 1;
11435 /* SPSR is going to be changed. */
11438 else if (arm_insn_r
->opcode
<= 15)
11440 /* Normal data processing insns. */
11441 /* Out of 11 shifter operands mode, all the insn modifies destination
11442 register, which is specified by 13-16 decode. */
11443 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11444 record_buf
[1] = ARM_PS_REGNUM
;
11445 arm_insn_r
->reg_rec_count
= 2;
11452 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11453 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11457 /* Handle ARM mode instructions with opcode 010. */
11460 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
11462 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11464 uint32_t reg_base
, reg_dest
;
11465 uint32_t offset_12
, tgt_mem_addr
;
11466 uint32_t record_buf
[8], record_buf_mem
[8];
11467 unsigned char wback
;
11470 /* Calculate wback. */
11471 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
11472 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
11474 arm_insn_r
->reg_rec_count
= 0;
11475 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11477 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11479 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11482 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11483 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
11485 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11486 preceeds a LDR instruction having R15 as reg_base, it
11487 emulates a branch and link instruction, and hence we need to save
11488 CPSR and PC as well. */
11489 if (ARM_PC_REGNUM
== reg_dest
)
11490 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11492 /* If wback is true, also save the base register, which is going to be
11495 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11499 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11501 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
11502 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11504 /* Handle bit U. */
11505 if (bit (arm_insn_r
->arm_insn
, 23))
11507 /* U == 1: Add the offset. */
11508 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
11512 /* U == 0: subtract the offset. */
11513 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
11516 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11518 if (bit (arm_insn_r
->arm_insn
, 22))
11520 /* STRB and STRBT: 1 byte. */
11521 record_buf_mem
[0] = 1;
11525 /* STR and STRT: 4 bytes. */
11526 record_buf_mem
[0] = 4;
11529 /* Handle bit P. */
11530 if (bit (arm_insn_r
->arm_insn
, 24))
11531 record_buf_mem
[1] = tgt_mem_addr
;
11533 record_buf_mem
[1] = (uint32_t) u_regval
;
11535 arm_insn_r
->mem_rec_count
= 1;
11537 /* If wback is true, also save the base register, which is going to be
11540 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11543 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11544 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11548 /* Handling opcode 011 insns. */
11551 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
11553 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11555 uint32_t shift_imm
= 0;
11556 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11557 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
11558 uint32_t record_buf
[8], record_buf_mem
[8];
11561 ULONGEST u_regval
[2];
11563 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11564 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11566 /* Handle enhanced store insns and LDRD DSP insn,
11567 order begins according to addressing modes for store insns
11571 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11573 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11574 /* LDR insn has a capability to do branching, if
11575 MOV LR, PC is precedded by LDR insn having Rn as R15
11576 in that case, it emulates branch and link insn, and hence we
11577 need to save CSPR and PC as well. */
11578 if (15 != reg_dest
)
11580 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11581 arm_insn_r
->reg_rec_count
= 1;
11585 record_buf
[0] = reg_dest
;
11586 record_buf
[1] = ARM_PS_REGNUM
;
11587 arm_insn_r
->reg_rec_count
= 2;
11592 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
11594 /* Store insn, register offset and register pre-indexed,
11595 register post-indexed. */
11597 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11599 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11600 regcache_raw_read_unsigned (reg_cache
, reg_src1
11602 regcache_raw_read_unsigned (reg_cache
, reg_src2
11604 if (15 == reg_src2
)
11606 /* If R15 was used as Rn, hence current PC+8. */
11607 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11608 u_regval
[0] = u_regval
[0] + 8;
11610 /* Calculate target store address, Rn +/- Rm, register offset. */
11612 if (bit (arm_insn_r
->arm_insn
, 23))
11614 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11618 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11621 switch (arm_insn_r
->opcode
)
11635 record_buf_mem
[0] = 4;
11650 record_buf_mem
[0] = 1;
11654 gdb_assert_not_reached ("no decoding pattern found");
11657 record_buf_mem
[1] = tgt_mem_addr
;
11658 arm_insn_r
->mem_rec_count
= 1;
11660 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11661 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11662 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11663 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11664 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11665 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11668 /* Rn is going to be changed in pre-indexed mode and
11669 post-indexed mode as well. */
11670 record_buf
[0] = reg_src2
;
11671 arm_insn_r
->reg_rec_count
= 1;
11676 /* Store insn, scaled register offset; scaled pre-indexed. */
11677 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11679 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11681 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11682 /* Get shift_imm. */
11683 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11684 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11685 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11686 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11687 /* Offset_12 used as shift. */
11691 /* Offset_12 used as index. */
11692 offset_12
= u_regval
[0] << shift_imm
;
11696 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11702 if (bit (u_regval
[0], 31))
11704 offset_12
= 0xFFFFFFFF;
11713 /* This is arithmetic shift. */
11714 offset_12
= s_word
>> shift_imm
;
11721 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11723 /* Get C flag value and shift it by 31. */
11724 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11725 | (u_regval
[0]) >> 1);
11729 offset_12
= (u_regval
[0] >> shift_imm
) \
11731 (sizeof(uint32_t) - shift_imm
));
11736 gdb_assert_not_reached ("no decoding pattern found");
11740 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11742 if (bit (arm_insn_r
->arm_insn
, 23))
11744 tgt_mem_addr
= u_regval
[1] + offset_12
;
11748 tgt_mem_addr
= u_regval
[1] - offset_12
;
11751 switch (arm_insn_r
->opcode
)
11765 record_buf_mem
[0] = 4;
11780 record_buf_mem
[0] = 1;
11784 gdb_assert_not_reached ("no decoding pattern found");
11787 record_buf_mem
[1] = tgt_mem_addr
;
11788 arm_insn_r
->mem_rec_count
= 1;
11790 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11791 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11792 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11793 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11794 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11795 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11798 /* Rn is going to be changed in register scaled pre-indexed
11799 mode,and scaled post indexed mode. */
11800 record_buf
[0] = reg_src2
;
11801 arm_insn_r
->reg_rec_count
= 1;
11806 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11807 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11811 /* Handle ARM mode instructions with opcode 100. */
11814 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11816 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11817 uint32_t register_count
= 0, register_bits
;
11818 uint32_t reg_base
, addr_mode
;
11819 uint32_t record_buf
[24], record_buf_mem
[48];
11823 /* Fetch the list of registers. */
11824 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11825 arm_insn_r
->reg_rec_count
= 0;
11827 /* Fetch the base register that contains the address we are loading data
11829 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11831 /* Calculate wback. */
11832 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11834 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11836 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11838 /* Find out which registers are going to be loaded from memory. */
11839 while (register_bits
)
11841 if (register_bits
& 0x00000001)
11842 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11843 register_bits
= register_bits
>> 1;
11848 /* If wback is true, also save the base register, which is going to be
11851 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11853 /* Save the CPSR register. */
11854 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11858 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11860 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11862 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11864 /* Find out how many registers are going to be stored to memory. */
11865 while (register_bits
)
11867 if (register_bits
& 0x00000001)
11869 register_bits
= register_bits
>> 1;
11874 /* STMDA (STMED): Decrement after. */
11876 record_buf_mem
[1] = (uint32_t) u_regval
11877 - register_count
* INT_REGISTER_SIZE
+ 4;
11879 /* STM (STMIA, STMEA): Increment after. */
11881 record_buf_mem
[1] = (uint32_t) u_regval
;
11883 /* STMDB (STMFD): Decrement before. */
11885 record_buf_mem
[1] = (uint32_t) u_regval
11886 - register_count
* INT_REGISTER_SIZE
;
11888 /* STMIB (STMFA): Increment before. */
11890 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11893 gdb_assert_not_reached ("no decoding pattern found");
11897 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11898 arm_insn_r
->mem_rec_count
= 1;
11900 /* If wback is true, also save the base register, which is going to be
11903 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11906 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11907 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11911 /* Handling opcode 101 insns. */
11914 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11916 uint32_t record_buf
[8];
11918 /* Handle B, BL, BLX(1) insns. */
11919 /* B simply branches so we do nothing here. */
11920 /* Note: BLX(1) doesnt fall here but instead it falls into
11921 extension space. */
11922 if (bit (arm_insn_r
->arm_insn
, 24))
11924 record_buf
[0] = ARM_LR_REGNUM
;
11925 arm_insn_r
->reg_rec_count
= 1;
11928 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11933 /* Handling opcode 110 insns. */
11936 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11938 printf_unfiltered (_("Process record does not support instruction "
11939 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11940 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11945 /* Record handler for vector data transfer instructions. */
11948 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11950 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11951 uint32_t record_buf
[4];
11953 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
11954 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11955 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11956 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11957 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11958 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11960 /* Handle VMOV instruction. */
11961 if (bit_l
&& bit_c
)
11963 record_buf
[0] = reg_t
;
11964 arm_insn_r
->reg_rec_count
= 1;
11966 else if (bit_l
&& !bit_c
)
11968 /* Handle VMOV instruction. */
11969 if (bits_a
== 0x00)
11971 if (bit (arm_insn_r
->arm_insn
, 20))
11972 record_buf
[0] = reg_t
;
11974 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
11977 arm_insn_r
->reg_rec_count
= 1;
11979 /* Handle VMRS instruction. */
11980 else if (bits_a
== 0x07)
11983 reg_t
= ARM_PS_REGNUM
;
11985 record_buf
[0] = reg_t
;
11986 arm_insn_r
->reg_rec_count
= 1;
11989 else if (!bit_l
&& !bit_c
)
11991 /* Handle VMOV instruction. */
11992 if (bits_a
== 0x00)
11994 if (bit (arm_insn_r
->arm_insn
, 20))
11995 record_buf
[0] = reg_t
;
11997 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12000 arm_insn_r
->reg_rec_count
= 1;
12002 /* Handle VMSR instruction. */
12003 else if (bits_a
== 0x07)
12005 record_buf
[0] = ARM_FPSCR_REGNUM
;
12006 arm_insn_r
->reg_rec_count
= 1;
12009 else if (!bit_l
&& bit_c
)
12011 /* Handle VMOV instruction. */
12012 if (!(bits_a
& 0x04))
12014 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
12016 arm_insn_r
->reg_rec_count
= 1;
12018 /* Handle VDUP instruction. */
12021 if (bit (arm_insn_r
->arm_insn
, 21))
12023 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12024 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12025 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
12026 arm_insn_r
->reg_rec_count
= 2;
12030 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12031 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12032 arm_insn_r
->reg_rec_count
= 1;
12037 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12041 /* Record handler for extension register load/store instructions. */
12044 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
12046 uint32_t opcode
, single_reg
;
12047 uint8_t op_vldm_vstm
;
12048 uint32_t record_buf
[8], record_buf_mem
[128];
12049 ULONGEST u_regval
= 0;
12051 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12052 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
12054 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
12055 single_reg
= bit (arm_insn_r
->arm_insn
, 8);
12056 op_vldm_vstm
= opcode
& 0x1b;
12058 /* Handle VMOV instructions. */
12059 if ((opcode
& 0x1e) == 0x04)
12061 if (bit (arm_insn_r
->arm_insn
, 4))
12063 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12064 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12065 arm_insn_r
->reg_rec_count
= 2;
12069 uint8_t reg_m
= (bits (arm_insn_r
->arm_insn
, 0, 3) << 1)
12070 | bit (arm_insn_r
->arm_insn
, 5);
12074 record_buf
[0] = num_regs
+ reg_m
;
12075 record_buf
[1] = num_regs
+ reg_m
+ 1;
12076 arm_insn_r
->reg_rec_count
= 2;
12080 record_buf
[0] = reg_m
+ ARM_D0_REGNUM
;
12081 arm_insn_r
->reg_rec_count
= 1;
12085 /* Handle VSTM and VPUSH instructions. */
12086 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
12087 || op_vldm_vstm
== 0x12)
12089 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12090 uint32_t memory_index
= 0;
12092 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12093 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12094 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12095 imm_off32
= imm_off8
<< 24;
12096 memory_count
= imm_off8
;
12098 if (bit (arm_insn_r
->arm_insn
, 23))
12099 start_address
= u_regval
;
12101 start_address
= u_regval
- imm_off32
;
12103 if (bit (arm_insn_r
->arm_insn
, 21))
12105 record_buf
[0] = reg_rn
;
12106 arm_insn_r
->reg_rec_count
= 1;
12109 while (memory_count
> 0)
12113 record_buf_mem
[memory_index
] = start_address
;
12114 record_buf_mem
[memory_index
+ 1] = 4;
12115 start_address
= start_address
+ 4;
12116 memory_index
= memory_index
+ 2;
12120 record_buf_mem
[memory_index
] = start_address
;
12121 record_buf_mem
[memory_index
+ 1] = 4;
12122 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12123 record_buf_mem
[memory_index
+ 3] = 4;
12124 start_address
= start_address
+ 8;
12125 memory_index
= memory_index
+ 4;
12129 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
12131 /* Handle VLDM instructions. */
12132 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
12133 || op_vldm_vstm
== 0x13)
12135 uint32_t reg_count
, reg_vd
;
12136 uint32_t reg_index
= 0;
12138 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12139 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
12142 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12144 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12146 if (bit (arm_insn_r
->arm_insn
, 21))
12147 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
12149 while (reg_count
> 0)
12152 record_buf
[reg_index
++] = num_regs
+ reg_vd
+ reg_count
- 1;
12154 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
12158 arm_insn_r
->reg_rec_count
= reg_index
;
12160 /* VSTR Vector store register. */
12161 else if ((opcode
& 0x13) == 0x10)
12163 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12164 uint32_t memory_index
= 0;
12166 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12167 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12168 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12169 imm_off32
= imm_off8
<< 24;
12170 memory_count
= imm_off8
;
12172 if (bit (arm_insn_r
->arm_insn
, 23))
12173 start_address
= u_regval
+ imm_off32
;
12175 start_address
= u_regval
- imm_off32
;
12179 record_buf_mem
[memory_index
] = start_address
;
12180 record_buf_mem
[memory_index
+ 1] = 4;
12181 arm_insn_r
->mem_rec_count
= 1;
12185 record_buf_mem
[memory_index
] = start_address
;
12186 record_buf_mem
[memory_index
+ 1] = 4;
12187 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12188 record_buf_mem
[memory_index
+ 3] = 4;
12189 arm_insn_r
->mem_rec_count
= 2;
12192 /* VLDR Vector load register. */
12193 else if ((opcode
& 0x13) == 0x11)
12195 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12199 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12200 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
12204 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12205 record_buf
[0] = num_regs
+ reg_vd
;
12207 arm_insn_r
->reg_rec_count
= 1;
12210 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12211 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
12215 /* Record handler for arm/thumb mode VFP data processing instructions. */
12218 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
12220 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
12221 uint32_t record_buf
[4];
12222 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
12223 enum insn_types curr_insn_type
= INSN_INV
;
12225 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12226 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
12227 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
12228 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
12229 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
12230 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
12231 opc1
= opc1
& 0x04;
12233 /* Handle VMLA, VMLS. */
12236 if (bit (arm_insn_r
->arm_insn
, 10))
12238 if (bit (arm_insn_r
->arm_insn
, 6))
12239 curr_insn_type
= INSN_T0
;
12241 curr_insn_type
= INSN_T1
;
12246 curr_insn_type
= INSN_T1
;
12248 curr_insn_type
= INSN_T2
;
12251 /* Handle VNMLA, VNMLS, VNMUL. */
12252 else if (opc1
== 0x01)
12255 curr_insn_type
= INSN_T1
;
12257 curr_insn_type
= INSN_T2
;
12260 else if (opc1
== 0x02 && !(opc3
& 0x01))
12262 if (bit (arm_insn_r
->arm_insn
, 10))
12264 if (bit (arm_insn_r
->arm_insn
, 6))
12265 curr_insn_type
= INSN_T0
;
12267 curr_insn_type
= INSN_T1
;
12272 curr_insn_type
= INSN_T1
;
12274 curr_insn_type
= INSN_T2
;
12277 /* Handle VADD, VSUB. */
12278 else if (opc1
== 0x03)
12280 if (!bit (arm_insn_r
->arm_insn
, 9))
12282 if (bit (arm_insn_r
->arm_insn
, 6))
12283 curr_insn_type
= INSN_T0
;
12285 curr_insn_type
= INSN_T1
;
12290 curr_insn_type
= INSN_T1
;
12292 curr_insn_type
= INSN_T2
;
12296 else if (opc1
== 0x0b)
12299 curr_insn_type
= INSN_T1
;
12301 curr_insn_type
= INSN_T2
;
12303 /* Handle all other vfp data processing instructions. */
12304 else if (opc1
== 0x0b)
12307 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
12309 if (bit (arm_insn_r
->arm_insn
, 4))
12311 if (bit (arm_insn_r
->arm_insn
, 6))
12312 curr_insn_type
= INSN_T0
;
12314 curr_insn_type
= INSN_T1
;
12319 curr_insn_type
= INSN_T1
;
12321 curr_insn_type
= INSN_T2
;
12324 /* Handle VNEG and VABS. */
12325 else if ((opc2
== 0x01 && opc3
== 0x01)
12326 || (opc2
== 0x00 && opc3
== 0x03))
12328 if (!bit (arm_insn_r
->arm_insn
, 11))
12330 if (bit (arm_insn_r
->arm_insn
, 6))
12331 curr_insn_type
= INSN_T0
;
12333 curr_insn_type
= INSN_T1
;
12338 curr_insn_type
= INSN_T1
;
12340 curr_insn_type
= INSN_T2
;
12343 /* Handle VSQRT. */
12344 else if (opc2
== 0x01 && opc3
== 0x03)
12347 curr_insn_type
= INSN_T1
;
12349 curr_insn_type
= INSN_T2
;
12352 else if (opc2
== 0x07 && opc3
== 0x03)
12355 curr_insn_type
= INSN_T1
;
12357 curr_insn_type
= INSN_T2
;
12359 else if (opc3
& 0x01)
12362 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
12364 if (!bit (arm_insn_r
->arm_insn
, 18))
12365 curr_insn_type
= INSN_T2
;
12369 curr_insn_type
= INSN_T1
;
12371 curr_insn_type
= INSN_T2
;
12375 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
12378 curr_insn_type
= INSN_T1
;
12380 curr_insn_type
= INSN_T2
;
12382 /* Handle VCVTB, VCVTT. */
12383 else if ((opc2
& 0x0e) == 0x02)
12384 curr_insn_type
= INSN_T2
;
12385 /* Handle VCMP, VCMPE. */
12386 else if ((opc2
& 0x0e) == 0x04)
12387 curr_insn_type
= INSN_T3
;
12391 switch (curr_insn_type
)
12394 reg_vd
= reg_vd
| (bit_d
<< 4);
12395 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12396 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
12397 arm_insn_r
->reg_rec_count
= 2;
12401 reg_vd
= reg_vd
| (bit_d
<< 4);
12402 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12403 arm_insn_r
->reg_rec_count
= 1;
12407 reg_vd
= (reg_vd
<< 1) | bit_d
;
12408 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12409 arm_insn_r
->reg_rec_count
= 1;
12413 record_buf
[0] = ARM_FPSCR_REGNUM
;
12414 arm_insn_r
->reg_rec_count
= 1;
12418 gdb_assert_not_reached ("no decoding pattern found");
12422 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12426 /* Handling opcode 110 insns. */
12429 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
12431 uint32_t op
, op1
, op1_sbit
, op1_ebit
, coproc
;
12433 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12434 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
12435 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12437 if ((coproc
& 0x0e) == 0x0a)
12439 /* Handle extension register ld/st instructions. */
12441 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12443 /* 64-bit transfers between arm core and extension registers. */
12444 if ((op1
& 0x3e) == 0x04)
12445 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12449 /* Handle coprocessor ld/st instructions. */
12454 return arm_record_unsupported_insn (arm_insn_r
);
12457 return arm_record_unsupported_insn (arm_insn_r
);
12460 /* Move to coprocessor from two arm core registers. */
12462 return arm_record_unsupported_insn (arm_insn_r
);
12464 /* Move to two arm core registers from coprocessor. */
12469 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12470 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12471 arm_insn_r
->reg_rec_count
= 2;
12473 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
12477 return arm_record_unsupported_insn (arm_insn_r
);
12480 /* Handling opcode 111 insns. */
12483 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
12485 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
12486 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
12487 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12488 ULONGEST u_regval
= 0;
12490 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
12491 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12492 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
12493 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12494 op
= bit (arm_insn_r
->arm_insn
, 4);
12496 /* Handle arm SWI/SVC system call instructions. */
12499 if (tdep
->arm_syscall_record
!= NULL
)
12501 ULONGEST svc_operand
, svc_number
;
12503 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
12505 if (svc_operand
) /* OABI. */
12506 svc_number
= svc_operand
- 0x900000;
12508 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
12510 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
12514 printf_unfiltered (_("no syscall record support\n"));
12519 if ((coproc
& 0x0e) == 0x0a)
12521 /* VFP data-processing instructions. */
12522 if (!op1_sbit
&& !op
)
12523 return arm_record_vfp_data_proc_insn (arm_insn_r
);
12525 /* Advanced SIMD, VFP instructions. */
12526 if (!op1_sbit
&& op
)
12527 return arm_record_vdata_transfer_insn (arm_insn_r
);
12531 /* Coprocessor data operations. */
12532 if (!op1_sbit
&& !op
)
12533 return arm_record_unsupported_insn (arm_insn_r
);
12535 /* Move to Coprocessor from ARM core register. */
12536 if (!op1_sbit
&& !op1_ebit
&& op
)
12537 return arm_record_unsupported_insn (arm_insn_r
);
12539 /* Move to arm core register from coprocessor. */
12540 if (!op1_sbit
&& op1_ebit
&& op
)
12542 uint32_t record_buf
[1];
12544 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12545 if (record_buf
[0] == 15)
12546 record_buf
[0] = ARM_PS_REGNUM
;
12548 arm_insn_r
->reg_rec_count
= 1;
12549 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
12555 return arm_record_unsupported_insn (arm_insn_r
);
12558 /* Handling opcode 000 insns. */
12561 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12563 uint32_t record_buf
[8];
12564 uint32_t reg_src1
= 0;
12566 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12568 record_buf
[0] = ARM_PS_REGNUM
;
12569 record_buf
[1] = reg_src1
;
12570 thumb_insn_r
->reg_rec_count
= 2;
12572 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12578 /* Handling opcode 001 insns. */
12581 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12583 uint32_t record_buf
[8];
12584 uint32_t reg_src1
= 0;
12586 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12588 record_buf
[0] = ARM_PS_REGNUM
;
12589 record_buf
[1] = reg_src1
;
12590 thumb_insn_r
->reg_rec_count
= 2;
12592 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12597 /* Handling opcode 010 insns. */
12600 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12602 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12603 uint32_t record_buf
[8], record_buf_mem
[8];
12605 uint32_t reg_src1
= 0, reg_src2
= 0;
12606 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12608 ULONGEST u_regval
[2] = {0};
12610 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12612 if (bit (thumb_insn_r
->arm_insn
, 12))
12614 /* Handle load/store register offset. */
12615 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
12616 if (opcode2
>= 12 && opcode2
<= 15)
12618 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12619 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12620 record_buf
[0] = reg_src1
;
12621 thumb_insn_r
->reg_rec_count
= 1;
12623 else if (opcode2
>= 8 && opcode2
<= 10)
12625 /* STR(2), STRB(2), STRH(2) . */
12626 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12627 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12628 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12629 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12631 record_buf_mem
[0] = 4; /* STR (2). */
12632 else if (10 == opcode2
)
12633 record_buf_mem
[0] = 1; /* STRB (2). */
12634 else if (9 == opcode2
)
12635 record_buf_mem
[0] = 2; /* STRH (2). */
12636 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12637 thumb_insn_r
->mem_rec_count
= 1;
12640 else if (bit (thumb_insn_r
->arm_insn
, 11))
12642 /* Handle load from literal pool. */
12644 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12645 record_buf
[0] = reg_src1
;
12646 thumb_insn_r
->reg_rec_count
= 1;
12650 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12651 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12652 if ((3 == opcode2
) && (!opcode3
))
12654 /* Branch with exchange. */
12655 record_buf
[0] = ARM_PS_REGNUM
;
12656 thumb_insn_r
->reg_rec_count
= 1;
12660 /* Format 8; special data processing insns. */
12661 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12662 record_buf
[0] = ARM_PS_REGNUM
;
12663 record_buf
[1] = reg_src1
;
12664 thumb_insn_r
->reg_rec_count
= 2;
12669 /* Format 5; data processing insns. */
12670 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12671 if (bit (thumb_insn_r
->arm_insn
, 7))
12673 reg_src1
= reg_src1
+ 8;
12675 record_buf
[0] = ARM_PS_REGNUM
;
12676 record_buf
[1] = reg_src1
;
12677 thumb_insn_r
->reg_rec_count
= 2;
12680 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12681 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12687 /* Handling opcode 001 insns. */
12690 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12692 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12693 uint32_t record_buf
[8], record_buf_mem
[8];
12695 uint32_t reg_src1
= 0;
12696 uint32_t opcode
= 0, immed_5
= 0;
12698 ULONGEST u_regval
= 0;
12700 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12705 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12706 record_buf
[0] = reg_src1
;
12707 thumb_insn_r
->reg_rec_count
= 1;
12712 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12713 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12714 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12715 record_buf_mem
[0] = 4;
12716 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12717 thumb_insn_r
->mem_rec_count
= 1;
12720 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12721 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12727 /* Handling opcode 100 insns. */
12730 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12732 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12733 uint32_t record_buf
[8], record_buf_mem
[8];
12735 uint32_t reg_src1
= 0;
12736 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12738 ULONGEST u_regval
= 0;
12740 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12745 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12746 record_buf
[0] = reg_src1
;
12747 thumb_insn_r
->reg_rec_count
= 1;
12749 else if (1 == opcode
)
12752 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12753 record_buf
[0] = reg_src1
;
12754 thumb_insn_r
->reg_rec_count
= 1;
12756 else if (2 == opcode
)
12759 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12760 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12761 record_buf_mem
[0] = 4;
12762 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12763 thumb_insn_r
->mem_rec_count
= 1;
12765 else if (0 == opcode
)
12768 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12769 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12770 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12771 record_buf_mem
[0] = 2;
12772 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12773 thumb_insn_r
->mem_rec_count
= 1;
12776 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12777 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12783 /* Handling opcode 101 insns. */
12786 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12788 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12790 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
12791 uint32_t register_bits
= 0, register_count
= 0;
12792 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12793 uint32_t record_buf
[24], record_buf_mem
[48];
12796 ULONGEST u_regval
= 0;
12798 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12799 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12800 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
12805 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12806 while (register_bits
)
12808 if (register_bits
& 0x00000001)
12809 record_buf
[index
++] = register_count
;
12810 register_bits
= register_bits
>> 1;
12813 record_buf
[index
++] = ARM_PS_REGNUM
;
12814 record_buf
[index
++] = ARM_SP_REGNUM
;
12815 thumb_insn_r
->reg_rec_count
= index
;
12817 else if (10 == opcode2
)
12820 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12821 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12822 while (register_bits
)
12824 if (register_bits
& 0x00000001)
12826 register_bits
= register_bits
>> 1;
12828 start_address
= u_regval
- \
12829 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12830 thumb_insn_r
->mem_rec_count
= register_count
;
12831 while (register_count
)
12833 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12834 record_buf_mem
[(register_count
* 2) - 2] = 4;
12835 start_address
= start_address
+ 4;
12838 record_buf
[0] = ARM_SP_REGNUM
;
12839 thumb_insn_r
->reg_rec_count
= 1;
12841 else if (0x1E == opcode1
)
12844 /* Handle enhanced software breakpoint insn, BKPT. */
12845 /* CPSR is changed to be executed in ARM state, disabling normal
12846 interrupts, entering abort mode. */
12847 /* According to high vector configuration PC is set. */
12848 /* User hits breakpoint and type reverse, in that case, we need to go back with
12849 previous CPSR and Program Counter. */
12850 record_buf
[0] = ARM_PS_REGNUM
;
12851 record_buf
[1] = ARM_LR_REGNUM
;
12852 thumb_insn_r
->reg_rec_count
= 2;
12853 /* We need to save SPSR value, which is not yet done. */
12854 printf_unfiltered (_("Process record does not support instruction "
12855 "0x%0x at address %s.\n"),
12856 thumb_insn_r
->arm_insn
,
12857 paddress (thumb_insn_r
->gdbarch
,
12858 thumb_insn_r
->this_addr
));
12861 else if ((0 == opcode
) || (1 == opcode
))
12863 /* ADD(5), ADD(6). */
12864 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12865 record_buf
[0] = reg_src1
;
12866 thumb_insn_r
->reg_rec_count
= 1;
12868 else if (2 == opcode
)
12870 /* ADD(7), SUB(4). */
12871 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12872 record_buf
[0] = ARM_SP_REGNUM
;
12873 thumb_insn_r
->reg_rec_count
= 1;
12876 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12877 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12883 /* Handling opcode 110 insns. */
12886 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12888 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12889 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12891 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12892 uint32_t reg_src1
= 0;
12893 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12894 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12895 uint32_t record_buf
[24], record_buf_mem
[48];
12897 ULONGEST u_regval
= 0;
12899 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12900 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12906 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12908 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12909 while (register_bits
)
12911 if (register_bits
& 0x00000001)
12912 record_buf
[index
++] = register_count
;
12913 register_bits
= register_bits
>> 1;
12916 record_buf
[index
++] = reg_src1
;
12917 thumb_insn_r
->reg_rec_count
= index
;
12919 else if (0 == opcode2
)
12921 /* It handles both STMIA. */
12922 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12924 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12925 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12926 while (register_bits
)
12928 if (register_bits
& 0x00000001)
12930 register_bits
= register_bits
>> 1;
12932 start_address
= u_regval
;
12933 thumb_insn_r
->mem_rec_count
= register_count
;
12934 while (register_count
)
12936 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12937 record_buf_mem
[(register_count
* 2) - 2] = 4;
12938 start_address
= start_address
+ 4;
12942 else if (0x1F == opcode1
)
12944 /* Handle arm syscall insn. */
12945 if (tdep
->arm_syscall_record
!= NULL
)
12947 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12948 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12952 printf_unfiltered (_("no syscall record support\n"));
12957 /* B (1), conditional branch is automatically taken care in process_record,
12958 as PC is saved there. */
12960 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12961 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12967 /* Handling opcode 111 insns. */
12970 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12972 uint32_t record_buf
[8];
12973 uint32_t bits_h
= 0;
12975 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12977 if (2 == bits_h
|| 3 == bits_h
)
12980 record_buf
[0] = ARM_LR_REGNUM
;
12981 thumb_insn_r
->reg_rec_count
= 1;
12983 else if (1 == bits_h
)
12986 record_buf
[0] = ARM_PS_REGNUM
;
12987 record_buf
[1] = ARM_LR_REGNUM
;
12988 thumb_insn_r
->reg_rec_count
= 2;
12991 /* B(2) is automatically taken care in process_record, as PC is
12994 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12999 /* Handler for thumb2 load/store multiple instructions. */
13002 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
13004 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13006 uint32_t reg_rn
, op
;
13007 uint32_t register_bits
= 0, register_count
= 0;
13008 uint32_t index
= 0, start_address
= 0;
13009 uint32_t record_buf
[24], record_buf_mem
[48];
13011 ULONGEST u_regval
= 0;
13013 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13014 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13016 if (0 == op
|| 3 == op
)
13018 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13020 /* Handle RFE instruction. */
13021 record_buf
[0] = ARM_PS_REGNUM
;
13022 thumb2_insn_r
->reg_rec_count
= 1;
13026 /* Handle SRS instruction after reading banked SP. */
13027 return arm_record_unsupported_insn (thumb2_insn_r
);
13030 else if (1 == op
|| 2 == op
)
13032 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13034 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13035 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13036 while (register_bits
)
13038 if (register_bits
& 0x00000001)
13039 record_buf
[index
++] = register_count
;
13042 register_bits
= register_bits
>> 1;
13044 record_buf
[index
++] = reg_rn
;
13045 record_buf
[index
++] = ARM_PS_REGNUM
;
13046 thumb2_insn_r
->reg_rec_count
= index
;
13050 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13051 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13052 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13053 while (register_bits
)
13055 if (register_bits
& 0x00000001)
13058 register_bits
= register_bits
>> 1;
13063 /* Start address calculation for LDMDB/LDMEA. */
13064 start_address
= u_regval
;
13068 /* Start address calculation for LDMDB/LDMEA. */
13069 start_address
= u_regval
- register_count
* 4;
13072 thumb2_insn_r
->mem_rec_count
= register_count
;
13073 while (register_count
)
13075 record_buf_mem
[register_count
* 2 - 1] = start_address
;
13076 record_buf_mem
[register_count
* 2 - 2] = 4;
13077 start_address
= start_address
+ 4;
13080 record_buf
[0] = reg_rn
;
13081 record_buf
[1] = ARM_PS_REGNUM
;
13082 thumb2_insn_r
->reg_rec_count
= 2;
13086 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13088 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13090 return ARM_RECORD_SUCCESS
;
13093 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13097 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
13099 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13101 uint32_t reg_rd
, reg_rn
, offset_imm
;
13102 uint32_t reg_dest1
, reg_dest2
;
13103 uint32_t address
, offset_addr
;
13104 uint32_t record_buf
[8], record_buf_mem
[8];
13105 uint32_t op1
, op2
, op3
;
13108 ULONGEST u_regval
[2];
13110 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13111 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
13112 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13114 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13116 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
13118 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13119 record_buf
[0] = reg_dest1
;
13120 record_buf
[1] = ARM_PS_REGNUM
;
13121 thumb2_insn_r
->reg_rec_count
= 2;
13124 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
13126 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13127 record_buf
[2] = reg_dest2
;
13128 thumb2_insn_r
->reg_rec_count
= 3;
13133 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13134 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13136 if (0 == op1
&& 0 == op2
)
13138 /* Handle STREX. */
13139 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13140 address
= u_regval
[0] + (offset_imm
* 4);
13141 record_buf_mem
[0] = 4;
13142 record_buf_mem
[1] = address
;
13143 thumb2_insn_r
->mem_rec_count
= 1;
13144 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13145 record_buf
[0] = reg_rd
;
13146 thumb2_insn_r
->reg_rec_count
= 1;
13148 else if (1 == op1
&& 0 == op2
)
13150 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13151 record_buf
[0] = reg_rd
;
13152 thumb2_insn_r
->reg_rec_count
= 1;
13153 address
= u_regval
[0];
13154 record_buf_mem
[1] = address
;
13158 /* Handle STREXB. */
13159 record_buf_mem
[0] = 1;
13160 thumb2_insn_r
->mem_rec_count
= 1;
13164 /* Handle STREXH. */
13165 record_buf_mem
[0] = 2 ;
13166 thumb2_insn_r
->mem_rec_count
= 1;
13170 /* Handle STREXD. */
13171 address
= u_regval
[0];
13172 record_buf_mem
[0] = 4;
13173 record_buf_mem
[2] = 4;
13174 record_buf_mem
[3] = address
+ 4;
13175 thumb2_insn_r
->mem_rec_count
= 2;
13180 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13182 if (bit (thumb2_insn_r
->arm_insn
, 24))
13184 if (bit (thumb2_insn_r
->arm_insn
, 23))
13185 offset_addr
= u_regval
[0] + (offset_imm
* 4);
13187 offset_addr
= u_regval
[0] - (offset_imm
* 4);
13189 address
= offset_addr
;
13192 address
= u_regval
[0];
13194 record_buf_mem
[0] = 4;
13195 record_buf_mem
[1] = address
;
13196 record_buf_mem
[2] = 4;
13197 record_buf_mem
[3] = address
+ 4;
13198 thumb2_insn_r
->mem_rec_count
= 2;
13199 record_buf
[0] = reg_rn
;
13200 thumb2_insn_r
->reg_rec_count
= 1;
13204 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13206 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13208 return ARM_RECORD_SUCCESS
;
13211 /* Handler for thumb2 data processing (shift register and modified immediate)
13215 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
13217 uint32_t reg_rd
, op
;
13218 uint32_t record_buf
[8];
13220 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
13221 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13223 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
13225 record_buf
[0] = ARM_PS_REGNUM
;
13226 thumb2_insn_r
->reg_rec_count
= 1;
13230 record_buf
[0] = reg_rd
;
13231 record_buf
[1] = ARM_PS_REGNUM
;
13232 thumb2_insn_r
->reg_rec_count
= 2;
13235 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13237 return ARM_RECORD_SUCCESS
;
13240 /* Generic handler for thumb2 instructions which effect destination and PS
13244 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
13247 uint32_t record_buf
[8];
13249 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13251 record_buf
[0] = reg_rd
;
13252 record_buf
[1] = ARM_PS_REGNUM
;
13253 thumb2_insn_r
->reg_rec_count
= 2;
13255 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13257 return ARM_RECORD_SUCCESS
;
13260 /* Handler for thumb2 branch and miscellaneous control instructions. */
13263 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
13265 uint32_t op
, op1
, op2
;
13266 uint32_t record_buf
[8];
13268 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13269 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
13270 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13272 /* Handle MSR insn. */
13273 if (!(op1
& 0x2) && 0x38 == op
)
13277 /* CPSR is going to be changed. */
13278 record_buf
[0] = ARM_PS_REGNUM
;
13279 thumb2_insn_r
->reg_rec_count
= 1;
13283 arm_record_unsupported_insn(thumb2_insn_r
);
13287 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
13290 record_buf
[0] = ARM_PS_REGNUM
;
13291 record_buf
[1] = ARM_LR_REGNUM
;
13292 thumb2_insn_r
->reg_rec_count
= 2;
13295 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13297 return ARM_RECORD_SUCCESS
;
13300 /* Handler for thumb2 store single data item instructions. */
13303 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
13305 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13307 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
13308 uint32_t address
, offset_addr
;
13309 uint32_t record_buf
[8], record_buf_mem
[8];
13312 ULONGEST u_regval
[2];
13314 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
13315 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
13316 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13317 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13319 if (bit (thumb2_insn_r
->arm_insn
, 23))
13322 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
13323 offset_addr
= u_regval
[0] + offset_imm
;
13324 address
= offset_addr
;
13329 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
13331 /* Handle STRB (register). */
13332 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13333 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
13334 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
13335 offset_addr
= u_regval
[1] << shift_imm
;
13336 address
= u_regval
[0] + offset_addr
;
13340 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13341 if (bit (thumb2_insn_r
->arm_insn
, 10))
13343 if (bit (thumb2_insn_r
->arm_insn
, 9))
13344 offset_addr
= u_regval
[0] + offset_imm
;
13346 offset_addr
= u_regval
[0] - offset_imm
;
13348 address
= offset_addr
;
13351 address
= u_regval
[0];
13357 /* Store byte instructions. */
13360 record_buf_mem
[0] = 1;
13362 /* Store half word instructions. */
13365 record_buf_mem
[0] = 2;
13367 /* Store word instructions. */
13370 record_buf_mem
[0] = 4;
13374 gdb_assert_not_reached ("no decoding pattern found");
13378 record_buf_mem
[1] = address
;
13379 thumb2_insn_r
->mem_rec_count
= 1;
13380 record_buf
[0] = reg_rn
;
13381 thumb2_insn_r
->reg_rec_count
= 1;
13383 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13385 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13387 return ARM_RECORD_SUCCESS
;
13390 /* Handler for thumb2 load memory hints instructions. */
13393 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
13395 uint32_t record_buf
[8];
13396 uint32_t reg_rt
, reg_rn
;
13398 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13399 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13401 if (ARM_PC_REGNUM
!= reg_rt
)
13403 record_buf
[0] = reg_rt
;
13404 record_buf
[1] = reg_rn
;
13405 record_buf
[2] = ARM_PS_REGNUM
;
13406 thumb2_insn_r
->reg_rec_count
= 3;
13408 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13410 return ARM_RECORD_SUCCESS
;
13413 return ARM_RECORD_FAILURE
;
13416 /* Handler for thumb2 load word instructions. */
13419 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
13421 uint32_t opcode1
= 0, opcode2
= 0;
13422 uint32_t record_buf
[8];
13424 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13425 record_buf
[1] = ARM_PS_REGNUM
;
13426 thumb2_insn_r
->reg_rec_count
= 2;
13428 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13430 return ARM_RECORD_SUCCESS
;
13433 /* Handler for thumb2 long multiply, long multiply accumulate, and
13434 divide instructions. */
13437 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
13439 uint32_t opcode1
= 0, opcode2
= 0;
13440 uint32_t record_buf
[8];
13441 uint32_t reg_src1
= 0;
13443 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
13444 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13446 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
13448 /* Handle SMULL, UMULL, SMULAL. */
13449 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13450 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13451 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13452 record_buf
[2] = ARM_PS_REGNUM
;
13453 thumb2_insn_r
->reg_rec_count
= 3;
13455 else if (1 == opcode1
|| 3 == opcode2
)
13457 /* Handle SDIV and UDIV. */
13458 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13459 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13460 record_buf
[2] = ARM_PS_REGNUM
;
13461 thumb2_insn_r
->reg_rec_count
= 3;
13464 return ARM_RECORD_FAILURE
;
13466 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13468 return ARM_RECORD_SUCCESS
;
13471 /* Record handler for thumb32 coprocessor instructions. */
13474 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
13476 if (bit (thumb2_insn_r
->arm_insn
, 25))
13477 return arm_record_coproc_data_proc (thumb2_insn_r
);
13479 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
13482 /* Record handler for advance SIMD structure load/store instructions. */
13485 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
13487 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13488 uint32_t l_bit
, a_bit
, b_bits
;
13489 uint32_t record_buf
[128], record_buf_mem
[128];
13490 uint32_t reg_rn
, reg_vd
, address
, f_esize
, f_elem
;
13491 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
13494 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
13495 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
13496 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13497 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13498 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13499 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
13500 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
13501 f_esize
= 8 * f_ebytes
;
13502 f_elem
= 8 / f_ebytes
;
13506 ULONGEST u_regval
= 0;
13507 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13508 address
= u_regval
;
13513 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13515 if (b_bits
== 0x07)
13517 else if (b_bits
== 0x0a)
13519 else if (b_bits
== 0x06)
13521 else if (b_bits
== 0x02)
13526 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13528 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13530 record_buf_mem
[index_m
++] = f_ebytes
;
13531 record_buf_mem
[index_m
++] = address
;
13532 address
= address
+ f_ebytes
;
13533 thumb2_insn_r
->mem_rec_count
+= 1;
13538 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13540 if (b_bits
== 0x09 || b_bits
== 0x08)
13542 else if (b_bits
== 0x03)
13547 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13548 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13550 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13552 record_buf_mem
[index_m
++] = f_ebytes
;
13553 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13554 thumb2_insn_r
->mem_rec_count
+= 1;
13556 address
= address
+ (2 * f_ebytes
);
13560 else if ((b_bits
& 0x0e) == 0x04)
13562 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13564 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13566 record_buf_mem
[index_m
++] = f_ebytes
;
13567 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13568 thumb2_insn_r
->mem_rec_count
+= 1;
13570 address
= address
+ (3 * f_ebytes
);
13574 else if (!(b_bits
& 0x0e))
13576 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13578 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13580 record_buf_mem
[index_m
++] = f_ebytes
;
13581 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13582 thumb2_insn_r
->mem_rec_count
+= 1;
13584 address
= address
+ (4 * f_ebytes
);
13590 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13592 if (bft_size
== 0x00)
13594 else if (bft_size
== 0x01)
13596 else if (bft_size
== 0x02)
13602 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13603 thumb2_insn_r
->mem_rec_count
= 1;
13605 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13606 thumb2_insn_r
->mem_rec_count
= 2;
13608 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13609 thumb2_insn_r
->mem_rec_count
= 3;
13611 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13612 thumb2_insn_r
->mem_rec_count
= 4;
13614 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13616 record_buf_mem
[index_m
] = f_ebytes
;
13617 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13626 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13627 thumb2_insn_r
->reg_rec_count
= 1;
13629 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13630 thumb2_insn_r
->reg_rec_count
= 2;
13632 else if ((b_bits
& 0x0e) == 0x04)
13633 thumb2_insn_r
->reg_rec_count
= 3;
13635 else if (!(b_bits
& 0x0e))
13636 thumb2_insn_r
->reg_rec_count
= 4;
13641 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13642 thumb2_insn_r
->reg_rec_count
= 1;
13644 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13645 thumb2_insn_r
->reg_rec_count
= 2;
13647 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13648 thumb2_insn_r
->reg_rec_count
= 3;
13650 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13651 thumb2_insn_r
->reg_rec_count
= 4;
13653 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13654 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13658 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13660 record_buf
[index_r
] = reg_rn
;
13661 thumb2_insn_r
->reg_rec_count
+= 1;
13664 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13666 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13671 /* Decodes thumb2 instruction type and invokes its record handler. */
13673 static unsigned int
13674 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13676 uint32_t op
, op1
, op2
;
13678 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13679 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13680 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13684 if (!(op2
& 0x64 ))
13686 /* Load/store multiple instruction. */
13687 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13689 else if (!((op2
& 0x64) ^ 0x04))
13691 /* Load/store (dual/exclusive) and table branch instruction. */
13692 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13694 else if (!((op2
& 0x20) ^ 0x20))
13696 /* Data-processing (shifted register). */
13697 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13699 else if (op2
& 0x40)
13701 /* Co-processor instructions. */
13702 return thumb2_record_coproc_insn (thumb2_insn_r
);
13705 else if (op1
== 0x02)
13709 /* Branches and miscellaneous control instructions. */
13710 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13712 else if (op2
& 0x20)
13714 /* Data-processing (plain binary immediate) instruction. */
13715 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13719 /* Data-processing (modified immediate). */
13720 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13723 else if (op1
== 0x03)
13725 if (!(op2
& 0x71 ))
13727 /* Store single data item. */
13728 return thumb2_record_str_single_data (thumb2_insn_r
);
13730 else if (!((op2
& 0x71) ^ 0x10))
13732 /* Advanced SIMD or structure load/store instructions. */
13733 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13735 else if (!((op2
& 0x67) ^ 0x01))
13737 /* Load byte, memory hints instruction. */
13738 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13740 else if (!((op2
& 0x67) ^ 0x03))
13742 /* Load halfword, memory hints instruction. */
13743 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13745 else if (!((op2
& 0x67) ^ 0x05))
13747 /* Load word instruction. */
13748 return thumb2_record_ld_word (thumb2_insn_r
);
13750 else if (!((op2
& 0x70) ^ 0x20))
13752 /* Data-processing (register) instruction. */
13753 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13755 else if (!((op2
& 0x78) ^ 0x30))
13757 /* Multiply, multiply accumulate, abs diff instruction. */
13758 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13760 else if (!((op2
& 0x78) ^ 0x38))
13762 /* Long multiply, long multiply accumulate, and divide. */
13763 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13765 else if (op2
& 0x40)
13767 /* Co-processor instructions. */
13768 return thumb2_record_coproc_insn (thumb2_insn_r
);
13775 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13776 and positive val on fauilure. */
13779 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
13781 gdb_byte buf
[insn_size
];
13783 memset (&buf
[0], 0, insn_size
);
13785 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
13787 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13789 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13793 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13795 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13799 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
13800 uint32_t insn_size
)
13803 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13804 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13806 arm_record_data_proc_misc_ld_str
, /* 000. */
13807 arm_record_data_proc_imm
, /* 001. */
13808 arm_record_ld_st_imm_offset
, /* 010. */
13809 arm_record_ld_st_reg_offset
, /* 011. */
13810 arm_record_ld_st_multiple
, /* 100. */
13811 arm_record_b_bl
, /* 101. */
13812 arm_record_asimd_vfp_coproc
, /* 110. */
13813 arm_record_coproc_data_proc
/* 111. */
13816 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13817 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13819 thumb_record_shift_add_sub
, /* 000. */
13820 thumb_record_add_sub_cmp_mov
, /* 001. */
13821 thumb_record_ld_st_reg_offset
, /* 010. */
13822 thumb_record_ld_st_imm_offset
, /* 011. */
13823 thumb_record_ld_st_stack
, /* 100. */
13824 thumb_record_misc
, /* 101. */
13825 thumb_record_ldm_stm_swi
, /* 110. */
13826 thumb_record_branch
/* 111. */
13829 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13830 uint32_t insn_id
= 0;
13832 if (extract_arm_insn (arm_record
, insn_size
))
13836 printf_unfiltered (_("Process record: error reading memory at "
13837 "addr %s len = %d.\n"),
13838 paddress (arm_record
->gdbarch
, arm_record
->this_addr
), insn_size
);
13842 else if (ARM_RECORD
== record_type
)
13844 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13845 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13846 ret
= arm_record_extension_space (arm_record
);
13847 /* If this insn has fallen into extension space
13848 then we need not decode it anymore. */
13849 if (ret
!= -1 && !INSN_RECORDED(arm_record
))
13851 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13854 else if (THUMB_RECORD
== record_type
)
13856 /* As thumb does not have condition codes, we set negative. */
13857 arm_record
->cond
= -1;
13858 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13859 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13861 else if (THUMB2_RECORD
== record_type
)
13863 /* As thumb does not have condition codes, we set negative. */
13864 arm_record
->cond
= -1;
13866 /* Swap first half of 32bit thumb instruction with second half. */
13867 arm_record
->arm_insn
13868 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13870 insn_id
= thumb2_record_decode_insn_handler (arm_record
);
13872 if (insn_id
!= ARM_RECORD_SUCCESS
)
13874 arm_record_unsupported_insn (arm_record
);
13880 /* Throw assertion. */
13881 gdb_assert_not_reached ("not a valid instruction, could not decode");
13888 /* Cleans up local record registers and memory allocations. */
13891 deallocate_reg_mem (insn_decode_record
*record
)
13893 xfree (record
->arm_regs
);
13894 xfree (record
->arm_mems
);
13898 /* Parse the current instruction and record the values of the registers and
13899 memory that will be changed in current instruction to record_arch_list".
13900 Return -1 if something is wrong. */
13903 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13904 CORE_ADDR insn_addr
)
13907 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
13908 uint32_t no_of_rec
= 0;
13909 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13910 ULONGEST t_bit
= 0, insn_id
= 0;
13912 ULONGEST u_regval
= 0;
13914 insn_decode_record arm_record
;
13916 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13917 arm_record
.regcache
= regcache
;
13918 arm_record
.this_addr
= insn_addr
;
13919 arm_record
.gdbarch
= gdbarch
;
13922 if (record_debug
> 1)
13924 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13926 paddress (gdbarch
, arm_record
.this_addr
));
13929 if (extract_arm_insn (&arm_record
, 2))
13933 printf_unfiltered (_("Process record: error reading memory at "
13934 "addr %s len = %d.\n"),
13935 paddress (arm_record
.gdbarch
,
13936 arm_record
.this_addr
), 2);
13941 /* Check the insn, whether it is thumb or arm one. */
13943 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13944 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13947 if (!(u_regval
& t_bit
))
13949 /* We are decoding arm insn. */
13950 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13954 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13955 /* is it thumb2 insn? */
13956 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13958 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13959 THUMB2_INSN_SIZE_BYTES
);
13963 /* We are decoding thumb insn. */
13964 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
13970 /* Record registers. */
13971 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13972 if (arm_record
.arm_regs
)
13974 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13976 if (record_full_arch_list_add_reg
13977 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13981 /* Record memories. */
13982 if (arm_record
.arm_mems
)
13984 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13986 if (record_full_arch_list_add_mem
13987 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13988 arm_record
.arm_mems
[no_of_rec
].len
))
13993 if (record_full_arch_list_add_end ())
13998 deallocate_reg_mem (&arm_record
);