Rename a private data member in tui_source_window
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "gdbsupport/vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #if GDB_SELF_TEST
65 #include "gdbsupport/selftest.h"
66 #endif
67
68 static bool arm_debug;
69
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
73
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
76
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
79
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
82
83 struct arm_mapping_symbol
84 {
85 bfd_vma value;
86 char type;
87
88 bool operator< (const arm_mapping_symbol &other) const
89 { return this->value < other.value; }
90 };
91
92 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
93
94 struct arm_per_objfile
95 {
96 explicit arm_per_objfile (size_t num_sections)
97 : section_maps (new arm_mapping_symbol_vec[num_sections]),
98 section_maps_sorted (new bool[num_sections] ())
99 {}
100
101 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
102
103 /* Information about mapping symbols ($a, $d, $t) in the objfile.
104
105 The format is an array of vectors of arm_mapping_symbols, there is one
106 vector for each section of the objfile (the array is index by BFD section
107 index).
108
109 For each section, the vector of arm_mapping_symbol is sorted by
110 symbol value (address). */
111 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
112
113 /* For each corresponding element of section_maps above, is this vector
114 sorted. */
115 std::unique_ptr<bool[]> section_maps_sorted;
116 };
117
118 /* Per-objfile data used for mapping symbols. */
119 static objfile_key<arm_per_objfile> arm_objfile_data_key;
120
121 /* The list of available "set arm ..." and "show arm ..." commands. */
122 static struct cmd_list_element *setarmcmdlist = NULL;
123 static struct cmd_list_element *showarmcmdlist = NULL;
124
125 /* The type of floating-point to use. Keep this in sync with enum
126 arm_float_model, and the help string in _initialize_arm_tdep. */
127 static const char *const fp_model_strings[] =
128 {
129 "auto",
130 "softfpa",
131 "fpa",
132 "softvfp",
133 "vfp",
134 NULL
135 };
136
137 /* A variable that can be configured by the user. */
138 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
139 static const char *current_fp_model = "auto";
140
141 /* The ABI to use. Keep this in sync with arm_abi_kind. */
142 static const char *const arm_abi_strings[] =
143 {
144 "auto",
145 "APCS",
146 "AAPCS",
147 NULL
148 };
149
150 /* A variable that can be configured by the user. */
151 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
152 static const char *arm_abi_string = "auto";
153
154 /* The execution mode to assume. */
155 static const char *const arm_mode_strings[] =
156 {
157 "auto",
158 "arm",
159 "thumb",
160 NULL
161 };
162
163 static const char *arm_fallback_mode_string = "auto";
164 static const char *arm_force_mode_string = "auto";
165
166 /* The standard register names, and all the valid aliases for them. Note
167 that `fp', `sp' and `pc' are not added in this alias list, because they
168 have been added as builtin user registers in
169 std-regs.c:_initialize_frame_reg. */
170 static const struct
171 {
172 const char *name;
173 int regnum;
174 } arm_register_aliases[] = {
175 /* Basic register numbers. */
176 { "r0", 0 },
177 { "r1", 1 },
178 { "r2", 2 },
179 { "r3", 3 },
180 { "r4", 4 },
181 { "r5", 5 },
182 { "r6", 6 },
183 { "r7", 7 },
184 { "r8", 8 },
185 { "r9", 9 },
186 { "r10", 10 },
187 { "r11", 11 },
188 { "r12", 12 },
189 { "r13", 13 },
190 { "r14", 14 },
191 { "r15", 15 },
192 /* Synonyms (argument and variable registers). */
193 { "a1", 0 },
194 { "a2", 1 },
195 { "a3", 2 },
196 { "a4", 3 },
197 { "v1", 4 },
198 { "v2", 5 },
199 { "v3", 6 },
200 { "v4", 7 },
201 { "v5", 8 },
202 { "v6", 9 },
203 { "v7", 10 },
204 { "v8", 11 },
205 /* Other platform-specific names for r9. */
206 { "sb", 9 },
207 { "tr", 9 },
208 /* Special names. */
209 { "ip", 12 },
210 { "lr", 14 },
211 /* Names used by GCC (not listed in the ARM EABI). */
212 { "sl", 10 },
213 /* A special name from the older ATPCS. */
214 { "wr", 7 },
215 };
216
217 static const char *const arm_register_names[] =
218 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
219 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
220 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
221 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
222 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
223 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
224 "fps", "cpsr" }; /* 24 25 */
225
226 /* Holds the current set of options to be passed to the disassembler. */
227 static char *arm_disassembler_options;
228
229 /* Valid register name styles. */
230 static const char **valid_disassembly_styles;
231
232 /* Disassembly style to use. Default to "std" register names. */
233 static const char *disassembly_style;
234
235 /* All possible arm target descriptors. */
236 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
237 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
238
239 /* This is used to keep the bfd arch_info in sync with the disassembly
240 style. */
241 static void set_disassembly_style_sfunc (const char *, int,
242 struct cmd_list_element *);
243 static void show_disassembly_style_sfunc (struct ui_file *, int,
244 struct cmd_list_element *,
245 const char *);
246
247 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
248 readable_regcache *regcache,
249 int regnum, gdb_byte *buf);
250 static void arm_neon_quad_write (struct gdbarch *gdbarch,
251 struct regcache *regcache,
252 int regnum, const gdb_byte *buf);
253
254 static CORE_ADDR
255 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
256
257
258 /* get_next_pcs operations. */
259 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
260 arm_get_next_pcs_read_memory_unsigned_integer,
261 arm_get_next_pcs_syscall_next_pc,
262 arm_get_next_pcs_addr_bits_remove,
263 arm_get_next_pcs_is_thumb,
264 NULL,
265 };
266
267 struct arm_prologue_cache
268 {
269 /* The stack pointer at the time this frame was created; i.e. the
270 caller's stack pointer when this function was called. It is used
271 to identify this frame. */
272 CORE_ADDR prev_sp;
273
274 /* The frame base for this frame is just prev_sp - frame size.
275 FRAMESIZE is the distance from the frame pointer to the
276 initial stack pointer. */
277
278 int framesize;
279
280 /* The register used to hold the frame pointer for this frame. */
281 int framereg;
282
283 /* Saved register offsets. */
284 struct trad_frame_saved_reg *saved_regs;
285 };
286
287 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
288 CORE_ADDR prologue_start,
289 CORE_ADDR prologue_end,
290 struct arm_prologue_cache *cache);
291
292 /* Architecture version for displaced stepping. This effects the behaviour of
293 certain instructions, and really should not be hard-wired. */
294
295 #define DISPLACED_STEPPING_ARCH_VERSION 5
296
297 /* See arm-tdep.h. */
298
299 bool arm_apcs_32 = true;
300
301 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
302
303 int
304 arm_psr_thumb_bit (struct gdbarch *gdbarch)
305 {
306 if (gdbarch_tdep (gdbarch)->is_m)
307 return XPSR_T;
308 else
309 return CPSR_T;
310 }
311
312 /* Determine if the processor is currently executing in Thumb mode. */
313
314 int
315 arm_is_thumb (struct regcache *regcache)
316 {
317 ULONGEST cpsr;
318 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
319
320 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
321
322 return (cpsr & t_bit) != 0;
323 }
324
325 /* Determine if FRAME is executing in Thumb mode. */
326
327 int
328 arm_frame_is_thumb (struct frame_info *frame)
329 {
330 CORE_ADDR cpsr;
331 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
332
333 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
334 directly (from a signal frame or dummy frame) or by interpreting
335 the saved LR (from a prologue or DWARF frame). So consult it and
336 trust the unwinders. */
337 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
338
339 return (cpsr & t_bit) != 0;
340 }
341
342 /* Search for the mapping symbol covering MEMADDR. If one is found,
343 return its type. Otherwise, return 0. If START is non-NULL,
344 set *START to the location of the mapping symbol. */
345
346 static char
347 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
348 {
349 struct obj_section *sec;
350
351 /* If there are mapping symbols, consult them. */
352 sec = find_pc_section (memaddr);
353 if (sec != NULL)
354 {
355 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
356 if (data != NULL)
357 {
358 unsigned int section_idx = sec->the_bfd_section->index;
359 arm_mapping_symbol_vec &map
360 = data->section_maps[section_idx];
361
362 /* Sort the vector on first use. */
363 if (!data->section_maps_sorted[section_idx])
364 {
365 std::sort (map.begin (), map.end ());
366 data->section_maps_sorted[section_idx] = true;
367 }
368
369 struct arm_mapping_symbol map_key
370 = { memaddr - obj_section_addr (sec), 0 };
371 arm_mapping_symbol_vec::const_iterator it
372 = std::lower_bound (map.begin (), map.end (), map_key);
373
374 /* std::lower_bound finds the earliest ordered insertion
375 point. If the symbol at this position starts at this exact
376 address, we use that; otherwise, the preceding
377 mapping symbol covers this address. */
378 if (it < map.end ())
379 {
380 if (it->value == map_key.value)
381 {
382 if (start)
383 *start = it->value + obj_section_addr (sec);
384 return it->type;
385 }
386 }
387
388 if (it > map.begin ())
389 {
390 arm_mapping_symbol_vec::const_iterator prev_it
391 = it - 1;
392
393 if (start)
394 *start = prev_it->value + obj_section_addr (sec);
395 return prev_it->type;
396 }
397 }
398 }
399
400 return 0;
401 }
402
403 /* Determine if the program counter specified in MEMADDR is in a Thumb
404 function. This function should be called for addresses unrelated to
405 any executing frame; otherwise, prefer arm_frame_is_thumb. */
406
407 int
408 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
409 {
410 struct bound_minimal_symbol sym;
411 char type;
412 arm_displaced_step_closure *dsc
413 = ((arm_displaced_step_closure * )
414 get_displaced_step_closure_by_addr (memaddr));
415
416 /* If checking the mode of displaced instruction in copy area, the mode
417 should be determined by instruction on the original address. */
418 if (dsc)
419 {
420 if (debug_displaced)
421 fprintf_unfiltered (gdb_stdlog,
422 "displaced: check mode of %.8lx instead of %.8lx\n",
423 (unsigned long) dsc->insn_addr,
424 (unsigned long) memaddr);
425 memaddr = dsc->insn_addr;
426 }
427
428 /* If bit 0 of the address is set, assume this is a Thumb address. */
429 if (IS_THUMB_ADDR (memaddr))
430 return 1;
431
432 /* If the user wants to override the symbol table, let him. */
433 if (strcmp (arm_force_mode_string, "arm") == 0)
434 return 0;
435 if (strcmp (arm_force_mode_string, "thumb") == 0)
436 return 1;
437
438 /* ARM v6-M and v7-M are always in Thumb mode. */
439 if (gdbarch_tdep (gdbarch)->is_m)
440 return 1;
441
442 /* If there are mapping symbols, consult them. */
443 type = arm_find_mapping_symbol (memaddr, NULL);
444 if (type)
445 return type == 't';
446
447 /* Thumb functions have a "special" bit set in minimal symbols. */
448 sym = lookup_minimal_symbol_by_pc (memaddr);
449 if (sym.minsym)
450 return (MSYMBOL_IS_SPECIAL (sym.minsym));
451
452 /* If the user wants to override the fallback mode, let them. */
453 if (strcmp (arm_fallback_mode_string, "arm") == 0)
454 return 0;
455 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
456 return 1;
457
458 /* If we couldn't find any symbol, but we're talking to a running
459 target, then trust the current value of $cpsr. This lets
460 "display/i $pc" always show the correct mode (though if there is
461 a symbol table we will not reach here, so it still may not be
462 displayed in the mode it will be executed). */
463 if (target_has_registers)
464 return arm_frame_is_thumb (get_current_frame ());
465
466 /* Otherwise we're out of luck; we assume ARM. */
467 return 0;
468 }
469
470 /* Determine if the address specified equals any of these magic return
471 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
472 architectures.
473
474 From ARMv6-M Reference Manual B1.5.8
475 Table B1-5 Exception return behavior
476
477 EXC_RETURN Return To Return Stack
478 0xFFFFFFF1 Handler mode Main
479 0xFFFFFFF9 Thread mode Main
480 0xFFFFFFFD Thread mode Process
481
482 From ARMv7-M Reference Manual B1.5.8
483 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
484
485 EXC_RETURN Return To Return Stack
486 0xFFFFFFF1 Handler mode Main
487 0xFFFFFFF9 Thread mode Main
488 0xFFFFFFFD Thread mode Process
489
490 Table B1-9 EXC_RETURN definition of exception return behavior, with
491 FP
492
493 EXC_RETURN Return To Return Stack Frame Type
494 0xFFFFFFE1 Handler mode Main Extended
495 0xFFFFFFE9 Thread mode Main Extended
496 0xFFFFFFED Thread mode Process Extended
497 0xFFFFFFF1 Handler mode Main Basic
498 0xFFFFFFF9 Thread mode Main Basic
499 0xFFFFFFFD Thread mode Process Basic
500
501 For more details see "B1.5.8 Exception return behavior"
502 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
503
504 static int
505 arm_m_addr_is_magic (CORE_ADDR addr)
506 {
507 switch (addr)
508 {
509 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
510 the exception return behavior. */
511 case 0xffffffe1:
512 case 0xffffffe9:
513 case 0xffffffed:
514 case 0xfffffff1:
515 case 0xfffffff9:
516 case 0xfffffffd:
517 /* Address is magic. */
518 return 1;
519
520 default:
521 /* Address is not magic. */
522 return 0;
523 }
524 }
525
526 /* Remove useless bits from addresses in a running program. */
527 static CORE_ADDR
528 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
529 {
530 /* On M-profile devices, do not strip the low bit from EXC_RETURN
531 (the magic exception return address). */
532 if (gdbarch_tdep (gdbarch)->is_m
533 && arm_m_addr_is_magic (val))
534 return val;
535
536 if (arm_apcs_32)
537 return UNMAKE_THUMB_ADDR (val);
538 else
539 return (val & 0x03fffffc);
540 }
541
542 /* Return 1 if PC is the start of a compiler helper function which
543 can be safely ignored during prologue skipping. IS_THUMB is true
544 if the function is known to be a Thumb function due to the way it
545 is being called. */
546 static int
547 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
548 {
549 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
550 struct bound_minimal_symbol msym;
551
552 msym = lookup_minimal_symbol_by_pc (pc);
553 if (msym.minsym != NULL
554 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
555 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
556 {
557 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
558
559 /* The GNU linker's Thumb call stub to foo is named
560 __foo_from_thumb. */
561 if (strstr (name, "_from_thumb") != NULL)
562 name += 2;
563
564 /* On soft-float targets, __truncdfsf2 is called to convert promoted
565 arguments to their argument types in non-prototyped
566 functions. */
567 if (startswith (name, "__truncdfsf2"))
568 return 1;
569 if (startswith (name, "__aeabi_d2f"))
570 return 1;
571
572 /* Internal functions related to thread-local storage. */
573 if (startswith (name, "__tls_get_addr"))
574 return 1;
575 if (startswith (name, "__aeabi_read_tp"))
576 return 1;
577 }
578 else
579 {
580 /* If we run against a stripped glibc, we may be unable to identify
581 special functions by name. Check for one important case,
582 __aeabi_read_tp, by comparing the *code* against the default
583 implementation (this is hand-written ARM assembler in glibc). */
584
585 if (!is_thumb
586 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
587 == 0xe3e00a0f /* mov r0, #0xffff0fff */
588 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
589 == 0xe240f01f) /* sub pc, r0, #31 */
590 return 1;
591 }
592
593 return 0;
594 }
595
596 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
597 the first 16-bit of instruction, and INSN2 is the second 16-bit of
598 instruction. */
599 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
600 ((bits ((insn1), 0, 3) << 12) \
601 | (bits ((insn1), 10, 10) << 11) \
602 | (bits ((insn2), 12, 14) << 8) \
603 | bits ((insn2), 0, 7))
604
605 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
606 the 32-bit instruction. */
607 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
608 ((bits ((insn), 16, 19) << 12) \
609 | bits ((insn), 0, 11))
610
611 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
612
613 static unsigned int
614 thumb_expand_immediate (unsigned int imm)
615 {
616 unsigned int count = imm >> 7;
617
618 if (count < 8)
619 switch (count / 2)
620 {
621 case 0:
622 return imm & 0xff;
623 case 1:
624 return (imm & 0xff) | ((imm & 0xff) << 16);
625 case 2:
626 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
627 case 3:
628 return (imm & 0xff) | ((imm & 0xff) << 8)
629 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
630 }
631
632 return (0x80 | (imm & 0x7f)) << (32 - count);
633 }
634
635 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
636 epilogue, 0 otherwise. */
637
638 static int
639 thumb_instruction_restores_sp (unsigned short insn)
640 {
641 return (insn == 0x46bd /* mov sp, r7 */
642 || (insn & 0xff80) == 0xb000 /* add sp, imm */
643 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
644 }
645
646 /* Analyze a Thumb prologue, looking for a recognizable stack frame
647 and frame pointer. Scan until we encounter a store that could
648 clobber the stack frame unexpectedly, or an unknown instruction.
649 Return the last address which is definitely safe to skip for an
650 initial breakpoint. */
651
652 static CORE_ADDR
653 thumb_analyze_prologue (struct gdbarch *gdbarch,
654 CORE_ADDR start, CORE_ADDR limit,
655 struct arm_prologue_cache *cache)
656 {
657 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
658 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
659 int i;
660 pv_t regs[16];
661 CORE_ADDR offset;
662 CORE_ADDR unrecognized_pc = 0;
663
664 for (i = 0; i < 16; i++)
665 regs[i] = pv_register (i, 0);
666 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
667
668 while (start < limit)
669 {
670 unsigned short insn;
671
672 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
673
674 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
675 {
676 int regno;
677 int mask;
678
679 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
680 break;
681
682 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
683 whether to save LR (R14). */
684 mask = (insn & 0xff) | ((insn & 0x100) << 6);
685
686 /* Calculate offsets of saved R0-R7 and LR. */
687 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
688 if (mask & (1 << regno))
689 {
690 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 -4);
692 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
693 }
694 }
695 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
696 {
697 offset = (insn & 0x7f) << 2; /* get scaled offset */
698 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
699 -offset);
700 }
701 else if (thumb_instruction_restores_sp (insn))
702 {
703 /* Don't scan past the epilogue. */
704 break;
705 }
706 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
707 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
708 (insn & 0xff) << 2);
709 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
710 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
711 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
712 bits (insn, 6, 8));
713 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
714 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
715 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
716 bits (insn, 0, 7));
717 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
718 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
719 && pv_is_constant (regs[bits (insn, 3, 5)]))
720 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
721 regs[bits (insn, 6, 8)]);
722 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
723 && pv_is_constant (regs[bits (insn, 3, 6)]))
724 {
725 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
726 int rm = bits (insn, 3, 6);
727 regs[rd] = pv_add (regs[rd], regs[rm]);
728 }
729 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
730 {
731 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
732 int src_reg = (insn & 0x78) >> 3;
733 regs[dst_reg] = regs[src_reg];
734 }
735 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
736 {
737 /* Handle stores to the stack. Normally pushes are used,
738 but with GCC -mtpcs-frame, there may be other stores
739 in the prologue to create the frame. */
740 int regno = (insn >> 8) & 0x7;
741 pv_t addr;
742
743 offset = (insn & 0xff) << 2;
744 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
745
746 if (stack.store_would_trash (addr))
747 break;
748
749 stack.store (addr, 4, regs[regno]);
750 }
751 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
752 {
753 int rd = bits (insn, 0, 2);
754 int rn = bits (insn, 3, 5);
755 pv_t addr;
756
757 offset = bits (insn, 6, 10) << 2;
758 addr = pv_add_constant (regs[rn], offset);
759
760 if (stack.store_would_trash (addr))
761 break;
762
763 stack.store (addr, 4, regs[rd]);
764 }
765 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
766 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
768 /* Ignore stores of argument registers to the stack. */
769 ;
770 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
771 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
772 /* Ignore block loads from the stack, potentially copying
773 parameters from memory. */
774 ;
775 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
776 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
777 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
778 /* Similarly ignore single loads from the stack. */
779 ;
780 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
781 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
782 /* Skip register copies, i.e. saves to another register
783 instead of the stack. */
784 ;
785 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
786 /* Recognize constant loads; even with small stacks these are necessary
787 on Thumb. */
788 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
789 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
790 {
791 /* Constant pool loads, for the same reason. */
792 unsigned int constant;
793 CORE_ADDR loc;
794
795 loc = start + 4 + bits (insn, 0, 7) * 4;
796 constant = read_memory_unsigned_integer (loc, 4, byte_order);
797 regs[bits (insn, 8, 10)] = pv_constant (constant);
798 }
799 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
800 {
801 unsigned short inst2;
802
803 inst2 = read_code_unsigned_integer (start + 2, 2,
804 byte_order_for_code);
805
806 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
807 {
808 /* BL, BLX. Allow some special function calls when
809 skipping the prologue; GCC generates these before
810 storing arguments to the stack. */
811 CORE_ADDR nextpc;
812 int j1, j2, imm1, imm2;
813
814 imm1 = sbits (insn, 0, 10);
815 imm2 = bits (inst2, 0, 10);
816 j1 = bit (inst2, 13);
817 j2 = bit (inst2, 11);
818
819 offset = ((imm1 << 12) + (imm2 << 1));
820 offset ^= ((!j2) << 22) | ((!j1) << 23);
821
822 nextpc = start + 4 + offset;
823 /* For BLX make sure to clear the low bits. */
824 if (bit (inst2, 12) == 0)
825 nextpc = nextpc & 0xfffffffc;
826
827 if (!skip_prologue_function (gdbarch, nextpc,
828 bit (inst2, 12) != 0))
829 break;
830 }
831
832 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
833 { registers } */
834 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
835 {
836 pv_t addr = regs[bits (insn, 0, 3)];
837 int regno;
838
839 if (stack.store_would_trash (addr))
840 break;
841
842 /* Calculate offsets of saved registers. */
843 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
844 if (inst2 & (1 << regno))
845 {
846 addr = pv_add_constant (addr, -4);
847 stack.store (addr, 4, regs[regno]);
848 }
849
850 if (insn & 0x0020)
851 regs[bits (insn, 0, 3)] = addr;
852 }
853
854 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
855 [Rn, #+/-imm]{!} */
856 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
857 {
858 int regno1 = bits (inst2, 12, 15);
859 int regno2 = bits (inst2, 8, 11);
860 pv_t addr = regs[bits (insn, 0, 3)];
861
862 offset = inst2 & 0xff;
863 if (insn & 0x0080)
864 addr = pv_add_constant (addr, offset);
865 else
866 addr = pv_add_constant (addr, -offset);
867
868 if (stack.store_would_trash (addr))
869 break;
870
871 stack.store (addr, 4, regs[regno1]);
872 stack.store (pv_add_constant (addr, 4),
873 4, regs[regno2]);
874
875 if (insn & 0x0020)
876 regs[bits (insn, 0, 3)] = addr;
877 }
878
879 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
880 && (inst2 & 0x0c00) == 0x0c00
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 {
883 int regno = bits (inst2, 12, 15);
884 pv_t addr = regs[bits (insn, 0, 3)];
885
886 offset = inst2 & 0xff;
887 if (inst2 & 0x0200)
888 addr = pv_add_constant (addr, offset);
889 else
890 addr = pv_add_constant (addr, -offset);
891
892 if (stack.store_would_trash (addr))
893 break;
894
895 stack.store (addr, 4, regs[regno]);
896
897 if (inst2 & 0x0100)
898 regs[bits (insn, 0, 3)] = addr;
899 }
900
901 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
902 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 {
904 int regno = bits (inst2, 12, 15);
905 pv_t addr;
906
907 offset = inst2 & 0xfff;
908 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
909
910 if (stack.store_would_trash (addr))
911 break;
912
913 stack.store (addr, 4, regs[regno]);
914 }
915
916 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
917 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
918 /* Ignore stores of argument registers to the stack. */
919 ;
920
921 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
922 && (inst2 & 0x0d00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore stores of argument registers to the stack. */
925 ;
926
927 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
928 { registers } */
929 && (inst2 & 0x8000) == 0x0000
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore block loads from the stack, potentially copying
932 parameters from memory. */
933 ;
934
935 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
936 [Rn, #+/-imm] */
937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 /* Similarly ignore dual loads from the stack. */
939 ;
940
941 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
942 && (inst2 & 0x0d00) == 0x0c00
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Similarly ignore single loads from the stack. */
945 ;
946
947 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
948 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 /* Similarly ignore single loads from the stack. */
950 ;
951
952 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
953 && (inst2 & 0x8000) == 0x0000)
954 {
955 unsigned int imm = ((bits (insn, 10, 10) << 11)
956 | (bits (inst2, 12, 14) << 8)
957 | bits (inst2, 0, 7));
958
959 regs[bits (inst2, 8, 11)]
960 = pv_add_constant (regs[bits (insn, 0, 3)],
961 thumb_expand_immediate (imm));
962 }
963
964 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
965 && (inst2 & 0x8000) == 0x0000)
966 {
967 unsigned int imm = ((bits (insn, 10, 10) << 11)
968 | (bits (inst2, 12, 14) << 8)
969 | bits (inst2, 0, 7));
970
971 regs[bits (inst2, 8, 11)]
972 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
973 }
974
975 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
976 && (inst2 & 0x8000) == 0x0000)
977 {
978 unsigned int imm = ((bits (insn, 10, 10) << 11)
979 | (bits (inst2, 12, 14) << 8)
980 | bits (inst2, 0, 7));
981
982 regs[bits (inst2, 8, 11)]
983 = pv_add_constant (regs[bits (insn, 0, 3)],
984 - (CORE_ADDR) thumb_expand_immediate (imm));
985 }
986
987 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
988 && (inst2 & 0x8000) == 0x0000)
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
996 }
997
998 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
999 {
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1003
1004 regs[bits (inst2, 8, 11)]
1005 = pv_constant (thumb_expand_immediate (imm));
1006 }
1007
1008 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1009 {
1010 unsigned int imm
1011 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1012
1013 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1014 }
1015
1016 else if (insn == 0xea5f /* mov.w Rd,Rm */
1017 && (inst2 & 0xf0f0) == 0)
1018 {
1019 int dst_reg = (inst2 & 0x0f00) >> 8;
1020 int src_reg = inst2 & 0xf;
1021 regs[dst_reg] = regs[src_reg];
1022 }
1023
1024 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1025 {
1026 /* Constant pool loads. */
1027 unsigned int constant;
1028 CORE_ADDR loc;
1029
1030 offset = bits (inst2, 0, 11);
1031 if (insn & 0x0080)
1032 loc = start + 4 + offset;
1033 else
1034 loc = start + 4 - offset;
1035
1036 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1037 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1038 }
1039
1040 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1041 {
1042 /* Constant pool loads. */
1043 unsigned int constant;
1044 CORE_ADDR loc;
1045
1046 offset = bits (inst2, 0, 7) << 2;
1047 if (insn & 0x0080)
1048 loc = start + 4 + offset;
1049 else
1050 loc = start + 4 - offset;
1051
1052 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1053 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1054
1055 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1056 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1057 }
1058
1059 else if (thumb2_instruction_changes_pc (insn, inst2))
1060 {
1061 /* Don't scan past anything that might change control flow. */
1062 break;
1063 }
1064 else
1065 {
1066 /* The optimizer might shove anything into the prologue,
1067 so we just skip what we don't recognize. */
1068 unrecognized_pc = start;
1069 }
1070
1071 start += 2;
1072 }
1073 else if (thumb_instruction_changes_pc (insn))
1074 {
1075 /* Don't scan past anything that might change control flow. */
1076 break;
1077 }
1078 else
1079 {
1080 /* The optimizer might shove anything into the prologue,
1081 so we just skip what we don't recognize. */
1082 unrecognized_pc = start;
1083 }
1084
1085 start += 2;
1086 }
1087
1088 if (arm_debug)
1089 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1090 paddress (gdbarch, start));
1091
1092 if (unrecognized_pc == 0)
1093 unrecognized_pc = start;
1094
1095 if (cache == NULL)
1096 return unrecognized_pc;
1097
1098 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1099 {
1100 /* Frame pointer is fp. Frame size is constant. */
1101 cache->framereg = ARM_FP_REGNUM;
1102 cache->framesize = -regs[ARM_FP_REGNUM].k;
1103 }
1104 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1105 {
1106 /* Frame pointer is r7. Frame size is constant. */
1107 cache->framereg = THUMB_FP_REGNUM;
1108 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1109 }
1110 else
1111 {
1112 /* Try the stack pointer... this is a bit desperate. */
1113 cache->framereg = ARM_SP_REGNUM;
1114 cache->framesize = -regs[ARM_SP_REGNUM].k;
1115 }
1116
1117 for (i = 0; i < 16; i++)
1118 if (stack.find_reg (gdbarch, i, &offset))
1119 cache->saved_regs[i].addr = offset;
1120
1121 return unrecognized_pc;
1122 }
1123
1124
1125 /* Try to analyze the instructions starting from PC, which load symbol
1126 __stack_chk_guard. Return the address of instruction after loading this
1127 symbol, set the dest register number to *BASEREG, and set the size of
1128 instructions for loading symbol in OFFSET. Return 0 if instructions are
1129 not recognized. */
1130
1131 static CORE_ADDR
1132 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1133 unsigned int *destreg, int *offset)
1134 {
1135 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1136 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1137 unsigned int low, high, address;
1138
1139 address = 0;
1140 if (is_thumb)
1141 {
1142 unsigned short insn1
1143 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1144
1145 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1146 {
1147 *destreg = bits (insn1, 8, 10);
1148 *offset = 2;
1149 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1150 address = read_memory_unsigned_integer (address, 4,
1151 byte_order_for_code);
1152 }
1153 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1154 {
1155 unsigned short insn2
1156 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1157
1158 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159
1160 insn1
1161 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1162 insn2
1163 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1164
1165 /* movt Rd, #const */
1166 if ((insn1 & 0xfbc0) == 0xf2c0)
1167 {
1168 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1169 *destreg = bits (insn2, 8, 11);
1170 *offset = 8;
1171 address = (high << 16 | low);
1172 }
1173 }
1174 }
1175 else
1176 {
1177 unsigned int insn
1178 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1179
1180 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1181 {
1182 address = bits (insn, 0, 11) + pc + 8;
1183 address = read_memory_unsigned_integer (address, 4,
1184 byte_order_for_code);
1185
1186 *destreg = bits (insn, 12, 15);
1187 *offset = 4;
1188 }
1189 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1190 {
1191 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1192
1193 insn
1194 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1195
1196 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1197 {
1198 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1199 *destreg = bits (insn, 12, 15);
1200 *offset = 8;
1201 address = (high << 16 | low);
1202 }
1203 }
1204 }
1205
1206 return address;
1207 }
1208
1209 /* Try to skip a sequence of instructions used for stack protector. If PC
1210 points to the first instruction of this sequence, return the address of
1211 first instruction after this sequence, otherwise, return original PC.
1212
1213 On arm, this sequence of instructions is composed of mainly three steps,
1214 Step 1: load symbol __stack_chk_guard,
1215 Step 2: load from address of __stack_chk_guard,
1216 Step 3: store it to somewhere else.
1217
1218 Usually, instructions on step 2 and step 3 are the same on various ARM
1219 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1220 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1221 instructions in step 1 vary from different ARM architectures. On ARMv7,
1222 they are,
1223
1224 movw Rn, #:lower16:__stack_chk_guard
1225 movt Rn, #:upper16:__stack_chk_guard
1226
1227 On ARMv5t, it is,
1228
1229 ldr Rn, .Label
1230 ....
1231 .Lable:
1232 .word __stack_chk_guard
1233
1234 Since ldr/str is a very popular instruction, we can't use them as
1235 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1236 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1237 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1238
1239 static CORE_ADDR
1240 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1241 {
1242 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1243 unsigned int basereg;
1244 struct bound_minimal_symbol stack_chk_guard;
1245 int offset;
1246 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1247 CORE_ADDR addr;
1248
1249 /* Try to parse the instructions in Step 1. */
1250 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1251 &basereg, &offset);
1252 if (!addr)
1253 return pc;
1254
1255 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1256 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1257 Otherwise, this sequence cannot be for stack protector. */
1258 if (stack_chk_guard.minsym == NULL
1259 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1260 return pc;
1261
1262 if (is_thumb)
1263 {
1264 unsigned int destreg;
1265 unsigned short insn
1266 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1267
1268 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1269 if ((insn & 0xf800) != 0x6800)
1270 return pc;
1271 if (bits (insn, 3, 5) != basereg)
1272 return pc;
1273 destreg = bits (insn, 0, 2);
1274
1275 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1276 byte_order_for_code);
1277 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1278 if ((insn & 0xf800) != 0x6000)
1279 return pc;
1280 if (destreg != bits (insn, 0, 2))
1281 return pc;
1282 }
1283 else
1284 {
1285 unsigned int destreg;
1286 unsigned int insn
1287 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1288
1289 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1290 if ((insn & 0x0e500000) != 0x04100000)
1291 return pc;
1292 if (bits (insn, 16, 19) != basereg)
1293 return pc;
1294 destreg = bits (insn, 12, 15);
1295 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1296 insn = read_code_unsigned_integer (pc + offset + 4,
1297 4, byte_order_for_code);
1298 if ((insn & 0x0e500000) != 0x04000000)
1299 return pc;
1300 if (bits (insn, 12, 15) != destreg)
1301 return pc;
1302 }
1303 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1304 on arm. */
1305 if (is_thumb)
1306 return pc + offset + 4;
1307 else
1308 return pc + offset + 8;
1309 }
1310
1311 /* Advance the PC across any function entry prologue instructions to
1312 reach some "real" code.
1313
1314 The APCS (ARM Procedure Call Standard) defines the following
1315 prologue:
1316
1317 mov ip, sp
1318 [stmfd sp!, {a1,a2,a3,a4}]
1319 stmfd sp!, {...,fp,ip,lr,pc}
1320 [stfe f7, [sp, #-12]!]
1321 [stfe f6, [sp, #-12]!]
1322 [stfe f5, [sp, #-12]!]
1323 [stfe f4, [sp, #-12]!]
1324 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1325
1326 static CORE_ADDR
1327 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1328 {
1329 CORE_ADDR func_addr, limit_pc;
1330
1331 /* See if we can determine the end of the prologue via the symbol table.
1332 If so, then return either PC, or the PC after the prologue, whichever
1333 is greater. */
1334 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1335 {
1336 CORE_ADDR post_prologue_pc
1337 = skip_prologue_using_sal (gdbarch, func_addr);
1338 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1339
1340 if (post_prologue_pc)
1341 post_prologue_pc
1342 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1343
1344
1345 /* GCC always emits a line note before the prologue and another
1346 one after, even if the two are at the same address or on the
1347 same line. Take advantage of this so that we do not need to
1348 know every instruction that might appear in the prologue. We
1349 will have producer information for most binaries; if it is
1350 missing (e.g. for -gstabs), assuming the GNU tools. */
1351 if (post_prologue_pc
1352 && (cust == NULL
1353 || COMPUNIT_PRODUCER (cust) == NULL
1354 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1355 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1356 return post_prologue_pc;
1357
1358 if (post_prologue_pc != 0)
1359 {
1360 CORE_ADDR analyzed_limit;
1361
1362 /* For non-GCC compilers, make sure the entire line is an
1363 acceptable prologue; GDB will round this function's
1364 return value up to the end of the following line so we
1365 can not skip just part of a line (and we do not want to).
1366
1367 RealView does not treat the prologue specially, but does
1368 associate prologue code with the opening brace; so this
1369 lets us skip the first line if we think it is the opening
1370 brace. */
1371 if (arm_pc_is_thumb (gdbarch, func_addr))
1372 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1373 post_prologue_pc, NULL);
1374 else
1375 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1376 post_prologue_pc, NULL);
1377
1378 if (analyzed_limit != post_prologue_pc)
1379 return func_addr;
1380
1381 return post_prologue_pc;
1382 }
1383 }
1384
1385 /* Can't determine prologue from the symbol table, need to examine
1386 instructions. */
1387
1388 /* Find an upper limit on the function prologue using the debug
1389 information. If the debug information could not be used to provide
1390 that bound, then use an arbitrary large number as the upper bound. */
1391 /* Like arm_scan_prologue, stop no later than pc + 64. */
1392 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1393 if (limit_pc == 0)
1394 limit_pc = pc + 64; /* Magic. */
1395
1396
1397 /* Check if this is Thumb code. */
1398 if (arm_pc_is_thumb (gdbarch, pc))
1399 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1400 else
1401 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1402 }
1403
1404 /* *INDENT-OFF* */
1405 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1406 This function decodes a Thumb function prologue to determine:
1407 1) the size of the stack frame
1408 2) which registers are saved on it
1409 3) the offsets of saved regs
1410 4) the offset from the stack pointer to the frame pointer
1411
1412 A typical Thumb function prologue would create this stack frame
1413 (offsets relative to FP)
1414 old SP -> 24 stack parameters
1415 20 LR
1416 16 R7
1417 R7 -> 0 local variables (16 bytes)
1418 SP -> -12 additional stack space (12 bytes)
1419 The frame size would thus be 36 bytes, and the frame offset would be
1420 12 bytes. The frame register is R7.
1421
1422 The comments for thumb_skip_prolog() describe the algorithm we use
1423 to detect the end of the prolog. */
1424 /* *INDENT-ON* */
1425
1426 static void
1427 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1428 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1429 {
1430 CORE_ADDR prologue_start;
1431 CORE_ADDR prologue_end;
1432
1433 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1434 &prologue_end))
1435 {
1436 /* See comment in arm_scan_prologue for an explanation of
1437 this heuristics. */
1438 if (prologue_end > prologue_start + 64)
1439 {
1440 prologue_end = prologue_start + 64;
1441 }
1442 }
1443 else
1444 /* We're in the boondocks: we have no idea where the start of the
1445 function is. */
1446 return;
1447
1448 prologue_end = std::min (prologue_end, prev_pc);
1449
1450 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1451 }
1452
1453 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1454 otherwise. */
1455
1456 static int
1457 arm_instruction_restores_sp (unsigned int insn)
1458 {
1459 if (bits (insn, 28, 31) != INST_NV)
1460 {
1461 if ((insn & 0x0df0f000) == 0x0080d000
1462 /* ADD SP (register or immediate). */
1463 || (insn & 0x0df0f000) == 0x0040d000
1464 /* SUB SP (register or immediate). */
1465 || (insn & 0x0ffffff0) == 0x01a0d000
1466 /* MOV SP. */
1467 || (insn & 0x0fff0000) == 0x08bd0000
1468 /* POP (LDMIA). */
1469 || (insn & 0x0fff0000) == 0x049d0000)
1470 /* POP of a single register. */
1471 return 1;
1472 }
1473
1474 return 0;
1475 }
1476
1477 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1478 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1479 fill it in. Return the first address not recognized as a prologue
1480 instruction.
1481
1482 We recognize all the instructions typically found in ARM prologues,
1483 plus harmless instructions which can be skipped (either for analysis
1484 purposes, or a more restrictive set that can be skipped when finding
1485 the end of the prologue). */
1486
1487 static CORE_ADDR
1488 arm_analyze_prologue (struct gdbarch *gdbarch,
1489 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1490 struct arm_prologue_cache *cache)
1491 {
1492 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1493 int regno;
1494 CORE_ADDR offset, current_pc;
1495 pv_t regs[ARM_FPS_REGNUM];
1496 CORE_ADDR unrecognized_pc = 0;
1497
1498 /* Search the prologue looking for instructions that set up the
1499 frame pointer, adjust the stack pointer, and save registers.
1500
1501 Be careful, however, and if it doesn't look like a prologue,
1502 don't try to scan it. If, for instance, a frameless function
1503 begins with stmfd sp!, then we will tell ourselves there is
1504 a frame, which will confuse stack traceback, as well as "finish"
1505 and other operations that rely on a knowledge of the stack
1506 traceback. */
1507
1508 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1509 regs[regno] = pv_register (regno, 0);
1510 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1511
1512 for (current_pc = prologue_start;
1513 current_pc < prologue_end;
1514 current_pc += 4)
1515 {
1516 unsigned int insn
1517 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1518
1519 if (insn == 0xe1a0c00d) /* mov ip, sp */
1520 {
1521 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1522 continue;
1523 }
1524 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 {
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1532 continue;
1533 }
1534 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1535 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1536 {
1537 unsigned imm = insn & 0xff; /* immediate value */
1538 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1539 int rd = bits (insn, 12, 15);
1540 imm = (imm >> rot) | (imm << (32 - rot));
1541 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1542 continue;
1543 }
1544 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1545 [sp, #-4]! */
1546 {
1547 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1548 break;
1549 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1550 stack.store (regs[ARM_SP_REGNUM], 4,
1551 regs[bits (insn, 12, 15)]);
1552 continue;
1553 }
1554 else if ((insn & 0xffff0000) == 0xe92d0000)
1555 /* stmfd sp!, {..., fp, ip, lr, pc}
1556 or
1557 stmfd sp!, {a1, a2, a3, a4} */
1558 {
1559 int mask = insn & 0xffff;
1560
1561 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1562 break;
1563
1564 /* Calculate offsets of saved registers. */
1565 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1566 if (mask & (1 << regno))
1567 {
1568 regs[ARM_SP_REGNUM]
1569 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1570 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1571 }
1572 }
1573 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1574 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1575 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1576 {
1577 /* No need to add this to saved_regs -- it's just an arg reg. */
1578 continue;
1579 }
1580 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1581 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1582 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1583 {
1584 /* No need to add this to saved_regs -- it's just an arg reg. */
1585 continue;
1586 }
1587 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1588 { registers } */
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 {
1591 /* No need to add this to saved_regs -- it's just arg regs. */
1592 continue;
1593 }
1594 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1595 {
1596 unsigned imm = insn & 0xff; /* immediate value */
1597 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1598 imm = (imm >> rot) | (imm << (32 - rot));
1599 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1600 }
1601 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1602 {
1603 unsigned imm = insn & 0xff; /* immediate value */
1604 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1605 imm = (imm >> rot) | (imm << (32 - rot));
1606 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1607 }
1608 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1609 [sp, -#c]! */
1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1611 {
1612 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1613 break;
1614
1615 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1616 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1617 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1618 }
1619 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1620 [sp!] */
1621 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1622 {
1623 int n_saved_fp_regs;
1624 unsigned int fp_start_reg, fp_bound_reg;
1625
1626 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1627 break;
1628
1629 if ((insn & 0x800) == 0x800) /* N0 is set */
1630 {
1631 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1632 n_saved_fp_regs = 3;
1633 else
1634 n_saved_fp_regs = 1;
1635 }
1636 else
1637 {
1638 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1639 n_saved_fp_regs = 2;
1640 else
1641 n_saved_fp_regs = 4;
1642 }
1643
1644 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1645 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1646 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1647 {
1648 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1649 stack.store (regs[ARM_SP_REGNUM], 12,
1650 regs[fp_start_reg++]);
1651 }
1652 }
1653 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1654 {
1655 /* Allow some special function calls when skipping the
1656 prologue; GCC generates these before storing arguments to
1657 the stack. */
1658 CORE_ADDR dest = BranchDest (current_pc, insn);
1659
1660 if (skip_prologue_function (gdbarch, dest, 0))
1661 continue;
1662 else
1663 break;
1664 }
1665 else if ((insn & 0xf0000000) != 0xe0000000)
1666 break; /* Condition not true, exit early. */
1667 else if (arm_instruction_changes_pc (insn))
1668 /* Don't scan past anything that might change control flow. */
1669 break;
1670 else if (arm_instruction_restores_sp (insn))
1671 {
1672 /* Don't scan past the epilogue. */
1673 break;
1674 }
1675 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1676 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 /* Ignore block loads from the stack, potentially copying
1678 parameters from memory. */
1679 continue;
1680 else if ((insn & 0xfc500000) == 0xe4100000
1681 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1682 /* Similarly ignore single loads from the stack. */
1683 continue;
1684 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1685 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1686 register instead of the stack. */
1687 continue;
1688 else
1689 {
1690 /* The optimizer might shove anything into the prologue, if
1691 we build up cache (cache != NULL) from scanning prologue,
1692 we just skip what we don't recognize and scan further to
1693 make cache as complete as possible. However, if we skip
1694 prologue, we'll stop immediately on unrecognized
1695 instruction. */
1696 unrecognized_pc = current_pc;
1697 if (cache != NULL)
1698 continue;
1699 else
1700 break;
1701 }
1702 }
1703
1704 if (unrecognized_pc == 0)
1705 unrecognized_pc = current_pc;
1706
1707 if (cache)
1708 {
1709 int framereg, framesize;
1710
1711 /* The frame size is just the distance from the frame register
1712 to the original stack pointer. */
1713 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1714 {
1715 /* Frame pointer is fp. */
1716 framereg = ARM_FP_REGNUM;
1717 framesize = -regs[ARM_FP_REGNUM].k;
1718 }
1719 else
1720 {
1721 /* Try the stack pointer... this is a bit desperate. */
1722 framereg = ARM_SP_REGNUM;
1723 framesize = -regs[ARM_SP_REGNUM].k;
1724 }
1725
1726 cache->framereg = framereg;
1727 cache->framesize = framesize;
1728
1729 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1730 if (stack.find_reg (gdbarch, regno, &offset))
1731 cache->saved_regs[regno].addr = offset;
1732 }
1733
1734 if (arm_debug)
1735 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1736 paddress (gdbarch, unrecognized_pc));
1737
1738 return unrecognized_pc;
1739 }
1740
1741 static void
1742 arm_scan_prologue (struct frame_info *this_frame,
1743 struct arm_prologue_cache *cache)
1744 {
1745 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1747 CORE_ADDR prologue_start, prologue_end;
1748 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1749 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1750
1751 /* Assume there is no frame until proven otherwise. */
1752 cache->framereg = ARM_SP_REGNUM;
1753 cache->framesize = 0;
1754
1755 /* Check for Thumb prologue. */
1756 if (arm_frame_is_thumb (this_frame))
1757 {
1758 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1759 return;
1760 }
1761
1762 /* Find the function prologue. If we can't find the function in
1763 the symbol table, peek in the stack frame to find the PC. */
1764 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1765 &prologue_end))
1766 {
1767 /* One way to find the end of the prologue (which works well
1768 for unoptimized code) is to do the following:
1769
1770 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1771
1772 if (sal.line == 0)
1773 prologue_end = prev_pc;
1774 else if (sal.end < prologue_end)
1775 prologue_end = sal.end;
1776
1777 This mechanism is very accurate so long as the optimizer
1778 doesn't move any instructions from the function body into the
1779 prologue. If this happens, sal.end will be the last
1780 instruction in the first hunk of prologue code just before
1781 the first instruction that the scheduler has moved from
1782 the body to the prologue.
1783
1784 In order to make sure that we scan all of the prologue
1785 instructions, we use a slightly less accurate mechanism which
1786 may scan more than necessary. To help compensate for this
1787 lack of accuracy, the prologue scanning loop below contains
1788 several clauses which'll cause the loop to terminate early if
1789 an implausible prologue instruction is encountered.
1790
1791 The expression
1792
1793 prologue_start + 64
1794
1795 is a suitable endpoint since it accounts for the largest
1796 possible prologue plus up to five instructions inserted by
1797 the scheduler. */
1798
1799 if (prologue_end > prologue_start + 64)
1800 {
1801 prologue_end = prologue_start + 64; /* See above. */
1802 }
1803 }
1804 else
1805 {
1806 /* We have no symbol information. Our only option is to assume this
1807 function has a standard stack frame and the normal frame register.
1808 Then, we can find the value of our frame pointer on entrance to
1809 the callee (or at the present moment if this is the innermost frame).
1810 The value stored there should be the address of the stmfd + 8. */
1811 CORE_ADDR frame_loc;
1812 ULONGEST return_value;
1813
1814 /* AAPCS does not use a frame register, so we can abort here. */
1815 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1816 return;
1817
1818 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1819 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1820 &return_value))
1821 return;
1822 else
1823 {
1824 prologue_start = gdbarch_addr_bits_remove
1825 (gdbarch, return_value) - 8;
1826 prologue_end = prologue_start + 64; /* See above. */
1827 }
1828 }
1829
1830 if (prev_pc < prologue_end)
1831 prologue_end = prev_pc;
1832
1833 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1834 }
1835
1836 static struct arm_prologue_cache *
1837 arm_make_prologue_cache (struct frame_info *this_frame)
1838 {
1839 int reg;
1840 struct arm_prologue_cache *cache;
1841 CORE_ADDR unwound_fp;
1842
1843 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1844 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1845
1846 arm_scan_prologue (this_frame, cache);
1847
1848 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1849 if (unwound_fp == 0)
1850 return cache;
1851
1852 cache->prev_sp = unwound_fp + cache->framesize;
1853
1854 /* Calculate actual addresses of saved registers using offsets
1855 determined by arm_scan_prologue. */
1856 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1857 if (trad_frame_addr_p (cache->saved_regs, reg))
1858 cache->saved_regs[reg].addr += cache->prev_sp;
1859
1860 return cache;
1861 }
1862
1863 /* Implementation of the stop_reason hook for arm_prologue frames. */
1864
1865 static enum unwind_stop_reason
1866 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1867 void **this_cache)
1868 {
1869 struct arm_prologue_cache *cache;
1870 CORE_ADDR pc;
1871
1872 if (*this_cache == NULL)
1873 *this_cache = arm_make_prologue_cache (this_frame);
1874 cache = (struct arm_prologue_cache *) *this_cache;
1875
1876 /* This is meant to halt the backtrace at "_start". */
1877 pc = get_frame_pc (this_frame);
1878 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1879 return UNWIND_OUTERMOST;
1880
1881 /* If we've hit a wall, stop. */
1882 if (cache->prev_sp == 0)
1883 return UNWIND_OUTERMOST;
1884
1885 return UNWIND_NO_REASON;
1886 }
1887
1888 /* Our frame ID for a normal frame is the current function's starting PC
1889 and the caller's SP when we were called. */
1890
1891 static void
1892 arm_prologue_this_id (struct frame_info *this_frame,
1893 void **this_cache,
1894 struct frame_id *this_id)
1895 {
1896 struct arm_prologue_cache *cache;
1897 struct frame_id id;
1898 CORE_ADDR pc, func;
1899
1900 if (*this_cache == NULL)
1901 *this_cache = arm_make_prologue_cache (this_frame);
1902 cache = (struct arm_prologue_cache *) *this_cache;
1903
1904 /* Use function start address as part of the frame ID. If we cannot
1905 identify the start address (due to missing symbol information),
1906 fall back to just using the current PC. */
1907 pc = get_frame_pc (this_frame);
1908 func = get_frame_func (this_frame);
1909 if (!func)
1910 func = pc;
1911
1912 id = frame_id_build (cache->prev_sp, func);
1913 *this_id = id;
1914 }
1915
1916 static struct value *
1917 arm_prologue_prev_register (struct frame_info *this_frame,
1918 void **this_cache,
1919 int prev_regnum)
1920 {
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 struct arm_prologue_cache *cache;
1923
1924 if (*this_cache == NULL)
1925 *this_cache = arm_make_prologue_cache (this_frame);
1926 cache = (struct arm_prologue_cache *) *this_cache;
1927
1928 /* If we are asked to unwind the PC, then we need to return the LR
1929 instead. The prologue may save PC, but it will point into this
1930 frame's prologue, not the next frame's resume location. Also
1931 strip the saved T bit. A valid LR may have the low bit set, but
1932 a valid PC never does. */
1933 if (prev_regnum == ARM_PC_REGNUM)
1934 {
1935 CORE_ADDR lr;
1936
1937 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1938 return frame_unwind_got_constant (this_frame, prev_regnum,
1939 arm_addr_bits_remove (gdbarch, lr));
1940 }
1941
1942 /* SP is generally not saved to the stack, but this frame is
1943 identified by the next frame's stack pointer at the time of the call.
1944 The value was already reconstructed into PREV_SP. */
1945 if (prev_regnum == ARM_SP_REGNUM)
1946 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1947
1948 /* The CPSR may have been changed by the call instruction and by the
1949 called function. The only bit we can reconstruct is the T bit,
1950 by checking the low bit of LR as of the call. This is a reliable
1951 indicator of Thumb-ness except for some ARM v4T pre-interworking
1952 Thumb code, which could get away with a clear low bit as long as
1953 the called function did not use bx. Guess that all other
1954 bits are unchanged; the condition flags are presumably lost,
1955 but the processor status is likely valid. */
1956 if (prev_regnum == ARM_PS_REGNUM)
1957 {
1958 CORE_ADDR lr, cpsr;
1959 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1960
1961 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1962 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1963 if (IS_THUMB_ADDR (lr))
1964 cpsr |= t_bit;
1965 else
1966 cpsr &= ~t_bit;
1967 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1968 }
1969
1970 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1971 prev_regnum);
1972 }
1973
1974 struct frame_unwind arm_prologue_unwind = {
1975 NORMAL_FRAME,
1976 arm_prologue_unwind_stop_reason,
1977 arm_prologue_this_id,
1978 arm_prologue_prev_register,
1979 NULL,
1980 default_frame_sniffer
1981 };
1982
1983 /* Maintain a list of ARM exception table entries per objfile, similar to the
1984 list of mapping symbols. We only cache entries for standard ARM-defined
1985 personality routines; the cache will contain only the frame unwinding
1986 instructions associated with the entry (not the descriptors). */
1987
1988 struct arm_exidx_entry
1989 {
1990 bfd_vma addr;
1991 gdb_byte *entry;
1992
1993 bool operator< (const arm_exidx_entry &other) const
1994 {
1995 return addr < other.addr;
1996 }
1997 };
1998
1999 struct arm_exidx_data
2000 {
2001 std::vector<std::vector<arm_exidx_entry>> section_maps;
2002 };
2003
2004 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2013 {
2014 bfd_vma start, size;
2015 start = bfd_section_vma (osect->the_bfd_section);
2016 size = bfd_section_size (osect->the_bfd_section);
2017
2018 if (start <= vma && vma < start + size)
2019 return osect;
2020 }
2021
2022 return NULL;
2023 }
2024
2025 /* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2027
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2031 are normalized by:
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2035
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2038
2039 static void
2040 arm_exidx_new_objfile (struct objfile *objfile)
2041 {
2042 struct arm_exidx_data *data;
2043 asection *exidx, *extab;
2044 bfd_vma exidx_vma = 0, extab_vma = 0;
2045 LONGEST i;
2046
2047 /* If we've already touched this file, do nothing. */
2048 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2049 return;
2050
2051 /* Read contents of exception table and index. */
2052 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2053 gdb::byte_vector exidx_data;
2054 if (exidx)
2055 {
2056 exidx_vma = bfd_section_vma (exidx);
2057 exidx_data.resize (bfd_section_size (exidx));
2058
2059 if (!bfd_get_section_contents (objfile->obfd, exidx,
2060 exidx_data.data (), 0,
2061 exidx_data.size ()))
2062 return;
2063 }
2064
2065 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2066 gdb::byte_vector extab_data;
2067 if (extab)
2068 {
2069 extab_vma = bfd_section_vma (extab);
2070 extab_data.resize (bfd_section_size (extab));
2071
2072 if (!bfd_get_section_contents (objfile->obfd, extab,
2073 extab_data.data (), 0,
2074 extab_data.size ()))
2075 return;
2076 }
2077
2078 /* Allocate exception table data structure. */
2079 data = arm_exidx_data_key.emplace (objfile);
2080 data->section_maps.resize (objfile->obfd->section_count);
2081
2082 /* Fill in exception table. */
2083 for (i = 0; i < exidx_data.size () / 8; i++)
2084 {
2085 struct arm_exidx_entry new_exidx_entry;
2086 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2087 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2088 exidx_data.data () + i * 8 + 4);
2089 bfd_vma addr = 0, word = 0;
2090 int n_bytes = 0, n_words = 0;
2091 struct obj_section *sec;
2092 gdb_byte *entry = NULL;
2093
2094 /* Extract address of start of function. */
2095 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2096 idx += exidx_vma + i * 8;
2097
2098 /* Find section containing function and compute section offset. */
2099 sec = arm_obj_section_from_vma (objfile, idx);
2100 if (sec == NULL)
2101 continue;
2102 idx -= bfd_section_vma (sec->the_bfd_section);
2103
2104 /* Determine address of exception table entry. */
2105 if (val == 1)
2106 {
2107 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2108 }
2109 else if ((val & 0xff000000) == 0x80000000)
2110 {
2111 /* Exception table entry embedded in .ARM.exidx
2112 -- must be short form. */
2113 word = val;
2114 n_bytes = 3;
2115 }
2116 else if (!(val & 0x80000000))
2117 {
2118 /* Exception table entry in .ARM.extab. */
2119 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 addr += exidx_vma + i * 8 + 4;
2121
2122 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2123 {
2124 word = bfd_h_get_32 (objfile->obfd,
2125 extab_data.data () + addr - extab_vma);
2126 addr += 4;
2127
2128 if ((word & 0xff000000) == 0x80000000)
2129 {
2130 /* Short form. */
2131 n_bytes = 3;
2132 }
2133 else if ((word & 0xff000000) == 0x81000000
2134 || (word & 0xff000000) == 0x82000000)
2135 {
2136 /* Long form. */
2137 n_bytes = 2;
2138 n_words = ((word >> 16) & 0xff);
2139 }
2140 else if (!(word & 0x80000000))
2141 {
2142 bfd_vma pers;
2143 struct obj_section *pers_sec;
2144 int gnu_personality = 0;
2145
2146 /* Custom personality routine. */
2147 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2149
2150 /* Check whether we've got one of the variants of the
2151 GNU personality routines. */
2152 pers_sec = arm_obj_section_from_vma (objfile, pers);
2153 if (pers_sec)
2154 {
2155 static const char *personality[] =
2156 {
2157 "__gcc_personality_v0",
2158 "__gxx_personality_v0",
2159 "__gcj_personality_v0",
2160 "__gnu_objc_personality_v0",
2161 NULL
2162 };
2163
2164 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2165 int k;
2166
2167 for (k = 0; personality[k]; k++)
2168 if (lookup_minimal_symbol_by_pc_name
2169 (pc, personality[k], objfile))
2170 {
2171 gnu_personality = 1;
2172 break;
2173 }
2174 }
2175
2176 /* If so, the next word contains a word count in the high
2177 byte, followed by the same unwind instructions as the
2178 pre-defined forms. */
2179 if (gnu_personality
2180 && addr + 4 <= extab_vma + extab_data.size ())
2181 {
2182 word = bfd_h_get_32 (objfile->obfd,
2183 (extab_data.data ()
2184 + addr - extab_vma));
2185 addr += 4;
2186 n_bytes = 3;
2187 n_words = ((word >> 24) & 0xff);
2188 }
2189 }
2190 }
2191 }
2192
2193 /* Sanity check address. */
2194 if (n_words)
2195 if (addr < extab_vma
2196 || addr + 4 * n_words > extab_vma + extab_data.size ())
2197 n_words = n_bytes = 0;
2198
2199 /* The unwind instructions reside in WORD (only the N_BYTES least
2200 significant bytes are valid), followed by N_WORDS words in the
2201 extab section starting at ADDR. */
2202 if (n_bytes || n_words)
2203 {
2204 gdb_byte *p = entry
2205 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2206 n_bytes + n_words * 4 + 1);
2207
2208 while (n_bytes--)
2209 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2210
2211 while (n_words--)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data.data () + addr - extab_vma);
2215 addr += 4;
2216
2217 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2218 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2220 *p++ = (gdb_byte) (word & 0xff);
2221 }
2222
2223 /* Implied "Finish" to terminate the list. */
2224 *p++ = 0xb0;
2225 }
2226
2227 /* Push entry onto vector. They are guaranteed to always
2228 appear in order of increasing addresses. */
2229 new_exidx_entry.addr = idx;
2230 new_exidx_entry.entry = entry;
2231 data->section_maps[sec->the_bfd_section->index].push_back
2232 (new_exidx_entry);
2233 }
2234 }
2235
2236 /* Search for the exception table entry covering MEMADDR. If one is found,
2237 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2238 set *START to the start of the region covered by this entry. */
2239
2240 static gdb_byte *
2241 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2242 {
2243 struct obj_section *sec;
2244
2245 sec = find_pc_section (memaddr);
2246 if (sec != NULL)
2247 {
2248 struct arm_exidx_data *data;
2249 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2250
2251 data = arm_exidx_data_key.get (sec->objfile);
2252 if (data != NULL)
2253 {
2254 std::vector<arm_exidx_entry> &map
2255 = data->section_maps[sec->the_bfd_section->index];
2256 if (!map.empty ())
2257 {
2258 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2259
2260 /* std::lower_bound finds the earliest ordered insertion
2261 point. If the following symbol starts at this exact
2262 address, we use that; otherwise, the preceding
2263 exception table entry covers this address. */
2264 if (idx < map.end ())
2265 {
2266 if (idx->addr == map_key.addr)
2267 {
2268 if (start)
2269 *start = idx->addr + obj_section_addr (sec);
2270 return idx->entry;
2271 }
2272 }
2273
2274 if (idx > map.begin ())
2275 {
2276 idx = idx - 1;
2277 if (start)
2278 *start = idx->addr + obj_section_addr (sec);
2279 return idx->entry;
2280 }
2281 }
2282 }
2283 }
2284
2285 return NULL;
2286 }
2287
2288 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2289 instruction list from the ARM exception table entry ENTRY, allocate and
2290 return a prologue cache structure describing how to unwind this frame.
2291
2292 Return NULL if the unwinding instruction list contains a "spare",
2293 "reserved" or "refuse to unwind" instruction as defined in section
2294 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2295 for the ARM Architecture" document. */
2296
2297 static struct arm_prologue_cache *
2298 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2299 {
2300 CORE_ADDR vsp = 0;
2301 int vsp_valid = 0;
2302
2303 struct arm_prologue_cache *cache;
2304 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2305 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2306
2307 for (;;)
2308 {
2309 gdb_byte insn;
2310
2311 /* Whenever we reload SP, we actually have to retrieve its
2312 actual value in the current frame. */
2313 if (!vsp_valid)
2314 {
2315 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2316 {
2317 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2318 vsp = get_frame_register_unsigned (this_frame, reg);
2319 }
2320 else
2321 {
2322 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2323 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2324 }
2325
2326 vsp_valid = 1;
2327 }
2328
2329 /* Decode next unwind instruction. */
2330 insn = *entry++;
2331
2332 if ((insn & 0xc0) == 0)
2333 {
2334 int offset = insn & 0x3f;
2335 vsp += (offset << 2) + 4;
2336 }
2337 else if ((insn & 0xc0) == 0x40)
2338 {
2339 int offset = insn & 0x3f;
2340 vsp -= (offset << 2) + 4;
2341 }
2342 else if ((insn & 0xf0) == 0x80)
2343 {
2344 int mask = ((insn & 0xf) << 8) | *entry++;
2345 int i;
2346
2347 /* The special case of an all-zero mask identifies
2348 "Refuse to unwind". We return NULL to fall back
2349 to the prologue analyzer. */
2350 if (mask == 0)
2351 return NULL;
2352
2353 /* Pop registers r4..r15 under mask. */
2354 for (i = 0; i < 12; i++)
2355 if (mask & (1 << i))
2356 {
2357 cache->saved_regs[4 + i].addr = vsp;
2358 vsp += 4;
2359 }
2360
2361 /* Special-case popping SP -- we need to reload vsp. */
2362 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2363 vsp_valid = 0;
2364 }
2365 else if ((insn & 0xf0) == 0x90)
2366 {
2367 int reg = insn & 0xf;
2368
2369 /* Reserved cases. */
2370 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2371 return NULL;
2372
2373 /* Set SP from another register and mark VSP for reload. */
2374 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2375 vsp_valid = 0;
2376 }
2377 else if ((insn & 0xf0) == 0xa0)
2378 {
2379 int count = insn & 0x7;
2380 int pop_lr = (insn & 0x8) != 0;
2381 int i;
2382
2383 /* Pop r4..r[4+count]. */
2384 for (i = 0; i <= count; i++)
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* If indicated by flag, pop LR as well. */
2391 if (pop_lr)
2392 {
2393 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2394 vsp += 4;
2395 }
2396 }
2397 else if (insn == 0xb0)
2398 {
2399 /* We could only have updated PC by popping into it; if so, it
2400 will show up as address. Otherwise, copy LR into PC. */
2401 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2402 cache->saved_regs[ARM_PC_REGNUM]
2403 = cache->saved_regs[ARM_LR_REGNUM];
2404
2405 /* We're done. */
2406 break;
2407 }
2408 else if (insn == 0xb1)
2409 {
2410 int mask = *entry++;
2411 int i;
2412
2413 /* All-zero mask and mask >= 16 is "spare". */
2414 if (mask == 0 || mask >= 16)
2415 return NULL;
2416
2417 /* Pop r0..r3 under mask. */
2418 for (i = 0; i < 4; i++)
2419 if (mask & (1 << i))
2420 {
2421 cache->saved_regs[i].addr = vsp;
2422 vsp += 4;
2423 }
2424 }
2425 else if (insn == 0xb2)
2426 {
2427 ULONGEST offset = 0;
2428 unsigned shift = 0;
2429
2430 do
2431 {
2432 offset |= (*entry & 0x7f) << shift;
2433 shift += 7;
2434 }
2435 while (*entry++ & 0x80);
2436
2437 vsp += 0x204 + (offset << 2);
2438 }
2439 else if (insn == 0xb3)
2440 {
2441 int start = *entry >> 4;
2442 int count = (*entry++) & 0xf;
2443 int i;
2444
2445 /* Only registers D0..D15 are valid here. */
2446 if (start + count >= 16)
2447 return NULL;
2448
2449 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2450 for (i = 0; i <= count; i++)
2451 {
2452 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2453 vsp += 8;
2454 }
2455
2456 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2457 vsp += 4;
2458 }
2459 else if ((insn & 0xf8) == 0xb8)
2460 {
2461 int count = insn & 0x7;
2462 int i;
2463
2464 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2465 for (i = 0; i <= count; i++)
2466 {
2467 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2468 vsp += 8;
2469 }
2470
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 vsp += 4;
2473 }
2474 else if (insn == 0xc6)
2475 {
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2478 int i;
2479
2480 /* Only registers WR0..WR15 are valid. */
2481 if (start + count >= 16)
2482 return NULL;
2483
2484 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2488 vsp += 8;
2489 }
2490 }
2491 else if (insn == 0xc7)
2492 {
2493 int mask = *entry++;
2494 int i;
2495
2496 /* All-zero mask and mask >= 16 is "spare". */
2497 if (mask == 0 || mask >= 16)
2498 return NULL;
2499
2500 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2501 for (i = 0; i < 4; i++)
2502 if (mask & (1 << i))
2503 {
2504 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2505 vsp += 4;
2506 }
2507 }
2508 else if ((insn & 0xf8) == 0xc0)
2509 {
2510 int count = insn & 0x7;
2511 int i;
2512
2513 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc8)
2521 {
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2525
2526 /* Only registers D0..D31 are valid. */
2527 if (start + count >= 16)
2528 return NULL;
2529
2530 /* Pop VFP double-precision registers
2531 D[16+start]..D[16+start+count]. */
2532 for (i = 0; i <= count; i++)
2533 {
2534 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2535 vsp += 8;
2536 }
2537 }
2538 else if (insn == 0xc9)
2539 {
2540 int start = *entry >> 4;
2541 int count = (*entry++) & 0xf;
2542 int i;
2543
2544 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if ((insn & 0xf8) == 0xd0)
2552 {
2553 int count = insn & 0x7;
2554 int i;
2555
2556 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2557 for (i = 0; i <= count; i++)
2558 {
2559 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2560 vsp += 8;
2561 }
2562 }
2563 else
2564 {
2565 /* Everything else is "spare". */
2566 return NULL;
2567 }
2568 }
2569
2570 /* If we restore SP from a register, assume this was the frame register.
2571 Otherwise just fall back to SP as frame register. */
2572 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2573 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2574 else
2575 cache->framereg = ARM_SP_REGNUM;
2576
2577 /* Determine offset to previous frame. */
2578 cache->framesize
2579 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2580
2581 /* We already got the previous SP. */
2582 cache->prev_sp = vsp;
2583
2584 return cache;
2585 }
2586
2587 /* Unwinding via ARM exception table entries. Note that the sniffer
2588 already computes a filled-in prologue cache, which is then used
2589 with the same arm_prologue_this_id and arm_prologue_prev_register
2590 routines also used for prologue-parsing based unwinding. */
2591
2592 static int
2593 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2594 struct frame_info *this_frame,
2595 void **this_prologue_cache)
2596 {
2597 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2599 CORE_ADDR addr_in_block, exidx_region, func_start;
2600 struct arm_prologue_cache *cache;
2601 gdb_byte *entry;
2602
2603 /* See if we have an ARM exception table entry covering this address. */
2604 addr_in_block = get_frame_address_in_block (this_frame);
2605 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2606 if (!entry)
2607 return 0;
2608
2609 /* The ARM exception table does not describe unwind information
2610 for arbitrary PC values, but is guaranteed to be correct only
2611 at call sites. We have to decide here whether we want to use
2612 ARM exception table information for this frame, or fall back
2613 to using prologue parsing. (Note that if we have DWARF CFI,
2614 this sniffer isn't even called -- CFI is always preferred.)
2615
2616 Before we make this decision, however, we check whether we
2617 actually have *symbol* information for the current frame.
2618 If not, prologue parsing would not work anyway, so we might
2619 as well use the exception table and hope for the best. */
2620 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2621 {
2622 int exc_valid = 0;
2623
2624 /* If the next frame is "normal", we are at a call site in this
2625 frame, so exception information is guaranteed to be valid. */
2626 if (get_next_frame (this_frame)
2627 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2628 exc_valid = 1;
2629
2630 /* We also assume exception information is valid if we're currently
2631 blocked in a system call. The system library is supposed to
2632 ensure this, so that e.g. pthread cancellation works. */
2633 if (arm_frame_is_thumb (this_frame))
2634 {
2635 ULONGEST insn;
2636
2637 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2638 2, byte_order_for_code, &insn)
2639 && (insn & 0xff00) == 0xdf00 /* svc */)
2640 exc_valid = 1;
2641 }
2642 else
2643 {
2644 ULONGEST insn;
2645
2646 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2647 4, byte_order_for_code, &insn)
2648 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2649 exc_valid = 1;
2650 }
2651
2652 /* Bail out if we don't know that exception information is valid. */
2653 if (!exc_valid)
2654 return 0;
2655
2656 /* The ARM exception index does not mark the *end* of the region
2657 covered by the entry, and some functions will not have any entry.
2658 To correctly recognize the end of the covered region, the linker
2659 should have inserted dummy records with a CANTUNWIND marker.
2660
2661 Unfortunately, current versions of GNU ld do not reliably do
2662 this, and thus we may have found an incorrect entry above.
2663 As a (temporary) sanity check, we only use the entry if it
2664 lies *within* the bounds of the function. Note that this check
2665 might reject perfectly valid entries that just happen to cover
2666 multiple functions; therefore this check ought to be removed
2667 once the linker is fixed. */
2668 if (func_start > exidx_region)
2669 return 0;
2670 }
2671
2672 /* Decode the list of unwinding instructions into a prologue cache.
2673 Note that this may fail due to e.g. a "refuse to unwind" code. */
2674 cache = arm_exidx_fill_cache (this_frame, entry);
2675 if (!cache)
2676 return 0;
2677
2678 *this_prologue_cache = cache;
2679 return 1;
2680 }
2681
2682 struct frame_unwind arm_exidx_unwind = {
2683 NORMAL_FRAME,
2684 default_frame_unwind_stop_reason,
2685 arm_prologue_this_id,
2686 arm_prologue_prev_register,
2687 NULL,
2688 arm_exidx_unwind_sniffer
2689 };
2690
2691 static struct arm_prologue_cache *
2692 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2693 {
2694 struct arm_prologue_cache *cache;
2695 int reg;
2696
2697 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2698 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2699
2700 /* Still rely on the offset calculated from prologue. */
2701 arm_scan_prologue (this_frame, cache);
2702
2703 /* Since we are in epilogue, the SP has been restored. */
2704 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2705
2706 /* Calculate actual addresses of saved registers using offsets
2707 determined by arm_scan_prologue. */
2708 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2709 if (trad_frame_addr_p (cache->saved_regs, reg))
2710 cache->saved_regs[reg].addr += cache->prev_sp;
2711
2712 return cache;
2713 }
2714
2715 /* Implementation of function hook 'this_id' in
2716 'struct frame_uwnind' for epilogue unwinder. */
2717
2718 static void
2719 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2720 void **this_cache,
2721 struct frame_id *this_id)
2722 {
2723 struct arm_prologue_cache *cache;
2724 CORE_ADDR pc, func;
2725
2726 if (*this_cache == NULL)
2727 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2728 cache = (struct arm_prologue_cache *) *this_cache;
2729
2730 /* Use function start address as part of the frame ID. If we cannot
2731 identify the start address (due to missing symbol information),
2732 fall back to just using the current PC. */
2733 pc = get_frame_pc (this_frame);
2734 func = get_frame_func (this_frame);
2735 if (func == 0)
2736 func = pc;
2737
2738 (*this_id) = frame_id_build (cache->prev_sp, pc);
2739 }
2740
2741 /* Implementation of function hook 'prev_register' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2743
2744 static struct value *
2745 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2746 void **this_cache, int regnum)
2747 {
2748 if (*this_cache == NULL)
2749 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2750
2751 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2752 }
2753
2754 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2755 CORE_ADDR pc);
2756 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2757 CORE_ADDR pc);
2758
2759 /* Implementation of function hook 'sniffer' in
2760 'struct frame_uwnind' for epilogue unwinder. */
2761
2762 static int
2763 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
2766 {
2767 if (frame_relative_level (this_frame) == 0)
2768 {
2769 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2770 CORE_ADDR pc = get_frame_pc (this_frame);
2771
2772 if (arm_frame_is_thumb (this_frame))
2773 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2774 else
2775 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2776 }
2777 else
2778 return 0;
2779 }
2780
2781 /* Frame unwinder from epilogue. */
2782
2783 static const struct frame_unwind arm_epilogue_frame_unwind =
2784 {
2785 NORMAL_FRAME,
2786 default_frame_unwind_stop_reason,
2787 arm_epilogue_frame_this_id,
2788 arm_epilogue_frame_prev_register,
2789 NULL,
2790 arm_epilogue_frame_sniffer,
2791 };
2792
2793 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2794 trampoline, return the target PC. Otherwise return 0.
2795
2796 void call0a (char c, short s, int i, long l) {}
2797
2798 int main (void)
2799 {
2800 (*pointer_to_call0a) (c, s, i, l);
2801 }
2802
2803 Instead of calling a stub library function _call_via_xx (xx is
2804 the register name), GCC may inline the trampoline in the object
2805 file as below (register r2 has the address of call0a).
2806
2807 .global main
2808 .type main, %function
2809 ...
2810 bl .L1
2811 ...
2812 .size main, .-main
2813
2814 .L1:
2815 bx r2
2816
2817 The trampoline 'bx r2' doesn't belong to main. */
2818
2819 static CORE_ADDR
2820 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2821 {
2822 /* The heuristics of recognizing such trampoline is that FRAME is
2823 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2824 if (arm_frame_is_thumb (frame))
2825 {
2826 gdb_byte buf[2];
2827
2828 if (target_read_memory (pc, buf, 2) == 0)
2829 {
2830 struct gdbarch *gdbarch = get_frame_arch (frame);
2831 enum bfd_endian byte_order_for_code
2832 = gdbarch_byte_order_for_code (gdbarch);
2833 uint16_t insn
2834 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2835
2836 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2837 {
2838 CORE_ADDR dest
2839 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2840
2841 /* Clear the LSB so that gdb core sets step-resume
2842 breakpoint at the right address. */
2843 return UNMAKE_THUMB_ADDR (dest);
2844 }
2845 }
2846 }
2847
2848 return 0;
2849 }
2850
2851 static struct arm_prologue_cache *
2852 arm_make_stub_cache (struct frame_info *this_frame)
2853 {
2854 struct arm_prologue_cache *cache;
2855
2856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2858
2859 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2860
2861 return cache;
2862 }
2863
2864 /* Our frame ID for a stub frame is the current SP and LR. */
2865
2866 static void
2867 arm_stub_this_id (struct frame_info *this_frame,
2868 void **this_cache,
2869 struct frame_id *this_id)
2870 {
2871 struct arm_prologue_cache *cache;
2872
2873 if (*this_cache == NULL)
2874 *this_cache = arm_make_stub_cache (this_frame);
2875 cache = (struct arm_prologue_cache *) *this_cache;
2876
2877 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2878 }
2879
2880 static int
2881 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2882 struct frame_info *this_frame,
2883 void **this_prologue_cache)
2884 {
2885 CORE_ADDR addr_in_block;
2886 gdb_byte dummy[4];
2887 CORE_ADDR pc, start_addr;
2888 const char *name;
2889
2890 addr_in_block = get_frame_address_in_block (this_frame);
2891 pc = get_frame_pc (this_frame);
2892 if (in_plt_section (addr_in_block)
2893 /* We also use the stub winder if the target memory is unreadable
2894 to avoid having the prologue unwinder trying to read it. */
2895 || target_read_memory (pc, dummy, 4) != 0)
2896 return 1;
2897
2898 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2899 && arm_skip_bx_reg (this_frame, pc) != 0)
2900 return 1;
2901
2902 return 0;
2903 }
2904
2905 struct frame_unwind arm_stub_unwind = {
2906 NORMAL_FRAME,
2907 default_frame_unwind_stop_reason,
2908 arm_stub_this_id,
2909 arm_prologue_prev_register,
2910 NULL,
2911 arm_stub_unwind_sniffer
2912 };
2913
2914 /* Put here the code to store, into CACHE->saved_regs, the addresses
2915 of the saved registers of frame described by THIS_FRAME. CACHE is
2916 returned. */
2917
2918 static struct arm_prologue_cache *
2919 arm_m_exception_cache (struct frame_info *this_frame)
2920 {
2921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2923 struct arm_prologue_cache *cache;
2924 CORE_ADDR unwound_sp;
2925 LONGEST xpsr;
2926
2927 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2928 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2929
2930 unwound_sp = get_frame_register_unsigned (this_frame,
2931 ARM_SP_REGNUM);
2932
2933 /* The hardware saves eight 32-bit words, comprising xPSR,
2934 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2935 "B1.5.6 Exception entry behavior" in
2936 "ARMv7-M Architecture Reference Manual". */
2937 cache->saved_regs[0].addr = unwound_sp;
2938 cache->saved_regs[1].addr = unwound_sp + 4;
2939 cache->saved_regs[2].addr = unwound_sp + 8;
2940 cache->saved_regs[3].addr = unwound_sp + 12;
2941 cache->saved_regs[12].addr = unwound_sp + 16;
2942 cache->saved_regs[14].addr = unwound_sp + 20;
2943 cache->saved_regs[15].addr = unwound_sp + 24;
2944 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2945
2946 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2947 aligner between the top of the 32-byte stack frame and the
2948 previous context's stack pointer. */
2949 cache->prev_sp = unwound_sp + 32;
2950 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2951 && (xpsr & (1 << 9)) != 0)
2952 cache->prev_sp += 4;
2953
2954 return cache;
2955 }
2956
2957 /* Implementation of function hook 'this_id' in
2958 'struct frame_uwnind'. */
2959
2960 static void
2961 arm_m_exception_this_id (struct frame_info *this_frame,
2962 void **this_cache,
2963 struct frame_id *this_id)
2964 {
2965 struct arm_prologue_cache *cache;
2966
2967 if (*this_cache == NULL)
2968 *this_cache = arm_m_exception_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2970
2971 /* Our frame ID for a stub frame is the current SP and LR. */
2972 *this_id = frame_id_build (cache->prev_sp,
2973 get_frame_pc (this_frame));
2974 }
2975
2976 /* Implementation of function hook 'prev_register' in
2977 'struct frame_uwnind'. */
2978
2979 static struct value *
2980 arm_m_exception_prev_register (struct frame_info *this_frame,
2981 void **this_cache,
2982 int prev_regnum)
2983 {
2984 struct arm_prologue_cache *cache;
2985
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
2988 cache = (struct arm_prologue_cache *) *this_cache;
2989
2990 /* The value was already reconstructed into PREV_SP. */
2991 if (prev_regnum == ARM_SP_REGNUM)
2992 return frame_unwind_got_constant (this_frame, prev_regnum,
2993 cache->prev_sp);
2994
2995 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2996 prev_regnum);
2997 }
2998
2999 /* Implementation of function hook 'sniffer' in
3000 'struct frame_uwnind'. */
3001
3002 static int
3003 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3004 struct frame_info *this_frame,
3005 void **this_prologue_cache)
3006 {
3007 CORE_ADDR this_pc = get_frame_pc (this_frame);
3008
3009 /* No need to check is_m; this sniffer is only registered for
3010 M-profile architectures. */
3011
3012 /* Check if exception frame returns to a magic PC value. */
3013 return arm_m_addr_is_magic (this_pc);
3014 }
3015
3016 /* Frame unwinder for M-profile exceptions. */
3017
3018 struct frame_unwind arm_m_exception_unwind =
3019 {
3020 SIGTRAMP_FRAME,
3021 default_frame_unwind_stop_reason,
3022 arm_m_exception_this_id,
3023 arm_m_exception_prev_register,
3024 NULL,
3025 arm_m_exception_unwind_sniffer
3026 };
3027
3028 static CORE_ADDR
3029 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3030 {
3031 struct arm_prologue_cache *cache;
3032
3033 if (*this_cache == NULL)
3034 *this_cache = arm_make_prologue_cache (this_frame);
3035 cache = (struct arm_prologue_cache *) *this_cache;
3036
3037 return cache->prev_sp - cache->framesize;
3038 }
3039
3040 struct frame_base arm_normal_base = {
3041 &arm_prologue_unwind,
3042 arm_normal_frame_base,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base
3045 };
3046
3047 static struct value *
3048 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3049 int regnum)
3050 {
3051 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3052 CORE_ADDR lr, cpsr;
3053 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3054
3055 switch (regnum)
3056 {
3057 case ARM_PC_REGNUM:
3058 /* The PC is normally copied from the return column, which
3059 describes saves of LR. However, that version may have an
3060 extra bit set to indicate Thumb state. The bit is not
3061 part of the PC. */
3062 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3063 return frame_unwind_got_constant (this_frame, regnum,
3064 arm_addr_bits_remove (gdbarch, lr));
3065
3066 case ARM_PS_REGNUM:
3067 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3068 cpsr = get_frame_register_unsigned (this_frame, regnum);
3069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3070 if (IS_THUMB_ADDR (lr))
3071 cpsr |= t_bit;
3072 else
3073 cpsr &= ~t_bit;
3074 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3075
3076 default:
3077 internal_error (__FILE__, __LINE__,
3078 _("Unexpected register %d"), regnum);
3079 }
3080 }
3081
3082 static void
3083 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3084 struct dwarf2_frame_state_reg *reg,
3085 struct frame_info *this_frame)
3086 {
3087 switch (regnum)
3088 {
3089 case ARM_PC_REGNUM:
3090 case ARM_PS_REGNUM:
3091 reg->how = DWARF2_FRAME_REG_FN;
3092 reg->loc.fn = arm_dwarf2_prev_register;
3093 break;
3094 case ARM_SP_REGNUM:
3095 reg->how = DWARF2_FRAME_REG_CFA;
3096 break;
3097 }
3098 }
3099
3100 /* Implement the stack_frame_destroyed_p gdbarch method. */
3101
3102 static int
3103 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3104 {
3105 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3106 unsigned int insn, insn2;
3107 int found_return = 0, found_stack_adjust = 0;
3108 CORE_ADDR func_start, func_end;
3109 CORE_ADDR scan_pc;
3110 gdb_byte buf[4];
3111
3112 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3113 return 0;
3114
3115 /* The epilogue is a sequence of instructions along the following lines:
3116
3117 - add stack frame size to SP or FP
3118 - [if frame pointer used] restore SP from FP
3119 - restore registers from SP [may include PC]
3120 - a return-type instruction [if PC wasn't already restored]
3121
3122 In a first pass, we scan forward from the current PC and verify the
3123 instructions we find as compatible with this sequence, ending in a
3124 return instruction.
3125
3126 However, this is not sufficient to distinguish indirect function calls
3127 within a function from indirect tail calls in the epilogue in some cases.
3128 Therefore, if we didn't already find any SP-changing instruction during
3129 forward scan, we add a backward scanning heuristic to ensure we actually
3130 are in the epilogue. */
3131
3132 scan_pc = pc;
3133 while (scan_pc < func_end && !found_return)
3134 {
3135 if (target_read_memory (scan_pc, buf, 2))
3136 break;
3137
3138 scan_pc += 2;
3139 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3140
3141 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3142 found_return = 1;
3143 else if (insn == 0x46f7) /* mov pc, lr */
3144 found_return = 1;
3145 else if (thumb_instruction_restores_sp (insn))
3146 {
3147 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3148 found_return = 1;
3149 }
3150 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3151 {
3152 if (target_read_memory (scan_pc, buf, 2))
3153 break;
3154
3155 scan_pc += 2;
3156 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3157
3158 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3159 {
3160 if (insn2 & 0x8000) /* <registers> include PC. */
3161 found_return = 1;
3162 }
3163 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3164 && (insn2 & 0x0fff) == 0x0b04)
3165 {
3166 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3167 found_return = 1;
3168 }
3169 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3170 && (insn2 & 0x0e00) == 0x0a00)
3171 ;
3172 else
3173 break;
3174 }
3175 else
3176 break;
3177 }
3178
3179 if (!found_return)
3180 return 0;
3181
3182 /* Since any instruction in the epilogue sequence, with the possible
3183 exception of return itself, updates the stack pointer, we need to
3184 scan backwards for at most one instruction. Try either a 16-bit or
3185 a 32-bit instruction. This is just a heuristic, so we do not worry
3186 too much about false positives. */
3187
3188 if (pc - 4 < func_start)
3189 return 0;
3190 if (target_read_memory (pc - 4, buf, 4))
3191 return 0;
3192
3193 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3194 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3195
3196 if (thumb_instruction_restores_sp (insn2))
3197 found_stack_adjust = 1;
3198 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3199 found_stack_adjust = 1;
3200 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3201 && (insn2 & 0x0fff) == 0x0b04)
3202 found_stack_adjust = 1;
3203 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3204 && (insn2 & 0x0e00) == 0x0a00)
3205 found_stack_adjust = 1;
3206
3207 return found_stack_adjust;
3208 }
3209
3210 static int
3211 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3212 {
3213 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3214 unsigned int insn;
3215 int found_return;
3216 CORE_ADDR func_start, func_end;
3217
3218 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3219 return 0;
3220
3221 /* We are in the epilogue if the previous instruction was a stack
3222 adjustment and the next instruction is a possible return (bx, mov
3223 pc, or pop). We could have to scan backwards to find the stack
3224 adjustment, or forwards to find the return, but this is a decent
3225 approximation. First scan forwards. */
3226
3227 found_return = 0;
3228 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3229 if (bits (insn, 28, 31) != INST_NV)
3230 {
3231 if ((insn & 0x0ffffff0) == 0x012fff10)
3232 /* BX. */
3233 found_return = 1;
3234 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3235 /* MOV PC. */
3236 found_return = 1;
3237 else if ((insn & 0x0fff0000) == 0x08bd0000
3238 && (insn & 0x0000c000) != 0)
3239 /* POP (LDMIA), including PC or LR. */
3240 found_return = 1;
3241 }
3242
3243 if (!found_return)
3244 return 0;
3245
3246 /* Scan backwards. This is just a heuristic, so do not worry about
3247 false positives from mode changes. */
3248
3249 if (pc < func_start + 4)
3250 return 0;
3251
3252 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3253 if (arm_instruction_restores_sp (insn))
3254 return 1;
3255
3256 return 0;
3257 }
3258
3259 /* Implement the stack_frame_destroyed_p gdbarch method. */
3260
3261 static int
3262 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3263 {
3264 if (arm_pc_is_thumb (gdbarch, pc))
3265 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3266 else
3267 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3268 }
3269
3270 /* When arguments must be pushed onto the stack, they go on in reverse
3271 order. The code below implements a FILO (stack) to do this. */
3272
3273 struct stack_item
3274 {
3275 int len;
3276 struct stack_item *prev;
3277 gdb_byte *data;
3278 };
3279
3280 static struct stack_item *
3281 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3282 {
3283 struct stack_item *si;
3284 si = XNEW (struct stack_item);
3285 si->data = (gdb_byte *) xmalloc (len);
3286 si->len = len;
3287 si->prev = prev;
3288 memcpy (si->data, contents, len);
3289 return si;
3290 }
3291
3292 static struct stack_item *
3293 pop_stack_item (struct stack_item *si)
3294 {
3295 struct stack_item *dead = si;
3296 si = si->prev;
3297 xfree (dead->data);
3298 xfree (dead);
3299 return si;
3300 }
3301
3302 /* Implement the gdbarch type alignment method, overrides the generic
3303 alignment algorithm for anything that is arm specific. */
3304
3305 static ULONGEST
3306 arm_type_align (gdbarch *gdbarch, struct type *t)
3307 {
3308 t = check_typedef (t);
3309 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3310 {
3311 /* Use the natural alignment for vector types (the same for
3312 scalar type), but the maximum alignment is 64-bit. */
3313 if (TYPE_LENGTH (t) > 8)
3314 return 8;
3315 else
3316 return TYPE_LENGTH (t);
3317 }
3318
3319 /* Allow the common code to calculate the alignment. */
3320 return 0;
3321 }
3322
3323 /* Possible base types for a candidate for passing and returning in
3324 VFP registers. */
3325
3326 enum arm_vfp_cprc_base_type
3327 {
3328 VFP_CPRC_UNKNOWN,
3329 VFP_CPRC_SINGLE,
3330 VFP_CPRC_DOUBLE,
3331 VFP_CPRC_VEC64,
3332 VFP_CPRC_VEC128
3333 };
3334
3335 /* The length of one element of base type B. */
3336
3337 static unsigned
3338 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3339 {
3340 switch (b)
3341 {
3342 case VFP_CPRC_SINGLE:
3343 return 4;
3344 case VFP_CPRC_DOUBLE:
3345 return 8;
3346 case VFP_CPRC_VEC64:
3347 return 8;
3348 case VFP_CPRC_VEC128:
3349 return 16;
3350 default:
3351 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3352 (int) b);
3353 }
3354 }
3355
3356 /* The character ('s', 'd' or 'q') for the type of VFP register used
3357 for passing base type B. */
3358
3359 static int
3360 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3361 {
3362 switch (b)
3363 {
3364 case VFP_CPRC_SINGLE:
3365 return 's';
3366 case VFP_CPRC_DOUBLE:
3367 return 'd';
3368 case VFP_CPRC_VEC64:
3369 return 'd';
3370 case VFP_CPRC_VEC128:
3371 return 'q';
3372 default:
3373 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3374 (int) b);
3375 }
3376 }
3377
3378 /* Determine whether T may be part of a candidate for passing and
3379 returning in VFP registers, ignoring the limit on the total number
3380 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3381 classification of the first valid component found; if it is not
3382 VFP_CPRC_UNKNOWN, all components must have the same classification
3383 as *BASE_TYPE. If it is found that T contains a type not permitted
3384 for passing and returning in VFP registers, a type differently
3385 classified from *BASE_TYPE, or two types differently classified
3386 from each other, return -1, otherwise return the total number of
3387 base-type elements found (possibly 0 in an empty structure or
3388 array). Vector types are not currently supported, matching the
3389 generic AAPCS support. */
3390
3391 static int
3392 arm_vfp_cprc_sub_candidate (struct type *t,
3393 enum arm_vfp_cprc_base_type *base_type)
3394 {
3395 t = check_typedef (t);
3396 switch (TYPE_CODE (t))
3397 {
3398 case TYPE_CODE_FLT:
3399 switch (TYPE_LENGTH (t))
3400 {
3401 case 4:
3402 if (*base_type == VFP_CPRC_UNKNOWN)
3403 *base_type = VFP_CPRC_SINGLE;
3404 else if (*base_type != VFP_CPRC_SINGLE)
3405 return -1;
3406 return 1;
3407
3408 case 8:
3409 if (*base_type == VFP_CPRC_UNKNOWN)
3410 *base_type = VFP_CPRC_DOUBLE;
3411 else if (*base_type != VFP_CPRC_DOUBLE)
3412 return -1;
3413 return 1;
3414
3415 default:
3416 return -1;
3417 }
3418 break;
3419
3420 case TYPE_CODE_COMPLEX:
3421 /* Arguments of complex T where T is one of the types float or
3422 double get treated as if they are implemented as:
3423
3424 struct complexT
3425 {
3426 T real;
3427 T imag;
3428 };
3429
3430 */
3431 switch (TYPE_LENGTH (t))
3432 {
3433 case 8:
3434 if (*base_type == VFP_CPRC_UNKNOWN)
3435 *base_type = VFP_CPRC_SINGLE;
3436 else if (*base_type != VFP_CPRC_SINGLE)
3437 return -1;
3438 return 2;
3439
3440 case 16:
3441 if (*base_type == VFP_CPRC_UNKNOWN)
3442 *base_type = VFP_CPRC_DOUBLE;
3443 else if (*base_type != VFP_CPRC_DOUBLE)
3444 return -1;
3445 return 2;
3446
3447 default:
3448 return -1;
3449 }
3450 break;
3451
3452 case TYPE_CODE_ARRAY:
3453 {
3454 if (TYPE_VECTOR (t))
3455 {
3456 /* A 64-bit or 128-bit containerized vector type are VFP
3457 CPRCs. */
3458 switch (TYPE_LENGTH (t))
3459 {
3460 case 8:
3461 if (*base_type == VFP_CPRC_UNKNOWN)
3462 *base_type = VFP_CPRC_VEC64;
3463 return 1;
3464 case 16:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_VEC128;
3467 return 1;
3468 default:
3469 return -1;
3470 }
3471 }
3472 else
3473 {
3474 int count;
3475 unsigned unitlen;
3476
3477 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3478 base_type);
3479 if (count == -1)
3480 return -1;
3481 if (TYPE_LENGTH (t) == 0)
3482 {
3483 gdb_assert (count == 0);
3484 return 0;
3485 }
3486 else if (count == 0)
3487 return -1;
3488 unitlen = arm_vfp_cprc_unit_length (*base_type);
3489 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3490 return TYPE_LENGTH (t) / unitlen;
3491 }
3492 }
3493 break;
3494
3495 case TYPE_CODE_STRUCT:
3496 {
3497 int count = 0;
3498 unsigned unitlen;
3499 int i;
3500 for (i = 0; i < TYPE_NFIELDS (t); i++)
3501 {
3502 int sub_count = 0;
3503
3504 if (!field_is_static (&TYPE_FIELD (t, i)))
3505 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3506 base_type);
3507 if (sub_count == -1)
3508 return -1;
3509 count += sub_count;
3510 }
3511 if (TYPE_LENGTH (t) == 0)
3512 {
3513 gdb_assert (count == 0);
3514 return 0;
3515 }
3516 else if (count == 0)
3517 return -1;
3518 unitlen = arm_vfp_cprc_unit_length (*base_type);
3519 if (TYPE_LENGTH (t) != unitlen * count)
3520 return -1;
3521 return count;
3522 }
3523
3524 case TYPE_CODE_UNION:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < TYPE_NFIELDS (t); i++)
3530 {
3531 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3532 base_type);
3533 if (sub_count == -1)
3534 return -1;
3535 count = (count > sub_count ? count : sub_count);
3536 }
3537 if (TYPE_LENGTH (t) == 0)
3538 {
3539 gdb_assert (count == 0);
3540 return 0;
3541 }
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 if (TYPE_LENGTH (t) != unitlen * count)
3546 return -1;
3547 return count;
3548 }
3549
3550 default:
3551 break;
3552 }
3553
3554 return -1;
3555 }
3556
3557 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3558 if passed to or returned from a non-variadic function with the VFP
3559 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3560 *BASE_TYPE to the base type for T and *COUNT to the number of
3561 elements of that base type before returning. */
3562
3563 static int
3564 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3565 int *count)
3566 {
3567 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3568 int c = arm_vfp_cprc_sub_candidate (t, &b);
3569 if (c <= 0 || c > 4)
3570 return 0;
3571 *base_type = b;
3572 *count = c;
3573 return 1;
3574 }
3575
3576 /* Return 1 if the VFP ABI should be used for passing arguments to and
3577 returning values from a function of type FUNC_TYPE, 0
3578 otherwise. */
3579
3580 static int
3581 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3582 {
3583 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3584 /* Variadic functions always use the base ABI. Assume that functions
3585 without debug info are not variadic. */
3586 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3587 return 0;
3588 /* The VFP ABI is only supported as a variant of AAPCS. */
3589 if (tdep->arm_abi != ARM_ABI_AAPCS)
3590 return 0;
3591 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3592 }
3593
3594 /* We currently only support passing parameters in integer registers, which
3595 conforms with GCC's default model, and VFP argument passing following
3596 the VFP variant of AAPCS. Several other variants exist and
3597 we should probably support some of them based on the selected ABI. */
3598
3599 static CORE_ADDR
3600 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3601 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3602 struct value **args, CORE_ADDR sp,
3603 function_call_return_method return_method,
3604 CORE_ADDR struct_addr)
3605 {
3606 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3607 int argnum;
3608 int argreg;
3609 int nstack;
3610 struct stack_item *si = NULL;
3611 int use_vfp_abi;
3612 struct type *ftype;
3613 unsigned vfp_regs_free = (1 << 16) - 1;
3614
3615 /* Determine the type of this function and whether the VFP ABI
3616 applies. */
3617 ftype = check_typedef (value_type (function));
3618 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3619 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3620 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3621
3622 /* Set the return address. For the ARM, the return breakpoint is
3623 always at BP_ADDR. */
3624 if (arm_pc_is_thumb (gdbarch, bp_addr))
3625 bp_addr |= 1;
3626 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3627
3628 /* Walk through the list of args and determine how large a temporary
3629 stack is required. Need to take care here as structs may be
3630 passed on the stack, and we have to push them. */
3631 nstack = 0;
3632
3633 argreg = ARM_A1_REGNUM;
3634 nstack = 0;
3635
3636 /* The struct_return pointer occupies the first parameter
3637 passing register. */
3638 if (return_method == return_method_struct)
3639 {
3640 if (arm_debug)
3641 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3642 gdbarch_register_name (gdbarch, argreg),
3643 paddress (gdbarch, struct_addr));
3644 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3645 argreg++;
3646 }
3647
3648 for (argnum = 0; argnum < nargs; argnum++)
3649 {
3650 int len;
3651 struct type *arg_type;
3652 struct type *target_type;
3653 enum type_code typecode;
3654 const bfd_byte *val;
3655 int align;
3656 enum arm_vfp_cprc_base_type vfp_base_type;
3657 int vfp_base_count;
3658 int may_use_core_reg = 1;
3659
3660 arg_type = check_typedef (value_type (args[argnum]));
3661 len = TYPE_LENGTH (arg_type);
3662 target_type = TYPE_TARGET_TYPE (arg_type);
3663 typecode = TYPE_CODE (arg_type);
3664 val = value_contents (args[argnum]);
3665
3666 align = type_align (arg_type);
3667 /* Round alignment up to a whole number of words. */
3668 align = (align + ARM_INT_REGISTER_SIZE - 1)
3669 & ~(ARM_INT_REGISTER_SIZE - 1);
3670 /* Different ABIs have different maximum alignments. */
3671 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3672 {
3673 /* The APCS ABI only requires word alignment. */
3674 align = ARM_INT_REGISTER_SIZE;
3675 }
3676 else
3677 {
3678 /* The AAPCS requires at most doubleword alignment. */
3679 if (align > ARM_INT_REGISTER_SIZE * 2)
3680 align = ARM_INT_REGISTER_SIZE * 2;
3681 }
3682
3683 if (use_vfp_abi
3684 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3685 &vfp_base_count))
3686 {
3687 int regno;
3688 int unit_length;
3689 int shift;
3690 unsigned mask;
3691
3692 /* Because this is a CPRC it cannot go in a core register or
3693 cause a core register to be skipped for alignment.
3694 Either it goes in VFP registers and the rest of this loop
3695 iteration is skipped for this argument, or it goes on the
3696 stack (and the stack alignment code is correct for this
3697 case). */
3698 may_use_core_reg = 0;
3699
3700 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3701 shift = unit_length / 4;
3702 mask = (1 << (shift * vfp_base_count)) - 1;
3703 for (regno = 0; regno < 16; regno += shift)
3704 if (((vfp_regs_free >> regno) & mask) == mask)
3705 break;
3706
3707 if (regno < 16)
3708 {
3709 int reg_char;
3710 int reg_scaled;
3711 int i;
3712
3713 vfp_regs_free &= ~(mask << regno);
3714 reg_scaled = regno / shift;
3715 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3716 for (i = 0; i < vfp_base_count; i++)
3717 {
3718 char name_buf[4];
3719 int regnum;
3720 if (reg_char == 'q')
3721 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3722 val + i * unit_length);
3723 else
3724 {
3725 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3726 reg_char, reg_scaled + i);
3727 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3728 strlen (name_buf));
3729 regcache->cooked_write (regnum, val + i * unit_length);
3730 }
3731 }
3732 continue;
3733 }
3734 else
3735 {
3736 /* This CPRC could not go in VFP registers, so all VFP
3737 registers are now marked as used. */
3738 vfp_regs_free = 0;
3739 }
3740 }
3741
3742 /* Push stack padding for dowubleword alignment. */
3743 if (nstack & (align - 1))
3744 {
3745 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3746 nstack += ARM_INT_REGISTER_SIZE;
3747 }
3748
3749 /* Doubleword aligned quantities must go in even register pairs. */
3750 if (may_use_core_reg
3751 && argreg <= ARM_LAST_ARG_REGNUM
3752 && align > ARM_INT_REGISTER_SIZE
3753 && argreg & 1)
3754 argreg++;
3755
3756 /* If the argument is a pointer to a function, and it is a
3757 Thumb function, create a LOCAL copy of the value and set
3758 the THUMB bit in it. */
3759 if (TYPE_CODE_PTR == typecode
3760 && target_type != NULL
3761 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3762 {
3763 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3764 if (arm_pc_is_thumb (gdbarch, regval))
3765 {
3766 bfd_byte *copy = (bfd_byte *) alloca (len);
3767 store_unsigned_integer (copy, len, byte_order,
3768 MAKE_THUMB_ADDR (regval));
3769 val = copy;
3770 }
3771 }
3772
3773 /* Copy the argument to general registers or the stack in
3774 register-sized pieces. Large arguments are split between
3775 registers and stack. */
3776 while (len > 0)
3777 {
3778 int partial_len = len < ARM_INT_REGISTER_SIZE
3779 ? len : ARM_INT_REGISTER_SIZE;
3780 CORE_ADDR regval
3781 = extract_unsigned_integer (val, partial_len, byte_order);
3782
3783 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3784 {
3785 /* The argument is being passed in a general purpose
3786 register. */
3787 if (byte_order == BFD_ENDIAN_BIG)
3788 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3789 if (arm_debug)
3790 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3791 argnum,
3792 gdbarch_register_name
3793 (gdbarch, argreg),
3794 phex (regval, ARM_INT_REGISTER_SIZE));
3795 regcache_cooked_write_unsigned (regcache, argreg, regval);
3796 argreg++;
3797 }
3798 else
3799 {
3800 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3801
3802 memset (buf, 0, sizeof (buf));
3803 store_unsigned_integer (buf, partial_len, byte_order, regval);
3804
3805 /* Push the arguments onto the stack. */
3806 if (arm_debug)
3807 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3808 argnum, nstack);
3809 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3810 nstack += ARM_INT_REGISTER_SIZE;
3811 }
3812
3813 len -= partial_len;
3814 val += partial_len;
3815 }
3816 }
3817 /* If we have an odd number of words to push, then decrement the stack
3818 by one word now, so first stack argument will be dword aligned. */
3819 if (nstack & 4)
3820 sp -= 4;
3821
3822 while (si)
3823 {
3824 sp -= si->len;
3825 write_memory (sp, si->data, si->len);
3826 si = pop_stack_item (si);
3827 }
3828
3829 /* Finally, update teh SP register. */
3830 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3831
3832 return sp;
3833 }
3834
3835
3836 /* Always align the frame to an 8-byte boundary. This is required on
3837 some platforms and harmless on the rest. */
3838
3839 static CORE_ADDR
3840 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3841 {
3842 /* Align the stack to eight bytes. */
3843 return sp & ~ (CORE_ADDR) 7;
3844 }
3845
3846 static void
3847 print_fpu_flags (struct ui_file *file, int flags)
3848 {
3849 if (flags & (1 << 0))
3850 fputs_filtered ("IVO ", file);
3851 if (flags & (1 << 1))
3852 fputs_filtered ("DVZ ", file);
3853 if (flags & (1 << 2))
3854 fputs_filtered ("OFL ", file);
3855 if (flags & (1 << 3))
3856 fputs_filtered ("UFL ", file);
3857 if (flags & (1 << 4))
3858 fputs_filtered ("INX ", file);
3859 fputc_filtered ('\n', file);
3860 }
3861
3862 /* Print interesting information about the floating point processor
3863 (if present) or emulator. */
3864 static void
3865 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3866 struct frame_info *frame, const char *args)
3867 {
3868 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3869 int type;
3870
3871 type = (status >> 24) & 127;
3872 if (status & (1 << 31))
3873 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3874 else
3875 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3876 /* i18n: [floating point unit] mask */
3877 fputs_filtered (_("mask: "), file);
3878 print_fpu_flags (file, status >> 16);
3879 /* i18n: [floating point unit] flags */
3880 fputs_filtered (_("flags: "), file);
3881 print_fpu_flags (file, status);
3882 }
3883
3884 /* Construct the ARM extended floating point type. */
3885 static struct type *
3886 arm_ext_type (struct gdbarch *gdbarch)
3887 {
3888 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3889
3890 if (!tdep->arm_ext_type)
3891 tdep->arm_ext_type
3892 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3893 floatformats_arm_ext);
3894
3895 return tdep->arm_ext_type;
3896 }
3897
3898 static struct type *
3899 arm_neon_double_type (struct gdbarch *gdbarch)
3900 {
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3902
3903 if (tdep->neon_double_type == NULL)
3904 {
3905 struct type *t, *elem;
3906
3907 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3908 TYPE_CODE_UNION);
3909 elem = builtin_type (gdbarch)->builtin_uint8;
3910 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3911 elem = builtin_type (gdbarch)->builtin_uint16;
3912 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3913 elem = builtin_type (gdbarch)->builtin_uint32;
3914 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3915 elem = builtin_type (gdbarch)->builtin_uint64;
3916 append_composite_type_field (t, "u64", elem);
3917 elem = builtin_type (gdbarch)->builtin_float;
3918 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3919 elem = builtin_type (gdbarch)->builtin_double;
3920 append_composite_type_field (t, "f64", elem);
3921
3922 TYPE_VECTOR (t) = 1;
3923 TYPE_NAME (t) = "neon_d";
3924 tdep->neon_double_type = t;
3925 }
3926
3927 return tdep->neon_double_type;
3928 }
3929
3930 /* FIXME: The vector types are not correctly ordered on big-endian
3931 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3932 bits of d0 - regardless of what unit size is being held in d0. So
3933 the offset of the first uint8 in d0 is 7, but the offset of the
3934 first float is 4. This code works as-is for little-endian
3935 targets. */
3936
3937 static struct type *
3938 arm_neon_quad_type (struct gdbarch *gdbarch)
3939 {
3940 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3941
3942 if (tdep->neon_quad_type == NULL)
3943 {
3944 struct type *t, *elem;
3945
3946 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3947 TYPE_CODE_UNION);
3948 elem = builtin_type (gdbarch)->builtin_uint8;
3949 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3950 elem = builtin_type (gdbarch)->builtin_uint16;
3951 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3952 elem = builtin_type (gdbarch)->builtin_uint32;
3953 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3954 elem = builtin_type (gdbarch)->builtin_uint64;
3955 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3956 elem = builtin_type (gdbarch)->builtin_float;
3957 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3958 elem = builtin_type (gdbarch)->builtin_double;
3959 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3960
3961 TYPE_VECTOR (t) = 1;
3962 TYPE_NAME (t) = "neon_q";
3963 tdep->neon_quad_type = t;
3964 }
3965
3966 return tdep->neon_quad_type;
3967 }
3968
3969 /* Return the GDB type object for the "standard" data type of data in
3970 register N. */
3971
3972 static struct type *
3973 arm_register_type (struct gdbarch *gdbarch, int regnum)
3974 {
3975 int num_regs = gdbarch_num_regs (gdbarch);
3976
3977 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3978 && regnum >= num_regs && regnum < num_regs + 32)
3979 return builtin_type (gdbarch)->builtin_float;
3980
3981 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3982 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3983 return arm_neon_quad_type (gdbarch);
3984
3985 /* If the target description has register information, we are only
3986 in this function so that we can override the types of
3987 double-precision registers for NEON. */
3988 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3989 {
3990 struct type *t = tdesc_register_type (gdbarch, regnum);
3991
3992 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3993 && TYPE_CODE (t) == TYPE_CODE_FLT
3994 && gdbarch_tdep (gdbarch)->have_neon)
3995 return arm_neon_double_type (gdbarch);
3996 else
3997 return t;
3998 }
3999
4000 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4001 {
4002 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4003 return builtin_type (gdbarch)->builtin_void;
4004
4005 return arm_ext_type (gdbarch);
4006 }
4007 else if (regnum == ARM_SP_REGNUM)
4008 return builtin_type (gdbarch)->builtin_data_ptr;
4009 else if (regnum == ARM_PC_REGNUM)
4010 return builtin_type (gdbarch)->builtin_func_ptr;
4011 else if (regnum >= ARRAY_SIZE (arm_register_names))
4012 /* These registers are only supported on targets which supply
4013 an XML description. */
4014 return builtin_type (gdbarch)->builtin_int0;
4015 else
4016 return builtin_type (gdbarch)->builtin_uint32;
4017 }
4018
4019 /* Map a DWARF register REGNUM onto the appropriate GDB register
4020 number. */
4021
4022 static int
4023 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4024 {
4025 /* Core integer regs. */
4026 if (reg >= 0 && reg <= 15)
4027 return reg;
4028
4029 /* Legacy FPA encoding. These were once used in a way which
4030 overlapped with VFP register numbering, so their use is
4031 discouraged, but GDB doesn't support the ARM toolchain
4032 which used them for VFP. */
4033 if (reg >= 16 && reg <= 23)
4034 return ARM_F0_REGNUM + reg - 16;
4035
4036 /* New assignments for the FPA registers. */
4037 if (reg >= 96 && reg <= 103)
4038 return ARM_F0_REGNUM + reg - 96;
4039
4040 /* WMMX register assignments. */
4041 if (reg >= 104 && reg <= 111)
4042 return ARM_WCGR0_REGNUM + reg - 104;
4043
4044 if (reg >= 112 && reg <= 127)
4045 return ARM_WR0_REGNUM + reg - 112;
4046
4047 if (reg >= 192 && reg <= 199)
4048 return ARM_WC0_REGNUM + reg - 192;
4049
4050 /* VFP v2 registers. A double precision value is actually
4051 in d1 rather than s2, but the ABI only defines numbering
4052 for the single precision registers. This will "just work"
4053 in GDB for little endian targets (we'll read eight bytes,
4054 starting in s0 and then progressing to s1), but will be
4055 reversed on big endian targets with VFP. This won't
4056 be a problem for the new Neon quad registers; you're supposed
4057 to use DW_OP_piece for those. */
4058 if (reg >= 64 && reg <= 95)
4059 {
4060 char name_buf[4];
4061
4062 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4063 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4064 strlen (name_buf));
4065 }
4066
4067 /* VFP v3 / Neon registers. This range is also used for VFP v2
4068 registers, except that it now describes d0 instead of s0. */
4069 if (reg >= 256 && reg <= 287)
4070 {
4071 char name_buf[4];
4072
4073 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4074 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4075 strlen (name_buf));
4076 }
4077
4078 return -1;
4079 }
4080
4081 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4082 static int
4083 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4084 {
4085 int reg = regnum;
4086 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4087
4088 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4089 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4090
4091 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4092 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4093
4094 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4095 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4096
4097 if (reg < NUM_GREGS)
4098 return SIM_ARM_R0_REGNUM + reg;
4099 reg -= NUM_GREGS;
4100
4101 if (reg < NUM_FREGS)
4102 return SIM_ARM_FP0_REGNUM + reg;
4103 reg -= NUM_FREGS;
4104
4105 if (reg < NUM_SREGS)
4106 return SIM_ARM_FPS_REGNUM + reg;
4107 reg -= NUM_SREGS;
4108
4109 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4110 }
4111
4112 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
4115
4116 static gdb_byte *
4117 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4119 {
4120 gdb_byte *new_buf;
4121 int bytes_to_read = new_len - old_len;
4122
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4125 xfree (buf);
4126 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4127 {
4128 xfree (new_buf);
4129 return NULL;
4130 }
4131 return new_buf;
4132 }
4133
4134 /* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138 #define MAX_IT_BLOCK_PREFIX 14
4139
4140 /* Use a quick scan if there are more than this many bytes of
4141 code. */
4142 #define IT_SCAN_THRESHOLD 32
4143
4144 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4146 condition flags. */
4147 static CORE_ADDR
4148 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4149 {
4150 gdb_byte *buf;
4151 char map_type;
4152 CORE_ADDR boundary, func_start;
4153 int buf_len;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
4156
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 return bpaddr;
4160
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 return bpaddr;
4164
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4168 sequences. */
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4170 if (map_type == 0)
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 return bpaddr;
4173
4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4175
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
4179
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4185 if (buf_len == 0)
4186 /* No room for an IT instruction. */
4187 return bpaddr;
4188
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4191 return bpaddr;
4192 any = 0;
4193 for (i = 0; i < buf_len; i += 2)
4194 {
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4197 {
4198 any = 1;
4199 break;
4200 }
4201 }
4202
4203 if (any == 0)
4204 {
4205 xfree (buf);
4206 return bpaddr;
4207 }
4208
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 {
4217 int definite;
4218
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 if (buf == NULL)
4225 return bpaddr;
4226 buf_len = IT_SCAN_THRESHOLD;
4227
4228 definite = 0;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4230 {
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4233 {
4234 definite = 1;
4235 break;
4236 }
4237 }
4238
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 known boundary. */
4244 if (! definite)
4245 {
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 bpaddr - boundary);
4248 if (buf == NULL)
4249 return bpaddr;
4250 buf_len = bpaddr - boundary;
4251 i = 0;
4252 }
4253 }
4254 else
4255 {
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 if (buf == NULL)
4258 return bpaddr;
4259 buf_len = bpaddr - boundary;
4260 i = 0;
4261 }
4262
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 last_it = -1;
4265 last_it_count = 0;
4266 while (i < buf_len)
4267 {
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4269 last_it_count--;
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 {
4272 last_it = i;
4273 if (inst1 & 0x0001)
4274 last_it_count = 4;
4275 else if (inst1 & 0x0002)
4276 last_it_count = 3;
4277 else if (inst1 & 0x0004)
4278 last_it_count = 2;
4279 else
4280 last_it_count = 1;
4281 }
4282 i += thumb_insn_size (inst1);
4283 }
4284
4285 xfree (buf);
4286
4287 if (last_it == -1)
4288 /* There wasn't really an IT instruction after all. */
4289 return bpaddr;
4290
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4293 return bpaddr;
4294
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4296 instruction. */
4297 return bpaddr - buf_len + last_it;
4298 }
4299
4300 /* ARM displaced stepping support.
4301
4302 Generally ARM displaced stepping works as follows:
4303
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn. Depending on the type of instruction, it is
4306 then copied to a scratch location, possibly in a modified form. The
4307 copy_* set of functions performs such modification, as necessary. A
4308 breakpoint is placed after the modified instruction in the scratch space
4309 to return control to GDB. Note in particular that instructions which
4310 modify the PC will no longer do so after modification.
4311
4312 2. The instruction is single-stepped, by setting the PC to the scratch
4313 location address, and resuming. Control returns to GDB when the
4314 breakpoint is hit.
4315
4316 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4317 function used for the current instruction. This function's job is to
4318 put the CPU/memory state back to what it would have been if the
4319 instruction had been executed unmodified in its original location. */
4320
4321 /* NOP instruction (mov r0, r0). */
4322 #define ARM_NOP 0xe1a00000
4323 #define THUMB_NOP 0x4600
4324
4325 /* Helper for register reads for displaced stepping. In particular, this
4326 returns the PC as it would be seen by the instruction at its original
4327 location. */
4328
4329 ULONGEST
4330 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4331 int regno)
4332 {
4333 ULONGEST ret;
4334 CORE_ADDR from = dsc->insn_addr;
4335
4336 if (regno == ARM_PC_REGNUM)
4337 {
4338 /* Compute pipeline offset:
4339 - When executing an ARM instruction, PC reads as the address of the
4340 current instruction plus 8.
4341 - When executing a Thumb instruction, PC reads as the address of the
4342 current instruction plus 4. */
4343
4344 if (!dsc->is_thumb)
4345 from += 8;
4346 else
4347 from += 4;
4348
4349 if (debug_displaced)
4350 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4351 (unsigned long) from);
4352 return (ULONGEST) from;
4353 }
4354 else
4355 {
4356 regcache_cooked_read_unsigned (regs, regno, &ret);
4357 if (debug_displaced)
4358 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4359 regno, (unsigned long) ret);
4360 return ret;
4361 }
4362 }
4363
4364 static int
4365 displaced_in_arm_mode (struct regcache *regs)
4366 {
4367 ULONGEST ps;
4368 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4369
4370 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4371
4372 return (ps & t_bit) == 0;
4373 }
4374
4375 /* Write to the PC as from a branch instruction. */
4376
4377 static void
4378 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4379 ULONGEST val)
4380 {
4381 if (!dsc->is_thumb)
4382 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4383 architecture versions < 6. */
4384 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4385 val & ~(ULONGEST) 0x3);
4386 else
4387 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4388 val & ~(ULONGEST) 0x1);
4389 }
4390
4391 /* Write to the PC as from a branch-exchange instruction. */
4392
4393 static void
4394 bx_write_pc (struct regcache *regs, ULONGEST val)
4395 {
4396 ULONGEST ps;
4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4398
4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400
4401 if ((val & 1) == 1)
4402 {
4403 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4404 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4405 }
4406 else if ((val & 2) == 0)
4407 {
4408 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4409 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4410 }
4411 else
4412 {
4413 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4414 mode, align dest to 4 bytes). */
4415 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4418 }
4419 }
4420
4421 /* Write to the PC as if from a load instruction. */
4422
4423 static void
4424 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4425 ULONGEST val)
4426 {
4427 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4428 bx_write_pc (regs, val);
4429 else
4430 branch_write_pc (regs, dsc, val);
4431 }
4432
4433 /* Write to the PC as if from an ALU instruction. */
4434
4435 static void
4436 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4437 ULONGEST val)
4438 {
4439 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4440 bx_write_pc (regs, val);
4441 else
4442 branch_write_pc (regs, dsc, val);
4443 }
4444
4445 /* Helper for writing to registers for displaced stepping. Writing to the PC
4446 has a varying effects depending on the instruction which does the write:
4447 this is controlled by the WRITE_PC argument. */
4448
4449 void
4450 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4451 int regno, ULONGEST val, enum pc_write_style write_pc)
4452 {
4453 if (regno == ARM_PC_REGNUM)
4454 {
4455 if (debug_displaced)
4456 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4457 (unsigned long) val);
4458 switch (write_pc)
4459 {
4460 case BRANCH_WRITE_PC:
4461 branch_write_pc (regs, dsc, val);
4462 break;
4463
4464 case BX_WRITE_PC:
4465 bx_write_pc (regs, val);
4466 break;
4467
4468 case LOAD_WRITE_PC:
4469 load_write_pc (regs, dsc, val);
4470 break;
4471
4472 case ALU_WRITE_PC:
4473 alu_write_pc (regs, dsc, val);
4474 break;
4475
4476 case CANNOT_WRITE_PC:
4477 warning (_("Instruction wrote to PC in an unexpected way when "
4478 "single-stepping"));
4479 break;
4480
4481 default:
4482 internal_error (__FILE__, __LINE__,
4483 _("Invalid argument to displaced_write_reg"));
4484 }
4485
4486 dsc->wrote_to_pc = 1;
4487 }
4488 else
4489 {
4490 if (debug_displaced)
4491 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4492 regno, (unsigned long) val);
4493 regcache_cooked_write_unsigned (regs, regno, val);
4494 }
4495 }
4496
4497 /* This function is used to concisely determine if an instruction INSN
4498 references PC. Register fields of interest in INSN should have the
4499 corresponding fields of BITMASK set to 0b1111. The function
4500 returns return 1 if any of these fields in INSN reference the PC
4501 (also 0b1111, r15), else it returns 0. */
4502
4503 static int
4504 insn_references_pc (uint32_t insn, uint32_t bitmask)
4505 {
4506 uint32_t lowbit = 1;
4507
4508 while (bitmask != 0)
4509 {
4510 uint32_t mask;
4511
4512 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4513 ;
4514
4515 if (!lowbit)
4516 break;
4517
4518 mask = lowbit * 0xf;
4519
4520 if ((insn & mask) == mask)
4521 return 1;
4522
4523 bitmask &= ~mask;
4524 }
4525
4526 return 0;
4527 }
4528
4529 /* The simplest copy function. Many instructions have the same effect no
4530 matter what address they are executed at: in those cases, use this. */
4531
4532 static int
4533 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4534 const char *iname, arm_displaced_step_closure *dsc)
4535 {
4536 if (debug_displaced)
4537 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4538 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4539 iname);
4540
4541 dsc->modinsn[0] = insn;
4542
4543 return 0;
4544 }
4545
4546 static int
4547 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4548 uint16_t insn2, const char *iname,
4549 arm_displaced_step_closure *dsc)
4550 {
4551 if (debug_displaced)
4552 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4553 "opcode/class '%s' unmodified\n", insn1, insn2,
4554 iname);
4555
4556 dsc->modinsn[0] = insn1;
4557 dsc->modinsn[1] = insn2;
4558 dsc->numinsns = 2;
4559
4560 return 0;
4561 }
4562
4563 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4564 modification. */
4565 static int
4566 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4567 const char *iname,
4568 arm_displaced_step_closure *dsc)
4569 {
4570 if (debug_displaced)
4571 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4572 "opcode/class '%s' unmodified\n", insn,
4573 iname);
4574
4575 dsc->modinsn[0] = insn;
4576
4577 return 0;
4578 }
4579
4580 /* Preload instructions with immediate offset. */
4581
4582 static void
4583 cleanup_preload (struct gdbarch *gdbarch,
4584 struct regcache *regs, arm_displaced_step_closure *dsc)
4585 {
4586 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4587 if (!dsc->u.preload.immed)
4588 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4589 }
4590
4591 static void
4592 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4593 arm_displaced_step_closure *dsc, unsigned int rn)
4594 {
4595 ULONGEST rn_val;
4596 /* Preload instructions:
4597
4598 {pli/pld} [rn, #+/-imm]
4599 ->
4600 {pli/pld} [r0, #+/-imm]. */
4601
4602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4603 rn_val = displaced_read_reg (regs, dsc, rn);
4604 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4605 dsc->u.preload.immed = 1;
4606
4607 dsc->cleanup = &cleanup_preload;
4608 }
4609
4610 static int
4611 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4612 arm_displaced_step_closure *dsc)
4613 {
4614 unsigned int rn = bits (insn, 16, 19);
4615
4616 if (!insn_references_pc (insn, 0x000f0000ul))
4617 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4618
4619 if (debug_displaced)
4620 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4621 (unsigned long) insn);
4622
4623 dsc->modinsn[0] = insn & 0xfff0ffff;
4624
4625 install_preload (gdbarch, regs, dsc, rn);
4626
4627 return 0;
4628 }
4629
4630 static int
4631 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4632 struct regcache *regs, arm_displaced_step_closure *dsc)
4633 {
4634 unsigned int rn = bits (insn1, 0, 3);
4635 unsigned int u_bit = bit (insn1, 7);
4636 int imm12 = bits (insn2, 0, 11);
4637 ULONGEST pc_val;
4638
4639 if (rn != ARM_PC_REGNUM)
4640 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4641
4642 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4643 PLD (literal) Encoding T1. */
4644 if (debug_displaced)
4645 fprintf_unfiltered (gdb_stdlog,
4646 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4647 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4648 imm12);
4649
4650 if (!u_bit)
4651 imm12 = -1 * imm12;
4652
4653 /* Rewrite instruction {pli/pld} PC imm12 into:
4654 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4655
4656 {pli/pld} [r0, r1]
4657
4658 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4659
4660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4661 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4662
4663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4664
4665 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4666 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4667 dsc->u.preload.immed = 0;
4668
4669 /* {pli/pld} [r0, r1] */
4670 dsc->modinsn[0] = insn1 & 0xfff0;
4671 dsc->modinsn[1] = 0xf001;
4672 dsc->numinsns = 2;
4673
4674 dsc->cleanup = &cleanup_preload;
4675 return 0;
4676 }
4677
4678 /* Preload instructions with register offset. */
4679
4680 static void
4681 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4682 arm_displaced_step_closure *dsc, unsigned int rn,
4683 unsigned int rm)
4684 {
4685 ULONGEST rn_val, rm_val;
4686
4687 /* Preload register-offset instructions:
4688
4689 {pli/pld} [rn, rm {, shift}]
4690 ->
4691 {pli/pld} [r0, r1 {, shift}]. */
4692
4693 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4694 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4695 rn_val = displaced_read_reg (regs, dsc, rn);
4696 rm_val = displaced_read_reg (regs, dsc, rm);
4697 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4698 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4699 dsc->u.preload.immed = 0;
4700
4701 dsc->cleanup = &cleanup_preload;
4702 }
4703
4704 static int
4705 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4706 struct regcache *regs,
4707 arm_displaced_step_closure *dsc)
4708 {
4709 unsigned int rn = bits (insn, 16, 19);
4710 unsigned int rm = bits (insn, 0, 3);
4711
4712
4713 if (!insn_references_pc (insn, 0x000f000ful))
4714 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4715
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4719
4720 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4721
4722 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4723 return 0;
4724 }
4725
4726 /* Copy/cleanup coprocessor load and store instructions. */
4727
4728 static void
4729 cleanup_copro_load_store (struct gdbarch *gdbarch,
4730 struct regcache *regs,
4731 arm_displaced_step_closure *dsc)
4732 {
4733 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4734
4735 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4736
4737 if (dsc->u.ldst.writeback)
4738 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4739 }
4740
4741 static void
4742 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4743 arm_displaced_step_closure *dsc,
4744 int writeback, unsigned int rn)
4745 {
4746 ULONGEST rn_val;
4747
4748 /* Coprocessor load/store instructions:
4749
4750 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4751 ->
4752 {stc/stc2} [r0, #+/-imm].
4753
4754 ldc/ldc2 are handled identically. */
4755
4756 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4757 rn_val = displaced_read_reg (regs, dsc, rn);
4758 /* PC should be 4-byte aligned. */
4759 rn_val = rn_val & 0xfffffffc;
4760 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4761
4762 dsc->u.ldst.writeback = writeback;
4763 dsc->u.ldst.rn = rn;
4764
4765 dsc->cleanup = &cleanup_copro_load_store;
4766 }
4767
4768 static int
4769 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4770 struct regcache *regs,
4771 arm_displaced_step_closure *dsc)
4772 {
4773 unsigned int rn = bits (insn, 16, 19);
4774
4775 if (!insn_references_pc (insn, 0x000f0000ul))
4776 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4777
4778 if (debug_displaced)
4779 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4780 "load/store insn %.8lx\n", (unsigned long) insn);
4781
4782 dsc->modinsn[0] = insn & 0xfff0ffff;
4783
4784 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4785
4786 return 0;
4787 }
4788
4789 static int
4790 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4791 uint16_t insn2, struct regcache *regs,
4792 arm_displaced_step_closure *dsc)
4793 {
4794 unsigned int rn = bits (insn1, 0, 3);
4795
4796 if (rn != ARM_PC_REGNUM)
4797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4798 "copro load/store", dsc);
4799
4800 if (debug_displaced)
4801 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4802 "load/store insn %.4x%.4x\n", insn1, insn2);
4803
4804 dsc->modinsn[0] = insn1 & 0xfff0;
4805 dsc->modinsn[1] = insn2;
4806 dsc->numinsns = 2;
4807
4808 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4809 doesn't support writeback, so pass 0. */
4810 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4811
4812 return 0;
4813 }
4814
4815 /* Clean up branch instructions (actually perform the branch, by setting
4816 PC). */
4817
4818 static void
4819 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4820 arm_displaced_step_closure *dsc)
4821 {
4822 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4823 int branch_taken = condition_true (dsc->u.branch.cond, status);
4824 enum pc_write_style write_pc = dsc->u.branch.exchange
4825 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4826
4827 if (!branch_taken)
4828 return;
4829
4830 if (dsc->u.branch.link)
4831 {
4832 /* The value of LR should be the next insn of current one. In order
4833 not to confuse logic hanlding later insn `bx lr', if current insn mode
4834 is Thumb, the bit 0 of LR value should be set to 1. */
4835 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4836
4837 if (dsc->is_thumb)
4838 next_insn_addr |= 0x1;
4839
4840 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4841 CANNOT_WRITE_PC);
4842 }
4843
4844 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4845 }
4846
4847 /* Copy B/BL/BLX instructions with immediate destinations. */
4848
4849 static void
4850 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4851 arm_displaced_step_closure *dsc,
4852 unsigned int cond, int exchange, int link, long offset)
4853 {
4854 /* Implement "BL<cond> <label>" as:
4855
4856 Preparation: cond <- instruction condition
4857 Insn: mov r0, r0 (nop)
4858 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4859
4860 B<cond> similar, but don't set r14 in cleanup. */
4861
4862 dsc->u.branch.cond = cond;
4863 dsc->u.branch.link = link;
4864 dsc->u.branch.exchange = exchange;
4865
4866 dsc->u.branch.dest = dsc->insn_addr;
4867 if (link && exchange)
4868 /* For BLX, offset is computed from the Align (PC, 4). */
4869 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4870
4871 if (dsc->is_thumb)
4872 dsc->u.branch.dest += 4 + offset;
4873 else
4874 dsc->u.branch.dest += 8 + offset;
4875
4876 dsc->cleanup = &cleanup_branch;
4877 }
4878 static int
4879 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4880 struct regcache *regs, arm_displaced_step_closure *dsc)
4881 {
4882 unsigned int cond = bits (insn, 28, 31);
4883 int exchange = (cond == 0xf);
4884 int link = exchange || bit (insn, 24);
4885 long offset;
4886
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4889 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4890 (unsigned long) insn);
4891 if (exchange)
4892 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4893 then arrange the switch into Thumb mode. */
4894 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4895 else
4896 offset = bits (insn, 0, 23) << 2;
4897
4898 if (bit (offset, 25))
4899 offset = offset | ~0x3ffffff;
4900
4901 dsc->modinsn[0] = ARM_NOP;
4902
4903 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4904 return 0;
4905 }
4906
4907 static int
4908 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4909 uint16_t insn2, struct regcache *regs,
4910 arm_displaced_step_closure *dsc)
4911 {
4912 int link = bit (insn2, 14);
4913 int exchange = link && !bit (insn2, 12);
4914 int cond = INST_AL;
4915 long offset = 0;
4916 int j1 = bit (insn2, 13);
4917 int j2 = bit (insn2, 11);
4918 int s = sbits (insn1, 10, 10);
4919 int i1 = !(j1 ^ bit (insn1, 10));
4920 int i2 = !(j2 ^ bit (insn1, 10));
4921
4922 if (!link && !exchange) /* B */
4923 {
4924 offset = (bits (insn2, 0, 10) << 1);
4925 if (bit (insn2, 12)) /* Encoding T4 */
4926 {
4927 offset |= (bits (insn1, 0, 9) << 12)
4928 | (i2 << 22)
4929 | (i1 << 23)
4930 | (s << 24);
4931 cond = INST_AL;
4932 }
4933 else /* Encoding T3 */
4934 {
4935 offset |= (bits (insn1, 0, 5) << 12)
4936 | (j1 << 18)
4937 | (j2 << 19)
4938 | (s << 20);
4939 cond = bits (insn1, 6, 9);
4940 }
4941 }
4942 else
4943 {
4944 offset = (bits (insn1, 0, 9) << 12);
4945 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4946 offset |= exchange ?
4947 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4948 }
4949
4950 if (debug_displaced)
4951 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4952 "%.4x %.4x with offset %.8lx\n",
4953 link ? (exchange) ? "blx" : "bl" : "b",
4954 insn1, insn2, offset);
4955
4956 dsc->modinsn[0] = THUMB_NOP;
4957
4958 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4959 return 0;
4960 }
4961
4962 /* Copy B Thumb instructions. */
4963 static int
4964 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4965 arm_displaced_step_closure *dsc)
4966 {
4967 unsigned int cond = 0;
4968 int offset = 0;
4969 unsigned short bit_12_15 = bits (insn, 12, 15);
4970 CORE_ADDR from = dsc->insn_addr;
4971
4972 if (bit_12_15 == 0xd)
4973 {
4974 /* offset = SignExtend (imm8:0, 32) */
4975 offset = sbits ((insn << 1), 0, 8);
4976 cond = bits (insn, 8, 11);
4977 }
4978 else if (bit_12_15 == 0xe) /* Encoding T2 */
4979 {
4980 offset = sbits ((insn << 1), 0, 11);
4981 cond = INST_AL;
4982 }
4983
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog,
4986 "displaced: copying b immediate insn %.4x "
4987 "with offset %d\n", insn, offset);
4988
4989 dsc->u.branch.cond = cond;
4990 dsc->u.branch.link = 0;
4991 dsc->u.branch.exchange = 0;
4992 dsc->u.branch.dest = from + 4 + offset;
4993
4994 dsc->modinsn[0] = THUMB_NOP;
4995
4996 dsc->cleanup = &cleanup_branch;
4997
4998 return 0;
4999 }
5000
5001 /* Copy BX/BLX with register-specified destinations. */
5002
5003 static void
5004 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5005 arm_displaced_step_closure *dsc, int link,
5006 unsigned int cond, unsigned int rm)
5007 {
5008 /* Implement {BX,BLX}<cond> <reg>" as:
5009
5010 Preparation: cond <- instruction condition
5011 Insn: mov r0, r0 (nop)
5012 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5013
5014 Don't set r14 in cleanup for BX. */
5015
5016 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = link;
5020
5021 dsc->u.branch.exchange = 1;
5022
5023 dsc->cleanup = &cleanup_branch;
5024 }
5025
5026 static int
5027 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5028 struct regcache *regs, arm_displaced_step_closure *dsc)
5029 {
5030 unsigned int cond = bits (insn, 28, 31);
5031 /* BX: x12xxx1x
5032 BLX: x12xxx3x. */
5033 int link = bit (insn, 5);
5034 unsigned int rm = bits (insn, 0, 3);
5035
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5038 (unsigned long) insn);
5039
5040 dsc->modinsn[0] = ARM_NOP;
5041
5042 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5043 return 0;
5044 }
5045
5046 static int
5047 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5048 struct regcache *regs,
5049 arm_displaced_step_closure *dsc)
5050 {
5051 int link = bit (insn, 7);
5052 unsigned int rm = bits (insn, 3, 6);
5053
5054 if (debug_displaced)
5055 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5056 (unsigned short) insn);
5057
5058 dsc->modinsn[0] = THUMB_NOP;
5059
5060 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5061
5062 return 0;
5063 }
5064
5065
5066 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5067
5068 static void
5069 cleanup_alu_imm (struct gdbarch *gdbarch,
5070 struct regcache *regs, arm_displaced_step_closure *dsc)
5071 {
5072 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5073 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5074 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5076 }
5077
5078 static int
5079 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5080 arm_displaced_step_closure *dsc)
5081 {
5082 unsigned int rn = bits (insn, 16, 19);
5083 unsigned int rd = bits (insn, 12, 15);
5084 unsigned int op = bits (insn, 21, 24);
5085 int is_mov = (op == 0xd);
5086 ULONGEST rd_val, rn_val;
5087
5088 if (!insn_references_pc (insn, 0x000ff000ul))
5089 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5093 "%.8lx\n", is_mov ? "move" : "ALU",
5094 (unsigned long) insn);
5095
5096 /* Instruction is of form:
5097
5098 <op><cond> rd, [rn,] #imm
5099
5100 Rewrite as:
5101
5102 Preparation: tmp1, tmp2 <- r0, r1;
5103 r0, r1 <- rd, rn
5104 Insn: <op><cond> r0, r1, #imm
5105 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5106 */
5107
5108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5109 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5110 rn_val = displaced_read_reg (regs, dsc, rn);
5111 rd_val = displaced_read_reg (regs, dsc, rd);
5112 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5113 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5114 dsc->rd = rd;
5115
5116 if (is_mov)
5117 dsc->modinsn[0] = insn & 0xfff00fff;
5118 else
5119 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5120
5121 dsc->cleanup = &cleanup_alu_imm;
5122
5123 return 0;
5124 }
5125
5126 static int
5127 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5128 uint16_t insn2, struct regcache *regs,
5129 arm_displaced_step_closure *dsc)
5130 {
5131 unsigned int op = bits (insn1, 5, 8);
5132 unsigned int rn, rm, rd;
5133 ULONGEST rd_val, rn_val;
5134
5135 rn = bits (insn1, 0, 3); /* Rn */
5136 rm = bits (insn2, 0, 3); /* Rm */
5137 rd = bits (insn2, 8, 11); /* Rd */
5138
5139 /* This routine is only called for instruction MOV. */
5140 gdb_assert (op == 0x2 && rn == 0xf);
5141
5142 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5143 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5144
5145 if (debug_displaced)
5146 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5147 "ALU", insn1, insn2);
5148
5149 /* Instruction is of form:
5150
5151 <op><cond> rd, [rn,] #imm
5152
5153 Rewrite as:
5154
5155 Preparation: tmp1, tmp2 <- r0, r1;
5156 r0, r1 <- rd, rn
5157 Insn: <op><cond> r0, r1, #imm
5158 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5159 */
5160
5161 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5162 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5163 rn_val = displaced_read_reg (regs, dsc, rn);
5164 rd_val = displaced_read_reg (regs, dsc, rd);
5165 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5166 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5167 dsc->rd = rd;
5168
5169 dsc->modinsn[0] = insn1;
5170 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5171 dsc->numinsns = 2;
5172
5173 dsc->cleanup = &cleanup_alu_imm;
5174
5175 return 0;
5176 }
5177
5178 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5179
5180 static void
5181 cleanup_alu_reg (struct gdbarch *gdbarch,
5182 struct regcache *regs, arm_displaced_step_closure *dsc)
5183 {
5184 ULONGEST rd_val;
5185 int i;
5186
5187 rd_val = displaced_read_reg (regs, dsc, 0);
5188
5189 for (i = 0; i < 3; i++)
5190 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5191
5192 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5193 }
5194
5195 static void
5196 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5197 arm_displaced_step_closure *dsc,
5198 unsigned int rd, unsigned int rn, unsigned int rm)
5199 {
5200 ULONGEST rd_val, rn_val, rm_val;
5201
5202 /* Instruction is of form:
5203
5204 <op><cond> rd, [rn,] rm [, <shift>]
5205
5206 Rewrite as:
5207
5208 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5209 r0, r1, r2 <- rd, rn, rm
5210 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5211 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5212 */
5213
5214 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5215 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5216 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5217 rd_val = displaced_read_reg (regs, dsc, rd);
5218 rn_val = displaced_read_reg (regs, dsc, rn);
5219 rm_val = displaced_read_reg (regs, dsc, rm);
5220 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5221 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5223 dsc->rd = rd;
5224
5225 dsc->cleanup = &cleanup_alu_reg;
5226 }
5227
5228 static int
5229 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5230 arm_displaced_step_closure *dsc)
5231 {
5232 unsigned int op = bits (insn, 21, 24);
5233 int is_mov = (op == 0xd);
5234
5235 if (!insn_references_pc (insn, 0x000ff00ful))
5236 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5237
5238 if (debug_displaced)
5239 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5240 is_mov ? "move" : "ALU", (unsigned long) insn);
5241
5242 if (is_mov)
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5244 else
5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5246
5247 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5248 bits (insn, 0, 3));
5249 return 0;
5250 }
5251
5252 static int
5253 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5254 struct regcache *regs,
5255 arm_displaced_step_closure *dsc)
5256 {
5257 unsigned rm, rd;
5258
5259 rm = bits (insn, 3, 6);
5260 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5261
5262 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5263 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5264
5265 if (debug_displaced)
5266 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5267 (unsigned short) insn);
5268
5269 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5270
5271 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5272
5273 return 0;
5274 }
5275
5276 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5277
5278 static void
5279 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5280 struct regcache *regs,
5281 arm_displaced_step_closure *dsc)
5282 {
5283 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5284 int i;
5285
5286 for (i = 0; i < 4; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5288
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290 }
5291
5292 static void
5293 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 arm_displaced_step_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm,
5296 unsigned rs)
5297 {
5298 int i;
5299 ULONGEST rd_val, rn_val, rm_val, rs_val;
5300
5301 /* Instruction is of form:
5302
5303 <op><cond> rd, [rn,] rm, <shift> rs
5304
5305 Rewrite as:
5306
5307 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5308 r0, r1, r2, r3 <- rd, rn, rm, rs
5309 Insn: <op><cond> r0, r1, r2, <shift> r3
5310 Cleanup: tmp5 <- r0
5311 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5312 rd <- tmp5
5313 */
5314
5315 for (i = 0; i < 4; i++)
5316 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5317
5318 rd_val = displaced_read_reg (regs, dsc, rd);
5319 rn_val = displaced_read_reg (regs, dsc, rn);
5320 rm_val = displaced_read_reg (regs, dsc, rm);
5321 rs_val = displaced_read_reg (regs, dsc, rs);
5322 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5326 dsc->rd = rd;
5327 dsc->cleanup = &cleanup_alu_shifted_reg;
5328 }
5329
5330 static int
5331 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5332 struct regcache *regs,
5333 arm_displaced_step_closure *dsc)
5334 {
5335 unsigned int op = bits (insn, 21, 24);
5336 int is_mov = (op == 0xd);
5337 unsigned int rd, rn, rm, rs;
5338
5339 if (!insn_references_pc (insn, 0x000fff0ful))
5340 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5341
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5344 "%.8lx\n", is_mov ? "move" : "ALU",
5345 (unsigned long) insn);
5346
5347 rn = bits (insn, 16, 19);
5348 rm = bits (insn, 0, 3);
5349 rs = bits (insn, 8, 11);
5350 rd = bits (insn, 12, 15);
5351
5352 if (is_mov)
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5354 else
5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5356
5357 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5358
5359 return 0;
5360 }
5361
5362 /* Clean up load instructions. */
5363
5364 static void
5365 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 ULONGEST rt_val, rt_val2 = 0, rn_val;
5369
5370 rt_val = displaced_read_reg (regs, dsc, 0);
5371 if (dsc->u.ldst.xfersize == 8)
5372 rt_val2 = displaced_read_reg (regs, dsc, 1);
5373 rn_val = displaced_read_reg (regs, dsc, 2);
5374
5375 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5376 if (dsc->u.ldst.xfersize > 4)
5377 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5378 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5379 if (!dsc->u.ldst.immed)
5380 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5381
5382 /* Handle register writeback. */
5383 if (dsc->u.ldst.writeback)
5384 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5385 /* Put result in right place. */
5386 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5387 if (dsc->u.ldst.xfersize == 8)
5388 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5389 }
5390
5391 /* Clean up store instructions. */
5392
5393 static void
5394 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5395 arm_displaced_step_closure *dsc)
5396 {
5397 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5398
5399 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5400 if (dsc->u.ldst.xfersize > 4)
5401 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5402 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.immed)
5404 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5405 if (!dsc->u.ldst.restore_r4)
5406 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5407
5408 /* Writeback. */
5409 if (dsc->u.ldst.writeback)
5410 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5411 }
5412
5413 /* Copy "extra" load/store instructions. These are halfword/doubleword
5414 transfers, which have a different encoding to byte/word transfers. */
5415
5416 static int
5417 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5418 struct regcache *regs, arm_displaced_step_closure *dsc)
5419 {
5420 unsigned int op1 = bits (insn, 20, 24);
5421 unsigned int op2 = bits (insn, 5, 6);
5422 unsigned int rt = bits (insn, 12, 15);
5423 unsigned int rn = bits (insn, 16, 19);
5424 unsigned int rm = bits (insn, 0, 3);
5425 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5426 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5427 int immed = (op1 & 0x4) != 0;
5428 int opcode;
5429 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5430
5431 if (!insn_references_pc (insn, 0x000ff00ful))
5432 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5433
5434 if (debug_displaced)
5435 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5436 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5437 (unsigned long) insn);
5438
5439 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5440
5441 if (opcode < 0)
5442 internal_error (__FILE__, __LINE__,
5443 _("copy_extra_ld_st: instruction decode error"));
5444
5445 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5446 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5447 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5448 if (!immed)
5449 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5450
5451 rt_val = displaced_read_reg (regs, dsc, rt);
5452 if (bytesize[opcode] == 8)
5453 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 if (!immed)
5456 rm_val = displaced_read_reg (regs, dsc, rm);
5457
5458 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5459 if (bytesize[opcode] == 8)
5460 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5461 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5462 if (!immed)
5463 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5464
5465 dsc->rd = rt;
5466 dsc->u.ldst.xfersize = bytesize[opcode];
5467 dsc->u.ldst.rn = rn;
5468 dsc->u.ldst.immed = immed;
5469 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5470 dsc->u.ldst.restore_r4 = 0;
5471
5472 if (immed)
5473 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5474 ->
5475 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5476 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5477 else
5478 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5479 ->
5480 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5482
5483 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5484
5485 return 0;
5486 }
5487
5488 /* Copy byte/half word/word loads and stores. */
5489
5490 static void
5491 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5492 arm_displaced_step_closure *dsc, int load,
5493 int immed, int writeback, int size, int usermode,
5494 int rt, int rm, int rn)
5495 {
5496 ULONGEST rt_val, rn_val, rm_val = 0;
5497
5498 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5499 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5500 if (!immed)
5501 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5502 if (!load)
5503 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5504
5505 rt_val = displaced_read_reg (regs, dsc, rt);
5506 rn_val = displaced_read_reg (regs, dsc, rn);
5507 if (!immed)
5508 rm_val = displaced_read_reg (regs, dsc, rm);
5509
5510 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5511 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5512 if (!immed)
5513 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5514 dsc->rd = rt;
5515 dsc->u.ldst.xfersize = size;
5516 dsc->u.ldst.rn = rn;
5517 dsc->u.ldst.immed = immed;
5518 dsc->u.ldst.writeback = writeback;
5519
5520 /* To write PC we can do:
5521
5522 Before this sequence of instructions:
5523 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5524 r2 is the Rn value got from dispalced_read_reg.
5525
5526 Insn1: push {pc} Write address of STR instruction + offset on stack
5527 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5528 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5529 = addr(Insn1) + offset - addr(Insn3) - 8
5530 = offset - 16
5531 Insn4: add r4, r4, #8 r4 = offset - 8
5532 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5533 = from + offset
5534 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5535
5536 Otherwise we don't know what value to write for PC, since the offset is
5537 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5538 of this can be found in Section "Saving from r15" in
5539 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5540
5541 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5542 }
5543
5544
5545 static int
5546 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5547 uint16_t insn2, struct regcache *regs,
5548 arm_displaced_step_closure *dsc, int size)
5549 {
5550 unsigned int u_bit = bit (insn1, 7);
5551 unsigned int rt = bits (insn2, 12, 15);
5552 int imm12 = bits (insn2, 0, 11);
5553 ULONGEST pc_val;
5554
5555 if (debug_displaced)
5556 fprintf_unfiltered (gdb_stdlog,
5557 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5558 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5559 imm12);
5560
5561 if (!u_bit)
5562 imm12 = -1 * imm12;
5563
5564 /* Rewrite instruction LDR Rt imm12 into:
5565
5566 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5567
5568 LDR R0, R2, R3,
5569
5570 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5571
5572
5573 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5574 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5575 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5576
5577 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5578
5579 pc_val = pc_val & 0xfffffffc;
5580
5581 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5582 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5583
5584 dsc->rd = rt;
5585
5586 dsc->u.ldst.xfersize = size;
5587 dsc->u.ldst.immed = 0;
5588 dsc->u.ldst.writeback = 0;
5589 dsc->u.ldst.restore_r4 = 0;
5590
5591 /* LDR R0, R2, R3 */
5592 dsc->modinsn[0] = 0xf852;
5593 dsc->modinsn[1] = 0x3;
5594 dsc->numinsns = 2;
5595
5596 dsc->cleanup = &cleanup_load;
5597
5598 return 0;
5599 }
5600
5601 static int
5602 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5603 uint16_t insn2, struct regcache *regs,
5604 arm_displaced_step_closure *dsc,
5605 int writeback, int immed)
5606 {
5607 unsigned int rt = bits (insn2, 12, 15);
5608 unsigned int rn = bits (insn1, 0, 3);
5609 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5610 /* In LDR (register), there is also a register Rm, which is not allowed to
5611 be PC, so we don't have to check it. */
5612
5613 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5614 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5615 dsc);
5616
5617 if (debug_displaced)
5618 fprintf_unfiltered (gdb_stdlog,
5619 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5620 rt, rn, insn1, insn2);
5621
5622 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5623 0, rt, rm, rn);
5624
5625 dsc->u.ldst.restore_r4 = 0;
5626
5627 if (immed)
5628 /* ldr[b]<cond> rt, [rn, #imm], etc.
5629 ->
5630 ldr[b]<cond> r0, [r2, #imm]. */
5631 {
5632 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5633 dsc->modinsn[1] = insn2 & 0x0fff;
5634 }
5635 else
5636 /* ldr[b]<cond> rt, [rn, rm], etc.
5637 ->
5638 ldr[b]<cond> r0, [r2, r3]. */
5639 {
5640 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5641 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5642 }
5643
5644 dsc->numinsns = 2;
5645
5646 return 0;
5647 }
5648
5649
5650 static int
5651 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5652 struct regcache *regs,
5653 arm_displaced_step_closure *dsc,
5654 int load, int size, int usermode)
5655 {
5656 int immed = !bit (insn, 25);
5657 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5658 unsigned int rt = bits (insn, 12, 15);
5659 unsigned int rn = bits (insn, 16, 19);
5660 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5661
5662 if (!insn_references_pc (insn, 0x000ff00ful))
5663 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5664
5665 if (debug_displaced)
5666 fprintf_unfiltered (gdb_stdlog,
5667 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5668 load ? (size == 1 ? "ldrb" : "ldr")
5669 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5670 rt, rn,
5671 (unsigned long) insn);
5672
5673 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5674 usermode, rt, rm, rn);
5675
5676 if (load || rt != ARM_PC_REGNUM)
5677 {
5678 dsc->u.ldst.restore_r4 = 0;
5679
5680 if (immed)
5681 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5682 ->
5683 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5684 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5685 else
5686 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5687 ->
5688 {ldr,str}[b]<cond> r0, [r2, r3]. */
5689 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5690 }
5691 else
5692 {
5693 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5694 dsc->u.ldst.restore_r4 = 1;
5695 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5696 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5697 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5698 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5699 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5700
5701 /* As above. */
5702 if (immed)
5703 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5704 else
5705 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5706
5707 dsc->numinsns = 6;
5708 }
5709
5710 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5711
5712 return 0;
5713 }
5714
5715 /* Cleanup LDM instructions with fully-populated register list. This is an
5716 unfortunate corner case: it's impossible to implement correctly by modifying
5717 the instruction. The issue is as follows: we have an instruction,
5718
5719 ldm rN, {r0-r15}
5720
5721 which we must rewrite to avoid loading PC. A possible solution would be to
5722 do the load in two halves, something like (with suitable cleanup
5723 afterwards):
5724
5725 mov r8, rN
5726 ldm[id][ab] r8!, {r0-r7}
5727 str r7, <temp>
5728 ldm[id][ab] r8, {r7-r14}
5729 <bkpt>
5730
5731 but at present there's no suitable place for <temp>, since the scratch space
5732 is overwritten before the cleanup routine is called. For now, we simply
5733 emulate the instruction. */
5734
5735 static void
5736 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5737 arm_displaced_step_closure *dsc)
5738 {
5739 int inc = dsc->u.block.increment;
5740 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5741 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5742 uint32_t regmask = dsc->u.block.regmask;
5743 int regno = inc ? 0 : 15;
5744 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5745 int exception_return = dsc->u.block.load && dsc->u.block.user
5746 && (regmask & 0x8000) != 0;
5747 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5748 int do_transfer = condition_true (dsc->u.block.cond, status);
5749 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5750
5751 if (!do_transfer)
5752 return;
5753
5754 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5755 sensible we can do here. Complain loudly. */
5756 if (exception_return)
5757 error (_("Cannot single-step exception return"));
5758
5759 /* We don't handle any stores here for now. */
5760 gdb_assert (dsc->u.block.load != 0);
5761
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5764 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5765 dsc->u.block.increment ? "inc" : "dec",
5766 dsc->u.block.before ? "before" : "after");
5767
5768 while (regmask)
5769 {
5770 uint32_t memword;
5771
5772 if (inc)
5773 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5774 regno++;
5775 else
5776 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5777 regno--;
5778
5779 xfer_addr += bump_before;
5780
5781 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5782 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5783
5784 xfer_addr += bump_after;
5785
5786 regmask &= ~(1 << regno);
5787 }
5788
5789 if (dsc->u.block.writeback)
5790 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5791 CANNOT_WRITE_PC);
5792 }
5793
5794 /* Clean up an STM which included the PC in the register list. */
5795
5796 static void
5797 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5798 arm_displaced_step_closure *dsc)
5799 {
5800 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5801 int store_executed = condition_true (dsc->u.block.cond, status);
5802 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5803 CORE_ADDR stm_insn_addr;
5804 uint32_t pc_val;
5805 long offset;
5806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5807
5808 /* If condition code fails, there's nothing else to do. */
5809 if (!store_executed)
5810 return;
5811
5812 if (dsc->u.block.increment)
5813 {
5814 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5815
5816 if (dsc->u.block.before)
5817 pc_stored_at += 4;
5818 }
5819 else
5820 {
5821 pc_stored_at = dsc->u.block.xfer_addr;
5822
5823 if (dsc->u.block.before)
5824 pc_stored_at -= 4;
5825 }
5826
5827 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5828 stm_insn_addr = dsc->scratch_base;
5829 offset = pc_val - stm_insn_addr;
5830
5831 if (debug_displaced)
5832 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5833 "STM instruction\n", offset);
5834
5835 /* Rewrite the stored PC to the proper value for the non-displaced original
5836 instruction. */
5837 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5838 dsc->insn_addr + offset);
5839 }
5840
5841 /* Clean up an LDM which includes the PC in the register list. We clumped all
5842 the registers in the transferred list into a contiguous range r0...rX (to
5843 avoid loading PC directly and losing control of the debugged program), so we
5844 must undo that here. */
5845
5846 static void
5847 cleanup_block_load_pc (struct gdbarch *gdbarch,
5848 struct regcache *regs,
5849 arm_displaced_step_closure *dsc)
5850 {
5851 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5852 int load_executed = condition_true (dsc->u.block.cond, status);
5853 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5854 unsigned int regs_loaded = bitcount (mask);
5855 unsigned int num_to_shuffle = regs_loaded, clobbered;
5856
5857 /* The method employed here will fail if the register list is fully populated
5858 (we need to avoid loading PC directly). */
5859 gdb_assert (num_to_shuffle < 16);
5860
5861 if (!load_executed)
5862 return;
5863
5864 clobbered = (1 << num_to_shuffle) - 1;
5865
5866 while (num_to_shuffle > 0)
5867 {
5868 if ((mask & (1 << write_reg)) != 0)
5869 {
5870 unsigned int read_reg = num_to_shuffle - 1;
5871
5872 if (read_reg != write_reg)
5873 {
5874 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5875 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5876 if (debug_displaced)
5877 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5878 "loaded register r%d to r%d\n"), read_reg,
5879 write_reg);
5880 }
5881 else if (debug_displaced)
5882 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5883 "r%d already in the right place\n"),
5884 write_reg);
5885
5886 clobbered &= ~(1 << write_reg);
5887
5888 num_to_shuffle--;
5889 }
5890
5891 write_reg--;
5892 }
5893
5894 /* Restore any registers we scribbled over. */
5895 for (write_reg = 0; clobbered != 0; write_reg++)
5896 {
5897 if ((clobbered & (1 << write_reg)) != 0)
5898 {
5899 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5900 CANNOT_WRITE_PC);
5901 if (debug_displaced)
5902 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5903 "clobbered register r%d\n"), write_reg);
5904 clobbered &= ~(1 << write_reg);
5905 }
5906 }
5907
5908 /* Perform register writeback manually. */
5909 if (dsc->u.block.writeback)
5910 {
5911 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5912
5913 if (dsc->u.block.increment)
5914 new_rn_val += regs_loaded * 4;
5915 else
5916 new_rn_val -= regs_loaded * 4;
5917
5918 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5919 CANNOT_WRITE_PC);
5920 }
5921 }
5922
5923 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5924 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5925
5926 static int
5927 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5928 struct regcache *regs,
5929 arm_displaced_step_closure *dsc)
5930 {
5931 int load = bit (insn, 20);
5932 int user = bit (insn, 22);
5933 int increment = bit (insn, 23);
5934 int before = bit (insn, 24);
5935 int writeback = bit (insn, 21);
5936 int rn = bits (insn, 16, 19);
5937
5938 /* Block transfers which don't mention PC can be run directly
5939 out-of-line. */
5940 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5941 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5942
5943 if (rn == ARM_PC_REGNUM)
5944 {
5945 warning (_("displaced: Unpredictable LDM or STM with "
5946 "base register r15"));
5947 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5948 }
5949
5950 if (debug_displaced)
5951 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5952 "%.8lx\n", (unsigned long) insn);
5953
5954 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5955 dsc->u.block.rn = rn;
5956
5957 dsc->u.block.load = load;
5958 dsc->u.block.user = user;
5959 dsc->u.block.increment = increment;
5960 dsc->u.block.before = before;
5961 dsc->u.block.writeback = writeback;
5962 dsc->u.block.cond = bits (insn, 28, 31);
5963
5964 dsc->u.block.regmask = insn & 0xffff;
5965
5966 if (load)
5967 {
5968 if ((insn & 0xffff) == 0xffff)
5969 {
5970 /* LDM with a fully-populated register list. This case is
5971 particularly tricky. Implement for now by fully emulating the
5972 instruction (which might not behave perfectly in all cases, but
5973 these instructions should be rare enough for that not to matter
5974 too much). */
5975 dsc->modinsn[0] = ARM_NOP;
5976
5977 dsc->cleanup = &cleanup_block_load_all;
5978 }
5979 else
5980 {
5981 /* LDM of a list of registers which includes PC. Implement by
5982 rewriting the list of registers to be transferred into a
5983 contiguous chunk r0...rX before doing the transfer, then shuffling
5984 registers into the correct places in the cleanup routine. */
5985 unsigned int regmask = insn & 0xffff;
5986 unsigned int num_in_list = bitcount (regmask), new_regmask;
5987 unsigned int i;
5988
5989 for (i = 0; i < num_in_list; i++)
5990 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5991
5992 /* Writeback makes things complicated. We need to avoid clobbering
5993 the base register with one of the registers in our modified
5994 register list, but just using a different register can't work in
5995 all cases, e.g.:
5996
5997 ldm r14!, {r0-r13,pc}
5998
5999 which would need to be rewritten as:
6000
6001 ldm rN!, {r0-r14}
6002
6003 but that can't work, because there's no free register for N.
6004
6005 Solve this by turning off the writeback bit, and emulating
6006 writeback manually in the cleanup routine. */
6007
6008 if (writeback)
6009 insn &= ~(1 << 21);
6010
6011 new_regmask = (1 << num_in_list) - 1;
6012
6013 if (debug_displaced)
6014 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6015 "{..., pc}: original reg list %.4x, modified "
6016 "list %.4x\n"), rn, writeback ? "!" : "",
6017 (int) insn & 0xffff, new_regmask);
6018
6019 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6020
6021 dsc->cleanup = &cleanup_block_load_pc;
6022 }
6023 }
6024 else
6025 {
6026 /* STM of a list of registers which includes PC. Run the instruction
6027 as-is, but out of line: this will store the wrong value for the PC,
6028 so we must manually fix up the memory in the cleanup routine.
6029 Doing things this way has the advantage that we can auto-detect
6030 the offset of the PC write (which is architecture-dependent) in
6031 the cleanup routine. */
6032 dsc->modinsn[0] = insn;
6033
6034 dsc->cleanup = &cleanup_block_store_pc;
6035 }
6036
6037 return 0;
6038 }
6039
6040 static int
6041 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6042 struct regcache *regs,
6043 arm_displaced_step_closure *dsc)
6044 {
6045 int rn = bits (insn1, 0, 3);
6046 int load = bit (insn1, 4);
6047 int writeback = bit (insn1, 5);
6048
6049 /* Block transfers which don't mention PC can be run directly
6050 out-of-line. */
6051 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6052 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6053
6054 if (rn == ARM_PC_REGNUM)
6055 {
6056 warning (_("displaced: Unpredictable LDM or STM with "
6057 "base register r15"));
6058 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6059 "unpredictable ldm/stm", dsc);
6060 }
6061
6062 if (debug_displaced)
6063 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6064 "%.4x%.4x\n", insn1, insn2);
6065
6066 /* Clear bit 13, since it should be always zero. */
6067 dsc->u.block.regmask = (insn2 & 0xdfff);
6068 dsc->u.block.rn = rn;
6069
6070 dsc->u.block.load = load;
6071 dsc->u.block.user = 0;
6072 dsc->u.block.increment = bit (insn1, 7);
6073 dsc->u.block.before = bit (insn1, 8);
6074 dsc->u.block.writeback = writeback;
6075 dsc->u.block.cond = INST_AL;
6076 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6077
6078 if (load)
6079 {
6080 if (dsc->u.block.regmask == 0xffff)
6081 {
6082 /* This branch is impossible to happen. */
6083 gdb_assert (0);
6084 }
6085 else
6086 {
6087 unsigned int regmask = dsc->u.block.regmask;
6088 unsigned int num_in_list = bitcount (regmask), new_regmask;
6089 unsigned int i;
6090
6091 for (i = 0; i < num_in_list; i++)
6092 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6093
6094 if (writeback)
6095 insn1 &= ~(1 << 5);
6096
6097 new_regmask = (1 << num_in_list) - 1;
6098
6099 if (debug_displaced)
6100 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6101 "{..., pc}: original reg list %.4x, modified "
6102 "list %.4x\n"), rn, writeback ? "!" : "",
6103 (int) dsc->u.block.regmask, new_regmask);
6104
6105 dsc->modinsn[0] = insn1;
6106 dsc->modinsn[1] = (new_regmask & 0xffff);
6107 dsc->numinsns = 2;
6108
6109 dsc->cleanup = &cleanup_block_load_pc;
6110 }
6111 }
6112 else
6113 {
6114 dsc->modinsn[0] = insn1;
6115 dsc->modinsn[1] = insn2;
6116 dsc->numinsns = 2;
6117 dsc->cleanup = &cleanup_block_store_pc;
6118 }
6119 return 0;
6120 }
6121
6122 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6123 This is used to avoid a dependency on BFD's bfd_endian enum. */
6124
6125 ULONGEST
6126 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6127 int byte_order)
6128 {
6129 return read_memory_unsigned_integer (memaddr, len,
6130 (enum bfd_endian) byte_order);
6131 }
6132
6133 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6134
6135 CORE_ADDR
6136 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6137 CORE_ADDR val)
6138 {
6139 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6140 }
6141
6142 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6143
6144 static CORE_ADDR
6145 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6146 {
6147 return 0;
6148 }
6149
6150 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6151
6152 int
6153 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6154 {
6155 return arm_is_thumb (self->regcache);
6156 }
6157
6158 /* single_step() is called just before we want to resume the inferior,
6159 if we want to single-step it but there is no hardware or kernel
6160 single-step support. We find the target of the coming instructions
6161 and breakpoint them. */
6162
6163 std::vector<CORE_ADDR>
6164 arm_software_single_step (struct regcache *regcache)
6165 {
6166 struct gdbarch *gdbarch = regcache->arch ();
6167 struct arm_get_next_pcs next_pcs_ctx;
6168
6169 arm_get_next_pcs_ctor (&next_pcs_ctx,
6170 &arm_get_next_pcs_ops,
6171 gdbarch_byte_order (gdbarch),
6172 gdbarch_byte_order_for_code (gdbarch),
6173 0,
6174 regcache);
6175
6176 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6177
6178 for (CORE_ADDR &pc_ref : next_pcs)
6179 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6180
6181 return next_pcs;
6182 }
6183
6184 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6185 for Linux, where some SVC instructions must be treated specially. */
6186
6187 static void
6188 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6189 arm_displaced_step_closure *dsc)
6190 {
6191 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6192
6193 if (debug_displaced)
6194 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6195 "%.8lx\n", (unsigned long) resume_addr);
6196
6197 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6198 }
6199
6200
6201 /* Common copy routine for svc instruciton. */
6202
6203 static int
6204 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6205 arm_displaced_step_closure *dsc)
6206 {
6207 /* Preparation: none.
6208 Insn: unmodified svc.
6209 Cleanup: pc <- insn_addr + insn_size. */
6210
6211 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6212 instruction. */
6213 dsc->wrote_to_pc = 1;
6214
6215 /* Allow OS-specific code to override SVC handling. */
6216 if (dsc->u.svc.copy_svc_os)
6217 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6218 else
6219 {
6220 dsc->cleanup = &cleanup_svc;
6221 return 0;
6222 }
6223 }
6224
6225 static int
6226 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6227 struct regcache *regs, arm_displaced_step_closure *dsc)
6228 {
6229
6230 if (debug_displaced)
6231 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6232 (unsigned long) insn);
6233
6234 dsc->modinsn[0] = insn;
6235
6236 return install_svc (gdbarch, regs, dsc);
6237 }
6238
6239 static int
6240 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6241 struct regcache *regs, arm_displaced_step_closure *dsc)
6242 {
6243
6244 if (debug_displaced)
6245 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6246 insn);
6247
6248 dsc->modinsn[0] = insn;
6249
6250 return install_svc (gdbarch, regs, dsc);
6251 }
6252
6253 /* Copy undefined instructions. */
6254
6255 static int
6256 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6257 arm_displaced_step_closure *dsc)
6258 {
6259 if (debug_displaced)
6260 fprintf_unfiltered (gdb_stdlog,
6261 "displaced: copying undefined insn %.8lx\n",
6262 (unsigned long) insn);
6263
6264 dsc->modinsn[0] = insn;
6265
6266 return 0;
6267 }
6268
6269 static int
6270 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6271 arm_displaced_step_closure *dsc)
6272 {
6273
6274 if (debug_displaced)
6275 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6276 "%.4x %.4x\n", (unsigned short) insn1,
6277 (unsigned short) insn2);
6278
6279 dsc->modinsn[0] = insn1;
6280 dsc->modinsn[1] = insn2;
6281 dsc->numinsns = 2;
6282
6283 return 0;
6284 }
6285
6286 /* Copy unpredictable instructions. */
6287
6288 static int
6289 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6290 arm_displaced_step_closure *dsc)
6291 {
6292 if (debug_displaced)
6293 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6294 "%.8lx\n", (unsigned long) insn);
6295
6296 dsc->modinsn[0] = insn;
6297
6298 return 0;
6299 }
6300
6301 /* The decode_* functions are instruction decoding helpers. They mostly follow
6302 the presentation in the ARM ARM. */
6303
6304 static int
6305 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6306 struct regcache *regs,
6307 arm_displaced_step_closure *dsc)
6308 {
6309 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6310 unsigned int rn = bits (insn, 16, 19);
6311
6312 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6313 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6314 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6315 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6316 else if ((op1 & 0x60) == 0x20)
6317 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6318 else if ((op1 & 0x71) == 0x40)
6319 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6320 dsc);
6321 else if ((op1 & 0x77) == 0x41)
6322 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6323 else if ((op1 & 0x77) == 0x45)
6324 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6325 else if ((op1 & 0x77) == 0x51)
6326 {
6327 if (rn != 0xf)
6328 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6329 else
6330 return arm_copy_unpred (gdbarch, insn, dsc);
6331 }
6332 else if ((op1 & 0x77) == 0x55)
6333 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6334 else if (op1 == 0x57)
6335 switch (op2)
6336 {
6337 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6338 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6339 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6340 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6341 default: return arm_copy_unpred (gdbarch, insn, dsc);
6342 }
6343 else if ((op1 & 0x63) == 0x43)
6344 return arm_copy_unpred (gdbarch, insn, dsc);
6345 else if ((op2 & 0x1) == 0x0)
6346 switch (op1 & ~0x80)
6347 {
6348 case 0x61:
6349 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6350 case 0x65:
6351 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6352 case 0x71: case 0x75:
6353 /* pld/pldw reg. */
6354 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6355 case 0x63: case 0x67: case 0x73: case 0x77:
6356 return arm_copy_unpred (gdbarch, insn, dsc);
6357 default:
6358 return arm_copy_undef (gdbarch, insn, dsc);
6359 }
6360 else
6361 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6362 }
6363
6364 static int
6365 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6366 struct regcache *regs,
6367 arm_displaced_step_closure *dsc)
6368 {
6369 if (bit (insn, 27) == 0)
6370 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6371 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6372 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6373 {
6374 case 0x0: case 0x2:
6375 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6376
6377 case 0x1: case 0x3:
6378 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6379
6380 case 0x4: case 0x5: case 0x6: case 0x7:
6381 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6382
6383 case 0x8:
6384 switch ((insn & 0xe00000) >> 21)
6385 {
6386 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6387 /* stc/stc2. */
6388 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6389
6390 case 0x2:
6391 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6392
6393 default:
6394 return arm_copy_undef (gdbarch, insn, dsc);
6395 }
6396
6397 case 0x9:
6398 {
6399 int rn_f = (bits (insn, 16, 19) == 0xf);
6400 switch ((insn & 0xe00000) >> 21)
6401 {
6402 case 0x1: case 0x3:
6403 /* ldc/ldc2 imm (undefined for rn == pc). */
6404 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6405 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6406
6407 case 0x2:
6408 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6409
6410 case 0x4: case 0x5: case 0x6: case 0x7:
6411 /* ldc/ldc2 lit (undefined for rn != pc). */
6412 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6413 : arm_copy_undef (gdbarch, insn, dsc);
6414
6415 default:
6416 return arm_copy_undef (gdbarch, insn, dsc);
6417 }
6418 }
6419
6420 case 0xa:
6421 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6422
6423 case 0xb:
6424 if (bits (insn, 16, 19) == 0xf)
6425 /* ldc/ldc2 lit. */
6426 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6427 else
6428 return arm_copy_undef (gdbarch, insn, dsc);
6429
6430 case 0xc:
6431 if (bit (insn, 4))
6432 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6433 else
6434 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6435
6436 case 0xd:
6437 if (bit (insn, 4))
6438 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6439 else
6440 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6441
6442 default:
6443 return arm_copy_undef (gdbarch, insn, dsc);
6444 }
6445 }
6446
6447 /* Decode miscellaneous instructions in dp/misc encoding space. */
6448
6449 static int
6450 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6451 struct regcache *regs,
6452 arm_displaced_step_closure *dsc)
6453 {
6454 unsigned int op2 = bits (insn, 4, 6);
6455 unsigned int op = bits (insn, 21, 22);
6456
6457 switch (op2)
6458 {
6459 case 0x0:
6460 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6461
6462 case 0x1:
6463 if (op == 0x1) /* bx. */
6464 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6465 else if (op == 0x3)
6466 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6467 else
6468 return arm_copy_undef (gdbarch, insn, dsc);
6469
6470 case 0x2:
6471 if (op == 0x1)
6472 /* Not really supported. */
6473 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6474 else
6475 return arm_copy_undef (gdbarch, insn, dsc);
6476
6477 case 0x3:
6478 if (op == 0x1)
6479 return arm_copy_bx_blx_reg (gdbarch, insn,
6480 regs, dsc); /* blx register. */
6481 else
6482 return arm_copy_undef (gdbarch, insn, dsc);
6483
6484 case 0x5:
6485 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6486
6487 case 0x7:
6488 if (op == 0x1)
6489 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6490 else if (op == 0x3)
6491 /* Not really supported. */
6492 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6493 /* Fall through. */
6494
6495 default:
6496 return arm_copy_undef (gdbarch, insn, dsc);
6497 }
6498 }
6499
6500 static int
6501 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6502 struct regcache *regs,
6503 arm_displaced_step_closure *dsc)
6504 {
6505 if (bit (insn, 25))
6506 switch (bits (insn, 20, 24))
6507 {
6508 case 0x10:
6509 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6510
6511 case 0x14:
6512 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6513
6514 case 0x12: case 0x16:
6515 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6516
6517 default:
6518 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6519 }
6520 else
6521 {
6522 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6523
6524 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6525 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6526 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6527 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6528 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6529 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6530 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6531 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6532 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6533 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6534 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6535 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6536 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6537 /* 2nd arg means "unprivileged". */
6538 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6539 dsc);
6540 }
6541
6542 /* Should be unreachable. */
6543 return 1;
6544 }
6545
6546 static int
6547 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6548 struct regcache *regs,
6549 arm_displaced_step_closure *dsc)
6550 {
6551 int a = bit (insn, 25), b = bit (insn, 4);
6552 uint32_t op1 = bits (insn, 20, 24);
6553
6554 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6555 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6556 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6557 else if ((!a && (op1 & 0x17) == 0x02)
6558 || (a && (op1 & 0x17) == 0x02 && !b))
6559 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6560 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6561 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6562 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6563 else if ((!a && (op1 & 0x17) == 0x03)
6564 || (a && (op1 & 0x17) == 0x03 && !b))
6565 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6566 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6567 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6568 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6569 else if ((!a && (op1 & 0x17) == 0x06)
6570 || (a && (op1 & 0x17) == 0x06 && !b))
6571 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6572 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6573 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6574 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6575 else if ((!a && (op1 & 0x17) == 0x07)
6576 || (a && (op1 & 0x17) == 0x07 && !b))
6577 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6578
6579 /* Should be unreachable. */
6580 return 1;
6581 }
6582
6583 static int
6584 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6585 arm_displaced_step_closure *dsc)
6586 {
6587 switch (bits (insn, 20, 24))
6588 {
6589 case 0x00: case 0x01: case 0x02: case 0x03:
6590 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6591
6592 case 0x04: case 0x05: case 0x06: case 0x07:
6593 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6594
6595 case 0x08: case 0x09: case 0x0a: case 0x0b:
6596 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6597 return arm_copy_unmodified (gdbarch, insn,
6598 "decode/pack/unpack/saturate/reverse", dsc);
6599
6600 case 0x18:
6601 if (bits (insn, 5, 7) == 0) /* op2. */
6602 {
6603 if (bits (insn, 12, 15) == 0xf)
6604 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6605 else
6606 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6607 }
6608 else
6609 return arm_copy_undef (gdbarch, insn, dsc);
6610
6611 case 0x1a: case 0x1b:
6612 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6613 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6614 else
6615 return arm_copy_undef (gdbarch, insn, dsc);
6616
6617 case 0x1c: case 0x1d:
6618 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6619 {
6620 if (bits (insn, 0, 3) == 0xf)
6621 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6622 else
6623 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6624 }
6625 else
6626 return arm_copy_undef (gdbarch, insn, dsc);
6627
6628 case 0x1e: case 0x1f:
6629 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6630 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6631 else
6632 return arm_copy_undef (gdbarch, insn, dsc);
6633 }
6634
6635 /* Should be unreachable. */
6636 return 1;
6637 }
6638
6639 static int
6640 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6641 struct regcache *regs,
6642 arm_displaced_step_closure *dsc)
6643 {
6644 if (bit (insn, 25))
6645 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6646 else
6647 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6648 }
6649
6650 static int
6651 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6652 struct regcache *regs,
6653 arm_displaced_step_closure *dsc)
6654 {
6655 unsigned int opcode = bits (insn, 20, 24);
6656
6657 switch (opcode)
6658 {
6659 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6660 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6661
6662 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6663 case 0x12: case 0x16:
6664 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6665
6666 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6667 case 0x13: case 0x17:
6668 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6669
6670 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6671 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6672 /* Note: no writeback for these instructions. Bit 25 will always be
6673 zero though (via caller), so the following works OK. */
6674 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6675 }
6676
6677 /* Should be unreachable. */
6678 return 1;
6679 }
6680
6681 /* Decode shifted register instructions. */
6682
6683 static int
6684 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6685 uint16_t insn2, struct regcache *regs,
6686 arm_displaced_step_closure *dsc)
6687 {
6688 /* PC is only allowed to be used in instruction MOV. */
6689
6690 unsigned int op = bits (insn1, 5, 8);
6691 unsigned int rn = bits (insn1, 0, 3);
6692
6693 if (op == 0x2 && rn == 0xf) /* MOV */
6694 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6695 else
6696 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6697 "dp (shift reg)", dsc);
6698 }
6699
6700
6701 /* Decode extension register load/store. Exactly the same as
6702 arm_decode_ext_reg_ld_st. */
6703
6704 static int
6705 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6706 uint16_t insn2, struct regcache *regs,
6707 arm_displaced_step_closure *dsc)
6708 {
6709 unsigned int opcode = bits (insn1, 4, 8);
6710
6711 switch (opcode)
6712 {
6713 case 0x04: case 0x05:
6714 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6715 "vfp/neon vmov", dsc);
6716
6717 case 0x08: case 0x0c: /* 01x00 */
6718 case 0x0a: case 0x0e: /* 01x10 */
6719 case 0x12: case 0x16: /* 10x10 */
6720 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6721 "vfp/neon vstm/vpush", dsc);
6722
6723 case 0x09: case 0x0d: /* 01x01 */
6724 case 0x0b: case 0x0f: /* 01x11 */
6725 case 0x13: case 0x17: /* 10x11 */
6726 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6727 "vfp/neon vldm/vpop", dsc);
6728
6729 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6730 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6731 "vstr", dsc);
6732 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6733 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6734 }
6735
6736 /* Should be unreachable. */
6737 return 1;
6738 }
6739
6740 static int
6741 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6742 struct regcache *regs, arm_displaced_step_closure *dsc)
6743 {
6744 unsigned int op1 = bits (insn, 20, 25);
6745 int op = bit (insn, 4);
6746 unsigned int coproc = bits (insn, 8, 11);
6747
6748 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6749 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6750 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6751 && (coproc & 0xe) != 0xa)
6752 /* stc/stc2. */
6753 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6754 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6755 && (coproc & 0xe) != 0xa)
6756 /* ldc/ldc2 imm/lit. */
6757 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6758 else if ((op1 & 0x3e) == 0x00)
6759 return arm_copy_undef (gdbarch, insn, dsc);
6760 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6761 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6762 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6763 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6764 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6765 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6766 else if ((op1 & 0x30) == 0x20 && !op)
6767 {
6768 if ((coproc & 0xe) == 0xa)
6769 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6770 else
6771 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6772 }
6773 else if ((op1 & 0x30) == 0x20 && op)
6774 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6775 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6776 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6777 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6778 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6779 else if ((op1 & 0x30) == 0x30)
6780 return arm_copy_svc (gdbarch, insn, regs, dsc);
6781 else
6782 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6783 }
6784
6785 static int
6786 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6787 uint16_t insn2, struct regcache *regs,
6788 arm_displaced_step_closure *dsc)
6789 {
6790 unsigned int coproc = bits (insn2, 8, 11);
6791 unsigned int bit_5_8 = bits (insn1, 5, 8);
6792 unsigned int bit_9 = bit (insn1, 9);
6793 unsigned int bit_4 = bit (insn1, 4);
6794
6795 if (bit_9 == 0)
6796 {
6797 if (bit_5_8 == 2)
6798 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6799 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6800 dsc);
6801 else if (bit_5_8 == 0) /* UNDEFINED. */
6802 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6803 else
6804 {
6805 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6806 if ((coproc & 0xe) == 0xa)
6807 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6808 dsc);
6809 else /* coproc is not 101x. */
6810 {
6811 if (bit_4 == 0) /* STC/STC2. */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 "stc/stc2", dsc);
6814 else /* LDC/LDC2 {literal, immeidate}. */
6815 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6816 regs, dsc);
6817 }
6818 }
6819 }
6820 else
6821 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6822
6823 return 0;
6824 }
6825
6826 static void
6827 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6828 arm_displaced_step_closure *dsc, int rd)
6829 {
6830 /* ADR Rd, #imm
6831
6832 Rewrite as:
6833
6834 Preparation: Rd <- PC
6835 Insn: ADD Rd, #imm
6836 Cleanup: Null.
6837 */
6838
6839 /* Rd <- PC */
6840 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6841 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6842 }
6843
6844 static int
6845 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6846 arm_displaced_step_closure *dsc,
6847 int rd, unsigned int imm)
6848 {
6849
6850 /* Encoding T2: ADDS Rd, #imm */
6851 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6852
6853 install_pc_relative (gdbarch, regs, dsc, rd);
6854
6855 return 0;
6856 }
6857
6858 static int
6859 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6860 struct regcache *regs,
6861 arm_displaced_step_closure *dsc)
6862 {
6863 unsigned int rd = bits (insn, 8, 10);
6864 unsigned int imm8 = bits (insn, 0, 7);
6865
6866 if (debug_displaced)
6867 fprintf_unfiltered (gdb_stdlog,
6868 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6869 rd, imm8, insn);
6870
6871 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6872 }
6873
6874 static int
6875 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6876 uint16_t insn2, struct regcache *regs,
6877 arm_displaced_step_closure *dsc)
6878 {
6879 unsigned int rd = bits (insn2, 8, 11);
6880 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6881 extract raw immediate encoding rather than computing immediate. When
6882 generating ADD or SUB instruction, we can simply perform OR operation to
6883 set immediate into ADD. */
6884 unsigned int imm_3_8 = insn2 & 0x70ff;
6885 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6886
6887 if (debug_displaced)
6888 fprintf_unfiltered (gdb_stdlog,
6889 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6890 rd, imm_i, imm_3_8, insn1, insn2);
6891
6892 if (bit (insn1, 7)) /* Encoding T2 */
6893 {
6894 /* Encoding T3: SUB Rd, Rd, #imm */
6895 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6896 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6897 }
6898 else /* Encoding T3 */
6899 {
6900 /* Encoding T3: ADD Rd, Rd, #imm */
6901 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6902 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6903 }
6904 dsc->numinsns = 2;
6905
6906 install_pc_relative (gdbarch, regs, dsc, rd);
6907
6908 return 0;
6909 }
6910
6911 static int
6912 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6913 struct regcache *regs,
6914 arm_displaced_step_closure *dsc)
6915 {
6916 unsigned int rt = bits (insn1, 8, 10);
6917 unsigned int pc;
6918 int imm8 = (bits (insn1, 0, 7) << 2);
6919
6920 /* LDR Rd, #imm8
6921
6922 Rwrite as:
6923
6924 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6925
6926 Insn: LDR R0, [R2, R3];
6927 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6928
6929 if (debug_displaced)
6930 fprintf_unfiltered (gdb_stdlog,
6931 "displaced: copying thumb ldr r%d [pc #%d]\n"
6932 , rt, imm8);
6933
6934 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6935 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6936 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6937 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6938 /* The assembler calculates the required value of the offset from the
6939 Align(PC,4) value of this instruction to the label. */
6940 pc = pc & 0xfffffffc;
6941
6942 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6943 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6944
6945 dsc->rd = rt;
6946 dsc->u.ldst.xfersize = 4;
6947 dsc->u.ldst.rn = 0;
6948 dsc->u.ldst.immed = 0;
6949 dsc->u.ldst.writeback = 0;
6950 dsc->u.ldst.restore_r4 = 0;
6951
6952 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6953
6954 dsc->cleanup = &cleanup_load;
6955
6956 return 0;
6957 }
6958
6959 /* Copy Thumb cbnz/cbz insruction. */
6960
6961 static int
6962 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6963 struct regcache *regs,
6964 arm_displaced_step_closure *dsc)
6965 {
6966 int non_zero = bit (insn1, 11);
6967 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6968 CORE_ADDR from = dsc->insn_addr;
6969 int rn = bits (insn1, 0, 2);
6970 int rn_val = displaced_read_reg (regs, dsc, rn);
6971
6972 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6973 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6974 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6975 condition is false, let it be, cleanup_branch will do nothing. */
6976 if (dsc->u.branch.cond)
6977 {
6978 dsc->u.branch.cond = INST_AL;
6979 dsc->u.branch.dest = from + 4 + imm5;
6980 }
6981 else
6982 dsc->u.branch.dest = from + 2;
6983
6984 dsc->u.branch.link = 0;
6985 dsc->u.branch.exchange = 0;
6986
6987 if (debug_displaced)
6988 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6989 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6990 rn, rn_val, insn1, dsc->u.branch.dest);
6991
6992 dsc->modinsn[0] = THUMB_NOP;
6993
6994 dsc->cleanup = &cleanup_branch;
6995 return 0;
6996 }
6997
6998 /* Copy Table Branch Byte/Halfword */
6999 static int
7000 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7001 uint16_t insn2, struct regcache *regs,
7002 arm_displaced_step_closure *dsc)
7003 {
7004 ULONGEST rn_val, rm_val;
7005 int is_tbh = bit (insn2, 4);
7006 CORE_ADDR halfwords = 0;
7007 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7008
7009 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7010 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7011
7012 if (is_tbh)
7013 {
7014 gdb_byte buf[2];
7015
7016 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7017 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7018 }
7019 else
7020 {
7021 gdb_byte buf[1];
7022
7023 target_read_memory (rn_val + rm_val, buf, 1);
7024 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7025 }
7026
7027 if (debug_displaced)
7028 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7029 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7030 (unsigned int) rn_val, (unsigned int) rm_val,
7031 (unsigned int) halfwords);
7032
7033 dsc->u.branch.cond = INST_AL;
7034 dsc->u.branch.link = 0;
7035 dsc->u.branch.exchange = 0;
7036 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7037
7038 dsc->cleanup = &cleanup_branch;
7039
7040 return 0;
7041 }
7042
7043 static void
7044 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7045 arm_displaced_step_closure *dsc)
7046 {
7047 /* PC <- r7 */
7048 int val = displaced_read_reg (regs, dsc, 7);
7049 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7050
7051 /* r7 <- r8 */
7052 val = displaced_read_reg (regs, dsc, 8);
7053 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7054
7055 /* r8 <- tmp[0] */
7056 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7057
7058 }
7059
7060 static int
7061 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7062 struct regcache *regs,
7063 arm_displaced_step_closure *dsc)
7064 {
7065 dsc->u.block.regmask = insn1 & 0x00ff;
7066
7067 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7068 to :
7069
7070 (1) register list is full, that is, r0-r7 are used.
7071 Prepare: tmp[0] <- r8
7072
7073 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7074 MOV r8, r7; Move value of r7 to r8;
7075 POP {r7}; Store PC value into r7.
7076
7077 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7078
7079 (2) register list is not full, supposing there are N registers in
7080 register list (except PC, 0 <= N <= 7).
7081 Prepare: for each i, 0 - N, tmp[i] <- ri.
7082
7083 POP {r0, r1, ...., rN};
7084
7085 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7086 from tmp[] properly.
7087 */
7088 if (debug_displaced)
7089 fprintf_unfiltered (gdb_stdlog,
7090 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7091 dsc->u.block.regmask, insn1);
7092
7093 if (dsc->u.block.regmask == 0xff)
7094 {
7095 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7096
7097 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7098 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7099 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7100
7101 dsc->numinsns = 3;
7102 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7103 }
7104 else
7105 {
7106 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7107 unsigned int i;
7108 unsigned int new_regmask;
7109
7110 for (i = 0; i < num_in_list + 1; i++)
7111 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7112
7113 new_regmask = (1 << (num_in_list + 1)) - 1;
7114
7115 if (debug_displaced)
7116 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7117 "{..., pc}: original reg list %.4x,"
7118 " modified list %.4x\n"),
7119 (int) dsc->u.block.regmask, new_regmask);
7120
7121 dsc->u.block.regmask |= 0x8000;
7122 dsc->u.block.writeback = 0;
7123 dsc->u.block.cond = INST_AL;
7124
7125 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7126
7127 dsc->cleanup = &cleanup_block_load_pc;
7128 }
7129
7130 return 0;
7131 }
7132
7133 static void
7134 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7135 struct regcache *regs,
7136 arm_displaced_step_closure *dsc)
7137 {
7138 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7139 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7140 int err = 0;
7141
7142 /* 16-bit thumb instructions. */
7143 switch (op_bit_12_15)
7144 {
7145 /* Shift (imme), add, subtract, move and compare. */
7146 case 0: case 1: case 2: case 3:
7147 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7148 "shift/add/sub/mov/cmp",
7149 dsc);
7150 break;
7151 case 4:
7152 switch (op_bit_10_11)
7153 {
7154 case 0: /* Data-processing */
7155 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7156 "data-processing",
7157 dsc);
7158 break;
7159 case 1: /* Special data instructions and branch and exchange. */
7160 {
7161 unsigned short op = bits (insn1, 7, 9);
7162 if (op == 6 || op == 7) /* BX or BLX */
7163 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7164 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7165 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7166 else
7167 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7168 dsc);
7169 }
7170 break;
7171 default: /* LDR (literal) */
7172 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7173 }
7174 break;
7175 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7176 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7177 break;
7178 case 10:
7179 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7180 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7181 else /* Generate SP-relative address */
7182 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7183 break;
7184 case 11: /* Misc 16-bit instructions */
7185 {
7186 switch (bits (insn1, 8, 11))
7187 {
7188 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7189 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7190 break;
7191 case 12: case 13: /* POP */
7192 if (bit (insn1, 8)) /* PC is in register list. */
7193 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7194 else
7195 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7196 break;
7197 case 15: /* If-Then, and hints */
7198 if (bits (insn1, 0, 3))
7199 /* If-Then makes up to four following instructions conditional.
7200 IT instruction itself is not conditional, so handle it as a
7201 common unmodified instruction. */
7202 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7203 dsc);
7204 else
7205 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7206 break;
7207 default:
7208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7209 }
7210 }
7211 break;
7212 case 12:
7213 if (op_bit_10_11 < 2) /* Store multiple registers */
7214 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7215 else /* Load multiple registers */
7216 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7217 break;
7218 case 13: /* Conditional branch and supervisor call */
7219 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7220 err = thumb_copy_b (gdbarch, insn1, dsc);
7221 else
7222 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7223 break;
7224 case 14: /* Unconditional branch */
7225 err = thumb_copy_b (gdbarch, insn1, dsc);
7226 break;
7227 default:
7228 err = 1;
7229 }
7230
7231 if (err)
7232 internal_error (__FILE__, __LINE__,
7233 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7234 }
7235
7236 static int
7237 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7238 uint16_t insn1, uint16_t insn2,
7239 struct regcache *regs,
7240 arm_displaced_step_closure *dsc)
7241 {
7242 int rt = bits (insn2, 12, 15);
7243 int rn = bits (insn1, 0, 3);
7244 int op1 = bits (insn1, 7, 8);
7245
7246 switch (bits (insn1, 5, 6))
7247 {
7248 case 0: /* Load byte and memory hints */
7249 if (rt == 0xf) /* PLD/PLI */
7250 {
7251 if (rn == 0xf)
7252 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7253 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7254 else
7255 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7256 "pli/pld", dsc);
7257 }
7258 else
7259 {
7260 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7261 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7262 1);
7263 else
7264 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7265 "ldrb{reg, immediate}/ldrbt",
7266 dsc);
7267 }
7268
7269 break;
7270 case 1: /* Load halfword and memory hints. */
7271 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7272 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7273 "pld/unalloc memhint", dsc);
7274 else
7275 {
7276 if (rn == 0xf)
7277 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7278 2);
7279 else
7280 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7281 "ldrh/ldrht", dsc);
7282 }
7283 break;
7284 case 2: /* Load word */
7285 {
7286 int insn2_bit_8_11 = bits (insn2, 8, 11);
7287
7288 if (rn == 0xf)
7289 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7290 else if (op1 == 0x1) /* Encoding T3 */
7291 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7292 0, 1);
7293 else /* op1 == 0x0 */
7294 {
7295 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7296 /* LDR (immediate) */
7297 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7298 dsc, bit (insn2, 8), 1);
7299 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7300 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7301 "ldrt", dsc);
7302 else
7303 /* LDR (register) */
7304 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7305 dsc, 0, 0);
7306 }
7307 break;
7308 }
7309 default:
7310 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7311 break;
7312 }
7313 return 0;
7314 }
7315
7316 static void
7317 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7318 uint16_t insn2, struct regcache *regs,
7319 arm_displaced_step_closure *dsc)
7320 {
7321 int err = 0;
7322 unsigned short op = bit (insn2, 15);
7323 unsigned int op1 = bits (insn1, 11, 12);
7324
7325 switch (op1)
7326 {
7327 case 1:
7328 {
7329 switch (bits (insn1, 9, 10))
7330 {
7331 case 0:
7332 if (bit (insn1, 6))
7333 {
7334 /* Load/store {dual, execlusive}, table branch. */
7335 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7336 && bits (insn2, 5, 7) == 0)
7337 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7338 dsc);
7339 else
7340 /* PC is not allowed to use in load/store {dual, exclusive}
7341 instructions. */
7342 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7343 "load/store dual/ex", dsc);
7344 }
7345 else /* load/store multiple */
7346 {
7347 switch (bits (insn1, 7, 8))
7348 {
7349 case 0: case 3: /* SRS, RFE */
7350 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7351 "srs/rfe", dsc);
7352 break;
7353 case 1: case 2: /* LDM/STM/PUSH/POP */
7354 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7355 break;
7356 }
7357 }
7358 break;
7359
7360 case 1:
7361 /* Data-processing (shift register). */
7362 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7363 dsc);
7364 break;
7365 default: /* Coprocessor instructions. */
7366 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7367 break;
7368 }
7369 break;
7370 }
7371 case 2: /* op1 = 2 */
7372 if (op) /* Branch and misc control. */
7373 {
7374 if (bit (insn2, 14) /* BLX/BL */
7375 || bit (insn2, 12) /* Unconditional branch */
7376 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7377 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7378 else
7379 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7380 "misc ctrl", dsc);
7381 }
7382 else
7383 {
7384 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7385 {
7386 int dp_op = bits (insn1, 4, 8);
7387 int rn = bits (insn1, 0, 3);
7388 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7389 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7390 regs, dsc);
7391 else
7392 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 "dp/pb", dsc);
7394 }
7395 else /* Data processing (modified immeidate) */
7396 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7397 "dp/mi", dsc);
7398 }
7399 break;
7400 case 3: /* op1 = 3 */
7401 switch (bits (insn1, 9, 10))
7402 {
7403 case 0:
7404 if (bit (insn1, 4))
7405 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7406 regs, dsc);
7407 else /* NEON Load/Store and Store single data item */
7408 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7409 "neon elt/struct load/store",
7410 dsc);
7411 break;
7412 case 1: /* op1 = 3, bits (9, 10) == 1 */
7413 switch (bits (insn1, 7, 8))
7414 {
7415 case 0: case 1: /* Data processing (register) */
7416 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "dp(reg)", dsc);
7418 break;
7419 case 2: /* Multiply and absolute difference */
7420 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7421 "mul/mua/diff", dsc);
7422 break;
7423 case 3: /* Long multiply and divide */
7424 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7425 "lmul/lmua", dsc);
7426 break;
7427 }
7428 break;
7429 default: /* Coprocessor instructions */
7430 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7431 break;
7432 }
7433 break;
7434 default:
7435 err = 1;
7436 }
7437
7438 if (err)
7439 internal_error (__FILE__, __LINE__,
7440 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7441
7442 }
7443
7444 static void
7445 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7446 struct regcache *regs,
7447 arm_displaced_step_closure *dsc)
7448 {
7449 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7450 uint16_t insn1
7451 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7452
7453 if (debug_displaced)
7454 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7455 "at %.8lx\n", insn1, (unsigned long) from);
7456
7457 dsc->is_thumb = 1;
7458 dsc->insn_size = thumb_insn_size (insn1);
7459 if (thumb_insn_size (insn1) == 4)
7460 {
7461 uint16_t insn2
7462 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7463 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7464 }
7465 else
7466 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7467 }
7468
7469 void
7470 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7471 CORE_ADDR to, struct regcache *regs,
7472 arm_displaced_step_closure *dsc)
7473 {
7474 int err = 0;
7475 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7476 uint32_t insn;
7477
7478 /* Most displaced instructions use a 1-instruction scratch space, so set this
7479 here and override below if/when necessary. */
7480 dsc->numinsns = 1;
7481 dsc->insn_addr = from;
7482 dsc->scratch_base = to;
7483 dsc->cleanup = NULL;
7484 dsc->wrote_to_pc = 0;
7485
7486 if (!displaced_in_arm_mode (regs))
7487 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7488
7489 dsc->is_thumb = 0;
7490 dsc->insn_size = 4;
7491 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7492 if (debug_displaced)
7493 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7494 "at %.8lx\n", (unsigned long) insn,
7495 (unsigned long) from);
7496
7497 if ((insn & 0xf0000000) == 0xf0000000)
7498 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7499 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7500 {
7501 case 0x0: case 0x1: case 0x2: case 0x3:
7502 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7503 break;
7504
7505 case 0x4: case 0x5: case 0x6:
7506 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7507 break;
7508
7509 case 0x7:
7510 err = arm_decode_media (gdbarch, insn, dsc);
7511 break;
7512
7513 case 0x8: case 0x9: case 0xa: case 0xb:
7514 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7515 break;
7516
7517 case 0xc: case 0xd: case 0xe: case 0xf:
7518 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7519 break;
7520 }
7521
7522 if (err)
7523 internal_error (__FILE__, __LINE__,
7524 _("arm_process_displaced_insn: Instruction decode error"));
7525 }
7526
7527 /* Actually set up the scratch space for a displaced instruction. */
7528
7529 void
7530 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7531 CORE_ADDR to, arm_displaced_step_closure *dsc)
7532 {
7533 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7534 unsigned int i, len, offset;
7535 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7536 int size = dsc->is_thumb? 2 : 4;
7537 const gdb_byte *bkp_insn;
7538
7539 offset = 0;
7540 /* Poke modified instruction(s). */
7541 for (i = 0; i < dsc->numinsns; i++)
7542 {
7543 if (debug_displaced)
7544 {
7545 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7546 if (size == 4)
7547 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7548 dsc->modinsn[i]);
7549 else if (size == 2)
7550 fprintf_unfiltered (gdb_stdlog, "%.4x",
7551 (unsigned short)dsc->modinsn[i]);
7552
7553 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7554 (unsigned long) to + offset);
7555
7556 }
7557 write_memory_unsigned_integer (to + offset, size,
7558 byte_order_for_code,
7559 dsc->modinsn[i]);
7560 offset += size;
7561 }
7562
7563 /* Choose the correct breakpoint instruction. */
7564 if (dsc->is_thumb)
7565 {
7566 bkp_insn = tdep->thumb_breakpoint;
7567 len = tdep->thumb_breakpoint_size;
7568 }
7569 else
7570 {
7571 bkp_insn = tdep->arm_breakpoint;
7572 len = tdep->arm_breakpoint_size;
7573 }
7574
7575 /* Put breakpoint afterwards. */
7576 write_memory (to + offset, bkp_insn, len);
7577
7578 if (debug_displaced)
7579 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7580 paddress (gdbarch, from), paddress (gdbarch, to));
7581 }
7582
7583 /* Entry point for cleaning things up after a displaced instruction has been
7584 single-stepped. */
7585
7586 void
7587 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7588 struct displaced_step_closure *dsc_,
7589 CORE_ADDR from, CORE_ADDR to,
7590 struct regcache *regs)
7591 {
7592 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7593
7594 if (dsc->cleanup)
7595 dsc->cleanup (gdbarch, regs, dsc);
7596
7597 if (!dsc->wrote_to_pc)
7598 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7599 dsc->insn_addr + dsc->insn_size);
7600
7601 }
7602
7603 #include "bfd-in2.h"
7604 #include "libcoff.h"
7605
7606 static int
7607 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7608 {
7609 gdb_disassembler *di
7610 = static_cast<gdb_disassembler *>(info->application_data);
7611 struct gdbarch *gdbarch = di->arch ();
7612
7613 if (arm_pc_is_thumb (gdbarch, memaddr))
7614 {
7615 static asymbol *asym;
7616 static combined_entry_type ce;
7617 static struct coff_symbol_struct csym;
7618 static struct bfd fake_bfd;
7619 static bfd_target fake_target;
7620
7621 if (csym.native == NULL)
7622 {
7623 /* Create a fake symbol vector containing a Thumb symbol.
7624 This is solely so that the code in print_insn_little_arm()
7625 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7626 the presence of a Thumb symbol and switch to decoding
7627 Thumb instructions. */
7628
7629 fake_target.flavour = bfd_target_coff_flavour;
7630 fake_bfd.xvec = &fake_target;
7631 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7632 csym.native = &ce;
7633 csym.symbol.the_bfd = &fake_bfd;
7634 csym.symbol.name = "fake";
7635 asym = (asymbol *) & csym;
7636 }
7637
7638 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7639 info->symbols = &asym;
7640 }
7641 else
7642 info->symbols = NULL;
7643
7644 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7645 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7646 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7647 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7648 in default_print_insn. */
7649 if (exec_bfd != NULL)
7650 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7651
7652 return default_print_insn (memaddr, info);
7653 }
7654
7655 /* The following define instruction sequences that will cause ARM
7656 cpu's to take an undefined instruction trap. These are used to
7657 signal a breakpoint to GDB.
7658
7659 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7660 modes. A different instruction is required for each mode. The ARM
7661 cpu's can also be big or little endian. Thus four different
7662 instructions are needed to support all cases.
7663
7664 Note: ARMv4 defines several new instructions that will take the
7665 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7666 not in fact add the new instructions. The new undefined
7667 instructions in ARMv4 are all instructions that had no defined
7668 behaviour in earlier chips. There is no guarantee that they will
7669 raise an exception, but may be treated as NOP's. In practice, it
7670 may only safe to rely on instructions matching:
7671
7672 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7673 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7674 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7675
7676 Even this may only true if the condition predicate is true. The
7677 following use a condition predicate of ALWAYS so it is always TRUE.
7678
7679 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7680 and NetBSD all use a software interrupt rather than an undefined
7681 instruction to force a trap. This can be handled by by the
7682 abi-specific code during establishment of the gdbarch vector. */
7683
7684 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7685 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7686 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7687 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7688
7689 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7690 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7691 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7693
7694 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7695
7696 static int
7697 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7698 {
7699 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7701
7702 if (arm_pc_is_thumb (gdbarch, *pcptr))
7703 {
7704 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7705
7706 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7707 check whether we are replacing a 32-bit instruction. */
7708 if (tdep->thumb2_breakpoint != NULL)
7709 {
7710 gdb_byte buf[2];
7711
7712 if (target_read_memory (*pcptr, buf, 2) == 0)
7713 {
7714 unsigned short inst1;
7715
7716 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7717 if (thumb_insn_size (inst1) == 4)
7718 return ARM_BP_KIND_THUMB2;
7719 }
7720 }
7721
7722 return ARM_BP_KIND_THUMB;
7723 }
7724 else
7725 return ARM_BP_KIND_ARM;
7726
7727 }
7728
7729 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7730
7731 static const gdb_byte *
7732 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7733 {
7734 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7735
7736 switch (kind)
7737 {
7738 case ARM_BP_KIND_ARM:
7739 *size = tdep->arm_breakpoint_size;
7740 return tdep->arm_breakpoint;
7741 case ARM_BP_KIND_THUMB:
7742 *size = tdep->thumb_breakpoint_size;
7743 return tdep->thumb_breakpoint;
7744 case ARM_BP_KIND_THUMB2:
7745 *size = tdep->thumb2_breakpoint_size;
7746 return tdep->thumb2_breakpoint;
7747 default:
7748 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7749 }
7750 }
7751
7752 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7753
7754 static int
7755 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7756 struct regcache *regcache,
7757 CORE_ADDR *pcptr)
7758 {
7759 gdb_byte buf[4];
7760
7761 /* Check the memory pointed by PC is readable. */
7762 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7763 {
7764 struct arm_get_next_pcs next_pcs_ctx;
7765
7766 arm_get_next_pcs_ctor (&next_pcs_ctx,
7767 &arm_get_next_pcs_ops,
7768 gdbarch_byte_order (gdbarch),
7769 gdbarch_byte_order_for_code (gdbarch),
7770 0,
7771 regcache);
7772
7773 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7774
7775 /* If MEMADDR is the next instruction of current pc, do the
7776 software single step computation, and get the thumb mode by
7777 the destination address. */
7778 for (CORE_ADDR pc : next_pcs)
7779 {
7780 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7781 {
7782 if (IS_THUMB_ADDR (pc))
7783 {
7784 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7785 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7786 }
7787 else
7788 return ARM_BP_KIND_ARM;
7789 }
7790 }
7791 }
7792
7793 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7794 }
7795
7796 /* Extract from an array REGBUF containing the (raw) register state a
7797 function return value of type TYPE, and copy that, in virtual
7798 format, into VALBUF. */
7799
7800 static void
7801 arm_extract_return_value (struct type *type, struct regcache *regs,
7802 gdb_byte *valbuf)
7803 {
7804 struct gdbarch *gdbarch = regs->arch ();
7805 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7806
7807 if (TYPE_CODE_FLT == TYPE_CODE (type))
7808 {
7809 switch (gdbarch_tdep (gdbarch)->fp_model)
7810 {
7811 case ARM_FLOAT_FPA:
7812 {
7813 /* The value is in register F0 in internal format. We need to
7814 extract the raw value and then convert it to the desired
7815 internal type. */
7816 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7817
7818 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7819 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7820 valbuf, type);
7821 }
7822 break;
7823
7824 case ARM_FLOAT_SOFT_FPA:
7825 case ARM_FLOAT_SOFT_VFP:
7826 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7827 not using the VFP ABI code. */
7828 case ARM_FLOAT_VFP:
7829 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7830 if (TYPE_LENGTH (type) > 4)
7831 regs->cooked_read (ARM_A1_REGNUM + 1,
7832 valbuf + ARM_INT_REGISTER_SIZE);
7833 break;
7834
7835 default:
7836 internal_error (__FILE__, __LINE__,
7837 _("arm_extract_return_value: "
7838 "Floating point model not supported"));
7839 break;
7840 }
7841 }
7842 else if (TYPE_CODE (type) == TYPE_CODE_INT
7843 || TYPE_CODE (type) == TYPE_CODE_CHAR
7844 || TYPE_CODE (type) == TYPE_CODE_BOOL
7845 || TYPE_CODE (type) == TYPE_CODE_PTR
7846 || TYPE_IS_REFERENCE (type)
7847 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7848 {
7849 /* If the type is a plain integer, then the access is
7850 straight-forward. Otherwise we have to play around a bit
7851 more. */
7852 int len = TYPE_LENGTH (type);
7853 int regno = ARM_A1_REGNUM;
7854 ULONGEST tmp;
7855
7856 while (len > 0)
7857 {
7858 /* By using store_unsigned_integer we avoid having to do
7859 anything special for small big-endian values. */
7860 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7861 store_unsigned_integer (valbuf,
7862 (len > ARM_INT_REGISTER_SIZE
7863 ? ARM_INT_REGISTER_SIZE : len),
7864 byte_order, tmp);
7865 len -= ARM_INT_REGISTER_SIZE;
7866 valbuf += ARM_INT_REGISTER_SIZE;
7867 }
7868 }
7869 else
7870 {
7871 /* For a structure or union the behaviour is as if the value had
7872 been stored to word-aligned memory and then loaded into
7873 registers with 32-bit load instruction(s). */
7874 int len = TYPE_LENGTH (type);
7875 int regno = ARM_A1_REGNUM;
7876 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7877
7878 while (len > 0)
7879 {
7880 regs->cooked_read (regno++, tmpbuf);
7881 memcpy (valbuf, tmpbuf,
7882 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7883 len -= ARM_INT_REGISTER_SIZE;
7884 valbuf += ARM_INT_REGISTER_SIZE;
7885 }
7886 }
7887 }
7888
7889
7890 /* Will a function return an aggregate type in memory or in a
7891 register? Return 0 if an aggregate type can be returned in a
7892 register, 1 if it must be returned in memory. */
7893
7894 static int
7895 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7896 {
7897 enum type_code code;
7898
7899 type = check_typedef (type);
7900
7901 /* Simple, non-aggregate types (ie not including vectors and
7902 complex) are always returned in a register (or registers). */
7903 code = TYPE_CODE (type);
7904 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7905 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7906 return 0;
7907
7908 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7909 {
7910 /* Vector values should be returned using ARM registers if they
7911 are not over 16 bytes. */
7912 return (TYPE_LENGTH (type) > 16);
7913 }
7914
7915 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7916 {
7917 /* The AAPCS says all aggregates not larger than a word are returned
7918 in a register. */
7919 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7920 return 0;
7921
7922 return 1;
7923 }
7924 else
7925 {
7926 int nRc;
7927
7928 /* All aggregate types that won't fit in a register must be returned
7929 in memory. */
7930 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7931 return 1;
7932
7933 /* In the ARM ABI, "integer" like aggregate types are returned in
7934 registers. For an aggregate type to be integer like, its size
7935 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7936 offset of each addressable subfield must be zero. Note that bit
7937 fields are not addressable, and all addressable subfields of
7938 unions always start at offset zero.
7939
7940 This function is based on the behaviour of GCC 2.95.1.
7941 See: gcc/arm.c: arm_return_in_memory() for details.
7942
7943 Note: All versions of GCC before GCC 2.95.2 do not set up the
7944 parameters correctly for a function returning the following
7945 structure: struct { float f;}; This should be returned in memory,
7946 not a register. Richard Earnshaw sent me a patch, but I do not
7947 know of any way to detect if a function like the above has been
7948 compiled with the correct calling convention. */
7949
7950 /* Assume all other aggregate types can be returned in a register.
7951 Run a check for structures, unions and arrays. */
7952 nRc = 0;
7953
7954 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7955 {
7956 int i;
7957 /* Need to check if this struct/union is "integer" like. For
7958 this to be true, its size must be less than or equal to
7959 ARM_INT_REGISTER_SIZE and the offset of each addressable
7960 subfield must be zero. Note that bit fields are not
7961 addressable, and unions always start at offset zero. If any
7962 of the subfields is a floating point type, the struct/union
7963 cannot be an integer type. */
7964
7965 /* For each field in the object, check:
7966 1) Is it FP? --> yes, nRc = 1;
7967 2) Is it addressable (bitpos != 0) and
7968 not packed (bitsize == 0)?
7969 --> yes, nRc = 1
7970 */
7971
7972 for (i = 0; i < TYPE_NFIELDS (type); i++)
7973 {
7974 enum type_code field_type_code;
7975
7976 field_type_code
7977 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7978 i)));
7979
7980 /* Is it a floating point type field? */
7981 if (field_type_code == TYPE_CODE_FLT)
7982 {
7983 nRc = 1;
7984 break;
7985 }
7986
7987 /* If bitpos != 0, then we have to care about it. */
7988 if (TYPE_FIELD_BITPOS (type, i) != 0)
7989 {
7990 /* Bitfields are not addressable. If the field bitsize is
7991 zero, then the field is not packed. Hence it cannot be
7992 a bitfield or any other packed type. */
7993 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7994 {
7995 nRc = 1;
7996 break;
7997 }
7998 }
7999 }
8000 }
8001
8002 return nRc;
8003 }
8004 }
8005
8006 /* Write into appropriate registers a function return value of type
8007 TYPE, given in virtual format. */
8008
8009 static void
8010 arm_store_return_value (struct type *type, struct regcache *regs,
8011 const gdb_byte *valbuf)
8012 {
8013 struct gdbarch *gdbarch = regs->arch ();
8014 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8015
8016 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8017 {
8018 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8019
8020 switch (gdbarch_tdep (gdbarch)->fp_model)
8021 {
8022 case ARM_FLOAT_FPA:
8023
8024 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8025 regs->cooked_write (ARM_F0_REGNUM, buf);
8026 break;
8027
8028 case ARM_FLOAT_SOFT_FPA:
8029 case ARM_FLOAT_SOFT_VFP:
8030 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8031 not using the VFP ABI code. */
8032 case ARM_FLOAT_VFP:
8033 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8034 if (TYPE_LENGTH (type) > 4)
8035 regs->cooked_write (ARM_A1_REGNUM + 1,
8036 valbuf + ARM_INT_REGISTER_SIZE);
8037 break;
8038
8039 default:
8040 internal_error (__FILE__, __LINE__,
8041 _("arm_store_return_value: Floating "
8042 "point model not supported"));
8043 break;
8044 }
8045 }
8046 else if (TYPE_CODE (type) == TYPE_CODE_INT
8047 || TYPE_CODE (type) == TYPE_CODE_CHAR
8048 || TYPE_CODE (type) == TYPE_CODE_BOOL
8049 || TYPE_CODE (type) == TYPE_CODE_PTR
8050 || TYPE_IS_REFERENCE (type)
8051 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8052 {
8053 if (TYPE_LENGTH (type) <= 4)
8054 {
8055 /* Values of one word or less are zero/sign-extended and
8056 returned in r0. */
8057 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8058 LONGEST val = unpack_long (type, valbuf);
8059
8060 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8061 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8062 }
8063 else
8064 {
8065 /* Integral values greater than one word are stored in consecutive
8066 registers starting with r0. This will always be a multiple of
8067 the regiser size. */
8068 int len = TYPE_LENGTH (type);
8069 int regno = ARM_A1_REGNUM;
8070
8071 while (len > 0)
8072 {
8073 regs->cooked_write (regno++, valbuf);
8074 len -= ARM_INT_REGISTER_SIZE;
8075 valbuf += ARM_INT_REGISTER_SIZE;
8076 }
8077 }
8078 }
8079 else
8080 {
8081 /* For a structure or union the behaviour is as if the value had
8082 been stored to word-aligned memory and then loaded into
8083 registers with 32-bit load instruction(s). */
8084 int len = TYPE_LENGTH (type);
8085 int regno = ARM_A1_REGNUM;
8086 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8087
8088 while (len > 0)
8089 {
8090 memcpy (tmpbuf, valbuf,
8091 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8092 regs->cooked_write (regno++, tmpbuf);
8093 len -= ARM_INT_REGISTER_SIZE;
8094 valbuf += ARM_INT_REGISTER_SIZE;
8095 }
8096 }
8097 }
8098
8099
8100 /* Handle function return values. */
8101
8102 static enum return_value_convention
8103 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8104 struct type *valtype, struct regcache *regcache,
8105 gdb_byte *readbuf, const gdb_byte *writebuf)
8106 {
8107 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8108 struct type *func_type = function ? value_type (function) : NULL;
8109 enum arm_vfp_cprc_base_type vfp_base_type;
8110 int vfp_base_count;
8111
8112 if (arm_vfp_abi_for_function (gdbarch, func_type)
8113 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8114 {
8115 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8116 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8117 int i;
8118 for (i = 0; i < vfp_base_count; i++)
8119 {
8120 if (reg_char == 'q')
8121 {
8122 if (writebuf)
8123 arm_neon_quad_write (gdbarch, regcache, i,
8124 writebuf + i * unit_length);
8125
8126 if (readbuf)
8127 arm_neon_quad_read (gdbarch, regcache, i,
8128 readbuf + i * unit_length);
8129 }
8130 else
8131 {
8132 char name_buf[4];
8133 int regnum;
8134
8135 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8136 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8137 strlen (name_buf));
8138 if (writebuf)
8139 regcache->cooked_write (regnum, writebuf + i * unit_length);
8140 if (readbuf)
8141 regcache->cooked_read (regnum, readbuf + i * unit_length);
8142 }
8143 }
8144 return RETURN_VALUE_REGISTER_CONVENTION;
8145 }
8146
8147 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8148 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8149 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8150 {
8151 if (tdep->struct_return == pcc_struct_return
8152 || arm_return_in_memory (gdbarch, valtype))
8153 return RETURN_VALUE_STRUCT_CONVENTION;
8154 }
8155 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8156 {
8157 if (arm_return_in_memory (gdbarch, valtype))
8158 return RETURN_VALUE_STRUCT_CONVENTION;
8159 }
8160
8161 if (writebuf)
8162 arm_store_return_value (valtype, regcache, writebuf);
8163
8164 if (readbuf)
8165 arm_extract_return_value (valtype, regcache, readbuf);
8166
8167 return RETURN_VALUE_REGISTER_CONVENTION;
8168 }
8169
8170
8171 static int
8172 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8173 {
8174 struct gdbarch *gdbarch = get_frame_arch (frame);
8175 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8176 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8177 CORE_ADDR jb_addr;
8178 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8179
8180 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8181
8182 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8183 ARM_INT_REGISTER_SIZE))
8184 return 0;
8185
8186 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8187 return 1;
8188 }
8189 /* A call to cmse secure entry function "foo" at "a" is modified by
8190 GNU ld as "b".
8191 a) bl xxxx <foo>
8192
8193 <foo>
8194 xxxx:
8195
8196 b) bl yyyy <__acle_se_foo>
8197
8198 section .gnu.sgstubs:
8199 <foo>
8200 yyyy: sg // secure gateway
8201 b.w xxxx <__acle_se_foo> // original_branch_dest
8202
8203 <__acle_se_foo>
8204 xxxx:
8205
8206 When the control at "b", the pc contains "yyyy" (sg address) which is a
8207 trampoline and does not exist in source code. This function returns the
8208 target pc "xxxx". For more details please refer to section 5.4
8209 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8210 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8211 document on www.developer.arm.com. */
8212
8213 static CORE_ADDR
8214 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8215 {
8216 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8217 char *target_name = (char *) alloca (target_len);
8218 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8219
8220 struct bound_minimal_symbol minsym
8221 = lookup_minimal_symbol (target_name, NULL, objfile);
8222
8223 if (minsym.minsym != nullptr)
8224 return BMSYMBOL_VALUE_ADDRESS (minsym);
8225
8226 return 0;
8227 }
8228
8229 /* Return true when SEC points to ".gnu.sgstubs" section. */
8230
8231 static bool
8232 arm_is_sgstubs_section (struct obj_section *sec)
8233 {
8234 return (sec != nullptr
8235 && sec->the_bfd_section != nullptr
8236 && sec->the_bfd_section->name != nullptr
8237 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8238 }
8239
8240 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8241 return the target PC. Otherwise return 0. */
8242
8243 CORE_ADDR
8244 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8245 {
8246 const char *name;
8247 int namelen;
8248 CORE_ADDR start_addr;
8249
8250 /* Find the starting address and name of the function containing the PC. */
8251 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8252 {
8253 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8254 check here. */
8255 start_addr = arm_skip_bx_reg (frame, pc);
8256 if (start_addr != 0)
8257 return start_addr;
8258
8259 return 0;
8260 }
8261
8262 /* If PC is in a Thumb call or return stub, return the address of the
8263 target PC, which is in a register. The thunk functions are called
8264 _call_via_xx, where x is the register name. The possible names
8265 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8266 functions, named __ARM_call_via_r[0-7]. */
8267 if (startswith (name, "_call_via_")
8268 || startswith (name, "__ARM_call_via_"))
8269 {
8270 /* Use the name suffix to determine which register contains the
8271 target PC. */
8272 static const char *table[15] =
8273 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8274 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8275 };
8276 int regno;
8277 int offset = strlen (name) - 2;
8278
8279 for (regno = 0; regno <= 14; regno++)
8280 if (strcmp (&name[offset], table[regno]) == 0)
8281 return get_frame_register_unsigned (frame, regno);
8282 }
8283
8284 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8285 non-interworking calls to foo. We could decode the stubs
8286 to find the target but it's easier to use the symbol table. */
8287 namelen = strlen (name);
8288 if (name[0] == '_' && name[1] == '_'
8289 && ((namelen > 2 + strlen ("_from_thumb")
8290 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8291 || (namelen > 2 + strlen ("_from_arm")
8292 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8293 {
8294 char *target_name;
8295 int target_len = namelen - 2;
8296 struct bound_minimal_symbol minsym;
8297 struct objfile *objfile;
8298 struct obj_section *sec;
8299
8300 if (name[namelen - 1] == 'b')
8301 target_len -= strlen ("_from_thumb");
8302 else
8303 target_len -= strlen ("_from_arm");
8304
8305 target_name = (char *) alloca (target_len + 1);
8306 memcpy (target_name, name + 2, target_len);
8307 target_name[target_len] = '\0';
8308
8309 sec = find_pc_section (pc);
8310 objfile = (sec == NULL) ? NULL : sec->objfile;
8311 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8312 if (minsym.minsym != NULL)
8313 return BMSYMBOL_VALUE_ADDRESS (minsym);
8314 else
8315 return 0;
8316 }
8317
8318 struct obj_section *section = find_pc_section (pc);
8319
8320 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8321 if (arm_is_sgstubs_section (section))
8322 return arm_skip_cmse_entry (pc, name, section->objfile);
8323
8324 return 0; /* not a stub */
8325 }
8326
8327 static void
8328 set_arm_command (const char *args, int from_tty)
8329 {
8330 printf_unfiltered (_("\
8331 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8332 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8333 }
8334
8335 static void
8336 show_arm_command (const char *args, int from_tty)
8337 {
8338 cmd_show_list (showarmcmdlist, from_tty, "");
8339 }
8340
8341 static void
8342 arm_update_current_architecture (void)
8343 {
8344 struct gdbarch_info info;
8345
8346 /* If the current architecture is not ARM, we have nothing to do. */
8347 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8348 return;
8349
8350 /* Update the architecture. */
8351 gdbarch_info_init (&info);
8352
8353 if (!gdbarch_update_p (info))
8354 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8355 }
8356
8357 static void
8358 set_fp_model_sfunc (const char *args, int from_tty,
8359 struct cmd_list_element *c)
8360 {
8361 int fp_model;
8362
8363 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8364 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8365 {
8366 arm_fp_model = (enum arm_float_model) fp_model;
8367 break;
8368 }
8369
8370 if (fp_model == ARM_FLOAT_LAST)
8371 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8372 current_fp_model);
8373
8374 arm_update_current_architecture ();
8375 }
8376
8377 static void
8378 show_fp_model (struct ui_file *file, int from_tty,
8379 struct cmd_list_element *c, const char *value)
8380 {
8381 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8382
8383 if (arm_fp_model == ARM_FLOAT_AUTO
8384 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8385 fprintf_filtered (file, _("\
8386 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8387 fp_model_strings[tdep->fp_model]);
8388 else
8389 fprintf_filtered (file, _("\
8390 The current ARM floating point model is \"%s\".\n"),
8391 fp_model_strings[arm_fp_model]);
8392 }
8393
8394 static void
8395 arm_set_abi (const char *args, int from_tty,
8396 struct cmd_list_element *c)
8397 {
8398 int arm_abi;
8399
8400 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8401 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8402 {
8403 arm_abi_global = (enum arm_abi_kind) arm_abi;
8404 break;
8405 }
8406
8407 if (arm_abi == ARM_ABI_LAST)
8408 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8409 arm_abi_string);
8410
8411 arm_update_current_architecture ();
8412 }
8413
8414 static void
8415 arm_show_abi (struct ui_file *file, int from_tty,
8416 struct cmd_list_element *c, const char *value)
8417 {
8418 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8419
8420 if (arm_abi_global == ARM_ABI_AUTO
8421 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8422 fprintf_filtered (file, _("\
8423 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8424 arm_abi_strings[tdep->arm_abi]);
8425 else
8426 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8427 arm_abi_string);
8428 }
8429
8430 static void
8431 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8432 struct cmd_list_element *c, const char *value)
8433 {
8434 fprintf_filtered (file,
8435 _("The current execution mode assumed "
8436 "(when symbols are unavailable) is \"%s\".\n"),
8437 arm_fallback_mode_string);
8438 }
8439
8440 static void
8441 arm_show_force_mode (struct ui_file *file, int from_tty,
8442 struct cmd_list_element *c, const char *value)
8443 {
8444 fprintf_filtered (file,
8445 _("The current execution mode assumed "
8446 "(even when symbols are available) is \"%s\".\n"),
8447 arm_force_mode_string);
8448 }
8449
8450 /* If the user changes the register disassembly style used for info
8451 register and other commands, we have to also switch the style used
8452 in opcodes for disassembly output. This function is run in the "set
8453 arm disassembly" command, and does that. */
8454
8455 static void
8456 set_disassembly_style_sfunc (const char *args, int from_tty,
8457 struct cmd_list_element *c)
8458 {
8459 /* Convert the short style name into the long style name (eg, reg-names-*)
8460 before calling the generic set_disassembler_options() function. */
8461 std::string long_name = std::string ("reg-names-") + disassembly_style;
8462 set_disassembler_options (&long_name[0]);
8463 }
8464
8465 static void
8466 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8467 struct cmd_list_element *c, const char *value)
8468 {
8469 struct gdbarch *gdbarch = get_current_arch ();
8470 char *options = get_disassembler_options (gdbarch);
8471 const char *style = "";
8472 int len = 0;
8473 const char *opt;
8474
8475 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8476 if (CONST_STRNEQ (opt, "reg-names-"))
8477 {
8478 style = &opt[strlen ("reg-names-")];
8479 len = strcspn (style, ",");
8480 }
8481
8482 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8483 }
8484 \f
8485 /* Return the ARM register name corresponding to register I. */
8486 static const char *
8487 arm_register_name (struct gdbarch *gdbarch, int i)
8488 {
8489 const int num_regs = gdbarch_num_regs (gdbarch);
8490
8491 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8492 && i >= num_regs && i < num_regs + 32)
8493 {
8494 static const char *const vfp_pseudo_names[] = {
8495 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8496 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8497 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8498 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8499 };
8500
8501 return vfp_pseudo_names[i - num_regs];
8502 }
8503
8504 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8505 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8506 {
8507 static const char *const neon_pseudo_names[] = {
8508 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8509 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8510 };
8511
8512 return neon_pseudo_names[i - num_regs - 32];
8513 }
8514
8515 if (i >= ARRAY_SIZE (arm_register_names))
8516 /* These registers are only supported on targets which supply
8517 an XML description. */
8518 return "";
8519
8520 return arm_register_names[i];
8521 }
8522
8523 /* Test whether the coff symbol specific value corresponds to a Thumb
8524 function. */
8525
8526 static int
8527 coff_sym_is_thumb (int val)
8528 {
8529 return (val == C_THUMBEXT
8530 || val == C_THUMBSTAT
8531 || val == C_THUMBEXTFUNC
8532 || val == C_THUMBSTATFUNC
8533 || val == C_THUMBLABEL);
8534 }
8535
8536 /* arm_coff_make_msymbol_special()
8537 arm_elf_make_msymbol_special()
8538
8539 These functions test whether the COFF or ELF symbol corresponds to
8540 an address in thumb code, and set a "special" bit in a minimal
8541 symbol to indicate that it does. */
8542
8543 static void
8544 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8545 {
8546 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8547
8548 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8549 == ST_BRANCH_TO_THUMB)
8550 MSYMBOL_SET_SPECIAL (msym);
8551 }
8552
8553 static void
8554 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8555 {
8556 if (coff_sym_is_thumb (val))
8557 MSYMBOL_SET_SPECIAL (msym);
8558 }
8559
8560 static void
8561 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8562 asymbol *sym)
8563 {
8564 const char *name = bfd_asymbol_name (sym);
8565 struct arm_per_objfile *data;
8566 struct arm_mapping_symbol new_map_sym;
8567
8568 gdb_assert (name[0] == '$');
8569 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8570 return;
8571
8572 data = arm_objfile_data_key.get (objfile);
8573 if (data == NULL)
8574 data = arm_objfile_data_key.emplace (objfile,
8575 objfile->obfd->section_count);
8576 arm_mapping_symbol_vec &map
8577 = data->section_maps[bfd_asymbol_section (sym)->index];
8578
8579 new_map_sym.value = sym->value;
8580 new_map_sym.type = name[1];
8581
8582 /* Insert at the end, the vector will be sorted on first use. */
8583 map.push_back (new_map_sym);
8584 }
8585
8586 static void
8587 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8588 {
8589 struct gdbarch *gdbarch = regcache->arch ();
8590 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8591
8592 /* If necessary, set the T bit. */
8593 if (arm_apcs_32)
8594 {
8595 ULONGEST val, t_bit;
8596 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8597 t_bit = arm_psr_thumb_bit (gdbarch);
8598 if (arm_pc_is_thumb (gdbarch, pc))
8599 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8600 val | t_bit);
8601 else
8602 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8603 val & ~t_bit);
8604 }
8605 }
8606
8607 /* Read the contents of a NEON quad register, by reading from two
8608 double registers. This is used to implement the quad pseudo
8609 registers, and for argument passing in case the quad registers are
8610 missing; vectors are passed in quad registers when using the VFP
8611 ABI, even if a NEON unit is not present. REGNUM is the index of
8612 the quad register, in [0, 15]. */
8613
8614 static enum register_status
8615 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8616 int regnum, gdb_byte *buf)
8617 {
8618 char name_buf[4];
8619 gdb_byte reg_buf[8];
8620 int offset, double_regnum;
8621 enum register_status status;
8622
8623 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8624 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8625 strlen (name_buf));
8626
8627 /* d0 is always the least significant half of q0. */
8628 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8629 offset = 8;
8630 else
8631 offset = 0;
8632
8633 status = regcache->raw_read (double_regnum, reg_buf);
8634 if (status != REG_VALID)
8635 return status;
8636 memcpy (buf + offset, reg_buf, 8);
8637
8638 offset = 8 - offset;
8639 status = regcache->raw_read (double_regnum + 1, reg_buf);
8640 if (status != REG_VALID)
8641 return status;
8642 memcpy (buf + offset, reg_buf, 8);
8643
8644 return REG_VALID;
8645 }
8646
8647 static enum register_status
8648 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8649 int regnum, gdb_byte *buf)
8650 {
8651 const int num_regs = gdbarch_num_regs (gdbarch);
8652 char name_buf[4];
8653 gdb_byte reg_buf[8];
8654 int offset, double_regnum;
8655
8656 gdb_assert (regnum >= num_regs);
8657 regnum -= num_regs;
8658
8659 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8660 /* Quad-precision register. */
8661 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8662 else
8663 {
8664 enum register_status status;
8665
8666 /* Single-precision register. */
8667 gdb_assert (regnum < 32);
8668
8669 /* s0 is always the least significant half of d0. */
8670 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8671 offset = (regnum & 1) ? 0 : 4;
8672 else
8673 offset = (regnum & 1) ? 4 : 0;
8674
8675 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8676 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8677 strlen (name_buf));
8678
8679 status = regcache->raw_read (double_regnum, reg_buf);
8680 if (status == REG_VALID)
8681 memcpy (buf, reg_buf + offset, 4);
8682 return status;
8683 }
8684 }
8685
8686 /* Store the contents of BUF to a NEON quad register, by writing to
8687 two double registers. This is used to implement the quad pseudo
8688 registers, and for argument passing in case the quad registers are
8689 missing; vectors are passed in quad registers when using the VFP
8690 ABI, even if a NEON unit is not present. REGNUM is the index
8691 of the quad register, in [0, 15]. */
8692
8693 static void
8694 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8695 int regnum, const gdb_byte *buf)
8696 {
8697 char name_buf[4];
8698 int offset, double_regnum;
8699
8700 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8701 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8702 strlen (name_buf));
8703
8704 /* d0 is always the least significant half of q0. */
8705 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8706 offset = 8;
8707 else
8708 offset = 0;
8709
8710 regcache->raw_write (double_regnum, buf + offset);
8711 offset = 8 - offset;
8712 regcache->raw_write (double_regnum + 1, buf + offset);
8713 }
8714
8715 static void
8716 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8717 int regnum, const gdb_byte *buf)
8718 {
8719 const int num_regs = gdbarch_num_regs (gdbarch);
8720 char name_buf[4];
8721 gdb_byte reg_buf[8];
8722 int offset, double_regnum;
8723
8724 gdb_assert (regnum >= num_regs);
8725 regnum -= num_regs;
8726
8727 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8728 /* Quad-precision register. */
8729 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8730 else
8731 {
8732 /* Single-precision register. */
8733 gdb_assert (regnum < 32);
8734
8735 /* s0 is always the least significant half of d0. */
8736 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8737 offset = (regnum & 1) ? 0 : 4;
8738 else
8739 offset = (regnum & 1) ? 4 : 0;
8740
8741 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8742 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8743 strlen (name_buf));
8744
8745 regcache->raw_read (double_regnum, reg_buf);
8746 memcpy (reg_buf + offset, buf, 4);
8747 regcache->raw_write (double_regnum, reg_buf);
8748 }
8749 }
8750
8751 static struct value *
8752 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8753 {
8754 const int *reg_p = (const int *) baton;
8755 return value_of_register (*reg_p, frame);
8756 }
8757 \f
8758 static enum gdb_osabi
8759 arm_elf_osabi_sniffer (bfd *abfd)
8760 {
8761 unsigned int elfosabi;
8762 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8763
8764 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8765
8766 if (elfosabi == ELFOSABI_ARM)
8767 /* GNU tools use this value. Check note sections in this case,
8768 as well. */
8769 bfd_map_over_sections (abfd,
8770 generic_elf_osabi_sniff_abi_tag_sections,
8771 &osabi);
8772
8773 /* Anything else will be handled by the generic ELF sniffer. */
8774 return osabi;
8775 }
8776
8777 static int
8778 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8779 struct reggroup *group)
8780 {
8781 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8782 this, FPS register belongs to save_regroup, restore_reggroup, and
8783 all_reggroup, of course. */
8784 if (regnum == ARM_FPS_REGNUM)
8785 return (group == float_reggroup
8786 || group == save_reggroup
8787 || group == restore_reggroup
8788 || group == all_reggroup);
8789 else
8790 return default_register_reggroup_p (gdbarch, regnum, group);
8791 }
8792
8793 /* For backward-compatibility we allow two 'g' packet lengths with
8794 the remote protocol depending on whether FPA registers are
8795 supplied. M-profile targets do not have FPA registers, but some
8796 stubs already exist in the wild which use a 'g' packet which
8797 supplies them albeit with dummy values. The packet format which
8798 includes FPA registers should be considered deprecated for
8799 M-profile targets. */
8800
8801 static void
8802 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8803 {
8804 if (gdbarch_tdep (gdbarch)->is_m)
8805 {
8806 const target_desc *tdesc;
8807
8808 /* If we know from the executable this is an M-profile target,
8809 cater for remote targets whose register set layout is the
8810 same as the FPA layout. */
8811 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8812 register_remote_g_packet_guess (gdbarch,
8813 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8814 tdesc);
8815
8816 /* The regular M-profile layout. */
8817 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8818 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8819 tdesc);
8820
8821 /* M-profile plus M4F VFP. */
8822 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8823 register_remote_g_packet_guess (gdbarch,
8824 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8825 tdesc);
8826 }
8827
8828 /* Otherwise we don't have a useful guess. */
8829 }
8830
8831 /* Implement the code_of_frame_writable gdbarch method. */
8832
8833 static int
8834 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8835 {
8836 if (gdbarch_tdep (gdbarch)->is_m
8837 && get_frame_type (frame) == SIGTRAMP_FRAME)
8838 {
8839 /* M-profile exception frames return to some magic PCs, where
8840 isn't writable at all. */
8841 return 0;
8842 }
8843 else
8844 return 1;
8845 }
8846
8847 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8848 to be postfixed by a version (eg armv7hl). */
8849
8850 static const char *
8851 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8852 {
8853 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8854 return "arm(v[^- ]*)?";
8855 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8856 }
8857
8858 /* Initialize the current architecture based on INFO. If possible,
8859 re-use an architecture from ARCHES, which is a list of
8860 architectures already created during this debugging session.
8861
8862 Called e.g. at program startup, when reading a core file, and when
8863 reading a binary file. */
8864
8865 static struct gdbarch *
8866 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8867 {
8868 struct gdbarch_tdep *tdep;
8869 struct gdbarch *gdbarch;
8870 struct gdbarch_list *best_arch;
8871 enum arm_abi_kind arm_abi = arm_abi_global;
8872 enum arm_float_model fp_model = arm_fp_model;
8873 struct tdesc_arch_data *tdesc_data = NULL;
8874 int i, is_m = 0;
8875 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8876 int have_wmmx_registers = 0;
8877 int have_neon = 0;
8878 int have_fpa_registers = 1;
8879 const struct target_desc *tdesc = info.target_desc;
8880
8881 /* If we have an object to base this architecture on, try to determine
8882 its ABI. */
8883
8884 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8885 {
8886 int ei_osabi, e_flags;
8887
8888 switch (bfd_get_flavour (info.abfd))
8889 {
8890 case bfd_target_coff_flavour:
8891 /* Assume it's an old APCS-style ABI. */
8892 /* XXX WinCE? */
8893 arm_abi = ARM_ABI_APCS;
8894 break;
8895
8896 case bfd_target_elf_flavour:
8897 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8898 e_flags = elf_elfheader (info.abfd)->e_flags;
8899
8900 if (ei_osabi == ELFOSABI_ARM)
8901 {
8902 /* GNU tools used to use this value, but do not for EABI
8903 objects. There's nowhere to tag an EABI version
8904 anyway, so assume APCS. */
8905 arm_abi = ARM_ABI_APCS;
8906 }
8907 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8908 {
8909 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8910
8911 switch (eabi_ver)
8912 {
8913 case EF_ARM_EABI_UNKNOWN:
8914 /* Assume GNU tools. */
8915 arm_abi = ARM_ABI_APCS;
8916 break;
8917
8918 case EF_ARM_EABI_VER4:
8919 case EF_ARM_EABI_VER5:
8920 arm_abi = ARM_ABI_AAPCS;
8921 /* EABI binaries default to VFP float ordering.
8922 They may also contain build attributes that can
8923 be used to identify if the VFP argument-passing
8924 ABI is in use. */
8925 if (fp_model == ARM_FLOAT_AUTO)
8926 {
8927 #ifdef HAVE_ELF
8928 switch (bfd_elf_get_obj_attr_int (info.abfd,
8929 OBJ_ATTR_PROC,
8930 Tag_ABI_VFP_args))
8931 {
8932 case AEABI_VFP_args_base:
8933 /* "The user intended FP parameter/result
8934 passing to conform to AAPCS, base
8935 variant". */
8936 fp_model = ARM_FLOAT_SOFT_VFP;
8937 break;
8938 case AEABI_VFP_args_vfp:
8939 /* "The user intended FP parameter/result
8940 passing to conform to AAPCS, VFP
8941 variant". */
8942 fp_model = ARM_FLOAT_VFP;
8943 break;
8944 case AEABI_VFP_args_toolchain:
8945 /* "The user intended FP parameter/result
8946 passing to conform to tool chain-specific
8947 conventions" - we don't know any such
8948 conventions, so leave it as "auto". */
8949 break;
8950 case AEABI_VFP_args_compatible:
8951 /* "Code is compatible with both the base
8952 and VFP variants; the user did not permit
8953 non-variadic functions to pass FP
8954 parameters/results" - leave it as
8955 "auto". */
8956 break;
8957 default:
8958 /* Attribute value not mentioned in the
8959 November 2012 ABI, so leave it as
8960 "auto". */
8961 break;
8962 }
8963 #else
8964 fp_model = ARM_FLOAT_SOFT_VFP;
8965 #endif
8966 }
8967 break;
8968
8969 default:
8970 /* Leave it as "auto". */
8971 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8972 break;
8973 }
8974
8975 #ifdef HAVE_ELF
8976 /* Detect M-profile programs. This only works if the
8977 executable file includes build attributes; GCC does
8978 copy them to the executable, but e.g. RealView does
8979 not. */
8980 int attr_arch
8981 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8982 Tag_CPU_arch);
8983 int attr_profile
8984 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8985 Tag_CPU_arch_profile);
8986
8987 /* GCC specifies the profile for v6-M; RealView only
8988 specifies the profile for architectures starting with
8989 V7 (as opposed to architectures with a tag
8990 numerically greater than TAG_CPU_ARCH_V7). */
8991 if (!tdesc_has_registers (tdesc)
8992 && (attr_arch == TAG_CPU_ARCH_V6_M
8993 || attr_arch == TAG_CPU_ARCH_V6S_M
8994 || attr_profile == 'M'))
8995 is_m = 1;
8996 #endif
8997 }
8998
8999 if (fp_model == ARM_FLOAT_AUTO)
9000 {
9001 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9002 {
9003 case 0:
9004 /* Leave it as "auto". Strictly speaking this case
9005 means FPA, but almost nobody uses that now, and
9006 many toolchains fail to set the appropriate bits
9007 for the floating-point model they use. */
9008 break;
9009 case EF_ARM_SOFT_FLOAT:
9010 fp_model = ARM_FLOAT_SOFT_FPA;
9011 break;
9012 case EF_ARM_VFP_FLOAT:
9013 fp_model = ARM_FLOAT_VFP;
9014 break;
9015 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9016 fp_model = ARM_FLOAT_SOFT_VFP;
9017 break;
9018 }
9019 }
9020
9021 if (e_flags & EF_ARM_BE8)
9022 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9023
9024 break;
9025
9026 default:
9027 /* Leave it as "auto". */
9028 break;
9029 }
9030 }
9031
9032 /* Check any target description for validity. */
9033 if (tdesc_has_registers (tdesc))
9034 {
9035 /* For most registers we require GDB's default names; but also allow
9036 the numeric names for sp / lr / pc, as a convenience. */
9037 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9038 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9039 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9040
9041 const struct tdesc_feature *feature;
9042 int valid_p;
9043
9044 feature = tdesc_find_feature (tdesc,
9045 "org.gnu.gdb.arm.core");
9046 if (feature == NULL)
9047 {
9048 feature = tdesc_find_feature (tdesc,
9049 "org.gnu.gdb.arm.m-profile");
9050 if (feature == NULL)
9051 return NULL;
9052 else
9053 is_m = 1;
9054 }
9055
9056 tdesc_data = tdesc_data_alloc ();
9057
9058 valid_p = 1;
9059 for (i = 0; i < ARM_SP_REGNUM; i++)
9060 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9061 arm_register_names[i]);
9062 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9063 ARM_SP_REGNUM,
9064 arm_sp_names);
9065 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9066 ARM_LR_REGNUM,
9067 arm_lr_names);
9068 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9069 ARM_PC_REGNUM,
9070 arm_pc_names);
9071 if (is_m)
9072 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9073 ARM_PS_REGNUM, "xpsr");
9074 else
9075 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9076 ARM_PS_REGNUM, "cpsr");
9077
9078 if (!valid_p)
9079 {
9080 tdesc_data_cleanup (tdesc_data);
9081 return NULL;
9082 }
9083
9084 feature = tdesc_find_feature (tdesc,
9085 "org.gnu.gdb.arm.fpa");
9086 if (feature != NULL)
9087 {
9088 valid_p = 1;
9089 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9090 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9091 arm_register_names[i]);
9092 if (!valid_p)
9093 {
9094 tdesc_data_cleanup (tdesc_data);
9095 return NULL;
9096 }
9097 }
9098 else
9099 have_fpa_registers = 0;
9100
9101 feature = tdesc_find_feature (tdesc,
9102 "org.gnu.gdb.xscale.iwmmxt");
9103 if (feature != NULL)
9104 {
9105 static const char *const iwmmxt_names[] = {
9106 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9107 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9108 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9109 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9110 };
9111
9112 valid_p = 1;
9113 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9114 valid_p
9115 &= tdesc_numbered_register (feature, tdesc_data, i,
9116 iwmmxt_names[i - ARM_WR0_REGNUM]);
9117
9118 /* Check for the control registers, but do not fail if they
9119 are missing. */
9120 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9121 tdesc_numbered_register (feature, tdesc_data, i,
9122 iwmmxt_names[i - ARM_WR0_REGNUM]);
9123
9124 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9125 valid_p
9126 &= tdesc_numbered_register (feature, tdesc_data, i,
9127 iwmmxt_names[i - ARM_WR0_REGNUM]);
9128
9129 if (!valid_p)
9130 {
9131 tdesc_data_cleanup (tdesc_data);
9132 return NULL;
9133 }
9134
9135 have_wmmx_registers = 1;
9136 }
9137
9138 /* If we have a VFP unit, check whether the single precision registers
9139 are present. If not, then we will synthesize them as pseudo
9140 registers. */
9141 feature = tdesc_find_feature (tdesc,
9142 "org.gnu.gdb.arm.vfp");
9143 if (feature != NULL)
9144 {
9145 static const char *const vfp_double_names[] = {
9146 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9147 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9148 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9149 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9150 };
9151
9152 /* Require the double precision registers. There must be either
9153 16 or 32. */
9154 valid_p = 1;
9155 for (i = 0; i < 32; i++)
9156 {
9157 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9158 ARM_D0_REGNUM + i,
9159 vfp_double_names[i]);
9160 if (!valid_p)
9161 break;
9162 }
9163 if (!valid_p && i == 16)
9164 valid_p = 1;
9165
9166 /* Also require FPSCR. */
9167 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9168 ARM_FPSCR_REGNUM, "fpscr");
9169 if (!valid_p)
9170 {
9171 tdesc_data_cleanup (tdesc_data);
9172 return NULL;
9173 }
9174
9175 if (tdesc_unnumbered_register (feature, "s0") == 0)
9176 have_vfp_pseudos = 1;
9177
9178 vfp_register_count = i;
9179
9180 /* If we have VFP, also check for NEON. The architecture allows
9181 NEON without VFP (integer vector operations only), but GDB
9182 does not support that. */
9183 feature = tdesc_find_feature (tdesc,
9184 "org.gnu.gdb.arm.neon");
9185 if (feature != NULL)
9186 {
9187 /* NEON requires 32 double-precision registers. */
9188 if (i != 32)
9189 {
9190 tdesc_data_cleanup (tdesc_data);
9191 return NULL;
9192 }
9193
9194 /* If there are quad registers defined by the stub, use
9195 their type; otherwise (normally) provide them with
9196 the default type. */
9197 if (tdesc_unnumbered_register (feature, "q0") == 0)
9198 have_neon_pseudos = 1;
9199
9200 have_neon = 1;
9201 }
9202 }
9203 }
9204
9205 /* If there is already a candidate, use it. */
9206 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9207 best_arch != NULL;
9208 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9209 {
9210 if (arm_abi != ARM_ABI_AUTO
9211 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9212 continue;
9213
9214 if (fp_model != ARM_FLOAT_AUTO
9215 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9216 continue;
9217
9218 /* There are various other properties in tdep that we do not
9219 need to check here: those derived from a target description,
9220 since gdbarches with a different target description are
9221 automatically disqualified. */
9222
9223 /* Do check is_m, though, since it might come from the binary. */
9224 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9225 continue;
9226
9227 /* Found a match. */
9228 break;
9229 }
9230
9231 if (best_arch != NULL)
9232 {
9233 if (tdesc_data != NULL)
9234 tdesc_data_cleanup (tdesc_data);
9235 return best_arch->gdbarch;
9236 }
9237
9238 tdep = XCNEW (struct gdbarch_tdep);
9239 gdbarch = gdbarch_alloc (&info, tdep);
9240
9241 /* Record additional information about the architecture we are defining.
9242 These are gdbarch discriminators, like the OSABI. */
9243 tdep->arm_abi = arm_abi;
9244 tdep->fp_model = fp_model;
9245 tdep->is_m = is_m;
9246 tdep->have_fpa_registers = have_fpa_registers;
9247 tdep->have_wmmx_registers = have_wmmx_registers;
9248 gdb_assert (vfp_register_count == 0
9249 || vfp_register_count == 16
9250 || vfp_register_count == 32);
9251 tdep->vfp_register_count = vfp_register_count;
9252 tdep->have_vfp_pseudos = have_vfp_pseudos;
9253 tdep->have_neon_pseudos = have_neon_pseudos;
9254 tdep->have_neon = have_neon;
9255
9256 arm_register_g_packet_guesses (gdbarch);
9257
9258 /* Breakpoints. */
9259 switch (info.byte_order_for_code)
9260 {
9261 case BFD_ENDIAN_BIG:
9262 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9263 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9264 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9265 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9266
9267 break;
9268
9269 case BFD_ENDIAN_LITTLE:
9270 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9271 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9272 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9273 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9274
9275 break;
9276
9277 default:
9278 internal_error (__FILE__, __LINE__,
9279 _("arm_gdbarch_init: bad byte order for float format"));
9280 }
9281
9282 /* On ARM targets char defaults to unsigned. */
9283 set_gdbarch_char_signed (gdbarch, 0);
9284
9285 /* wchar_t is unsigned under the AAPCS. */
9286 if (tdep->arm_abi == ARM_ABI_AAPCS)
9287 set_gdbarch_wchar_signed (gdbarch, 0);
9288 else
9289 set_gdbarch_wchar_signed (gdbarch, 1);
9290
9291 /* Compute type alignment. */
9292 set_gdbarch_type_align (gdbarch, arm_type_align);
9293
9294 /* Note: for displaced stepping, this includes the breakpoint, and one word
9295 of additional scratch space. This setting isn't used for anything beside
9296 displaced stepping at present. */
9297 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9298
9299 /* This should be low enough for everything. */
9300 tdep->lowest_pc = 0x20;
9301 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9302
9303 /* The default, for both APCS and AAPCS, is to return small
9304 structures in registers. */
9305 tdep->struct_return = reg_struct_return;
9306
9307 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9308 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9309
9310 if (is_m)
9311 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9312
9313 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9314
9315 frame_base_set_default (gdbarch, &arm_normal_base);
9316
9317 /* Address manipulation. */
9318 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9319
9320 /* Advance PC across function entry code. */
9321 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9322
9323 /* Detect whether PC is at a point where the stack has been destroyed. */
9324 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9325
9326 /* Skip trampolines. */
9327 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9328
9329 /* The stack grows downward. */
9330 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9331
9332 /* Breakpoint manipulation. */
9333 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9334 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9335 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9336 arm_breakpoint_kind_from_current_state);
9337
9338 /* Information about registers, etc. */
9339 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9340 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9341 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9342 set_gdbarch_register_type (gdbarch, arm_register_type);
9343 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9344
9345 /* This "info float" is FPA-specific. Use the generic version if we
9346 do not have FPA. */
9347 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9348 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9349
9350 /* Internal <-> external register number maps. */
9351 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9352 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9353
9354 set_gdbarch_register_name (gdbarch, arm_register_name);
9355
9356 /* Returning results. */
9357 set_gdbarch_return_value (gdbarch, arm_return_value);
9358
9359 /* Disassembly. */
9360 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9361
9362 /* Minsymbol frobbing. */
9363 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9364 set_gdbarch_coff_make_msymbol_special (gdbarch,
9365 arm_coff_make_msymbol_special);
9366 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9367
9368 /* Thumb-2 IT block support. */
9369 set_gdbarch_adjust_breakpoint_address (gdbarch,
9370 arm_adjust_breakpoint_address);
9371
9372 /* Virtual tables. */
9373 set_gdbarch_vbit_in_delta (gdbarch, 1);
9374
9375 /* Hook in the ABI-specific overrides, if they have been registered. */
9376 gdbarch_init_osabi (info, gdbarch);
9377
9378 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9379
9380 /* Add some default predicates. */
9381 if (is_m)
9382 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9383 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9384 dwarf2_append_unwinders (gdbarch);
9385 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9386 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9387 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9388
9389 /* Now we have tuned the configuration, set a few final things,
9390 based on what the OS ABI has told us. */
9391
9392 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9393 binaries are always marked. */
9394 if (tdep->arm_abi == ARM_ABI_AUTO)
9395 tdep->arm_abi = ARM_ABI_APCS;
9396
9397 /* Watchpoints are not steppable. */
9398 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9399
9400 /* We used to default to FPA for generic ARM, but almost nobody
9401 uses that now, and we now provide a way for the user to force
9402 the model. So default to the most useful variant. */
9403 if (tdep->fp_model == ARM_FLOAT_AUTO)
9404 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9405
9406 if (tdep->jb_pc >= 0)
9407 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9408
9409 /* Floating point sizes and format. */
9410 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9411 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9412 {
9413 set_gdbarch_double_format
9414 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9415 set_gdbarch_long_double_format
9416 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9417 }
9418 else
9419 {
9420 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9421 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9422 }
9423
9424 if (have_vfp_pseudos)
9425 {
9426 /* NOTE: These are the only pseudo registers used by
9427 the ARM target at the moment. If more are added, a
9428 little more care in numbering will be needed. */
9429
9430 int num_pseudos = 32;
9431 if (have_neon_pseudos)
9432 num_pseudos += 16;
9433 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9434 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9435 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9436 }
9437
9438 if (tdesc_data)
9439 {
9440 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9441
9442 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9443
9444 /* Override tdesc_register_type to adjust the types of VFP
9445 registers for NEON. */
9446 set_gdbarch_register_type (gdbarch, arm_register_type);
9447 }
9448
9449 /* Add standard register aliases. We add aliases even for those
9450 nanes which are used by the current architecture - it's simpler,
9451 and does no harm, since nothing ever lists user registers. */
9452 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9453 user_reg_add (gdbarch, arm_register_aliases[i].name,
9454 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9455
9456 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9457 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9458
9459 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9460
9461 return gdbarch;
9462 }
9463
9464 static void
9465 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9466 {
9467 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9468
9469 if (tdep == NULL)
9470 return;
9471
9472 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9473 (unsigned long) tdep->lowest_pc);
9474 }
9475
9476 #if GDB_SELF_TEST
9477 namespace selftests
9478 {
9479 static void arm_record_test (void);
9480 }
9481 #endif
9482
9483 void
9484 _initialize_arm_tdep (void)
9485 {
9486 long length;
9487 int i, j;
9488 char regdesc[1024], *rdptr = regdesc;
9489 size_t rest = sizeof (regdesc);
9490
9491 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9492
9493 /* Add ourselves to objfile event chain. */
9494 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9495
9496 /* Register an ELF OS ABI sniffer for ARM binaries. */
9497 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9498 bfd_target_elf_flavour,
9499 arm_elf_osabi_sniffer);
9500
9501 /* Add root prefix command for all "set arm"/"show arm" commands. */
9502 add_prefix_cmd ("arm", no_class, set_arm_command,
9503 _("Various ARM-specific commands."),
9504 &setarmcmdlist, "set arm ", 0, &setlist);
9505
9506 add_prefix_cmd ("arm", no_class, show_arm_command,
9507 _("Various ARM-specific commands."),
9508 &showarmcmdlist, "show arm ", 0, &showlist);
9509
9510
9511 arm_disassembler_options = xstrdup ("reg-names-std");
9512 const disasm_options_t *disasm_options
9513 = &disassembler_options_arm ()->options;
9514 int num_disassembly_styles = 0;
9515 for (i = 0; disasm_options->name[i] != NULL; i++)
9516 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9517 num_disassembly_styles++;
9518
9519 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9520 valid_disassembly_styles = XNEWVEC (const char *,
9521 num_disassembly_styles + 1);
9522 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9523 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9524 {
9525 size_t offset = strlen ("reg-names-");
9526 const char *style = disasm_options->name[i];
9527 valid_disassembly_styles[j++] = &style[offset];
9528 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9529 disasm_options->description[i]);
9530 rdptr += length;
9531 rest -= length;
9532 }
9533 /* Mark the end of valid options. */
9534 valid_disassembly_styles[num_disassembly_styles] = NULL;
9535
9536 /* Create the help text. */
9537 std::string helptext = string_printf ("%s%s%s",
9538 _("The valid values are:\n"),
9539 regdesc,
9540 _("The default is \"std\"."));
9541
9542 add_setshow_enum_cmd("disassembler", no_class,
9543 valid_disassembly_styles, &disassembly_style,
9544 _("Set the disassembly style."),
9545 _("Show the disassembly style."),
9546 helptext.c_str (),
9547 set_disassembly_style_sfunc,
9548 show_disassembly_style_sfunc,
9549 &setarmcmdlist, &showarmcmdlist);
9550
9551 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9552 _("Set usage of ARM 32-bit mode."),
9553 _("Show usage of ARM 32-bit mode."),
9554 _("When off, a 26-bit PC will be used."),
9555 NULL,
9556 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9557 mode is %s. */
9558 &setarmcmdlist, &showarmcmdlist);
9559
9560 /* Add a command to allow the user to force the FPU model. */
9561 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9562 _("Set the floating point type."),
9563 _("Show the floating point type."),
9564 _("auto - Determine the FP typefrom the OS-ABI.\n\
9565 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9566 fpa - FPA co-processor (GCC compiled).\n\
9567 softvfp - Software FP with pure-endian doubles.\n\
9568 vfp - VFP co-processor."),
9569 set_fp_model_sfunc, show_fp_model,
9570 &setarmcmdlist, &showarmcmdlist);
9571
9572 /* Add a command to allow the user to force the ABI. */
9573 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9574 _("Set the ABI."),
9575 _("Show the ABI."),
9576 NULL, arm_set_abi, arm_show_abi,
9577 &setarmcmdlist, &showarmcmdlist);
9578
9579 /* Add two commands to allow the user to force the assumed
9580 execution mode. */
9581 add_setshow_enum_cmd ("fallback-mode", class_support,
9582 arm_mode_strings, &arm_fallback_mode_string,
9583 _("Set the mode assumed when symbols are unavailable."),
9584 _("Show the mode assumed when symbols are unavailable."),
9585 NULL, NULL, arm_show_fallback_mode,
9586 &setarmcmdlist, &showarmcmdlist);
9587 add_setshow_enum_cmd ("force-mode", class_support,
9588 arm_mode_strings, &arm_force_mode_string,
9589 _("Set the mode assumed even when symbols are available."),
9590 _("Show the mode assumed even when symbols are available."),
9591 NULL, NULL, arm_show_force_mode,
9592 &setarmcmdlist, &showarmcmdlist);
9593
9594 /* Debugging flag. */
9595 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9596 _("Set ARM debugging."),
9597 _("Show ARM debugging."),
9598 _("When on, arm-specific debugging is enabled."),
9599 NULL,
9600 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9601 &setdebuglist, &showdebuglist);
9602
9603 #if GDB_SELF_TEST
9604 selftests::register_test ("arm-record", selftests::arm_record_test);
9605 #endif
9606
9607 }
9608
9609 /* ARM-reversible process record data structures. */
9610
9611 #define ARM_INSN_SIZE_BYTES 4
9612 #define THUMB_INSN_SIZE_BYTES 2
9613 #define THUMB2_INSN_SIZE_BYTES 4
9614
9615
9616 /* Position of the bit within a 32-bit ARM instruction
9617 that defines whether the instruction is a load or store. */
9618 #define INSN_S_L_BIT_NUM 20
9619
9620 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9621 do \
9622 { \
9623 unsigned int reg_len = LENGTH; \
9624 if (reg_len) \
9625 { \
9626 REGS = XNEWVEC (uint32_t, reg_len); \
9627 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9628 } \
9629 } \
9630 while (0)
9631
9632 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9633 do \
9634 { \
9635 unsigned int mem_len = LENGTH; \
9636 if (mem_len) \
9637 { \
9638 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9639 memcpy(&MEMS->len, &RECORD_BUF[0], \
9640 sizeof(struct arm_mem_r) * LENGTH); \
9641 } \
9642 } \
9643 while (0)
9644
9645 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9646 #define INSN_RECORDED(ARM_RECORD) \
9647 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9648
9649 /* ARM memory record structure. */
9650 struct arm_mem_r
9651 {
9652 uint32_t len; /* Record length. */
9653 uint32_t addr; /* Memory address. */
9654 };
9655
9656 /* ARM instruction record contains opcode of current insn
9657 and execution state (before entry to decode_insn()),
9658 contains list of to-be-modified registers and
9659 memory blocks (on return from decode_insn()). */
9660
9661 typedef struct insn_decode_record_t
9662 {
9663 struct gdbarch *gdbarch;
9664 struct regcache *regcache;
9665 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9666 uint32_t arm_insn; /* Should accommodate thumb. */
9667 uint32_t cond; /* Condition code. */
9668 uint32_t opcode; /* Insn opcode. */
9669 uint32_t decode; /* Insn decode bits. */
9670 uint32_t mem_rec_count; /* No of mem records. */
9671 uint32_t reg_rec_count; /* No of reg records. */
9672 uint32_t *arm_regs; /* Registers to be saved for this record. */
9673 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9674 } insn_decode_record;
9675
9676
9677 /* Checks ARM SBZ and SBO mandatory fields. */
9678
9679 static int
9680 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9681 {
9682 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9683
9684 if (!len)
9685 return 1;
9686
9687 if (!sbo)
9688 ones = ~ones;
9689
9690 while (ones)
9691 {
9692 if (!(ones & sbo))
9693 {
9694 return 0;
9695 }
9696 ones = ones >> 1;
9697 }
9698 return 1;
9699 }
9700
9701 enum arm_record_result
9702 {
9703 ARM_RECORD_SUCCESS = 0,
9704 ARM_RECORD_FAILURE = 1
9705 };
9706
9707 typedef enum
9708 {
9709 ARM_RECORD_STRH=1,
9710 ARM_RECORD_STRD
9711 } arm_record_strx_t;
9712
9713 typedef enum
9714 {
9715 ARM_RECORD=1,
9716 THUMB_RECORD,
9717 THUMB2_RECORD
9718 } record_type_t;
9719
9720
9721 static int
9722 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9723 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9724 {
9725
9726 struct regcache *reg_cache = arm_insn_r->regcache;
9727 ULONGEST u_regval[2]= {0};
9728
9729 uint32_t reg_src1 = 0, reg_src2 = 0;
9730 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9731
9732 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9733 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9734
9735 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9736 {
9737 /* 1) Handle misc store, immediate offset. */
9738 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9739 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9740 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9741 regcache_raw_read_unsigned (reg_cache, reg_src1,
9742 &u_regval[0]);
9743 if (ARM_PC_REGNUM == reg_src1)
9744 {
9745 /* If R15 was used as Rn, hence current PC+8. */
9746 u_regval[0] = u_regval[0] + 8;
9747 }
9748 offset_8 = (immed_high << 4) | immed_low;
9749 /* Calculate target store address. */
9750 if (14 == arm_insn_r->opcode)
9751 {
9752 tgt_mem_addr = u_regval[0] + offset_8;
9753 }
9754 else
9755 {
9756 tgt_mem_addr = u_regval[0] - offset_8;
9757 }
9758 if (ARM_RECORD_STRH == str_type)
9759 {
9760 record_buf_mem[0] = 2;
9761 record_buf_mem[1] = tgt_mem_addr;
9762 arm_insn_r->mem_rec_count = 1;
9763 }
9764 else if (ARM_RECORD_STRD == str_type)
9765 {
9766 record_buf_mem[0] = 4;
9767 record_buf_mem[1] = tgt_mem_addr;
9768 record_buf_mem[2] = 4;
9769 record_buf_mem[3] = tgt_mem_addr + 4;
9770 arm_insn_r->mem_rec_count = 2;
9771 }
9772 }
9773 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9774 {
9775 /* 2) Store, register offset. */
9776 /* Get Rm. */
9777 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9778 /* Get Rn. */
9779 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9780 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9781 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9782 if (15 == reg_src2)
9783 {
9784 /* If R15 was used as Rn, hence current PC+8. */
9785 u_regval[0] = u_regval[0] + 8;
9786 }
9787 /* Calculate target store address, Rn +/- Rm, register offset. */
9788 if (12 == arm_insn_r->opcode)
9789 {
9790 tgt_mem_addr = u_regval[0] + u_regval[1];
9791 }
9792 else
9793 {
9794 tgt_mem_addr = u_regval[1] - u_regval[0];
9795 }
9796 if (ARM_RECORD_STRH == str_type)
9797 {
9798 record_buf_mem[0] = 2;
9799 record_buf_mem[1] = tgt_mem_addr;
9800 arm_insn_r->mem_rec_count = 1;
9801 }
9802 else if (ARM_RECORD_STRD == str_type)
9803 {
9804 record_buf_mem[0] = 4;
9805 record_buf_mem[1] = tgt_mem_addr;
9806 record_buf_mem[2] = 4;
9807 record_buf_mem[3] = tgt_mem_addr + 4;
9808 arm_insn_r->mem_rec_count = 2;
9809 }
9810 }
9811 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9812 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9813 {
9814 /* 3) Store, immediate pre-indexed. */
9815 /* 5) Store, immediate post-indexed. */
9816 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9817 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9818 offset_8 = (immed_high << 4) | immed_low;
9819 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9820 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9821 /* Calculate target store address, Rn +/- Rm, register offset. */
9822 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9823 {
9824 tgt_mem_addr = u_regval[0] + offset_8;
9825 }
9826 else
9827 {
9828 tgt_mem_addr = u_regval[0] - offset_8;
9829 }
9830 if (ARM_RECORD_STRH == str_type)
9831 {
9832 record_buf_mem[0] = 2;
9833 record_buf_mem[1] = tgt_mem_addr;
9834 arm_insn_r->mem_rec_count = 1;
9835 }
9836 else if (ARM_RECORD_STRD == str_type)
9837 {
9838 record_buf_mem[0] = 4;
9839 record_buf_mem[1] = tgt_mem_addr;
9840 record_buf_mem[2] = 4;
9841 record_buf_mem[3] = tgt_mem_addr + 4;
9842 arm_insn_r->mem_rec_count = 2;
9843 }
9844 /* Record Rn also as it changes. */
9845 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9846 arm_insn_r->reg_rec_count = 1;
9847 }
9848 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9849 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9850 {
9851 /* 4) Store, register pre-indexed. */
9852 /* 6) Store, register post -indexed. */
9853 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9854 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9855 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9856 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9857 /* Calculate target store address, Rn +/- Rm, register offset. */
9858 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9859 {
9860 tgt_mem_addr = u_regval[0] + u_regval[1];
9861 }
9862 else
9863 {
9864 tgt_mem_addr = u_regval[1] - u_regval[0];
9865 }
9866 if (ARM_RECORD_STRH == str_type)
9867 {
9868 record_buf_mem[0] = 2;
9869 record_buf_mem[1] = tgt_mem_addr;
9870 arm_insn_r->mem_rec_count = 1;
9871 }
9872 else if (ARM_RECORD_STRD == str_type)
9873 {
9874 record_buf_mem[0] = 4;
9875 record_buf_mem[1] = tgt_mem_addr;
9876 record_buf_mem[2] = 4;
9877 record_buf_mem[3] = tgt_mem_addr + 4;
9878 arm_insn_r->mem_rec_count = 2;
9879 }
9880 /* Record Rn also as it changes. */
9881 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9882 arm_insn_r->reg_rec_count = 1;
9883 }
9884 return 0;
9885 }
9886
9887 /* Handling ARM extension space insns. */
9888
9889 static int
9890 arm_record_extension_space (insn_decode_record *arm_insn_r)
9891 {
9892 int ret = 0; /* Return value: -1:record failure ; 0:success */
9893 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9894 uint32_t record_buf[8], record_buf_mem[8];
9895 uint32_t reg_src1 = 0;
9896 struct regcache *reg_cache = arm_insn_r->regcache;
9897 ULONGEST u_regval = 0;
9898
9899 gdb_assert (!INSN_RECORDED(arm_insn_r));
9900 /* Handle unconditional insn extension space. */
9901
9902 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9903 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9904 if (arm_insn_r->cond)
9905 {
9906 /* PLD has no affect on architectural state, it just affects
9907 the caches. */
9908 if (5 == ((opcode1 & 0xE0) >> 5))
9909 {
9910 /* BLX(1) */
9911 record_buf[0] = ARM_PS_REGNUM;
9912 record_buf[1] = ARM_LR_REGNUM;
9913 arm_insn_r->reg_rec_count = 2;
9914 }
9915 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9916 }
9917
9918
9919 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9920 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9921 {
9922 ret = -1;
9923 /* Undefined instruction on ARM V5; need to handle if later
9924 versions define it. */
9925 }
9926
9927 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9928 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9929 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9930
9931 /* Handle arithmetic insn extension space. */
9932 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9933 && !INSN_RECORDED(arm_insn_r))
9934 {
9935 /* Handle MLA(S) and MUL(S). */
9936 if (in_inclusive_range (insn_op1, 0U, 3U))
9937 {
9938 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9939 record_buf[1] = ARM_PS_REGNUM;
9940 arm_insn_r->reg_rec_count = 2;
9941 }
9942 else if (in_inclusive_range (insn_op1, 4U, 15U))
9943 {
9944 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9945 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9946 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9947 record_buf[2] = ARM_PS_REGNUM;
9948 arm_insn_r->reg_rec_count = 3;
9949 }
9950 }
9951
9952 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9953 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9954 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9955
9956 /* Handle control insn extension space. */
9957
9958 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9959 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9960 {
9961 if (!bit (arm_insn_r->arm_insn,25))
9962 {
9963 if (!bits (arm_insn_r->arm_insn, 4, 7))
9964 {
9965 if ((0 == insn_op1) || (2 == insn_op1))
9966 {
9967 /* MRS. */
9968 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9969 arm_insn_r->reg_rec_count = 1;
9970 }
9971 else if (1 == insn_op1)
9972 {
9973 /* CSPR is going to be changed. */
9974 record_buf[0] = ARM_PS_REGNUM;
9975 arm_insn_r->reg_rec_count = 1;
9976 }
9977 else if (3 == insn_op1)
9978 {
9979 /* SPSR is going to be changed. */
9980 /* We need to get SPSR value, which is yet to be done. */
9981 return -1;
9982 }
9983 }
9984 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9985 {
9986 if (1 == insn_op1)
9987 {
9988 /* BX. */
9989 record_buf[0] = ARM_PS_REGNUM;
9990 arm_insn_r->reg_rec_count = 1;
9991 }
9992 else if (3 == insn_op1)
9993 {
9994 /* CLZ. */
9995 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9996 arm_insn_r->reg_rec_count = 1;
9997 }
9998 }
9999 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10000 {
10001 /* BLX. */
10002 record_buf[0] = ARM_PS_REGNUM;
10003 record_buf[1] = ARM_LR_REGNUM;
10004 arm_insn_r->reg_rec_count = 2;
10005 }
10006 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10007 {
10008 /* QADD, QSUB, QDADD, QDSUB */
10009 record_buf[0] = ARM_PS_REGNUM;
10010 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10011 arm_insn_r->reg_rec_count = 2;
10012 }
10013 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10014 {
10015 /* BKPT. */
10016 record_buf[0] = ARM_PS_REGNUM;
10017 record_buf[1] = ARM_LR_REGNUM;
10018 arm_insn_r->reg_rec_count = 2;
10019
10020 /* Save SPSR also;how? */
10021 return -1;
10022 }
10023 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10024 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10025 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10026 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10027 )
10028 {
10029 if (0 == insn_op1 || 1 == insn_op1)
10030 {
10031 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10032 /* We dont do optimization for SMULW<y> where we
10033 need only Rd. */
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10035 record_buf[1] = ARM_PS_REGNUM;
10036 arm_insn_r->reg_rec_count = 2;
10037 }
10038 else if (2 == insn_op1)
10039 {
10040 /* SMLAL<x><y>. */
10041 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10042 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10043 arm_insn_r->reg_rec_count = 2;
10044 }
10045 else if (3 == insn_op1)
10046 {
10047 /* SMUL<x><y>. */
10048 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10049 arm_insn_r->reg_rec_count = 1;
10050 }
10051 }
10052 }
10053 else
10054 {
10055 /* MSR : immediate form. */
10056 if (1 == insn_op1)
10057 {
10058 /* CSPR is going to be changed. */
10059 record_buf[0] = ARM_PS_REGNUM;
10060 arm_insn_r->reg_rec_count = 1;
10061 }
10062 else if (3 == insn_op1)
10063 {
10064 /* SPSR is going to be changed. */
10065 /* we need to get SPSR value, which is yet to be done */
10066 return -1;
10067 }
10068 }
10069 }
10070
10071 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10072 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10073 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10074
10075 /* Handle load/store insn extension space. */
10076
10077 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10078 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10079 && !INSN_RECORDED(arm_insn_r))
10080 {
10081 /* SWP/SWPB. */
10082 if (0 == insn_op1)
10083 {
10084 /* These insn, changes register and memory as well. */
10085 /* SWP or SWPB insn. */
10086 /* Get memory address given by Rn. */
10087 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10088 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10089 /* SWP insn ?, swaps word. */
10090 if (8 == arm_insn_r->opcode)
10091 {
10092 record_buf_mem[0] = 4;
10093 }
10094 else
10095 {
10096 /* SWPB insn, swaps only byte. */
10097 record_buf_mem[0] = 1;
10098 }
10099 record_buf_mem[1] = u_regval;
10100 arm_insn_r->mem_rec_count = 1;
10101 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10102 arm_insn_r->reg_rec_count = 1;
10103 }
10104 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10105 {
10106 /* STRH. */
10107 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10108 ARM_RECORD_STRH);
10109 }
10110 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10111 {
10112 /* LDRD. */
10113 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10114 record_buf[1] = record_buf[0] + 1;
10115 arm_insn_r->reg_rec_count = 2;
10116 }
10117 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10118 {
10119 /* STRD. */
10120 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10121 ARM_RECORD_STRD);
10122 }
10123 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10124 {
10125 /* LDRH, LDRSB, LDRSH. */
10126 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10127 arm_insn_r->reg_rec_count = 1;
10128 }
10129
10130 }
10131
10132 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10133 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10134 && !INSN_RECORDED(arm_insn_r))
10135 {
10136 ret = -1;
10137 /* Handle coprocessor insn extension space. */
10138 }
10139
10140 /* To be done for ARMv5 and later; as of now we return -1. */
10141 if (-1 == ret)
10142 return ret;
10143
10144 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10145 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10146
10147 return ret;
10148 }
10149
10150 /* Handling opcode 000 insns. */
10151
10152 static int
10153 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10154 {
10155 struct regcache *reg_cache = arm_insn_r->regcache;
10156 uint32_t record_buf[8], record_buf_mem[8];
10157 ULONGEST u_regval[2] = {0};
10158
10159 uint32_t reg_src1 = 0;
10160 uint32_t opcode1 = 0;
10161
10162 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10163 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10164 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10165
10166 if (!((opcode1 & 0x19) == 0x10))
10167 {
10168 /* Data-processing (register) and Data-processing (register-shifted
10169 register */
10170 /* Out of 11 shifter operands mode, all the insn modifies destination
10171 register, which is specified by 13-16 decode. */
10172 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10173 record_buf[1] = ARM_PS_REGNUM;
10174 arm_insn_r->reg_rec_count = 2;
10175 }
10176 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10177 {
10178 /* Miscellaneous instructions */
10179
10180 if (3 == arm_insn_r->decode && 0x12 == opcode1
10181 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10182 {
10183 /* Handle BLX, branch and link/exchange. */
10184 if (9 == arm_insn_r->opcode)
10185 {
10186 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10187 and R14 stores the return address. */
10188 record_buf[0] = ARM_PS_REGNUM;
10189 record_buf[1] = ARM_LR_REGNUM;
10190 arm_insn_r->reg_rec_count = 2;
10191 }
10192 }
10193 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10194 {
10195 /* Handle enhanced software breakpoint insn, BKPT. */
10196 /* CPSR is changed to be executed in ARM state, disabling normal
10197 interrupts, entering abort mode. */
10198 /* According to high vector configuration PC is set. */
10199 /* user hit breakpoint and type reverse, in
10200 that case, we need to go back with previous CPSR and
10201 Program Counter. */
10202 record_buf[0] = ARM_PS_REGNUM;
10203 record_buf[1] = ARM_LR_REGNUM;
10204 arm_insn_r->reg_rec_count = 2;
10205
10206 /* Save SPSR also; how? */
10207 return -1;
10208 }
10209 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10210 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10211 {
10212 /* Handle BX, branch and link/exchange. */
10213 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10214 record_buf[0] = ARM_PS_REGNUM;
10215 arm_insn_r->reg_rec_count = 1;
10216 }
10217 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10218 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10219 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10220 {
10221 /* Count leading zeros: CLZ. */
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10223 arm_insn_r->reg_rec_count = 1;
10224 }
10225 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10226 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10227 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10228 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10229 {
10230 /* Handle MRS insn. */
10231 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10232 arm_insn_r->reg_rec_count = 1;
10233 }
10234 }
10235 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10236 {
10237 /* Multiply and multiply-accumulate */
10238
10239 /* Handle multiply instructions. */
10240 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10241 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10242 {
10243 /* Handle MLA and MUL. */
10244 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10245 record_buf[1] = ARM_PS_REGNUM;
10246 arm_insn_r->reg_rec_count = 2;
10247 }
10248 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10249 {
10250 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10251 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10252 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10253 record_buf[2] = ARM_PS_REGNUM;
10254 arm_insn_r->reg_rec_count = 3;
10255 }
10256 }
10257 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10258 {
10259 /* Synchronization primitives */
10260
10261 /* Handling SWP, SWPB. */
10262 /* These insn, changes register and memory as well. */
10263 /* SWP or SWPB insn. */
10264
10265 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10266 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10267 /* SWP insn ?, swaps word. */
10268 if (8 == arm_insn_r->opcode)
10269 {
10270 record_buf_mem[0] = 4;
10271 }
10272 else
10273 {
10274 /* SWPB insn, swaps only byte. */
10275 record_buf_mem[0] = 1;
10276 }
10277 record_buf_mem[1] = u_regval[0];
10278 arm_insn_r->mem_rec_count = 1;
10279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10280 arm_insn_r->reg_rec_count = 1;
10281 }
10282 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10283 || 15 == arm_insn_r->decode)
10284 {
10285 if ((opcode1 & 0x12) == 2)
10286 {
10287 /* Extra load/store (unprivileged) */
10288 return -1;
10289 }
10290 else
10291 {
10292 /* Extra load/store */
10293 switch (bits (arm_insn_r->arm_insn, 5, 6))
10294 {
10295 case 1:
10296 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10297 {
10298 /* STRH (register), STRH (immediate) */
10299 arm_record_strx (arm_insn_r, &record_buf[0],
10300 &record_buf_mem[0], ARM_RECORD_STRH);
10301 }
10302 else if ((opcode1 & 0x05) == 0x1)
10303 {
10304 /* LDRH (register) */
10305 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10306 arm_insn_r->reg_rec_count = 1;
10307
10308 if (bit (arm_insn_r->arm_insn, 21))
10309 {
10310 /* Write back to Rn. */
10311 record_buf[arm_insn_r->reg_rec_count++]
10312 = bits (arm_insn_r->arm_insn, 16, 19);
10313 }
10314 }
10315 else if ((opcode1 & 0x05) == 0x5)
10316 {
10317 /* LDRH (immediate), LDRH (literal) */
10318 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10319
10320 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10321 arm_insn_r->reg_rec_count = 1;
10322
10323 if (rn != 15)
10324 {
10325 /*LDRH (immediate) */
10326 if (bit (arm_insn_r->arm_insn, 21))
10327 {
10328 /* Write back to Rn. */
10329 record_buf[arm_insn_r->reg_rec_count++] = rn;
10330 }
10331 }
10332 }
10333 else
10334 return -1;
10335 break;
10336 case 2:
10337 if ((opcode1 & 0x05) == 0x0)
10338 {
10339 /* LDRD (register) */
10340 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10341 record_buf[1] = record_buf[0] + 1;
10342 arm_insn_r->reg_rec_count = 2;
10343
10344 if (bit (arm_insn_r->arm_insn, 21))
10345 {
10346 /* Write back to Rn. */
10347 record_buf[arm_insn_r->reg_rec_count++]
10348 = bits (arm_insn_r->arm_insn, 16, 19);
10349 }
10350 }
10351 else if ((opcode1 & 0x05) == 0x1)
10352 {
10353 /* LDRSB (register) */
10354 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10355 arm_insn_r->reg_rec_count = 1;
10356
10357 if (bit (arm_insn_r->arm_insn, 21))
10358 {
10359 /* Write back to Rn. */
10360 record_buf[arm_insn_r->reg_rec_count++]
10361 = bits (arm_insn_r->arm_insn, 16, 19);
10362 }
10363 }
10364 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10365 {
10366 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10367 LDRSB (literal) */
10368 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10369
10370 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10371 arm_insn_r->reg_rec_count = 1;
10372
10373 if (rn != 15)
10374 {
10375 /*LDRD (immediate), LDRSB (immediate) */
10376 if (bit (arm_insn_r->arm_insn, 21))
10377 {
10378 /* Write back to Rn. */
10379 record_buf[arm_insn_r->reg_rec_count++] = rn;
10380 }
10381 }
10382 }
10383 else
10384 return -1;
10385 break;
10386 case 3:
10387 if ((opcode1 & 0x05) == 0x0)
10388 {
10389 /* STRD (register) */
10390 arm_record_strx (arm_insn_r, &record_buf[0],
10391 &record_buf_mem[0], ARM_RECORD_STRD);
10392 }
10393 else if ((opcode1 & 0x05) == 0x1)
10394 {
10395 /* LDRSH (register) */
10396 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10397 arm_insn_r->reg_rec_count = 1;
10398
10399 if (bit (arm_insn_r->arm_insn, 21))
10400 {
10401 /* Write back to Rn. */
10402 record_buf[arm_insn_r->reg_rec_count++]
10403 = bits (arm_insn_r->arm_insn, 16, 19);
10404 }
10405 }
10406 else if ((opcode1 & 0x05) == 0x4)
10407 {
10408 /* STRD (immediate) */
10409 arm_record_strx (arm_insn_r, &record_buf[0],
10410 &record_buf_mem[0], ARM_RECORD_STRD);
10411 }
10412 else if ((opcode1 & 0x05) == 0x5)
10413 {
10414 /* LDRSH (immediate), LDRSH (literal) */
10415 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10416 arm_insn_r->reg_rec_count = 1;
10417
10418 if (bit (arm_insn_r->arm_insn, 21))
10419 {
10420 /* Write back to Rn. */
10421 record_buf[arm_insn_r->reg_rec_count++]
10422 = bits (arm_insn_r->arm_insn, 16, 19);
10423 }
10424 }
10425 else
10426 return -1;
10427 break;
10428 default:
10429 return -1;
10430 }
10431 }
10432 }
10433 else
10434 {
10435 return -1;
10436 }
10437
10438 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10439 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10440 return 0;
10441 }
10442
10443 /* Handling opcode 001 insns. */
10444
10445 static int
10446 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10447 {
10448 uint32_t record_buf[8], record_buf_mem[8];
10449
10450 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10451 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10452
10453 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10454 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10455 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10456 )
10457 {
10458 /* Handle MSR insn. */
10459 if (9 == arm_insn_r->opcode)
10460 {
10461 /* CSPR is going to be changed. */
10462 record_buf[0] = ARM_PS_REGNUM;
10463 arm_insn_r->reg_rec_count = 1;
10464 }
10465 else
10466 {
10467 /* SPSR is going to be changed. */
10468 }
10469 }
10470 else if (arm_insn_r->opcode <= 15)
10471 {
10472 /* Normal data processing insns. */
10473 /* Out of 11 shifter operands mode, all the insn modifies destination
10474 register, which is specified by 13-16 decode. */
10475 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10476 record_buf[1] = ARM_PS_REGNUM;
10477 arm_insn_r->reg_rec_count = 2;
10478 }
10479 else
10480 {
10481 return -1;
10482 }
10483
10484 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10485 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10486 return 0;
10487 }
10488
10489 static int
10490 arm_record_media (insn_decode_record *arm_insn_r)
10491 {
10492 uint32_t record_buf[8];
10493
10494 switch (bits (arm_insn_r->arm_insn, 22, 24))
10495 {
10496 case 0:
10497 /* Parallel addition and subtraction, signed */
10498 case 1:
10499 /* Parallel addition and subtraction, unsigned */
10500 case 2:
10501 case 3:
10502 /* Packing, unpacking, saturation and reversal */
10503 {
10504 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10505
10506 record_buf[arm_insn_r->reg_rec_count++] = rd;
10507 }
10508 break;
10509
10510 case 4:
10511 case 5:
10512 /* Signed multiplies */
10513 {
10514 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10515 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10516
10517 record_buf[arm_insn_r->reg_rec_count++] = rd;
10518 if (op1 == 0x0)
10519 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10520 else if (op1 == 0x4)
10521 record_buf[arm_insn_r->reg_rec_count++]
10522 = bits (arm_insn_r->arm_insn, 12, 15);
10523 }
10524 break;
10525
10526 case 6:
10527 {
10528 if (bit (arm_insn_r->arm_insn, 21)
10529 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10530 {
10531 /* SBFX */
10532 record_buf[arm_insn_r->reg_rec_count++]
10533 = bits (arm_insn_r->arm_insn, 12, 15);
10534 }
10535 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10536 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10537 {
10538 /* USAD8 and USADA8 */
10539 record_buf[arm_insn_r->reg_rec_count++]
10540 = bits (arm_insn_r->arm_insn, 16, 19);
10541 }
10542 }
10543 break;
10544
10545 case 7:
10546 {
10547 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10548 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10549 {
10550 /* Permanently UNDEFINED */
10551 return -1;
10552 }
10553 else
10554 {
10555 /* BFC, BFI and UBFX */
10556 record_buf[arm_insn_r->reg_rec_count++]
10557 = bits (arm_insn_r->arm_insn, 12, 15);
10558 }
10559 }
10560 break;
10561
10562 default:
10563 return -1;
10564 }
10565
10566 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10567
10568 return 0;
10569 }
10570
10571 /* Handle ARM mode instructions with opcode 010. */
10572
10573 static int
10574 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10575 {
10576 struct regcache *reg_cache = arm_insn_r->regcache;
10577
10578 uint32_t reg_base , reg_dest;
10579 uint32_t offset_12, tgt_mem_addr;
10580 uint32_t record_buf[8], record_buf_mem[8];
10581 unsigned char wback;
10582 ULONGEST u_regval;
10583
10584 /* Calculate wback. */
10585 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10586 || (bit (arm_insn_r->arm_insn, 21) == 1);
10587
10588 arm_insn_r->reg_rec_count = 0;
10589 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10590
10591 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10592 {
10593 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10594 and LDRT. */
10595
10596 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10597 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10598
10599 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10600 preceeds a LDR instruction having R15 as reg_base, it
10601 emulates a branch and link instruction, and hence we need to save
10602 CPSR and PC as well. */
10603 if (ARM_PC_REGNUM == reg_dest)
10604 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10605
10606 /* If wback is true, also save the base register, which is going to be
10607 written to. */
10608 if (wback)
10609 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10610 }
10611 else
10612 {
10613 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10614
10615 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10616 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10617
10618 /* Handle bit U. */
10619 if (bit (arm_insn_r->arm_insn, 23))
10620 {
10621 /* U == 1: Add the offset. */
10622 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10623 }
10624 else
10625 {
10626 /* U == 0: subtract the offset. */
10627 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10628 }
10629
10630 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10631 bytes. */
10632 if (bit (arm_insn_r->arm_insn, 22))
10633 {
10634 /* STRB and STRBT: 1 byte. */
10635 record_buf_mem[0] = 1;
10636 }
10637 else
10638 {
10639 /* STR and STRT: 4 bytes. */
10640 record_buf_mem[0] = 4;
10641 }
10642
10643 /* Handle bit P. */
10644 if (bit (arm_insn_r->arm_insn, 24))
10645 record_buf_mem[1] = tgt_mem_addr;
10646 else
10647 record_buf_mem[1] = (uint32_t) u_regval;
10648
10649 arm_insn_r->mem_rec_count = 1;
10650
10651 /* If wback is true, also save the base register, which is going to be
10652 written to. */
10653 if (wback)
10654 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10655 }
10656
10657 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10658 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10659 return 0;
10660 }
10661
10662 /* Handling opcode 011 insns. */
10663
10664 static int
10665 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10666 {
10667 struct regcache *reg_cache = arm_insn_r->regcache;
10668
10669 uint32_t shift_imm = 0;
10670 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10671 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10672 uint32_t record_buf[8], record_buf_mem[8];
10673
10674 LONGEST s_word;
10675 ULONGEST u_regval[2];
10676
10677 if (bit (arm_insn_r->arm_insn, 4))
10678 return arm_record_media (arm_insn_r);
10679
10680 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10681 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10682
10683 /* Handle enhanced store insns and LDRD DSP insn,
10684 order begins according to addressing modes for store insns
10685 STRH insn. */
10686
10687 /* LDR or STR? */
10688 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10689 {
10690 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10691 /* LDR insn has a capability to do branching, if
10692 MOV LR, PC is precedded by LDR insn having Rn as R15
10693 in that case, it emulates branch and link insn, and hence we
10694 need to save CSPR and PC as well. */
10695 if (15 != reg_dest)
10696 {
10697 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10698 arm_insn_r->reg_rec_count = 1;
10699 }
10700 else
10701 {
10702 record_buf[0] = reg_dest;
10703 record_buf[1] = ARM_PS_REGNUM;
10704 arm_insn_r->reg_rec_count = 2;
10705 }
10706 }
10707 else
10708 {
10709 if (! bits (arm_insn_r->arm_insn, 4, 11))
10710 {
10711 /* Store insn, register offset and register pre-indexed,
10712 register post-indexed. */
10713 /* Get Rm. */
10714 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10715 /* Get Rn. */
10716 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10717 regcache_raw_read_unsigned (reg_cache, reg_src1
10718 , &u_regval[0]);
10719 regcache_raw_read_unsigned (reg_cache, reg_src2
10720 , &u_regval[1]);
10721 if (15 == reg_src2)
10722 {
10723 /* If R15 was used as Rn, hence current PC+8. */
10724 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10725 u_regval[0] = u_regval[0] + 8;
10726 }
10727 /* Calculate target store address, Rn +/- Rm, register offset. */
10728 /* U == 1. */
10729 if (bit (arm_insn_r->arm_insn, 23))
10730 {
10731 tgt_mem_addr = u_regval[0] + u_regval[1];
10732 }
10733 else
10734 {
10735 tgt_mem_addr = u_regval[1] - u_regval[0];
10736 }
10737
10738 switch (arm_insn_r->opcode)
10739 {
10740 /* STR. */
10741 case 8:
10742 case 12:
10743 /* STR. */
10744 case 9:
10745 case 13:
10746 /* STRT. */
10747 case 1:
10748 case 5:
10749 /* STR. */
10750 case 0:
10751 case 4:
10752 record_buf_mem[0] = 4;
10753 break;
10754
10755 /* STRB. */
10756 case 10:
10757 case 14:
10758 /* STRB. */
10759 case 11:
10760 case 15:
10761 /* STRBT. */
10762 case 3:
10763 case 7:
10764 /* STRB. */
10765 case 2:
10766 case 6:
10767 record_buf_mem[0] = 1;
10768 break;
10769
10770 default:
10771 gdb_assert_not_reached ("no decoding pattern found");
10772 break;
10773 }
10774 record_buf_mem[1] = tgt_mem_addr;
10775 arm_insn_r->mem_rec_count = 1;
10776
10777 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10778 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10779 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10780 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10781 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10782 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10783 )
10784 {
10785 /* Rn is going to be changed in pre-indexed mode and
10786 post-indexed mode as well. */
10787 record_buf[0] = reg_src2;
10788 arm_insn_r->reg_rec_count = 1;
10789 }
10790 }
10791 else
10792 {
10793 /* Store insn, scaled register offset; scaled pre-indexed. */
10794 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10795 /* Get Rm. */
10796 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10797 /* Get Rn. */
10798 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10799 /* Get shift_imm. */
10800 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10801 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10802 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10803 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10804 /* Offset_12 used as shift. */
10805 switch (offset_12)
10806 {
10807 case 0:
10808 /* Offset_12 used as index. */
10809 offset_12 = u_regval[0] << shift_imm;
10810 break;
10811
10812 case 1:
10813 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10814 break;
10815
10816 case 2:
10817 if (!shift_imm)
10818 {
10819 if (bit (u_regval[0], 31))
10820 {
10821 offset_12 = 0xFFFFFFFF;
10822 }
10823 else
10824 {
10825 offset_12 = 0;
10826 }
10827 }
10828 else
10829 {
10830 /* This is arithmetic shift. */
10831 offset_12 = s_word >> shift_imm;
10832 }
10833 break;
10834
10835 case 3:
10836 if (!shift_imm)
10837 {
10838 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10839 &u_regval[1]);
10840 /* Get C flag value and shift it by 31. */
10841 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10842 | (u_regval[0]) >> 1);
10843 }
10844 else
10845 {
10846 offset_12 = (u_regval[0] >> shift_imm) \
10847 | (u_regval[0] <<
10848 (sizeof(uint32_t) - shift_imm));
10849 }
10850 break;
10851
10852 default:
10853 gdb_assert_not_reached ("no decoding pattern found");
10854 break;
10855 }
10856
10857 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10858 /* bit U set. */
10859 if (bit (arm_insn_r->arm_insn, 23))
10860 {
10861 tgt_mem_addr = u_regval[1] + offset_12;
10862 }
10863 else
10864 {
10865 tgt_mem_addr = u_regval[1] - offset_12;
10866 }
10867
10868 switch (arm_insn_r->opcode)
10869 {
10870 /* STR. */
10871 case 8:
10872 case 12:
10873 /* STR. */
10874 case 9:
10875 case 13:
10876 /* STRT. */
10877 case 1:
10878 case 5:
10879 /* STR. */
10880 case 0:
10881 case 4:
10882 record_buf_mem[0] = 4;
10883 break;
10884
10885 /* STRB. */
10886 case 10:
10887 case 14:
10888 /* STRB. */
10889 case 11:
10890 case 15:
10891 /* STRBT. */
10892 case 3:
10893 case 7:
10894 /* STRB. */
10895 case 2:
10896 case 6:
10897 record_buf_mem[0] = 1;
10898 break;
10899
10900 default:
10901 gdb_assert_not_reached ("no decoding pattern found");
10902 break;
10903 }
10904 record_buf_mem[1] = tgt_mem_addr;
10905 arm_insn_r->mem_rec_count = 1;
10906
10907 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10908 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10909 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10910 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10911 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10912 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10913 )
10914 {
10915 /* Rn is going to be changed in register scaled pre-indexed
10916 mode,and scaled post indexed mode. */
10917 record_buf[0] = reg_src2;
10918 arm_insn_r->reg_rec_count = 1;
10919 }
10920 }
10921 }
10922
10923 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10924 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10925 return 0;
10926 }
10927
10928 /* Handle ARM mode instructions with opcode 100. */
10929
10930 static int
10931 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10932 {
10933 struct regcache *reg_cache = arm_insn_r->regcache;
10934 uint32_t register_count = 0, register_bits;
10935 uint32_t reg_base, addr_mode;
10936 uint32_t record_buf[24], record_buf_mem[48];
10937 uint32_t wback;
10938 ULONGEST u_regval;
10939
10940 /* Fetch the list of registers. */
10941 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10942 arm_insn_r->reg_rec_count = 0;
10943
10944 /* Fetch the base register that contains the address we are loading data
10945 to. */
10946 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10947
10948 /* Calculate wback. */
10949 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10950
10951 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10952 {
10953 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10954
10955 /* Find out which registers are going to be loaded from memory. */
10956 while (register_bits)
10957 {
10958 if (register_bits & 0x00000001)
10959 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10960 register_bits = register_bits >> 1;
10961 register_count++;
10962 }
10963
10964
10965 /* If wback is true, also save the base register, which is going to be
10966 written to. */
10967 if (wback)
10968 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10969
10970 /* Save the CPSR register. */
10971 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10972 }
10973 else
10974 {
10975 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10976
10977 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10978
10979 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10980
10981 /* Find out how many registers are going to be stored to memory. */
10982 while (register_bits)
10983 {
10984 if (register_bits & 0x00000001)
10985 register_count++;
10986 register_bits = register_bits >> 1;
10987 }
10988
10989 switch (addr_mode)
10990 {
10991 /* STMDA (STMED): Decrement after. */
10992 case 0:
10993 record_buf_mem[1] = (uint32_t) u_regval
10994 - register_count * ARM_INT_REGISTER_SIZE + 4;
10995 break;
10996 /* STM (STMIA, STMEA): Increment after. */
10997 case 1:
10998 record_buf_mem[1] = (uint32_t) u_regval;
10999 break;
11000 /* STMDB (STMFD): Decrement before. */
11001 case 2:
11002 record_buf_mem[1] = (uint32_t) u_regval
11003 - register_count * ARM_INT_REGISTER_SIZE;
11004 break;
11005 /* STMIB (STMFA): Increment before. */
11006 case 3:
11007 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11008 break;
11009 default:
11010 gdb_assert_not_reached ("no decoding pattern found");
11011 break;
11012 }
11013
11014 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11015 arm_insn_r->mem_rec_count = 1;
11016
11017 /* If wback is true, also save the base register, which is going to be
11018 written to. */
11019 if (wback)
11020 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11021 }
11022
11023 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11024 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11025 return 0;
11026 }
11027
11028 /* Handling opcode 101 insns. */
11029
11030 static int
11031 arm_record_b_bl (insn_decode_record *arm_insn_r)
11032 {
11033 uint32_t record_buf[8];
11034
11035 /* Handle B, BL, BLX(1) insns. */
11036 /* B simply branches so we do nothing here. */
11037 /* Note: BLX(1) doesnt fall here but instead it falls into
11038 extension space. */
11039 if (bit (arm_insn_r->arm_insn, 24))
11040 {
11041 record_buf[0] = ARM_LR_REGNUM;
11042 arm_insn_r->reg_rec_count = 1;
11043 }
11044
11045 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11046
11047 return 0;
11048 }
11049
11050 static int
11051 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11052 {
11053 printf_unfiltered (_("Process record does not support instruction "
11054 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11055 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11056
11057 return -1;
11058 }
11059
11060 /* Record handler for vector data transfer instructions. */
11061
11062 static int
11063 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11064 {
11065 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11066 uint32_t record_buf[4];
11067
11068 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11069 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11070 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11071 bit_l = bit (arm_insn_r->arm_insn, 20);
11072 bit_c = bit (arm_insn_r->arm_insn, 8);
11073
11074 /* Handle VMOV instruction. */
11075 if (bit_l && bit_c)
11076 {
11077 record_buf[0] = reg_t;
11078 arm_insn_r->reg_rec_count = 1;
11079 }
11080 else if (bit_l && !bit_c)
11081 {
11082 /* Handle VMOV instruction. */
11083 if (bits_a == 0x00)
11084 {
11085 record_buf[0] = reg_t;
11086 arm_insn_r->reg_rec_count = 1;
11087 }
11088 /* Handle VMRS instruction. */
11089 else if (bits_a == 0x07)
11090 {
11091 if (reg_t == 15)
11092 reg_t = ARM_PS_REGNUM;
11093
11094 record_buf[0] = reg_t;
11095 arm_insn_r->reg_rec_count = 1;
11096 }
11097 }
11098 else if (!bit_l && !bit_c)
11099 {
11100 /* Handle VMOV instruction. */
11101 if (bits_a == 0x00)
11102 {
11103 record_buf[0] = ARM_D0_REGNUM + reg_v;
11104
11105 arm_insn_r->reg_rec_count = 1;
11106 }
11107 /* Handle VMSR instruction. */
11108 else if (bits_a == 0x07)
11109 {
11110 record_buf[0] = ARM_FPSCR_REGNUM;
11111 arm_insn_r->reg_rec_count = 1;
11112 }
11113 }
11114 else if (!bit_l && bit_c)
11115 {
11116 /* Handle VMOV instruction. */
11117 if (!(bits_a & 0x04))
11118 {
11119 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11120 + ARM_D0_REGNUM;
11121 arm_insn_r->reg_rec_count = 1;
11122 }
11123 /* Handle VDUP instruction. */
11124 else
11125 {
11126 if (bit (arm_insn_r->arm_insn, 21))
11127 {
11128 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11129 record_buf[0] = reg_v + ARM_D0_REGNUM;
11130 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11131 arm_insn_r->reg_rec_count = 2;
11132 }
11133 else
11134 {
11135 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11136 record_buf[0] = reg_v + ARM_D0_REGNUM;
11137 arm_insn_r->reg_rec_count = 1;
11138 }
11139 }
11140 }
11141
11142 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11143 return 0;
11144 }
11145
11146 /* Record handler for extension register load/store instructions. */
11147
11148 static int
11149 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11150 {
11151 uint32_t opcode, single_reg;
11152 uint8_t op_vldm_vstm;
11153 uint32_t record_buf[8], record_buf_mem[128];
11154 ULONGEST u_regval = 0;
11155
11156 struct regcache *reg_cache = arm_insn_r->regcache;
11157
11158 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11159 single_reg = !bit (arm_insn_r->arm_insn, 8);
11160 op_vldm_vstm = opcode & 0x1b;
11161
11162 /* Handle VMOV instructions. */
11163 if ((opcode & 0x1e) == 0x04)
11164 {
11165 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11166 {
11167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11168 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11169 arm_insn_r->reg_rec_count = 2;
11170 }
11171 else
11172 {
11173 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11174 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11175
11176 if (single_reg)
11177 {
11178 /* The first S register number m is REG_M:M (M is bit 5),
11179 the corresponding D register number is REG_M:M / 2, which
11180 is REG_M. */
11181 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11182 /* The second S register number is REG_M:M + 1, the
11183 corresponding D register number is (REG_M:M + 1) / 2.
11184 IOW, if bit M is 1, the first and second S registers
11185 are mapped to different D registers, otherwise, they are
11186 in the same D register. */
11187 if (bit_m)
11188 {
11189 record_buf[arm_insn_r->reg_rec_count++]
11190 = ARM_D0_REGNUM + reg_m + 1;
11191 }
11192 }
11193 else
11194 {
11195 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11196 arm_insn_r->reg_rec_count = 1;
11197 }
11198 }
11199 }
11200 /* Handle VSTM and VPUSH instructions. */
11201 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11202 || op_vldm_vstm == 0x12)
11203 {
11204 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11205 uint32_t memory_index = 0;
11206
11207 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11208 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11209 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11210 imm_off32 = imm_off8 << 2;
11211 memory_count = imm_off8;
11212
11213 if (bit (arm_insn_r->arm_insn, 23))
11214 start_address = u_regval;
11215 else
11216 start_address = u_regval - imm_off32;
11217
11218 if (bit (arm_insn_r->arm_insn, 21))
11219 {
11220 record_buf[0] = reg_rn;
11221 arm_insn_r->reg_rec_count = 1;
11222 }
11223
11224 while (memory_count > 0)
11225 {
11226 if (single_reg)
11227 {
11228 record_buf_mem[memory_index] = 4;
11229 record_buf_mem[memory_index + 1] = start_address;
11230 start_address = start_address + 4;
11231 memory_index = memory_index + 2;
11232 }
11233 else
11234 {
11235 record_buf_mem[memory_index] = 4;
11236 record_buf_mem[memory_index + 1] = start_address;
11237 record_buf_mem[memory_index + 2] = 4;
11238 record_buf_mem[memory_index + 3] = start_address + 4;
11239 start_address = start_address + 8;
11240 memory_index = memory_index + 4;
11241 }
11242 memory_count--;
11243 }
11244 arm_insn_r->mem_rec_count = (memory_index >> 1);
11245 }
11246 /* Handle VLDM instructions. */
11247 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11248 || op_vldm_vstm == 0x13)
11249 {
11250 uint32_t reg_count, reg_vd;
11251 uint32_t reg_index = 0;
11252 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11253
11254 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11255 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11256
11257 /* REG_VD is the first D register number. If the instruction
11258 loads memory to S registers (SINGLE_REG is TRUE), the register
11259 number is (REG_VD << 1 | bit D), so the corresponding D
11260 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11261 if (!single_reg)
11262 reg_vd = reg_vd | (bit_d << 4);
11263
11264 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11265 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11266
11267 /* If the instruction loads memory to D register, REG_COUNT should
11268 be divided by 2, according to the ARM Architecture Reference
11269 Manual. If the instruction loads memory to S register, divide by
11270 2 as well because two S registers are mapped to D register. */
11271 reg_count = reg_count / 2;
11272 if (single_reg && bit_d)
11273 {
11274 /* Increase the register count if S register list starts from
11275 an odd number (bit d is one). */
11276 reg_count++;
11277 }
11278
11279 while (reg_count > 0)
11280 {
11281 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11282 reg_count--;
11283 }
11284 arm_insn_r->reg_rec_count = reg_index;
11285 }
11286 /* VSTR Vector store register. */
11287 else if ((opcode & 0x13) == 0x10)
11288 {
11289 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11290 uint32_t memory_index = 0;
11291
11292 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11293 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11294 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11295 imm_off32 = imm_off8 << 2;
11296
11297 if (bit (arm_insn_r->arm_insn, 23))
11298 start_address = u_regval + imm_off32;
11299 else
11300 start_address = u_regval - imm_off32;
11301
11302 if (single_reg)
11303 {
11304 record_buf_mem[memory_index] = 4;
11305 record_buf_mem[memory_index + 1] = start_address;
11306 arm_insn_r->mem_rec_count = 1;
11307 }
11308 else
11309 {
11310 record_buf_mem[memory_index] = 4;
11311 record_buf_mem[memory_index + 1] = start_address;
11312 record_buf_mem[memory_index + 2] = 4;
11313 record_buf_mem[memory_index + 3] = start_address + 4;
11314 arm_insn_r->mem_rec_count = 2;
11315 }
11316 }
11317 /* VLDR Vector load register. */
11318 else if ((opcode & 0x13) == 0x11)
11319 {
11320 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11321
11322 if (!single_reg)
11323 {
11324 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11325 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11326 }
11327 else
11328 {
11329 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11330 /* Record register D rather than pseudo register S. */
11331 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11332 }
11333 arm_insn_r->reg_rec_count = 1;
11334 }
11335
11336 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11337 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11338 return 0;
11339 }
11340
11341 /* Record handler for arm/thumb mode VFP data processing instructions. */
11342
11343 static int
11344 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11345 {
11346 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11347 uint32_t record_buf[4];
11348 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11349 enum insn_types curr_insn_type = INSN_INV;
11350
11351 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11352 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11353 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11354 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11355 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11356 bit_d = bit (arm_insn_r->arm_insn, 22);
11357 /* Mask off the "D" bit. */
11358 opc1 = opc1 & ~0x04;
11359
11360 /* Handle VMLA, VMLS. */
11361 if (opc1 == 0x00)
11362 {
11363 if (bit (arm_insn_r->arm_insn, 10))
11364 {
11365 if (bit (arm_insn_r->arm_insn, 6))
11366 curr_insn_type = INSN_T0;
11367 else
11368 curr_insn_type = INSN_T1;
11369 }
11370 else
11371 {
11372 if (dp_op_sz)
11373 curr_insn_type = INSN_T1;
11374 else
11375 curr_insn_type = INSN_T2;
11376 }
11377 }
11378 /* Handle VNMLA, VNMLS, VNMUL. */
11379 else if (opc1 == 0x01)
11380 {
11381 if (dp_op_sz)
11382 curr_insn_type = INSN_T1;
11383 else
11384 curr_insn_type = INSN_T2;
11385 }
11386 /* Handle VMUL. */
11387 else if (opc1 == 0x02 && !(opc3 & 0x01))
11388 {
11389 if (bit (arm_insn_r->arm_insn, 10))
11390 {
11391 if (bit (arm_insn_r->arm_insn, 6))
11392 curr_insn_type = INSN_T0;
11393 else
11394 curr_insn_type = INSN_T1;
11395 }
11396 else
11397 {
11398 if (dp_op_sz)
11399 curr_insn_type = INSN_T1;
11400 else
11401 curr_insn_type = INSN_T2;
11402 }
11403 }
11404 /* Handle VADD, VSUB. */
11405 else if (opc1 == 0x03)
11406 {
11407 if (!bit (arm_insn_r->arm_insn, 9))
11408 {
11409 if (bit (arm_insn_r->arm_insn, 6))
11410 curr_insn_type = INSN_T0;
11411 else
11412 curr_insn_type = INSN_T1;
11413 }
11414 else
11415 {
11416 if (dp_op_sz)
11417 curr_insn_type = INSN_T1;
11418 else
11419 curr_insn_type = INSN_T2;
11420 }
11421 }
11422 /* Handle VDIV. */
11423 else if (opc1 == 0x08)
11424 {
11425 if (dp_op_sz)
11426 curr_insn_type = INSN_T1;
11427 else
11428 curr_insn_type = INSN_T2;
11429 }
11430 /* Handle all other vfp data processing instructions. */
11431 else if (opc1 == 0x0b)
11432 {
11433 /* Handle VMOV. */
11434 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11435 {
11436 if (bit (arm_insn_r->arm_insn, 4))
11437 {
11438 if (bit (arm_insn_r->arm_insn, 6))
11439 curr_insn_type = INSN_T0;
11440 else
11441 curr_insn_type = INSN_T1;
11442 }
11443 else
11444 {
11445 if (dp_op_sz)
11446 curr_insn_type = INSN_T1;
11447 else
11448 curr_insn_type = INSN_T2;
11449 }
11450 }
11451 /* Handle VNEG and VABS. */
11452 else if ((opc2 == 0x01 && opc3 == 0x01)
11453 || (opc2 == 0x00 && opc3 == 0x03))
11454 {
11455 if (!bit (arm_insn_r->arm_insn, 11))
11456 {
11457 if (bit (arm_insn_r->arm_insn, 6))
11458 curr_insn_type = INSN_T0;
11459 else
11460 curr_insn_type = INSN_T1;
11461 }
11462 else
11463 {
11464 if (dp_op_sz)
11465 curr_insn_type = INSN_T1;
11466 else
11467 curr_insn_type = INSN_T2;
11468 }
11469 }
11470 /* Handle VSQRT. */
11471 else if (opc2 == 0x01 && opc3 == 0x03)
11472 {
11473 if (dp_op_sz)
11474 curr_insn_type = INSN_T1;
11475 else
11476 curr_insn_type = INSN_T2;
11477 }
11478 /* Handle VCVT. */
11479 else if (opc2 == 0x07 && opc3 == 0x03)
11480 {
11481 if (!dp_op_sz)
11482 curr_insn_type = INSN_T1;
11483 else
11484 curr_insn_type = INSN_T2;
11485 }
11486 else if (opc3 & 0x01)
11487 {
11488 /* Handle VCVT. */
11489 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11490 {
11491 if (!bit (arm_insn_r->arm_insn, 18))
11492 curr_insn_type = INSN_T2;
11493 else
11494 {
11495 if (dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 }
11501 /* Handle VCVT. */
11502 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11503 {
11504 if (dp_op_sz)
11505 curr_insn_type = INSN_T1;
11506 else
11507 curr_insn_type = INSN_T2;
11508 }
11509 /* Handle VCVTB, VCVTT. */
11510 else if ((opc2 & 0x0e) == 0x02)
11511 curr_insn_type = INSN_T2;
11512 /* Handle VCMP, VCMPE. */
11513 else if ((opc2 & 0x0e) == 0x04)
11514 curr_insn_type = INSN_T3;
11515 }
11516 }
11517
11518 switch (curr_insn_type)
11519 {
11520 case INSN_T0:
11521 reg_vd = reg_vd | (bit_d << 4);
11522 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11523 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11524 arm_insn_r->reg_rec_count = 2;
11525 break;
11526
11527 case INSN_T1:
11528 reg_vd = reg_vd | (bit_d << 4);
11529 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11530 arm_insn_r->reg_rec_count = 1;
11531 break;
11532
11533 case INSN_T2:
11534 reg_vd = (reg_vd << 1) | bit_d;
11535 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11536 arm_insn_r->reg_rec_count = 1;
11537 break;
11538
11539 case INSN_T3:
11540 record_buf[0] = ARM_FPSCR_REGNUM;
11541 arm_insn_r->reg_rec_count = 1;
11542 break;
11543
11544 default:
11545 gdb_assert_not_reached ("no decoding pattern found");
11546 break;
11547 }
11548
11549 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11550 return 0;
11551 }
11552
11553 /* Handling opcode 110 insns. */
11554
11555 static int
11556 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11557 {
11558 uint32_t op1, op1_ebit, coproc;
11559
11560 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11561 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11562 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11563
11564 if ((coproc & 0x0e) == 0x0a)
11565 {
11566 /* Handle extension register ld/st instructions. */
11567 if (!(op1 & 0x20))
11568 return arm_record_exreg_ld_st_insn (arm_insn_r);
11569
11570 /* 64-bit transfers between arm core and extension registers. */
11571 if ((op1 & 0x3e) == 0x04)
11572 return arm_record_exreg_ld_st_insn (arm_insn_r);
11573 }
11574 else
11575 {
11576 /* Handle coprocessor ld/st instructions. */
11577 if (!(op1 & 0x3a))
11578 {
11579 /* Store. */
11580 if (!op1_ebit)
11581 return arm_record_unsupported_insn (arm_insn_r);
11582 else
11583 /* Load. */
11584 return arm_record_unsupported_insn (arm_insn_r);
11585 }
11586
11587 /* Move to coprocessor from two arm core registers. */
11588 if (op1 == 0x4)
11589 return arm_record_unsupported_insn (arm_insn_r);
11590
11591 /* Move to two arm core registers from coprocessor. */
11592 if (op1 == 0x5)
11593 {
11594 uint32_t reg_t[2];
11595
11596 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11597 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11598 arm_insn_r->reg_rec_count = 2;
11599
11600 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11601 return 0;
11602 }
11603 }
11604 return arm_record_unsupported_insn (arm_insn_r);
11605 }
11606
11607 /* Handling opcode 111 insns. */
11608
11609 static int
11610 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11611 {
11612 uint32_t op, op1_ebit, coproc, bits_24_25;
11613 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11614 struct regcache *reg_cache = arm_insn_r->regcache;
11615
11616 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11617 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11618 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11619 op = bit (arm_insn_r->arm_insn, 4);
11620 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11621
11622 /* Handle arm SWI/SVC system call instructions. */
11623 if (bits_24_25 == 0x3)
11624 {
11625 if (tdep->arm_syscall_record != NULL)
11626 {
11627 ULONGEST svc_operand, svc_number;
11628
11629 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11630
11631 if (svc_operand) /* OABI. */
11632 svc_number = svc_operand - 0x900000;
11633 else /* EABI. */
11634 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11635
11636 return tdep->arm_syscall_record (reg_cache, svc_number);
11637 }
11638 else
11639 {
11640 printf_unfiltered (_("no syscall record support\n"));
11641 return -1;
11642 }
11643 }
11644 else if (bits_24_25 == 0x02)
11645 {
11646 if (op)
11647 {
11648 if ((coproc & 0x0e) == 0x0a)
11649 {
11650 /* 8, 16, and 32-bit transfer */
11651 return arm_record_vdata_transfer_insn (arm_insn_r);
11652 }
11653 else
11654 {
11655 if (op1_ebit)
11656 {
11657 /* MRC, MRC2 */
11658 uint32_t record_buf[1];
11659
11660 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11661 if (record_buf[0] == 15)
11662 record_buf[0] = ARM_PS_REGNUM;
11663
11664 arm_insn_r->reg_rec_count = 1;
11665 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11666 record_buf);
11667 return 0;
11668 }
11669 else
11670 {
11671 /* MCR, MCR2 */
11672 return -1;
11673 }
11674 }
11675 }
11676 else
11677 {
11678 if ((coproc & 0x0e) == 0x0a)
11679 {
11680 /* VFP data-processing instructions. */
11681 return arm_record_vfp_data_proc_insn (arm_insn_r);
11682 }
11683 else
11684 {
11685 /* CDP, CDP2 */
11686 return -1;
11687 }
11688 }
11689 }
11690 else
11691 {
11692 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11693
11694 if (op1 == 5)
11695 {
11696 if ((coproc & 0x0e) != 0x0a)
11697 {
11698 /* MRRC, MRRC2 */
11699 return -1;
11700 }
11701 }
11702 else if (op1 == 4 || op1 == 5)
11703 {
11704 if ((coproc & 0x0e) == 0x0a)
11705 {
11706 /* 64-bit transfers between ARM core and extension */
11707 return -1;
11708 }
11709 else if (op1 == 4)
11710 {
11711 /* MCRR, MCRR2 */
11712 return -1;
11713 }
11714 }
11715 else if (op1 == 0 || op1 == 1)
11716 {
11717 /* UNDEFINED */
11718 return -1;
11719 }
11720 else
11721 {
11722 if ((coproc & 0x0e) == 0x0a)
11723 {
11724 /* Extension register load/store */
11725 }
11726 else
11727 {
11728 /* STC, STC2, LDC, LDC2 */
11729 }
11730 return -1;
11731 }
11732 }
11733
11734 return -1;
11735 }
11736
11737 /* Handling opcode 000 insns. */
11738
11739 static int
11740 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11741 {
11742 uint32_t record_buf[8];
11743 uint32_t reg_src1 = 0;
11744
11745 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11746
11747 record_buf[0] = ARM_PS_REGNUM;
11748 record_buf[1] = reg_src1;
11749 thumb_insn_r->reg_rec_count = 2;
11750
11751 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11752
11753 return 0;
11754 }
11755
11756
11757 /* Handling opcode 001 insns. */
11758
11759 static int
11760 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11761 {
11762 uint32_t record_buf[8];
11763 uint32_t reg_src1 = 0;
11764
11765 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11766
11767 record_buf[0] = ARM_PS_REGNUM;
11768 record_buf[1] = reg_src1;
11769 thumb_insn_r->reg_rec_count = 2;
11770
11771 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11772
11773 return 0;
11774 }
11775
11776 /* Handling opcode 010 insns. */
11777
11778 static int
11779 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11780 {
11781 struct regcache *reg_cache = thumb_insn_r->regcache;
11782 uint32_t record_buf[8], record_buf_mem[8];
11783
11784 uint32_t reg_src1 = 0, reg_src2 = 0;
11785 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11786
11787 ULONGEST u_regval[2] = {0};
11788
11789 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11790
11791 if (bit (thumb_insn_r->arm_insn, 12))
11792 {
11793 /* Handle load/store register offset. */
11794 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11795
11796 if (in_inclusive_range (opB, 4U, 7U))
11797 {
11798 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11799 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11800 record_buf[0] = reg_src1;
11801 thumb_insn_r->reg_rec_count = 1;
11802 }
11803 else if (in_inclusive_range (opB, 0U, 2U))
11804 {
11805 /* STR(2), STRB(2), STRH(2) . */
11806 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11807 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11808 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11809 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11810 if (0 == opB)
11811 record_buf_mem[0] = 4; /* STR (2). */
11812 else if (2 == opB)
11813 record_buf_mem[0] = 1; /* STRB (2). */
11814 else if (1 == opB)
11815 record_buf_mem[0] = 2; /* STRH (2). */
11816 record_buf_mem[1] = u_regval[0] + u_regval[1];
11817 thumb_insn_r->mem_rec_count = 1;
11818 }
11819 }
11820 else if (bit (thumb_insn_r->arm_insn, 11))
11821 {
11822 /* Handle load from literal pool. */
11823 /* LDR(3). */
11824 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11825 record_buf[0] = reg_src1;
11826 thumb_insn_r->reg_rec_count = 1;
11827 }
11828 else if (opcode1)
11829 {
11830 /* Special data instructions and branch and exchange */
11831 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11832 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11833 if ((3 == opcode2) && (!opcode3))
11834 {
11835 /* Branch with exchange. */
11836 record_buf[0] = ARM_PS_REGNUM;
11837 thumb_insn_r->reg_rec_count = 1;
11838 }
11839 else
11840 {
11841 /* Format 8; special data processing insns. */
11842 record_buf[0] = ARM_PS_REGNUM;
11843 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11844 | bits (thumb_insn_r->arm_insn, 0, 2));
11845 thumb_insn_r->reg_rec_count = 2;
11846 }
11847 }
11848 else
11849 {
11850 /* Format 5; data processing insns. */
11851 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11852 if (bit (thumb_insn_r->arm_insn, 7))
11853 {
11854 reg_src1 = reg_src1 + 8;
11855 }
11856 record_buf[0] = ARM_PS_REGNUM;
11857 record_buf[1] = reg_src1;
11858 thumb_insn_r->reg_rec_count = 2;
11859 }
11860
11861 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11862 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11863 record_buf_mem);
11864
11865 return 0;
11866 }
11867
11868 /* Handling opcode 001 insns. */
11869
11870 static int
11871 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11872 {
11873 struct regcache *reg_cache = thumb_insn_r->regcache;
11874 uint32_t record_buf[8], record_buf_mem[8];
11875
11876 uint32_t reg_src1 = 0;
11877 uint32_t opcode = 0, immed_5 = 0;
11878
11879 ULONGEST u_regval = 0;
11880
11881 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11882
11883 if (opcode)
11884 {
11885 /* LDR(1). */
11886 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11887 record_buf[0] = reg_src1;
11888 thumb_insn_r->reg_rec_count = 1;
11889 }
11890 else
11891 {
11892 /* STR(1). */
11893 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11894 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11895 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11896 record_buf_mem[0] = 4;
11897 record_buf_mem[1] = u_regval + (immed_5 * 4);
11898 thumb_insn_r->mem_rec_count = 1;
11899 }
11900
11901 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11902 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11903 record_buf_mem);
11904
11905 return 0;
11906 }
11907
11908 /* Handling opcode 100 insns. */
11909
11910 static int
11911 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11912 {
11913 struct regcache *reg_cache = thumb_insn_r->regcache;
11914 uint32_t record_buf[8], record_buf_mem[8];
11915
11916 uint32_t reg_src1 = 0;
11917 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11918
11919 ULONGEST u_regval = 0;
11920
11921 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11922
11923 if (3 == opcode)
11924 {
11925 /* LDR(4). */
11926 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11927 record_buf[0] = reg_src1;
11928 thumb_insn_r->reg_rec_count = 1;
11929 }
11930 else if (1 == opcode)
11931 {
11932 /* LDRH(1). */
11933 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11934 record_buf[0] = reg_src1;
11935 thumb_insn_r->reg_rec_count = 1;
11936 }
11937 else if (2 == opcode)
11938 {
11939 /* STR(3). */
11940 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11941 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11942 record_buf_mem[0] = 4;
11943 record_buf_mem[1] = u_regval + (immed_8 * 4);
11944 thumb_insn_r->mem_rec_count = 1;
11945 }
11946 else if (0 == opcode)
11947 {
11948 /* STRH(1). */
11949 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11950 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11951 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11952 record_buf_mem[0] = 2;
11953 record_buf_mem[1] = u_regval + (immed_5 * 2);
11954 thumb_insn_r->mem_rec_count = 1;
11955 }
11956
11957 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11958 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11959 record_buf_mem);
11960
11961 return 0;
11962 }
11963
11964 /* Handling opcode 101 insns. */
11965
11966 static int
11967 thumb_record_misc (insn_decode_record *thumb_insn_r)
11968 {
11969 struct regcache *reg_cache = thumb_insn_r->regcache;
11970
11971 uint32_t opcode = 0;
11972 uint32_t register_bits = 0, register_count = 0;
11973 uint32_t index = 0, start_address = 0;
11974 uint32_t record_buf[24], record_buf_mem[48];
11975 uint32_t reg_src1;
11976
11977 ULONGEST u_regval = 0;
11978
11979 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11980
11981 if (opcode == 0 || opcode == 1)
11982 {
11983 /* ADR and ADD (SP plus immediate) */
11984
11985 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11986 record_buf[0] = reg_src1;
11987 thumb_insn_r->reg_rec_count = 1;
11988 }
11989 else
11990 {
11991 /* Miscellaneous 16-bit instructions */
11992 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11993
11994 switch (opcode2)
11995 {
11996 case 6:
11997 /* SETEND and CPS */
11998 break;
11999 case 0:
12000 /* ADD/SUB (SP plus immediate) */
12001 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12002 record_buf[0] = ARM_SP_REGNUM;
12003 thumb_insn_r->reg_rec_count = 1;
12004 break;
12005 case 1: /* fall through */
12006 case 3: /* fall through */
12007 case 9: /* fall through */
12008 case 11:
12009 /* CBNZ, CBZ */
12010 break;
12011 case 2:
12012 /* SXTH, SXTB, UXTH, UXTB */
12013 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12014 thumb_insn_r->reg_rec_count = 1;
12015 break;
12016 case 4: /* fall through */
12017 case 5:
12018 /* PUSH. */
12019 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12020 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12021 while (register_bits)
12022 {
12023 if (register_bits & 0x00000001)
12024 register_count++;
12025 register_bits = register_bits >> 1;
12026 }
12027 start_address = u_regval - \
12028 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12029 thumb_insn_r->mem_rec_count = register_count;
12030 while (register_count)
12031 {
12032 record_buf_mem[(register_count * 2) - 1] = start_address;
12033 record_buf_mem[(register_count * 2) - 2] = 4;
12034 start_address = start_address + 4;
12035 register_count--;
12036 }
12037 record_buf[0] = ARM_SP_REGNUM;
12038 thumb_insn_r->reg_rec_count = 1;
12039 break;
12040 case 10:
12041 /* REV, REV16, REVSH */
12042 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12043 thumb_insn_r->reg_rec_count = 1;
12044 break;
12045 case 12: /* fall through */
12046 case 13:
12047 /* POP. */
12048 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12049 while (register_bits)
12050 {
12051 if (register_bits & 0x00000001)
12052 record_buf[index++] = register_count;
12053 register_bits = register_bits >> 1;
12054 register_count++;
12055 }
12056 record_buf[index++] = ARM_PS_REGNUM;
12057 record_buf[index++] = ARM_SP_REGNUM;
12058 thumb_insn_r->reg_rec_count = index;
12059 break;
12060 case 0xe:
12061 /* BKPT insn. */
12062 /* Handle enhanced software breakpoint insn, BKPT. */
12063 /* CPSR is changed to be executed in ARM state, disabling normal
12064 interrupts, entering abort mode. */
12065 /* According to high vector configuration PC is set. */
12066 /* User hits breakpoint and type reverse, in that case, we need to go back with
12067 previous CPSR and Program Counter. */
12068 record_buf[0] = ARM_PS_REGNUM;
12069 record_buf[1] = ARM_LR_REGNUM;
12070 thumb_insn_r->reg_rec_count = 2;
12071 /* We need to save SPSR value, which is not yet done. */
12072 printf_unfiltered (_("Process record does not support instruction "
12073 "0x%0x at address %s.\n"),
12074 thumb_insn_r->arm_insn,
12075 paddress (thumb_insn_r->gdbarch,
12076 thumb_insn_r->this_addr));
12077 return -1;
12078
12079 case 0xf:
12080 /* If-Then, and hints */
12081 break;
12082 default:
12083 return -1;
12084 };
12085 }
12086
12087 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12088 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12089 record_buf_mem);
12090
12091 return 0;
12092 }
12093
12094 /* Handling opcode 110 insns. */
12095
12096 static int
12097 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12098 {
12099 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12100 struct regcache *reg_cache = thumb_insn_r->regcache;
12101
12102 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12103 uint32_t reg_src1 = 0;
12104 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12105 uint32_t index = 0, start_address = 0;
12106 uint32_t record_buf[24], record_buf_mem[48];
12107
12108 ULONGEST u_regval = 0;
12109
12110 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12111 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12112
12113 if (1 == opcode2)
12114 {
12115
12116 /* LDMIA. */
12117 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12118 /* Get Rn. */
12119 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12120 while (register_bits)
12121 {
12122 if (register_bits & 0x00000001)
12123 record_buf[index++] = register_count;
12124 register_bits = register_bits >> 1;
12125 register_count++;
12126 }
12127 record_buf[index++] = reg_src1;
12128 thumb_insn_r->reg_rec_count = index;
12129 }
12130 else if (0 == opcode2)
12131 {
12132 /* It handles both STMIA. */
12133 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12134 /* Get Rn. */
12135 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12136 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12137 while (register_bits)
12138 {
12139 if (register_bits & 0x00000001)
12140 register_count++;
12141 register_bits = register_bits >> 1;
12142 }
12143 start_address = u_regval;
12144 thumb_insn_r->mem_rec_count = register_count;
12145 while (register_count)
12146 {
12147 record_buf_mem[(register_count * 2) - 1] = start_address;
12148 record_buf_mem[(register_count * 2) - 2] = 4;
12149 start_address = start_address + 4;
12150 register_count--;
12151 }
12152 }
12153 else if (0x1F == opcode1)
12154 {
12155 /* Handle arm syscall insn. */
12156 if (tdep->arm_syscall_record != NULL)
12157 {
12158 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12159 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12160 }
12161 else
12162 {
12163 printf_unfiltered (_("no syscall record support\n"));
12164 return -1;
12165 }
12166 }
12167
12168 /* B (1), conditional branch is automatically taken care in process_record,
12169 as PC is saved there. */
12170
12171 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12172 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12173 record_buf_mem);
12174
12175 return ret;
12176 }
12177
12178 /* Handling opcode 111 insns. */
12179
12180 static int
12181 thumb_record_branch (insn_decode_record *thumb_insn_r)
12182 {
12183 uint32_t record_buf[8];
12184 uint32_t bits_h = 0;
12185
12186 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12187
12188 if (2 == bits_h || 3 == bits_h)
12189 {
12190 /* BL */
12191 record_buf[0] = ARM_LR_REGNUM;
12192 thumb_insn_r->reg_rec_count = 1;
12193 }
12194 else if (1 == bits_h)
12195 {
12196 /* BLX(1). */
12197 record_buf[0] = ARM_PS_REGNUM;
12198 record_buf[1] = ARM_LR_REGNUM;
12199 thumb_insn_r->reg_rec_count = 2;
12200 }
12201
12202 /* B(2) is automatically taken care in process_record, as PC is
12203 saved there. */
12204
12205 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12206
12207 return 0;
12208 }
12209
12210 /* Handler for thumb2 load/store multiple instructions. */
12211
12212 static int
12213 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12214 {
12215 struct regcache *reg_cache = thumb2_insn_r->regcache;
12216
12217 uint32_t reg_rn, op;
12218 uint32_t register_bits = 0, register_count = 0;
12219 uint32_t index = 0, start_address = 0;
12220 uint32_t record_buf[24], record_buf_mem[48];
12221
12222 ULONGEST u_regval = 0;
12223
12224 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12225 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12226
12227 if (0 == op || 3 == op)
12228 {
12229 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12230 {
12231 /* Handle RFE instruction. */
12232 record_buf[0] = ARM_PS_REGNUM;
12233 thumb2_insn_r->reg_rec_count = 1;
12234 }
12235 else
12236 {
12237 /* Handle SRS instruction after reading banked SP. */
12238 return arm_record_unsupported_insn (thumb2_insn_r);
12239 }
12240 }
12241 else if (1 == op || 2 == op)
12242 {
12243 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12244 {
12245 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12246 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12247 while (register_bits)
12248 {
12249 if (register_bits & 0x00000001)
12250 record_buf[index++] = register_count;
12251
12252 register_count++;
12253 register_bits = register_bits >> 1;
12254 }
12255 record_buf[index++] = reg_rn;
12256 record_buf[index++] = ARM_PS_REGNUM;
12257 thumb2_insn_r->reg_rec_count = index;
12258 }
12259 else
12260 {
12261 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12262 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12263 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12264 while (register_bits)
12265 {
12266 if (register_bits & 0x00000001)
12267 register_count++;
12268
12269 register_bits = register_bits >> 1;
12270 }
12271
12272 if (1 == op)
12273 {
12274 /* Start address calculation for LDMDB/LDMEA. */
12275 start_address = u_regval;
12276 }
12277 else if (2 == op)
12278 {
12279 /* Start address calculation for LDMDB/LDMEA. */
12280 start_address = u_regval - register_count * 4;
12281 }
12282
12283 thumb2_insn_r->mem_rec_count = register_count;
12284 while (register_count)
12285 {
12286 record_buf_mem[register_count * 2 - 1] = start_address;
12287 record_buf_mem[register_count * 2 - 2] = 4;
12288 start_address = start_address + 4;
12289 register_count--;
12290 }
12291 record_buf[0] = reg_rn;
12292 record_buf[1] = ARM_PS_REGNUM;
12293 thumb2_insn_r->reg_rec_count = 2;
12294 }
12295 }
12296
12297 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12298 record_buf_mem);
12299 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12300 record_buf);
12301 return ARM_RECORD_SUCCESS;
12302 }
12303
12304 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12305 instructions. */
12306
12307 static int
12308 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12309 {
12310 struct regcache *reg_cache = thumb2_insn_r->regcache;
12311
12312 uint32_t reg_rd, reg_rn, offset_imm;
12313 uint32_t reg_dest1, reg_dest2;
12314 uint32_t address, offset_addr;
12315 uint32_t record_buf[8], record_buf_mem[8];
12316 uint32_t op1, op2, op3;
12317
12318 ULONGEST u_regval[2];
12319
12320 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12321 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12322 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12323
12324 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12325 {
12326 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12327 {
12328 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12329 record_buf[0] = reg_dest1;
12330 record_buf[1] = ARM_PS_REGNUM;
12331 thumb2_insn_r->reg_rec_count = 2;
12332 }
12333
12334 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12335 {
12336 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12337 record_buf[2] = reg_dest2;
12338 thumb2_insn_r->reg_rec_count = 3;
12339 }
12340 }
12341 else
12342 {
12343 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12344 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12345
12346 if (0 == op1 && 0 == op2)
12347 {
12348 /* Handle STREX. */
12349 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12350 address = u_regval[0] + (offset_imm * 4);
12351 record_buf_mem[0] = 4;
12352 record_buf_mem[1] = address;
12353 thumb2_insn_r->mem_rec_count = 1;
12354 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12355 record_buf[0] = reg_rd;
12356 thumb2_insn_r->reg_rec_count = 1;
12357 }
12358 else if (1 == op1 && 0 == op2)
12359 {
12360 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12361 record_buf[0] = reg_rd;
12362 thumb2_insn_r->reg_rec_count = 1;
12363 address = u_regval[0];
12364 record_buf_mem[1] = address;
12365
12366 if (4 == op3)
12367 {
12368 /* Handle STREXB. */
12369 record_buf_mem[0] = 1;
12370 thumb2_insn_r->mem_rec_count = 1;
12371 }
12372 else if (5 == op3)
12373 {
12374 /* Handle STREXH. */
12375 record_buf_mem[0] = 2 ;
12376 thumb2_insn_r->mem_rec_count = 1;
12377 }
12378 else if (7 == op3)
12379 {
12380 /* Handle STREXD. */
12381 address = u_regval[0];
12382 record_buf_mem[0] = 4;
12383 record_buf_mem[2] = 4;
12384 record_buf_mem[3] = address + 4;
12385 thumb2_insn_r->mem_rec_count = 2;
12386 }
12387 }
12388 else
12389 {
12390 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12391
12392 if (bit (thumb2_insn_r->arm_insn, 24))
12393 {
12394 if (bit (thumb2_insn_r->arm_insn, 23))
12395 offset_addr = u_regval[0] + (offset_imm * 4);
12396 else
12397 offset_addr = u_regval[0] - (offset_imm * 4);
12398
12399 address = offset_addr;
12400 }
12401 else
12402 address = u_regval[0];
12403
12404 record_buf_mem[0] = 4;
12405 record_buf_mem[1] = address;
12406 record_buf_mem[2] = 4;
12407 record_buf_mem[3] = address + 4;
12408 thumb2_insn_r->mem_rec_count = 2;
12409 record_buf[0] = reg_rn;
12410 thumb2_insn_r->reg_rec_count = 1;
12411 }
12412 }
12413
12414 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12415 record_buf);
12416 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12417 record_buf_mem);
12418 return ARM_RECORD_SUCCESS;
12419 }
12420
12421 /* Handler for thumb2 data processing (shift register and modified immediate)
12422 instructions. */
12423
12424 static int
12425 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12426 {
12427 uint32_t reg_rd, op;
12428 uint32_t record_buf[8];
12429
12430 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12431 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12432
12433 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12434 {
12435 record_buf[0] = ARM_PS_REGNUM;
12436 thumb2_insn_r->reg_rec_count = 1;
12437 }
12438 else
12439 {
12440 record_buf[0] = reg_rd;
12441 record_buf[1] = ARM_PS_REGNUM;
12442 thumb2_insn_r->reg_rec_count = 2;
12443 }
12444
12445 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12446 record_buf);
12447 return ARM_RECORD_SUCCESS;
12448 }
12449
12450 /* Generic handler for thumb2 instructions which effect destination and PS
12451 registers. */
12452
12453 static int
12454 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12455 {
12456 uint32_t reg_rd;
12457 uint32_t record_buf[8];
12458
12459 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12460
12461 record_buf[0] = reg_rd;
12462 record_buf[1] = ARM_PS_REGNUM;
12463 thumb2_insn_r->reg_rec_count = 2;
12464
12465 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12466 record_buf);
12467 return ARM_RECORD_SUCCESS;
12468 }
12469
12470 /* Handler for thumb2 branch and miscellaneous control instructions. */
12471
12472 static int
12473 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12474 {
12475 uint32_t op, op1, op2;
12476 uint32_t record_buf[8];
12477
12478 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12479 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12480 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12481
12482 /* Handle MSR insn. */
12483 if (!(op1 & 0x2) && 0x38 == op)
12484 {
12485 if (!(op2 & 0x3))
12486 {
12487 /* CPSR is going to be changed. */
12488 record_buf[0] = ARM_PS_REGNUM;
12489 thumb2_insn_r->reg_rec_count = 1;
12490 }
12491 else
12492 {
12493 arm_record_unsupported_insn(thumb2_insn_r);
12494 return -1;
12495 }
12496 }
12497 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12498 {
12499 /* BLX. */
12500 record_buf[0] = ARM_PS_REGNUM;
12501 record_buf[1] = ARM_LR_REGNUM;
12502 thumb2_insn_r->reg_rec_count = 2;
12503 }
12504
12505 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12506 record_buf);
12507 return ARM_RECORD_SUCCESS;
12508 }
12509
12510 /* Handler for thumb2 store single data item instructions. */
12511
12512 static int
12513 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12514 {
12515 struct regcache *reg_cache = thumb2_insn_r->regcache;
12516
12517 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12518 uint32_t address, offset_addr;
12519 uint32_t record_buf[8], record_buf_mem[8];
12520 uint32_t op1, op2;
12521
12522 ULONGEST u_regval[2];
12523
12524 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12525 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12526 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12527 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12528
12529 if (bit (thumb2_insn_r->arm_insn, 23))
12530 {
12531 /* T2 encoding. */
12532 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12533 offset_addr = u_regval[0] + offset_imm;
12534 address = offset_addr;
12535 }
12536 else
12537 {
12538 /* T3 encoding. */
12539 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12540 {
12541 /* Handle STRB (register). */
12542 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12543 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12544 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12545 offset_addr = u_regval[1] << shift_imm;
12546 address = u_regval[0] + offset_addr;
12547 }
12548 else
12549 {
12550 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12551 if (bit (thumb2_insn_r->arm_insn, 10))
12552 {
12553 if (bit (thumb2_insn_r->arm_insn, 9))
12554 offset_addr = u_regval[0] + offset_imm;
12555 else
12556 offset_addr = u_regval[0] - offset_imm;
12557
12558 address = offset_addr;
12559 }
12560 else
12561 address = u_regval[0];
12562 }
12563 }
12564
12565 switch (op1)
12566 {
12567 /* Store byte instructions. */
12568 case 4:
12569 case 0:
12570 record_buf_mem[0] = 1;
12571 break;
12572 /* Store half word instructions. */
12573 case 1:
12574 case 5:
12575 record_buf_mem[0] = 2;
12576 break;
12577 /* Store word instructions. */
12578 case 2:
12579 case 6:
12580 record_buf_mem[0] = 4;
12581 break;
12582
12583 default:
12584 gdb_assert_not_reached ("no decoding pattern found");
12585 break;
12586 }
12587
12588 record_buf_mem[1] = address;
12589 thumb2_insn_r->mem_rec_count = 1;
12590 record_buf[0] = reg_rn;
12591 thumb2_insn_r->reg_rec_count = 1;
12592
12593 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12594 record_buf);
12595 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12596 record_buf_mem);
12597 return ARM_RECORD_SUCCESS;
12598 }
12599
12600 /* Handler for thumb2 load memory hints instructions. */
12601
12602 static int
12603 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12604 {
12605 uint32_t record_buf[8];
12606 uint32_t reg_rt, reg_rn;
12607
12608 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12609 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12610
12611 if (ARM_PC_REGNUM != reg_rt)
12612 {
12613 record_buf[0] = reg_rt;
12614 record_buf[1] = reg_rn;
12615 record_buf[2] = ARM_PS_REGNUM;
12616 thumb2_insn_r->reg_rec_count = 3;
12617
12618 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12619 record_buf);
12620 return ARM_RECORD_SUCCESS;
12621 }
12622
12623 return ARM_RECORD_FAILURE;
12624 }
12625
12626 /* Handler for thumb2 load word instructions. */
12627
12628 static int
12629 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12630 {
12631 uint32_t record_buf[8];
12632
12633 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12634 record_buf[1] = ARM_PS_REGNUM;
12635 thumb2_insn_r->reg_rec_count = 2;
12636
12637 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12638 record_buf);
12639 return ARM_RECORD_SUCCESS;
12640 }
12641
12642 /* Handler for thumb2 long multiply, long multiply accumulate, and
12643 divide instructions. */
12644
12645 static int
12646 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12647 {
12648 uint32_t opcode1 = 0, opcode2 = 0;
12649 uint32_t record_buf[8];
12650
12651 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12652 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12653
12654 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12655 {
12656 /* Handle SMULL, UMULL, SMULAL. */
12657 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12658 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12659 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12660 record_buf[2] = ARM_PS_REGNUM;
12661 thumb2_insn_r->reg_rec_count = 3;
12662 }
12663 else if (1 == opcode1 || 3 == opcode2)
12664 {
12665 /* Handle SDIV and UDIV. */
12666 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12667 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12668 record_buf[2] = ARM_PS_REGNUM;
12669 thumb2_insn_r->reg_rec_count = 3;
12670 }
12671 else
12672 return ARM_RECORD_FAILURE;
12673
12674 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12675 record_buf);
12676 return ARM_RECORD_SUCCESS;
12677 }
12678
12679 /* Record handler for thumb32 coprocessor instructions. */
12680
12681 static int
12682 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12683 {
12684 if (bit (thumb2_insn_r->arm_insn, 25))
12685 return arm_record_coproc_data_proc (thumb2_insn_r);
12686 else
12687 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12688 }
12689
12690 /* Record handler for advance SIMD structure load/store instructions. */
12691
12692 static int
12693 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12694 {
12695 struct regcache *reg_cache = thumb2_insn_r->regcache;
12696 uint32_t l_bit, a_bit, b_bits;
12697 uint32_t record_buf[128], record_buf_mem[128];
12698 uint32_t reg_rn, reg_vd, address, f_elem;
12699 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12700 uint8_t f_ebytes;
12701
12702 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12703 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12704 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12705 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12706 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12707 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12708 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12709 f_elem = 8 / f_ebytes;
12710
12711 if (!l_bit)
12712 {
12713 ULONGEST u_regval = 0;
12714 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12715 address = u_regval;
12716
12717 if (!a_bit)
12718 {
12719 /* Handle VST1. */
12720 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12721 {
12722 if (b_bits == 0x07)
12723 bf_regs = 1;
12724 else if (b_bits == 0x0a)
12725 bf_regs = 2;
12726 else if (b_bits == 0x06)
12727 bf_regs = 3;
12728 else if (b_bits == 0x02)
12729 bf_regs = 4;
12730 else
12731 bf_regs = 0;
12732
12733 for (index_r = 0; index_r < bf_regs; index_r++)
12734 {
12735 for (index_e = 0; index_e < f_elem; index_e++)
12736 {
12737 record_buf_mem[index_m++] = f_ebytes;
12738 record_buf_mem[index_m++] = address;
12739 address = address + f_ebytes;
12740 thumb2_insn_r->mem_rec_count += 1;
12741 }
12742 }
12743 }
12744 /* Handle VST2. */
12745 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12746 {
12747 if (b_bits == 0x09 || b_bits == 0x08)
12748 bf_regs = 1;
12749 else if (b_bits == 0x03)
12750 bf_regs = 2;
12751 else
12752 bf_regs = 0;
12753
12754 for (index_r = 0; index_r < bf_regs; index_r++)
12755 for (index_e = 0; index_e < f_elem; index_e++)
12756 {
12757 for (loop_t = 0; loop_t < 2; loop_t++)
12758 {
12759 record_buf_mem[index_m++] = f_ebytes;
12760 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12761 thumb2_insn_r->mem_rec_count += 1;
12762 }
12763 address = address + (2 * f_ebytes);
12764 }
12765 }
12766 /* Handle VST3. */
12767 else if ((b_bits & 0x0e) == 0x04)
12768 {
12769 for (index_e = 0; index_e < f_elem; index_e++)
12770 {
12771 for (loop_t = 0; loop_t < 3; loop_t++)
12772 {
12773 record_buf_mem[index_m++] = f_ebytes;
12774 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12775 thumb2_insn_r->mem_rec_count += 1;
12776 }
12777 address = address + (3 * f_ebytes);
12778 }
12779 }
12780 /* Handle VST4. */
12781 else if (!(b_bits & 0x0e))
12782 {
12783 for (index_e = 0; index_e < f_elem; index_e++)
12784 {
12785 for (loop_t = 0; loop_t < 4; loop_t++)
12786 {
12787 record_buf_mem[index_m++] = f_ebytes;
12788 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12789 thumb2_insn_r->mem_rec_count += 1;
12790 }
12791 address = address + (4 * f_ebytes);
12792 }
12793 }
12794 }
12795 else
12796 {
12797 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12798
12799 if (bft_size == 0x00)
12800 f_ebytes = 1;
12801 else if (bft_size == 0x01)
12802 f_ebytes = 2;
12803 else if (bft_size == 0x02)
12804 f_ebytes = 4;
12805 else
12806 f_ebytes = 0;
12807
12808 /* Handle VST1. */
12809 if (!(b_bits & 0x0b) || b_bits == 0x08)
12810 thumb2_insn_r->mem_rec_count = 1;
12811 /* Handle VST2. */
12812 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12813 thumb2_insn_r->mem_rec_count = 2;
12814 /* Handle VST3. */
12815 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12816 thumb2_insn_r->mem_rec_count = 3;
12817 /* Handle VST4. */
12818 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12819 thumb2_insn_r->mem_rec_count = 4;
12820
12821 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12822 {
12823 record_buf_mem[index_m] = f_ebytes;
12824 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12825 }
12826 }
12827 }
12828 else
12829 {
12830 if (!a_bit)
12831 {
12832 /* Handle VLD1. */
12833 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12834 thumb2_insn_r->reg_rec_count = 1;
12835 /* Handle VLD2. */
12836 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12837 thumb2_insn_r->reg_rec_count = 2;
12838 /* Handle VLD3. */
12839 else if ((b_bits & 0x0e) == 0x04)
12840 thumb2_insn_r->reg_rec_count = 3;
12841 /* Handle VLD4. */
12842 else if (!(b_bits & 0x0e))
12843 thumb2_insn_r->reg_rec_count = 4;
12844 }
12845 else
12846 {
12847 /* Handle VLD1. */
12848 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12849 thumb2_insn_r->reg_rec_count = 1;
12850 /* Handle VLD2. */
12851 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12852 thumb2_insn_r->reg_rec_count = 2;
12853 /* Handle VLD3. */
12854 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12855 thumb2_insn_r->reg_rec_count = 3;
12856 /* Handle VLD4. */
12857 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12858 thumb2_insn_r->reg_rec_count = 4;
12859
12860 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12861 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12862 }
12863 }
12864
12865 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12866 {
12867 record_buf[index_r] = reg_rn;
12868 thumb2_insn_r->reg_rec_count += 1;
12869 }
12870
12871 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12872 record_buf);
12873 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12874 record_buf_mem);
12875 return 0;
12876 }
12877
12878 /* Decodes thumb2 instruction type and invokes its record handler. */
12879
12880 static unsigned int
12881 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12882 {
12883 uint32_t op, op1, op2;
12884
12885 op = bit (thumb2_insn_r->arm_insn, 15);
12886 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12887 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12888
12889 if (op1 == 0x01)
12890 {
12891 if (!(op2 & 0x64 ))
12892 {
12893 /* Load/store multiple instruction. */
12894 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12895 }
12896 else if ((op2 & 0x64) == 0x4)
12897 {
12898 /* Load/store (dual/exclusive) and table branch instruction. */
12899 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12900 }
12901 else if ((op2 & 0x60) == 0x20)
12902 {
12903 /* Data-processing (shifted register). */
12904 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12905 }
12906 else if (op2 & 0x40)
12907 {
12908 /* Co-processor instructions. */
12909 return thumb2_record_coproc_insn (thumb2_insn_r);
12910 }
12911 }
12912 else if (op1 == 0x02)
12913 {
12914 if (op)
12915 {
12916 /* Branches and miscellaneous control instructions. */
12917 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12918 }
12919 else if (op2 & 0x20)
12920 {
12921 /* Data-processing (plain binary immediate) instruction. */
12922 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12923 }
12924 else
12925 {
12926 /* Data-processing (modified immediate). */
12927 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12928 }
12929 }
12930 else if (op1 == 0x03)
12931 {
12932 if (!(op2 & 0x71 ))
12933 {
12934 /* Store single data item. */
12935 return thumb2_record_str_single_data (thumb2_insn_r);
12936 }
12937 else if (!((op2 & 0x71) ^ 0x10))
12938 {
12939 /* Advanced SIMD or structure load/store instructions. */
12940 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12941 }
12942 else if (!((op2 & 0x67) ^ 0x01))
12943 {
12944 /* Load byte, memory hints instruction. */
12945 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12946 }
12947 else if (!((op2 & 0x67) ^ 0x03))
12948 {
12949 /* Load halfword, memory hints instruction. */
12950 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12951 }
12952 else if (!((op2 & 0x67) ^ 0x05))
12953 {
12954 /* Load word instruction. */
12955 return thumb2_record_ld_word (thumb2_insn_r);
12956 }
12957 else if (!((op2 & 0x70) ^ 0x20))
12958 {
12959 /* Data-processing (register) instruction. */
12960 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12961 }
12962 else if (!((op2 & 0x78) ^ 0x30))
12963 {
12964 /* Multiply, multiply accumulate, abs diff instruction. */
12965 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12966 }
12967 else if (!((op2 & 0x78) ^ 0x38))
12968 {
12969 /* Long multiply, long multiply accumulate, and divide. */
12970 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12971 }
12972 else if (op2 & 0x40)
12973 {
12974 /* Co-processor instructions. */
12975 return thumb2_record_coproc_insn (thumb2_insn_r);
12976 }
12977 }
12978
12979 return -1;
12980 }
12981
12982 namespace {
12983 /* Abstract memory reader. */
12984
12985 class abstract_memory_reader
12986 {
12987 public:
12988 /* Read LEN bytes of target memory at address MEMADDR, placing the
12989 results in GDB's memory at BUF. Return true on success. */
12990
12991 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12992 };
12993
12994 /* Instruction reader from real target. */
12995
12996 class instruction_reader : public abstract_memory_reader
12997 {
12998 public:
12999 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13000 {
13001 if (target_read_memory (memaddr, buf, len))
13002 return false;
13003 else
13004 return true;
13005 }
13006 };
13007
13008 } // namespace
13009
13010 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13011 and positive val on fauilure. */
13012
13013 static int
13014 extract_arm_insn (abstract_memory_reader& reader,
13015 insn_decode_record *insn_record, uint32_t insn_size)
13016 {
13017 gdb_byte buf[insn_size];
13018
13019 memset (&buf[0], 0, insn_size);
13020
13021 if (!reader.read (insn_record->this_addr, buf, insn_size))
13022 return 1;
13023 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13024 insn_size,
13025 gdbarch_byte_order_for_code (insn_record->gdbarch));
13026 return 0;
13027 }
13028
13029 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13030
13031 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13032 dispatch it. */
13033
13034 static int
13035 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13036 record_type_t record_type, uint32_t insn_size)
13037 {
13038
13039 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13040 instruction. */
13041 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13042 {
13043 arm_record_data_proc_misc_ld_str, /* 000. */
13044 arm_record_data_proc_imm, /* 001. */
13045 arm_record_ld_st_imm_offset, /* 010. */
13046 arm_record_ld_st_reg_offset, /* 011. */
13047 arm_record_ld_st_multiple, /* 100. */
13048 arm_record_b_bl, /* 101. */
13049 arm_record_asimd_vfp_coproc, /* 110. */
13050 arm_record_coproc_data_proc /* 111. */
13051 };
13052
13053 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13054 instruction. */
13055 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13056 { \
13057 thumb_record_shift_add_sub, /* 000. */
13058 thumb_record_add_sub_cmp_mov, /* 001. */
13059 thumb_record_ld_st_reg_offset, /* 010. */
13060 thumb_record_ld_st_imm_offset, /* 011. */
13061 thumb_record_ld_st_stack, /* 100. */
13062 thumb_record_misc, /* 101. */
13063 thumb_record_ldm_stm_swi, /* 110. */
13064 thumb_record_branch /* 111. */
13065 };
13066
13067 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13068 uint32_t insn_id = 0;
13069
13070 if (extract_arm_insn (reader, arm_record, insn_size))
13071 {
13072 if (record_debug)
13073 {
13074 printf_unfiltered (_("Process record: error reading memory at "
13075 "addr %s len = %d.\n"),
13076 paddress (arm_record->gdbarch,
13077 arm_record->this_addr), insn_size);
13078 }
13079 return -1;
13080 }
13081 else if (ARM_RECORD == record_type)
13082 {
13083 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13084 insn_id = bits (arm_record->arm_insn, 25, 27);
13085
13086 if (arm_record->cond == 0xf)
13087 ret = arm_record_extension_space (arm_record);
13088 else
13089 {
13090 /* If this insn has fallen into extension space
13091 then we need not decode it anymore. */
13092 ret = arm_handle_insn[insn_id] (arm_record);
13093 }
13094 if (ret != ARM_RECORD_SUCCESS)
13095 {
13096 arm_record_unsupported_insn (arm_record);
13097 ret = -1;
13098 }
13099 }
13100 else if (THUMB_RECORD == record_type)
13101 {
13102 /* As thumb does not have condition codes, we set negative. */
13103 arm_record->cond = -1;
13104 insn_id = bits (arm_record->arm_insn, 13, 15);
13105 ret = thumb_handle_insn[insn_id] (arm_record);
13106 if (ret != ARM_RECORD_SUCCESS)
13107 {
13108 arm_record_unsupported_insn (arm_record);
13109 ret = -1;
13110 }
13111 }
13112 else if (THUMB2_RECORD == record_type)
13113 {
13114 /* As thumb does not have condition codes, we set negative. */
13115 arm_record->cond = -1;
13116
13117 /* Swap first half of 32bit thumb instruction with second half. */
13118 arm_record->arm_insn
13119 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13120
13121 ret = thumb2_record_decode_insn_handler (arm_record);
13122
13123 if (ret != ARM_RECORD_SUCCESS)
13124 {
13125 arm_record_unsupported_insn (arm_record);
13126 ret = -1;
13127 }
13128 }
13129 else
13130 {
13131 /* Throw assertion. */
13132 gdb_assert_not_reached ("not a valid instruction, could not decode");
13133 }
13134
13135 return ret;
13136 }
13137
13138 #if GDB_SELF_TEST
13139 namespace selftests {
13140
13141 /* Provide both 16-bit and 32-bit thumb instructions. */
13142
13143 class instruction_reader_thumb : public abstract_memory_reader
13144 {
13145 public:
13146 template<size_t SIZE>
13147 instruction_reader_thumb (enum bfd_endian endian,
13148 const uint16_t (&insns)[SIZE])
13149 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13150 {}
13151
13152 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13153 {
13154 SELF_CHECK (len == 4 || len == 2);
13155 SELF_CHECK (memaddr % 2 == 0);
13156 SELF_CHECK ((memaddr / 2) < m_insns_size);
13157
13158 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13159 if (len == 4)
13160 {
13161 store_unsigned_integer (&buf[2], 2, m_endian,
13162 m_insns[memaddr / 2 + 1]);
13163 }
13164 return true;
13165 }
13166
13167 private:
13168 enum bfd_endian m_endian;
13169 const uint16_t *m_insns;
13170 size_t m_insns_size;
13171 };
13172
13173 static void
13174 arm_record_test (void)
13175 {
13176 struct gdbarch_info info;
13177 gdbarch_info_init (&info);
13178 info.bfd_arch_info = bfd_scan_arch ("arm");
13179
13180 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13181
13182 SELF_CHECK (gdbarch != NULL);
13183
13184 /* 16-bit Thumb instructions. */
13185 {
13186 insn_decode_record arm_record;
13187
13188 memset (&arm_record, 0, sizeof (insn_decode_record));
13189 arm_record.gdbarch = gdbarch;
13190
13191 static const uint16_t insns[] = {
13192 /* db b2 uxtb r3, r3 */
13193 0xb2db,
13194 /* cd 58 ldr r5, [r1, r3] */
13195 0x58cd,
13196 };
13197
13198 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13199 instruction_reader_thumb reader (endian, insns);
13200 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13201 THUMB_INSN_SIZE_BYTES);
13202
13203 SELF_CHECK (ret == 0);
13204 SELF_CHECK (arm_record.mem_rec_count == 0);
13205 SELF_CHECK (arm_record.reg_rec_count == 1);
13206 SELF_CHECK (arm_record.arm_regs[0] == 3);
13207
13208 arm_record.this_addr += 2;
13209 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13210 THUMB_INSN_SIZE_BYTES);
13211
13212 SELF_CHECK (ret == 0);
13213 SELF_CHECK (arm_record.mem_rec_count == 0);
13214 SELF_CHECK (arm_record.reg_rec_count == 1);
13215 SELF_CHECK (arm_record.arm_regs[0] == 5);
13216 }
13217
13218 /* 32-bit Thumb-2 instructions. */
13219 {
13220 insn_decode_record arm_record;
13221
13222 memset (&arm_record, 0, sizeof (insn_decode_record));
13223 arm_record.gdbarch = gdbarch;
13224
13225 static const uint16_t insns[] = {
13226 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13227 0xee1d, 0x7f70,
13228 };
13229
13230 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13231 instruction_reader_thumb reader (endian, insns);
13232 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13233 THUMB2_INSN_SIZE_BYTES);
13234
13235 SELF_CHECK (ret == 0);
13236 SELF_CHECK (arm_record.mem_rec_count == 0);
13237 SELF_CHECK (arm_record.reg_rec_count == 1);
13238 SELF_CHECK (arm_record.arm_regs[0] == 7);
13239 }
13240 }
13241 } // namespace selftests
13242 #endif /* GDB_SELF_TEST */
13243
13244 /* Cleans up local record registers and memory allocations. */
13245
13246 static void
13247 deallocate_reg_mem (insn_decode_record *record)
13248 {
13249 xfree (record->arm_regs);
13250 xfree (record->arm_mems);
13251 }
13252
13253
13254 /* Parse the current instruction and record the values of the registers and
13255 memory that will be changed in current instruction to record_arch_list".
13256 Return -1 if something is wrong. */
13257
13258 int
13259 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13260 CORE_ADDR insn_addr)
13261 {
13262
13263 uint32_t no_of_rec = 0;
13264 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13265 ULONGEST t_bit = 0, insn_id = 0;
13266
13267 ULONGEST u_regval = 0;
13268
13269 insn_decode_record arm_record;
13270
13271 memset (&arm_record, 0, sizeof (insn_decode_record));
13272 arm_record.regcache = regcache;
13273 arm_record.this_addr = insn_addr;
13274 arm_record.gdbarch = gdbarch;
13275
13276
13277 if (record_debug > 1)
13278 {
13279 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13280 "addr = %s\n",
13281 paddress (gdbarch, arm_record.this_addr));
13282 }
13283
13284 instruction_reader reader;
13285 if (extract_arm_insn (reader, &arm_record, 2))
13286 {
13287 if (record_debug)
13288 {
13289 printf_unfiltered (_("Process record: error reading memory at "
13290 "addr %s len = %d.\n"),
13291 paddress (arm_record.gdbarch,
13292 arm_record.this_addr), 2);
13293 }
13294 return -1;
13295 }
13296
13297 /* Check the insn, whether it is thumb or arm one. */
13298
13299 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13300 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13301
13302
13303 if (!(u_regval & t_bit))
13304 {
13305 /* We are decoding arm insn. */
13306 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13307 }
13308 else
13309 {
13310 insn_id = bits (arm_record.arm_insn, 11, 15);
13311 /* is it thumb2 insn? */
13312 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13313 {
13314 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13315 THUMB2_INSN_SIZE_BYTES);
13316 }
13317 else
13318 {
13319 /* We are decoding thumb insn. */
13320 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13321 THUMB_INSN_SIZE_BYTES);
13322 }
13323 }
13324
13325 if (0 == ret)
13326 {
13327 /* Record registers. */
13328 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13329 if (arm_record.arm_regs)
13330 {
13331 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13332 {
13333 if (record_full_arch_list_add_reg
13334 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13335 ret = -1;
13336 }
13337 }
13338 /* Record memories. */
13339 if (arm_record.arm_mems)
13340 {
13341 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13342 {
13343 if (record_full_arch_list_add_mem
13344 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13345 arm_record.arm_mems[no_of_rec].len))
13346 ret = -1;
13347 }
13348 }
13349
13350 if (record_full_arch_list_add_end ())
13351 ret = -1;
13352 }
13353
13354
13355 deallocate_reg_mem (&arm_record);
13356
13357 return ret;
13358 }
13359
13360 /* See arm-tdep.h. */
13361
13362 const target_desc *
13363 arm_read_description (arm_fp_type fp_type)
13364 {
13365 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13366
13367 if (tdesc == nullptr)
13368 {
13369 tdesc = arm_create_target_description (fp_type);
13370 tdesc_arm_list[fp_type] = tdesc;
13371 }
13372
13373 return tdesc;
13374 }
13375
13376 /* See arm-tdep.h. */
13377
13378 const target_desc *
13379 arm_read_mprofile_description (arm_m_profile_type m_type)
13380 {
13381 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13382
13383 if (tdesc == nullptr)
13384 {
13385 tdesc = arm_create_mprofile_target_description (m_type);
13386 tdesc_arm_mprofile_list[m_type] = tdesc;
13387 }
13388
13389 return tdesc;
13390 }
This page took 0.316208 seconds and 4 git commands to generate.