d1e3a98c0b5d3364d2799a67de8512f4ea8af0cc
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 struct arm_mapping_symbol
85 {
86 CORE_ADDR value;
87 char type;
88
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
91 };
92
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94
95 struct arm_per_bfd
96 {
97 explicit arm_per_bfd (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
100 {}
101
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
103
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
108 index).
109
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113
114 /* For each corresponding element of section_maps above, is this vector
115 sorted. */
116 std::unique_ptr<bool[]> section_maps_sorted;
117 };
118
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key<arm_per_bfd> arm_bfd_data_key;
121
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
125
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
129 {
130 "auto",
131 "softfpa",
132 "fpa",
133 "softvfp",
134 "vfp",
135 NULL
136 };
137
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
141
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
144 {
145 "auto",
146 "APCS",
147 "AAPCS",
148 NULL
149 };
150
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
154
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
157 {
158 "auto",
159 "arm",
160 "thumb",
161 NULL
162 };
163
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
166
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
171 static const struct
172 {
173 const char *name;
174 int regnum;
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
177 { "r0", 0 },
178 { "r1", 1 },
179 { "r2", 2 },
180 { "r3", 3 },
181 { "r4", 4 },
182 { "r5", 5 },
183 { "r6", 6 },
184 { "r7", 7 },
185 { "r8", 8 },
186 { "r9", 9 },
187 { "r10", 10 },
188 { "r11", 11 },
189 { "r12", 12 },
190 { "r13", 13 },
191 { "r14", 14 },
192 { "r15", 15 },
193 /* Synonyms (argument and variable registers). */
194 { "a1", 0 },
195 { "a2", 1 },
196 { "a3", 2 },
197 { "a4", 3 },
198 { "v1", 4 },
199 { "v2", 5 },
200 { "v3", 6 },
201 { "v4", 7 },
202 { "v5", 8 },
203 { "v6", 9 },
204 { "v7", 10 },
205 { "v8", 11 },
206 /* Other platform-specific names for r9. */
207 { "sb", 9 },
208 { "tr", 9 },
209 /* Special names. */
210 { "ip", 12 },
211 { "lr", 14 },
212 /* Names used by GCC (not listed in the ARM EABI). */
213 { "sl", 10 },
214 /* A special name from the older ATPCS. */
215 { "wr", 7 },
216 };
217
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
226
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
229
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
232
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
235
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239
240 /* This is used to keep the bfd arch_info in sync with the disassembly
241 style. */
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
246 const char *);
247
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
254
255 static CORE_ADDR
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
257
258
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
265 NULL,
266 };
267
268 struct arm_prologue_cache
269 {
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
273 CORE_ADDR prev_sp;
274
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
278
279 int framesize;
280
281 /* The register used to hold the frame pointer for this frame. */
282 int framereg;
283
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
286 };
287
288 namespace {
289
290 /* Abstract class to read ARM instructions from memory. */
291
292 class arm_instruction_reader
293 {
294 public:
295 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
296 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
297 };
298
299 /* Read instructions from target memory. */
300
301 class target_arm_instruction_reader : public arm_instruction_reader
302 {
303 public:
304 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
305 {
306 return read_code_unsigned_integer (memaddr, 4, byte_order);
307 }
308 };
309
310 } /* namespace */
311
312 static CORE_ADDR arm_analyze_prologue
313 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
314 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
315
316 /* Architecture version for displaced stepping. This effects the behaviour of
317 certain instructions, and really should not be hard-wired. */
318
319 #define DISPLACED_STEPPING_ARCH_VERSION 5
320
321 /* See arm-tdep.h. */
322
323 bool arm_apcs_32 = true;
324
325 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
326
327 int
328 arm_psr_thumb_bit (struct gdbarch *gdbarch)
329 {
330 if (gdbarch_tdep (gdbarch)->is_m)
331 return XPSR_T;
332 else
333 return CPSR_T;
334 }
335
336 /* Determine if the processor is currently executing in Thumb mode. */
337
338 int
339 arm_is_thumb (struct regcache *regcache)
340 {
341 ULONGEST cpsr;
342 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
343
344 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
345
346 return (cpsr & t_bit) != 0;
347 }
348
349 /* Determine if FRAME is executing in Thumb mode. */
350
351 int
352 arm_frame_is_thumb (struct frame_info *frame)
353 {
354 CORE_ADDR cpsr;
355 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
356
357 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
358 directly (from a signal frame or dummy frame) or by interpreting
359 the saved LR (from a prologue or DWARF frame). So consult it and
360 trust the unwinders. */
361 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
362
363 return (cpsr & t_bit) != 0;
364 }
365
366 /* Search for the mapping symbol covering MEMADDR. If one is found,
367 return its type. Otherwise, return 0. If START is non-NULL,
368 set *START to the location of the mapping symbol. */
369
370 static char
371 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
372 {
373 struct obj_section *sec;
374
375 /* If there are mapping symbols, consult them. */
376 sec = find_pc_section (memaddr);
377 if (sec != NULL)
378 {
379 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
380 if (data != NULL)
381 {
382 unsigned int section_idx = sec->the_bfd_section->index;
383 arm_mapping_symbol_vec &map
384 = data->section_maps[section_idx];
385
386 /* Sort the vector on first use. */
387 if (!data->section_maps_sorted[section_idx])
388 {
389 std::sort (map.begin (), map.end ());
390 data->section_maps_sorted[section_idx] = true;
391 }
392
393 struct arm_mapping_symbol map_key
394 = { memaddr - obj_section_addr (sec), 0 };
395 arm_mapping_symbol_vec::const_iterator it
396 = std::lower_bound (map.begin (), map.end (), map_key);
397
398 /* std::lower_bound finds the earliest ordered insertion
399 point. If the symbol at this position starts at this exact
400 address, we use that; otherwise, the preceding
401 mapping symbol covers this address. */
402 if (it < map.end ())
403 {
404 if (it->value == map_key.value)
405 {
406 if (start)
407 *start = it->value + obj_section_addr (sec);
408 return it->type;
409 }
410 }
411
412 if (it > map.begin ())
413 {
414 arm_mapping_symbol_vec::const_iterator prev_it
415 = it - 1;
416
417 if (start)
418 *start = prev_it->value + obj_section_addr (sec);
419 return prev_it->type;
420 }
421 }
422 }
423
424 return 0;
425 }
426
427 /* Determine if the program counter specified in MEMADDR is in a Thumb
428 function. This function should be called for addresses unrelated to
429 any executing frame; otherwise, prefer arm_frame_is_thumb. */
430
431 int
432 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
433 {
434 struct bound_minimal_symbol sym;
435 char type;
436 arm_displaced_step_closure *dsc
437 = ((arm_displaced_step_closure * )
438 get_displaced_step_closure_by_addr (memaddr));
439
440 /* If checking the mode of displaced instruction in copy area, the mode
441 should be determined by instruction on the original address. */
442 if (dsc)
443 {
444 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
445 (unsigned long) dsc->insn_addr,
446 (unsigned long) memaddr);
447 memaddr = dsc->insn_addr;
448 }
449
450 /* If bit 0 of the address is set, assume this is a Thumb address. */
451 if (IS_THUMB_ADDR (memaddr))
452 return 1;
453
454 /* If the user wants to override the symbol table, let him. */
455 if (strcmp (arm_force_mode_string, "arm") == 0)
456 return 0;
457 if (strcmp (arm_force_mode_string, "thumb") == 0)
458 return 1;
459
460 /* ARM v6-M and v7-M are always in Thumb mode. */
461 if (gdbarch_tdep (gdbarch)->is_m)
462 return 1;
463
464 /* If there are mapping symbols, consult them. */
465 type = arm_find_mapping_symbol (memaddr, NULL);
466 if (type)
467 return type == 't';
468
469 /* Thumb functions have a "special" bit set in minimal symbols. */
470 sym = lookup_minimal_symbol_by_pc (memaddr);
471 if (sym.minsym)
472 return (MSYMBOL_IS_SPECIAL (sym.minsym));
473
474 /* If the user wants to override the fallback mode, let them. */
475 if (strcmp (arm_fallback_mode_string, "arm") == 0)
476 return 0;
477 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
478 return 1;
479
480 /* If we couldn't find any symbol, but we're talking to a running
481 target, then trust the current value of $cpsr. This lets
482 "display/i $pc" always show the correct mode (though if there is
483 a symbol table we will not reach here, so it still may not be
484 displayed in the mode it will be executed). */
485 if (target_has_registers ())
486 return arm_frame_is_thumb (get_current_frame ());
487
488 /* Otherwise we're out of luck; we assume ARM. */
489 return 0;
490 }
491
492 /* Determine if the address specified equals any of these magic return
493 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
494 architectures.
495
496 From ARMv6-M Reference Manual B1.5.8
497 Table B1-5 Exception return behavior
498
499 EXC_RETURN Return To Return Stack
500 0xFFFFFFF1 Handler mode Main
501 0xFFFFFFF9 Thread mode Main
502 0xFFFFFFFD Thread mode Process
503
504 From ARMv7-M Reference Manual B1.5.8
505 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
506
507 EXC_RETURN Return To Return Stack
508 0xFFFFFFF1 Handler mode Main
509 0xFFFFFFF9 Thread mode Main
510 0xFFFFFFFD Thread mode Process
511
512 Table B1-9 EXC_RETURN definition of exception return behavior, with
513 FP
514
515 EXC_RETURN Return To Return Stack Frame Type
516 0xFFFFFFE1 Handler mode Main Extended
517 0xFFFFFFE9 Thread mode Main Extended
518 0xFFFFFFED Thread mode Process Extended
519 0xFFFFFFF1 Handler mode Main Basic
520 0xFFFFFFF9 Thread mode Main Basic
521 0xFFFFFFFD Thread mode Process Basic
522
523 For more details see "B1.5.8 Exception return behavior"
524 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
525
526 In the ARMv8-M Architecture Technical Reference also adds
527 for implementations without the Security Extension:
528
529 EXC_RETURN Condition
530 0xFFFFFFB0 Return to Handler mode.
531 0xFFFFFFB8 Return to Thread mode using the main stack.
532 0xFFFFFFBC Return to Thread mode using the process stack. */
533
534 static int
535 arm_m_addr_is_magic (CORE_ADDR addr)
536 {
537 switch (addr)
538 {
539 /* Values from ARMv8-M Architecture Technical Reference. */
540 case 0xffffffb0:
541 case 0xffffffb8:
542 case 0xffffffbc:
543 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
544 the exception return behavior. */
545 case 0xffffffe1:
546 case 0xffffffe9:
547 case 0xffffffed:
548 case 0xfffffff1:
549 case 0xfffffff9:
550 case 0xfffffffd:
551 /* Address is magic. */
552 return 1;
553
554 default:
555 /* Address is not magic. */
556 return 0;
557 }
558 }
559
560 /* Remove useless bits from addresses in a running program. */
561 static CORE_ADDR
562 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
563 {
564 /* On M-profile devices, do not strip the low bit from EXC_RETURN
565 (the magic exception return address). */
566 if (gdbarch_tdep (gdbarch)->is_m
567 && arm_m_addr_is_magic (val))
568 return val;
569
570 if (arm_apcs_32)
571 return UNMAKE_THUMB_ADDR (val);
572 else
573 return (val & 0x03fffffc);
574 }
575
576 /* Return 1 if PC is the start of a compiler helper function which
577 can be safely ignored during prologue skipping. IS_THUMB is true
578 if the function is known to be a Thumb function due to the way it
579 is being called. */
580 static int
581 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
582 {
583 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
584 struct bound_minimal_symbol msym;
585
586 msym = lookup_minimal_symbol_by_pc (pc);
587 if (msym.minsym != NULL
588 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
589 && msym.minsym->linkage_name () != NULL)
590 {
591 const char *name = msym.minsym->linkage_name ();
592
593 /* The GNU linker's Thumb call stub to foo is named
594 __foo_from_thumb. */
595 if (strstr (name, "_from_thumb") != NULL)
596 name += 2;
597
598 /* On soft-float targets, __truncdfsf2 is called to convert promoted
599 arguments to their argument types in non-prototyped
600 functions. */
601 if (startswith (name, "__truncdfsf2"))
602 return 1;
603 if (startswith (name, "__aeabi_d2f"))
604 return 1;
605
606 /* Internal functions related to thread-local storage. */
607 if (startswith (name, "__tls_get_addr"))
608 return 1;
609 if (startswith (name, "__aeabi_read_tp"))
610 return 1;
611 }
612 else
613 {
614 /* If we run against a stripped glibc, we may be unable to identify
615 special functions by name. Check for one important case,
616 __aeabi_read_tp, by comparing the *code* against the default
617 implementation (this is hand-written ARM assembler in glibc). */
618
619 if (!is_thumb
620 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
621 == 0xe3e00a0f /* mov r0, #0xffff0fff */
622 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
623 == 0xe240f01f) /* sub pc, r0, #31 */
624 return 1;
625 }
626
627 return 0;
628 }
629
630 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
631 the first 16-bit of instruction, and INSN2 is the second 16-bit of
632 instruction. */
633 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
634 ((bits ((insn1), 0, 3) << 12) \
635 | (bits ((insn1), 10, 10) << 11) \
636 | (bits ((insn2), 12, 14) << 8) \
637 | bits ((insn2), 0, 7))
638
639 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
640 the 32-bit instruction. */
641 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
642 ((bits ((insn), 16, 19) << 12) \
643 | bits ((insn), 0, 11))
644
645 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
646
647 static unsigned int
648 thumb_expand_immediate (unsigned int imm)
649 {
650 unsigned int count = imm >> 7;
651
652 if (count < 8)
653 switch (count / 2)
654 {
655 case 0:
656 return imm & 0xff;
657 case 1:
658 return (imm & 0xff) | ((imm & 0xff) << 16);
659 case 2:
660 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
661 case 3:
662 return (imm & 0xff) | ((imm & 0xff) << 8)
663 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
664 }
665
666 return (0x80 | (imm & 0x7f)) << (32 - count);
667 }
668
669 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
670 epilogue, 0 otherwise. */
671
672 static int
673 thumb_instruction_restores_sp (unsigned short insn)
674 {
675 return (insn == 0x46bd /* mov sp, r7 */
676 || (insn & 0xff80) == 0xb000 /* add sp, imm */
677 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
678 }
679
680 /* Analyze a Thumb prologue, looking for a recognizable stack frame
681 and frame pointer. Scan until we encounter a store that could
682 clobber the stack frame unexpectedly, or an unknown instruction.
683 Return the last address which is definitely safe to skip for an
684 initial breakpoint. */
685
686 static CORE_ADDR
687 thumb_analyze_prologue (struct gdbarch *gdbarch,
688 CORE_ADDR start, CORE_ADDR limit,
689 struct arm_prologue_cache *cache)
690 {
691 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
692 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
693 int i;
694 pv_t regs[16];
695 CORE_ADDR offset;
696 CORE_ADDR unrecognized_pc = 0;
697
698 for (i = 0; i < 16; i++)
699 regs[i] = pv_register (i, 0);
700 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
701
702 while (start < limit)
703 {
704 unsigned short insn;
705
706 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
707
708 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
709 {
710 int regno;
711 int mask;
712
713 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
714 break;
715
716 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
717 whether to save LR (R14). */
718 mask = (insn & 0xff) | ((insn & 0x100) << 6);
719
720 /* Calculate offsets of saved R0-R7 and LR. */
721 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
722 if (mask & (1 << regno))
723 {
724 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
725 -4);
726 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
727 }
728 }
729 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
730 {
731 offset = (insn & 0x7f) << 2; /* get scaled offset */
732 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
733 -offset);
734 }
735 else if (thumb_instruction_restores_sp (insn))
736 {
737 /* Don't scan past the epilogue. */
738 break;
739 }
740 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
741 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
742 (insn & 0xff) << 2);
743 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
744 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
745 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
746 bits (insn, 6, 8));
747 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
748 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
750 bits (insn, 0, 7));
751 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
752 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
753 && pv_is_constant (regs[bits (insn, 3, 5)]))
754 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
755 regs[bits (insn, 6, 8)]);
756 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
757 && pv_is_constant (regs[bits (insn, 3, 6)]))
758 {
759 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
760 int rm = bits (insn, 3, 6);
761 regs[rd] = pv_add (regs[rd], regs[rm]);
762 }
763 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
764 {
765 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
766 int src_reg = (insn & 0x78) >> 3;
767 regs[dst_reg] = regs[src_reg];
768 }
769 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
770 {
771 /* Handle stores to the stack. Normally pushes are used,
772 but with GCC -mtpcs-frame, there may be other stores
773 in the prologue to create the frame. */
774 int regno = (insn >> 8) & 0x7;
775 pv_t addr;
776
777 offset = (insn & 0xff) << 2;
778 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
779
780 if (stack.store_would_trash (addr))
781 break;
782
783 stack.store (addr, 4, regs[regno]);
784 }
785 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
786 {
787 int rd = bits (insn, 0, 2);
788 int rn = bits (insn, 3, 5);
789 pv_t addr;
790
791 offset = bits (insn, 6, 10) << 2;
792 addr = pv_add_constant (regs[rn], offset);
793
794 if (stack.store_would_trash (addr))
795 break;
796
797 stack.store (addr, 4, regs[rd]);
798 }
799 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
800 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
801 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
802 /* Ignore stores of argument registers to the stack. */
803 ;
804 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
805 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
806 /* Ignore block loads from the stack, potentially copying
807 parameters from memory. */
808 ;
809 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
810 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
811 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
812 /* Similarly ignore single loads from the stack. */
813 ;
814 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
815 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
816 /* Skip register copies, i.e. saves to another register
817 instead of the stack. */
818 ;
819 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
820 /* Recognize constant loads; even with small stacks these are necessary
821 on Thumb. */
822 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
823 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
824 {
825 /* Constant pool loads, for the same reason. */
826 unsigned int constant;
827 CORE_ADDR loc;
828
829 loc = start + 4 + bits (insn, 0, 7) * 4;
830 constant = read_memory_unsigned_integer (loc, 4, byte_order);
831 regs[bits (insn, 8, 10)] = pv_constant (constant);
832 }
833 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
834 {
835 unsigned short inst2;
836
837 inst2 = read_code_unsigned_integer (start + 2, 2,
838 byte_order_for_code);
839
840 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
841 {
842 /* BL, BLX. Allow some special function calls when
843 skipping the prologue; GCC generates these before
844 storing arguments to the stack. */
845 CORE_ADDR nextpc;
846 int j1, j2, imm1, imm2;
847
848 imm1 = sbits (insn, 0, 10);
849 imm2 = bits (inst2, 0, 10);
850 j1 = bit (inst2, 13);
851 j2 = bit (inst2, 11);
852
853 offset = ((imm1 << 12) + (imm2 << 1));
854 offset ^= ((!j2) << 22) | ((!j1) << 23);
855
856 nextpc = start + 4 + offset;
857 /* For BLX make sure to clear the low bits. */
858 if (bit (inst2, 12) == 0)
859 nextpc = nextpc & 0xfffffffc;
860
861 if (!skip_prologue_function (gdbarch, nextpc,
862 bit (inst2, 12) != 0))
863 break;
864 }
865
866 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
867 { registers } */
868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
869 {
870 pv_t addr = regs[bits (insn, 0, 3)];
871 int regno;
872
873 if (stack.store_would_trash (addr))
874 break;
875
876 /* Calculate offsets of saved registers. */
877 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
878 if (inst2 & (1 << regno))
879 {
880 addr = pv_add_constant (addr, -4);
881 stack.store (addr, 4, regs[regno]);
882 }
883
884 if (insn & 0x0020)
885 regs[bits (insn, 0, 3)] = addr;
886 }
887
888 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
889 [Rn, #+/-imm]{!} */
890 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
891 {
892 int regno1 = bits (inst2, 12, 15);
893 int regno2 = bits (inst2, 8, 11);
894 pv_t addr = regs[bits (insn, 0, 3)];
895
896 offset = inst2 & 0xff;
897 if (insn & 0x0080)
898 addr = pv_add_constant (addr, offset);
899 else
900 addr = pv_add_constant (addr, -offset);
901
902 if (stack.store_would_trash (addr))
903 break;
904
905 stack.store (addr, 4, regs[regno1]);
906 stack.store (pv_add_constant (addr, 4),
907 4, regs[regno2]);
908
909 if (insn & 0x0020)
910 regs[bits (insn, 0, 3)] = addr;
911 }
912
913 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
914 && (inst2 & 0x0c00) == 0x0c00
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 {
917 int regno = bits (inst2, 12, 15);
918 pv_t addr = regs[bits (insn, 0, 3)];
919
920 offset = inst2 & 0xff;
921 if (inst2 & 0x0200)
922 addr = pv_add_constant (addr, offset);
923 else
924 addr = pv_add_constant (addr, -offset);
925
926 if (stack.store_would_trash (addr))
927 break;
928
929 stack.store (addr, 4, regs[regno]);
930
931 if (inst2 & 0x0100)
932 regs[bits (insn, 0, 3)] = addr;
933 }
934
935 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 {
938 int regno = bits (inst2, 12, 15);
939 pv_t addr;
940
941 offset = inst2 & 0xfff;
942 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
943
944 if (stack.store_would_trash (addr))
945 break;
946
947 stack.store (addr, 4, regs[regno]);
948 }
949
950 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
951 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
952 /* Ignore stores of argument registers to the stack. */
953 ;
954
955 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
956 && (inst2 & 0x0d00) == 0x0c00
957 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 /* Ignore stores of argument registers to the stack. */
959 ;
960
961 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
962 { registers } */
963 && (inst2 & 0x8000) == 0x0000
964 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
965 /* Ignore block loads from the stack, potentially copying
966 parameters from memory. */
967 ;
968
969 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
970 [Rn, #+/-imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Similarly ignore dual loads from the stack. */
973 ;
974
975 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Similarly ignore single loads from the stack. */
979 ;
980
981 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore single loads from the stack. */
984 ;
985
986 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
988 {
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
992
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)],
995 thumb_expand_immediate (imm));
996 }
997
998 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
999 && (inst2 & 0x8000) == 0x0000)
1000 {
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1004
1005 regs[bits (inst2, 8, 11)]
1006 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1011 {
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1015
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)],
1018 - (CORE_ADDR) thumb_expand_immediate (imm));
1019 }
1020
1021 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1022 && (inst2 & 0x8000) == 0x0000)
1023 {
1024 unsigned int imm = ((bits (insn, 10, 10) << 11)
1025 | (bits (inst2, 12, 14) << 8)
1026 | bits (inst2, 0, 7));
1027
1028 regs[bits (inst2, 8, 11)]
1029 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1030 }
1031
1032 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1033 {
1034 unsigned int imm = ((bits (insn, 10, 10) << 11)
1035 | (bits (inst2, 12, 14) << 8)
1036 | bits (inst2, 0, 7));
1037
1038 regs[bits (inst2, 8, 11)]
1039 = pv_constant (thumb_expand_immediate (imm));
1040 }
1041
1042 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1043 {
1044 unsigned int imm
1045 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1046
1047 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1048 }
1049
1050 else if (insn == 0xea5f /* mov.w Rd,Rm */
1051 && (inst2 & 0xf0f0) == 0)
1052 {
1053 int dst_reg = (inst2 & 0x0f00) >> 8;
1054 int src_reg = inst2 & 0xf;
1055 regs[dst_reg] = regs[src_reg];
1056 }
1057
1058 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1059 {
1060 /* Constant pool loads. */
1061 unsigned int constant;
1062 CORE_ADDR loc;
1063
1064 offset = bits (inst2, 0, 11);
1065 if (insn & 0x0080)
1066 loc = start + 4 + offset;
1067 else
1068 loc = start + 4 - offset;
1069
1070 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1071 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1072 }
1073
1074 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1075 {
1076 /* Constant pool loads. */
1077 unsigned int constant;
1078 CORE_ADDR loc;
1079
1080 offset = bits (inst2, 0, 7) << 2;
1081 if (insn & 0x0080)
1082 loc = start + 4 + offset;
1083 else
1084 loc = start + 4 - offset;
1085
1086 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1087 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1088
1089 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1090 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1091 }
1092
1093 else if (thumb2_instruction_changes_pc (insn, inst2))
1094 {
1095 /* Don't scan past anything that might change control flow. */
1096 break;
1097 }
1098 else
1099 {
1100 /* The optimizer might shove anything into the prologue,
1101 so we just skip what we don't recognize. */
1102 unrecognized_pc = start;
1103 }
1104
1105 start += 2;
1106 }
1107 else if (thumb_instruction_changes_pc (insn))
1108 {
1109 /* Don't scan past anything that might change control flow. */
1110 break;
1111 }
1112 else
1113 {
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1117 }
1118
1119 start += 2;
1120 }
1121
1122 if (arm_debug)
1123 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1124 paddress (gdbarch, start));
1125
1126 if (unrecognized_pc == 0)
1127 unrecognized_pc = start;
1128
1129 if (cache == NULL)
1130 return unrecognized_pc;
1131
1132 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1133 {
1134 /* Frame pointer is fp. Frame size is constant. */
1135 cache->framereg = ARM_FP_REGNUM;
1136 cache->framesize = -regs[ARM_FP_REGNUM].k;
1137 }
1138 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1139 {
1140 /* Frame pointer is r7. Frame size is constant. */
1141 cache->framereg = THUMB_FP_REGNUM;
1142 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1143 }
1144 else
1145 {
1146 /* Try the stack pointer... this is a bit desperate. */
1147 cache->framereg = ARM_SP_REGNUM;
1148 cache->framesize = -regs[ARM_SP_REGNUM].k;
1149 }
1150
1151 for (i = 0; i < 16; i++)
1152 if (stack.find_reg (gdbarch, i, &offset))
1153 cache->saved_regs[i].addr = offset;
1154
1155 return unrecognized_pc;
1156 }
1157
1158
1159 /* Try to analyze the instructions starting from PC, which load symbol
1160 __stack_chk_guard. Return the address of instruction after loading this
1161 symbol, set the dest register number to *BASEREG, and set the size of
1162 instructions for loading symbol in OFFSET. Return 0 if instructions are
1163 not recognized. */
1164
1165 static CORE_ADDR
1166 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1167 unsigned int *destreg, int *offset)
1168 {
1169 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1170 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1171 unsigned int low, high, address;
1172
1173 address = 0;
1174 if (is_thumb)
1175 {
1176 unsigned short insn1
1177 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1178
1179 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1180 {
1181 *destreg = bits (insn1, 8, 10);
1182 *offset = 2;
1183 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1184 address = read_memory_unsigned_integer (address, 4,
1185 byte_order_for_code);
1186 }
1187 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1188 {
1189 unsigned short insn2
1190 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1191
1192 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1193
1194 insn1
1195 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1196 insn2
1197 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1198
1199 /* movt Rd, #const */
1200 if ((insn1 & 0xfbc0) == 0xf2c0)
1201 {
1202 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203 *destreg = bits (insn2, 8, 11);
1204 *offset = 8;
1205 address = (high << 16 | low);
1206 }
1207 }
1208 }
1209 else
1210 {
1211 unsigned int insn
1212 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1213
1214 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1215 {
1216 address = bits (insn, 0, 11) + pc + 8;
1217 address = read_memory_unsigned_integer (address, 4,
1218 byte_order_for_code);
1219
1220 *destreg = bits (insn, 12, 15);
1221 *offset = 4;
1222 }
1223 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1224 {
1225 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1226
1227 insn
1228 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1229
1230 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1231 {
1232 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1233 *destreg = bits (insn, 12, 15);
1234 *offset = 8;
1235 address = (high << 16 | low);
1236 }
1237 }
1238 }
1239
1240 return address;
1241 }
1242
1243 /* Try to skip a sequence of instructions used for stack protector. If PC
1244 points to the first instruction of this sequence, return the address of
1245 first instruction after this sequence, otherwise, return original PC.
1246
1247 On arm, this sequence of instructions is composed of mainly three steps,
1248 Step 1: load symbol __stack_chk_guard,
1249 Step 2: load from address of __stack_chk_guard,
1250 Step 3: store it to somewhere else.
1251
1252 Usually, instructions on step 2 and step 3 are the same on various ARM
1253 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1254 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1255 instructions in step 1 vary from different ARM architectures. On ARMv7,
1256 they are,
1257
1258 movw Rn, #:lower16:__stack_chk_guard
1259 movt Rn, #:upper16:__stack_chk_guard
1260
1261 On ARMv5t, it is,
1262
1263 ldr Rn, .Label
1264 ....
1265 .Lable:
1266 .word __stack_chk_guard
1267
1268 Since ldr/str is a very popular instruction, we can't use them as
1269 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1270 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1271 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1272
1273 static CORE_ADDR
1274 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1275 {
1276 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1277 unsigned int basereg;
1278 struct bound_minimal_symbol stack_chk_guard;
1279 int offset;
1280 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1281 CORE_ADDR addr;
1282
1283 /* Try to parse the instructions in Step 1. */
1284 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1285 &basereg, &offset);
1286 if (!addr)
1287 return pc;
1288
1289 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1290 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1291 Otherwise, this sequence cannot be for stack protector. */
1292 if (stack_chk_guard.minsym == NULL
1293 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1294 return pc;
1295
1296 if (is_thumb)
1297 {
1298 unsigned int destreg;
1299 unsigned short insn
1300 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1301
1302 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1303 if ((insn & 0xf800) != 0x6800)
1304 return pc;
1305 if (bits (insn, 3, 5) != basereg)
1306 return pc;
1307 destreg = bits (insn, 0, 2);
1308
1309 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1310 byte_order_for_code);
1311 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1312 if ((insn & 0xf800) != 0x6000)
1313 return pc;
1314 if (destreg != bits (insn, 0, 2))
1315 return pc;
1316 }
1317 else
1318 {
1319 unsigned int destreg;
1320 unsigned int insn
1321 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1322
1323 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1324 if ((insn & 0x0e500000) != 0x04100000)
1325 return pc;
1326 if (bits (insn, 16, 19) != basereg)
1327 return pc;
1328 destreg = bits (insn, 12, 15);
1329 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1330 insn = read_code_unsigned_integer (pc + offset + 4,
1331 4, byte_order_for_code);
1332 if ((insn & 0x0e500000) != 0x04000000)
1333 return pc;
1334 if (bits (insn, 12, 15) != destreg)
1335 return pc;
1336 }
1337 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1338 on arm. */
1339 if (is_thumb)
1340 return pc + offset + 4;
1341 else
1342 return pc + offset + 8;
1343 }
1344
1345 /* Advance the PC across any function entry prologue instructions to
1346 reach some "real" code.
1347
1348 The APCS (ARM Procedure Call Standard) defines the following
1349 prologue:
1350
1351 mov ip, sp
1352 [stmfd sp!, {a1,a2,a3,a4}]
1353 stmfd sp!, {...,fp,ip,lr,pc}
1354 [stfe f7, [sp, #-12]!]
1355 [stfe f6, [sp, #-12]!]
1356 [stfe f5, [sp, #-12]!]
1357 [stfe f4, [sp, #-12]!]
1358 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1359
1360 static CORE_ADDR
1361 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1362 {
1363 CORE_ADDR func_addr, limit_pc;
1364
1365 /* See if we can determine the end of the prologue via the symbol table.
1366 If so, then return either PC, or the PC after the prologue, whichever
1367 is greater. */
1368 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1369 {
1370 CORE_ADDR post_prologue_pc
1371 = skip_prologue_using_sal (gdbarch, func_addr);
1372 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1373
1374 if (post_prologue_pc)
1375 post_prologue_pc
1376 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1377
1378
1379 /* GCC always emits a line note before the prologue and another
1380 one after, even if the two are at the same address or on the
1381 same line. Take advantage of this so that we do not need to
1382 know every instruction that might appear in the prologue. We
1383 will have producer information for most binaries; if it is
1384 missing (e.g. for -gstabs), assuming the GNU tools. */
1385 if (post_prologue_pc
1386 && (cust == NULL
1387 || COMPUNIT_PRODUCER (cust) == NULL
1388 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1389 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1390 return post_prologue_pc;
1391
1392 if (post_prologue_pc != 0)
1393 {
1394 CORE_ADDR analyzed_limit;
1395
1396 /* For non-GCC compilers, make sure the entire line is an
1397 acceptable prologue; GDB will round this function's
1398 return value up to the end of the following line so we
1399 can not skip just part of a line (and we do not want to).
1400
1401 RealView does not treat the prologue specially, but does
1402 associate prologue code with the opening brace; so this
1403 lets us skip the first line if we think it is the opening
1404 brace. */
1405 if (arm_pc_is_thumb (gdbarch, func_addr))
1406 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1407 post_prologue_pc, NULL);
1408 else
1409 analyzed_limit
1410 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1411 NULL, target_arm_instruction_reader ());
1412
1413 if (analyzed_limit != post_prologue_pc)
1414 return func_addr;
1415
1416 return post_prologue_pc;
1417 }
1418 }
1419
1420 /* Can't determine prologue from the symbol table, need to examine
1421 instructions. */
1422
1423 /* Find an upper limit on the function prologue using the debug
1424 information. If the debug information could not be used to provide
1425 that bound, then use an arbitrary large number as the upper bound. */
1426 /* Like arm_scan_prologue, stop no later than pc + 64. */
1427 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1428 if (limit_pc == 0)
1429 limit_pc = pc + 64; /* Magic. */
1430
1431
1432 /* Check if this is Thumb code. */
1433 if (arm_pc_is_thumb (gdbarch, pc))
1434 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1435 else
1436 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1437 target_arm_instruction_reader ());
1438 }
1439
1440 /* *INDENT-OFF* */
1441 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1442 This function decodes a Thumb function prologue to determine:
1443 1) the size of the stack frame
1444 2) which registers are saved on it
1445 3) the offsets of saved regs
1446 4) the offset from the stack pointer to the frame pointer
1447
1448 A typical Thumb function prologue would create this stack frame
1449 (offsets relative to FP)
1450 old SP -> 24 stack parameters
1451 20 LR
1452 16 R7
1453 R7 -> 0 local variables (16 bytes)
1454 SP -> -12 additional stack space (12 bytes)
1455 The frame size would thus be 36 bytes, and the frame offset would be
1456 12 bytes. The frame register is R7.
1457
1458 The comments for thumb_skip_prolog() describe the algorithm we use
1459 to detect the end of the prolog. */
1460 /* *INDENT-ON* */
1461
1462 static void
1463 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1464 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1465 {
1466 CORE_ADDR prologue_start;
1467 CORE_ADDR prologue_end;
1468
1469 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1470 &prologue_end))
1471 {
1472 /* See comment in arm_scan_prologue for an explanation of
1473 this heuristics. */
1474 if (prologue_end > prologue_start + 64)
1475 {
1476 prologue_end = prologue_start + 64;
1477 }
1478 }
1479 else
1480 /* We're in the boondocks: we have no idea where the start of the
1481 function is. */
1482 return;
1483
1484 prologue_end = std::min (prologue_end, prev_pc);
1485
1486 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1487 }
1488
1489 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1490 otherwise. */
1491
1492 static int
1493 arm_instruction_restores_sp (unsigned int insn)
1494 {
1495 if (bits (insn, 28, 31) != INST_NV)
1496 {
1497 if ((insn & 0x0df0f000) == 0x0080d000
1498 /* ADD SP (register or immediate). */
1499 || (insn & 0x0df0f000) == 0x0040d000
1500 /* SUB SP (register or immediate). */
1501 || (insn & 0x0ffffff0) == 0x01a0d000
1502 /* MOV SP. */
1503 || (insn & 0x0fff0000) == 0x08bd0000
1504 /* POP (LDMIA). */
1505 || (insn & 0x0fff0000) == 0x049d0000)
1506 /* POP of a single register. */
1507 return 1;
1508 }
1509
1510 return 0;
1511 }
1512
1513 /* Implement immediate value decoding, as described in section A5.2.4
1514 (Modified immediate constants in ARM instructions) of the ARM Architecture
1515 Reference Manual (ARMv7-A and ARMv7-R edition). */
1516
1517 static uint32_t
1518 arm_expand_immediate (uint32_t imm)
1519 {
1520 /* Immediate values are 12 bits long. */
1521 gdb_assert ((imm & 0xfffff000) == 0);
1522
1523 uint32_t unrotated_value = imm & 0xff;
1524 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1525
1526 if (rotate_amount == 0)
1527 return unrotated_value;
1528
1529 return ((unrotated_value >> rotate_amount)
1530 | (unrotated_value << (32 - rotate_amount)));
1531 }
1532
1533 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1534 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1535 fill it in. Return the first address not recognized as a prologue
1536 instruction.
1537
1538 We recognize all the instructions typically found in ARM prologues,
1539 plus harmless instructions which can be skipped (either for analysis
1540 purposes, or a more restrictive set that can be skipped when finding
1541 the end of the prologue). */
1542
1543 static CORE_ADDR
1544 arm_analyze_prologue (struct gdbarch *gdbarch,
1545 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1546 struct arm_prologue_cache *cache,
1547 const arm_instruction_reader &insn_reader)
1548 {
1549 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1550 int regno;
1551 CORE_ADDR offset, current_pc;
1552 pv_t regs[ARM_FPS_REGNUM];
1553 CORE_ADDR unrecognized_pc = 0;
1554
1555 /* Search the prologue looking for instructions that set up the
1556 frame pointer, adjust the stack pointer, and save registers.
1557
1558 Be careful, however, and if it doesn't look like a prologue,
1559 don't try to scan it. If, for instance, a frameless function
1560 begins with stmfd sp!, then we will tell ourselves there is
1561 a frame, which will confuse stack traceback, as well as "finish"
1562 and other operations that rely on a knowledge of the stack
1563 traceback. */
1564
1565 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1566 regs[regno] = pv_register (regno, 0);
1567 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1568
1569 for (current_pc = prologue_start;
1570 current_pc < prologue_end;
1571 current_pc += 4)
1572 {
1573 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1574
1575 if (insn == 0xe1a0c00d) /* mov ip, sp */
1576 {
1577 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1578 continue;
1579 }
1580 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1581 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1582 {
1583 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1584 int rd = bits (insn, 12, 15);
1585 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1586 continue;
1587 }
1588 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 {
1591 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1592 int rd = bits (insn, 12, 15);
1593 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1594 continue;
1595 }
1596 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1597 [sp, #-4]! */
1598 {
1599 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1600 break;
1601 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1602 stack.store (regs[ARM_SP_REGNUM], 4,
1603 regs[bits (insn, 12, 15)]);
1604 continue;
1605 }
1606 else if ((insn & 0xffff0000) == 0xe92d0000)
1607 /* stmfd sp!, {..., fp, ip, lr, pc}
1608 or
1609 stmfd sp!, {a1, a2, a3, a4} */
1610 {
1611 int mask = insn & 0xffff;
1612
1613 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1614 break;
1615
1616 /* Calculate offsets of saved registers. */
1617 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1618 if (mask & (1 << regno))
1619 {
1620 regs[ARM_SP_REGNUM]
1621 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1622 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1623 }
1624 }
1625 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1626 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1627 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1628 {
1629 /* No need to add this to saved_regs -- it's just an arg reg. */
1630 continue;
1631 }
1632 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1633 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1634 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1635 {
1636 /* No need to add this to saved_regs -- it's just an arg reg. */
1637 continue;
1638 }
1639 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1640 { registers } */
1641 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1642 {
1643 /* No need to add this to saved_regs -- it's just arg regs. */
1644 continue;
1645 }
1646 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1647 {
1648 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1649 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1650 }
1651 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1652 {
1653 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1654 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1655 }
1656 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1657 [sp, -#c]! */
1658 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1659 {
1660 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1661 break;
1662
1663 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1664 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1665 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1666 }
1667 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1668 [sp!] */
1669 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1670 {
1671 int n_saved_fp_regs;
1672 unsigned int fp_start_reg, fp_bound_reg;
1673
1674 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1675 break;
1676
1677 if ((insn & 0x800) == 0x800) /* N0 is set */
1678 {
1679 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1680 n_saved_fp_regs = 3;
1681 else
1682 n_saved_fp_regs = 1;
1683 }
1684 else
1685 {
1686 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1687 n_saved_fp_regs = 2;
1688 else
1689 n_saved_fp_regs = 4;
1690 }
1691
1692 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1693 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1694 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1695 {
1696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1697 stack.store (regs[ARM_SP_REGNUM], 12,
1698 regs[fp_start_reg++]);
1699 }
1700 }
1701 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1702 {
1703 /* Allow some special function calls when skipping the
1704 prologue; GCC generates these before storing arguments to
1705 the stack. */
1706 CORE_ADDR dest = BranchDest (current_pc, insn);
1707
1708 if (skip_prologue_function (gdbarch, dest, 0))
1709 continue;
1710 else
1711 break;
1712 }
1713 else if ((insn & 0xf0000000) != 0xe0000000)
1714 break; /* Condition not true, exit early. */
1715 else if (arm_instruction_changes_pc (insn))
1716 /* Don't scan past anything that might change control flow. */
1717 break;
1718 else if (arm_instruction_restores_sp (insn))
1719 {
1720 /* Don't scan past the epilogue. */
1721 break;
1722 }
1723 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1724 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1725 /* Ignore block loads from the stack, potentially copying
1726 parameters from memory. */
1727 continue;
1728 else if ((insn & 0xfc500000) == 0xe4100000
1729 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1730 /* Similarly ignore single loads from the stack. */
1731 continue;
1732 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1733 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1734 register instead of the stack. */
1735 continue;
1736 else
1737 {
1738 /* The optimizer might shove anything into the prologue, if
1739 we build up cache (cache != NULL) from scanning prologue,
1740 we just skip what we don't recognize and scan further to
1741 make cache as complete as possible. However, if we skip
1742 prologue, we'll stop immediately on unrecognized
1743 instruction. */
1744 unrecognized_pc = current_pc;
1745 if (cache != NULL)
1746 continue;
1747 else
1748 break;
1749 }
1750 }
1751
1752 if (unrecognized_pc == 0)
1753 unrecognized_pc = current_pc;
1754
1755 if (cache)
1756 {
1757 int framereg, framesize;
1758
1759 /* The frame size is just the distance from the frame register
1760 to the original stack pointer. */
1761 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1762 {
1763 /* Frame pointer is fp. */
1764 framereg = ARM_FP_REGNUM;
1765 framesize = -regs[ARM_FP_REGNUM].k;
1766 }
1767 else
1768 {
1769 /* Try the stack pointer... this is a bit desperate. */
1770 framereg = ARM_SP_REGNUM;
1771 framesize = -regs[ARM_SP_REGNUM].k;
1772 }
1773
1774 cache->framereg = framereg;
1775 cache->framesize = framesize;
1776
1777 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1778 if (stack.find_reg (gdbarch, regno, &offset))
1779 cache->saved_regs[regno].addr = offset;
1780 }
1781
1782 if (arm_debug)
1783 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1784 paddress (gdbarch, unrecognized_pc));
1785
1786 return unrecognized_pc;
1787 }
1788
1789 static void
1790 arm_scan_prologue (struct frame_info *this_frame,
1791 struct arm_prologue_cache *cache)
1792 {
1793 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1794 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1795 CORE_ADDR prologue_start, prologue_end;
1796 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1797 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1798
1799 /* Assume there is no frame until proven otherwise. */
1800 cache->framereg = ARM_SP_REGNUM;
1801 cache->framesize = 0;
1802
1803 /* Check for Thumb prologue. */
1804 if (arm_frame_is_thumb (this_frame))
1805 {
1806 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1807 return;
1808 }
1809
1810 /* Find the function prologue. If we can't find the function in
1811 the symbol table, peek in the stack frame to find the PC. */
1812 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1813 &prologue_end))
1814 {
1815 /* One way to find the end of the prologue (which works well
1816 for unoptimized code) is to do the following:
1817
1818 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1819
1820 if (sal.line == 0)
1821 prologue_end = prev_pc;
1822 else if (sal.end < prologue_end)
1823 prologue_end = sal.end;
1824
1825 This mechanism is very accurate so long as the optimizer
1826 doesn't move any instructions from the function body into the
1827 prologue. If this happens, sal.end will be the last
1828 instruction in the first hunk of prologue code just before
1829 the first instruction that the scheduler has moved from
1830 the body to the prologue.
1831
1832 In order to make sure that we scan all of the prologue
1833 instructions, we use a slightly less accurate mechanism which
1834 may scan more than necessary. To help compensate for this
1835 lack of accuracy, the prologue scanning loop below contains
1836 several clauses which'll cause the loop to terminate early if
1837 an implausible prologue instruction is encountered.
1838
1839 The expression
1840
1841 prologue_start + 64
1842
1843 is a suitable endpoint since it accounts for the largest
1844 possible prologue plus up to five instructions inserted by
1845 the scheduler. */
1846
1847 if (prologue_end > prologue_start + 64)
1848 {
1849 prologue_end = prologue_start + 64; /* See above. */
1850 }
1851 }
1852 else
1853 {
1854 /* We have no symbol information. Our only option is to assume this
1855 function has a standard stack frame and the normal frame register.
1856 Then, we can find the value of our frame pointer on entrance to
1857 the callee (or at the present moment if this is the innermost frame).
1858 The value stored there should be the address of the stmfd + 8. */
1859 CORE_ADDR frame_loc;
1860 ULONGEST return_value;
1861
1862 /* AAPCS does not use a frame register, so we can abort here. */
1863 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1864 return;
1865
1866 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1867 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1868 &return_value))
1869 return;
1870 else
1871 {
1872 prologue_start = gdbarch_addr_bits_remove
1873 (gdbarch, return_value) - 8;
1874 prologue_end = prologue_start + 64; /* See above. */
1875 }
1876 }
1877
1878 if (prev_pc < prologue_end)
1879 prologue_end = prev_pc;
1880
1881 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1882 target_arm_instruction_reader ());
1883 }
1884
1885 static struct arm_prologue_cache *
1886 arm_make_prologue_cache (struct frame_info *this_frame)
1887 {
1888 int reg;
1889 struct arm_prologue_cache *cache;
1890 CORE_ADDR unwound_fp;
1891
1892 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1893 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1894
1895 arm_scan_prologue (this_frame, cache);
1896
1897 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1898 if (unwound_fp == 0)
1899 return cache;
1900
1901 cache->prev_sp = unwound_fp + cache->framesize;
1902
1903 /* Calculate actual addresses of saved registers using offsets
1904 determined by arm_scan_prologue. */
1905 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1906 if (trad_frame_addr_p (cache->saved_regs, reg))
1907 cache->saved_regs[reg].addr += cache->prev_sp;
1908
1909 return cache;
1910 }
1911
1912 /* Implementation of the stop_reason hook for arm_prologue frames. */
1913
1914 static enum unwind_stop_reason
1915 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1916 void **this_cache)
1917 {
1918 struct arm_prologue_cache *cache;
1919 CORE_ADDR pc;
1920
1921 if (*this_cache == NULL)
1922 *this_cache = arm_make_prologue_cache (this_frame);
1923 cache = (struct arm_prologue_cache *) *this_cache;
1924
1925 /* This is meant to halt the backtrace at "_start". */
1926 pc = get_frame_pc (this_frame);
1927 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1928 return UNWIND_OUTERMOST;
1929
1930 /* If we've hit a wall, stop. */
1931 if (cache->prev_sp == 0)
1932 return UNWIND_OUTERMOST;
1933
1934 return UNWIND_NO_REASON;
1935 }
1936
1937 /* Our frame ID for a normal frame is the current function's starting PC
1938 and the caller's SP when we were called. */
1939
1940 static void
1941 arm_prologue_this_id (struct frame_info *this_frame,
1942 void **this_cache,
1943 struct frame_id *this_id)
1944 {
1945 struct arm_prologue_cache *cache;
1946 struct frame_id id;
1947 CORE_ADDR pc, func;
1948
1949 if (*this_cache == NULL)
1950 *this_cache = arm_make_prologue_cache (this_frame);
1951 cache = (struct arm_prologue_cache *) *this_cache;
1952
1953 /* Use function start address as part of the frame ID. If we cannot
1954 identify the start address (due to missing symbol information),
1955 fall back to just using the current PC. */
1956 pc = get_frame_pc (this_frame);
1957 func = get_frame_func (this_frame);
1958 if (!func)
1959 func = pc;
1960
1961 id = frame_id_build (cache->prev_sp, func);
1962 *this_id = id;
1963 }
1964
1965 static struct value *
1966 arm_prologue_prev_register (struct frame_info *this_frame,
1967 void **this_cache,
1968 int prev_regnum)
1969 {
1970 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1971 struct arm_prologue_cache *cache;
1972
1973 if (*this_cache == NULL)
1974 *this_cache = arm_make_prologue_cache (this_frame);
1975 cache = (struct arm_prologue_cache *) *this_cache;
1976
1977 /* If we are asked to unwind the PC, then we need to return the LR
1978 instead. The prologue may save PC, but it will point into this
1979 frame's prologue, not the next frame's resume location. Also
1980 strip the saved T bit. A valid LR may have the low bit set, but
1981 a valid PC never does. */
1982 if (prev_regnum == ARM_PC_REGNUM)
1983 {
1984 CORE_ADDR lr;
1985
1986 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1987 return frame_unwind_got_constant (this_frame, prev_regnum,
1988 arm_addr_bits_remove (gdbarch, lr));
1989 }
1990
1991 /* SP is generally not saved to the stack, but this frame is
1992 identified by the next frame's stack pointer at the time of the call.
1993 The value was already reconstructed into PREV_SP. */
1994 if (prev_regnum == ARM_SP_REGNUM)
1995 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1996
1997 /* The CPSR may have been changed by the call instruction and by the
1998 called function. The only bit we can reconstruct is the T bit,
1999 by checking the low bit of LR as of the call. This is a reliable
2000 indicator of Thumb-ness except for some ARM v4T pre-interworking
2001 Thumb code, which could get away with a clear low bit as long as
2002 the called function did not use bx. Guess that all other
2003 bits are unchanged; the condition flags are presumably lost,
2004 but the processor status is likely valid. */
2005 if (prev_regnum == ARM_PS_REGNUM)
2006 {
2007 CORE_ADDR lr, cpsr;
2008 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2009
2010 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2011 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2012 if (IS_THUMB_ADDR (lr))
2013 cpsr |= t_bit;
2014 else
2015 cpsr &= ~t_bit;
2016 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2017 }
2018
2019 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2020 prev_regnum);
2021 }
2022
2023 struct frame_unwind arm_prologue_unwind = {
2024 NORMAL_FRAME,
2025 arm_prologue_unwind_stop_reason,
2026 arm_prologue_this_id,
2027 arm_prologue_prev_register,
2028 NULL,
2029 default_frame_sniffer
2030 };
2031
2032 /* Maintain a list of ARM exception table entries per objfile, similar to the
2033 list of mapping symbols. We only cache entries for standard ARM-defined
2034 personality routines; the cache will contain only the frame unwinding
2035 instructions associated with the entry (not the descriptors). */
2036
2037 struct arm_exidx_entry
2038 {
2039 CORE_ADDR addr;
2040 gdb_byte *entry;
2041
2042 bool operator< (const arm_exidx_entry &other) const
2043 {
2044 return addr < other.addr;
2045 }
2046 };
2047
2048 struct arm_exidx_data
2049 {
2050 std::vector<std::vector<arm_exidx_entry>> section_maps;
2051 };
2052
2053 /* Per-BFD key to store exception handling information. */
2054 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2055
2056 static struct obj_section *
2057 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2058 {
2059 struct obj_section *osect;
2060
2061 ALL_OBJFILE_OSECTIONS (objfile, osect)
2062 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2063 {
2064 bfd_vma start, size;
2065 start = bfd_section_vma (osect->the_bfd_section);
2066 size = bfd_section_size (osect->the_bfd_section);
2067
2068 if (start <= vma && vma < start + size)
2069 return osect;
2070 }
2071
2072 return NULL;
2073 }
2074
2075 /* Parse contents of exception table and exception index sections
2076 of OBJFILE, and fill in the exception table entry cache.
2077
2078 For each entry that refers to a standard ARM-defined personality
2079 routine, extract the frame unwinding instructions (from either
2080 the index or the table section). The unwinding instructions
2081 are normalized by:
2082 - extracting them from the rest of the table data
2083 - converting to host endianness
2084 - appending the implicit 0xb0 ("Finish") code
2085
2086 The extracted and normalized instructions are stored for later
2087 retrieval by the arm_find_exidx_entry routine. */
2088
2089 static void
2090 arm_exidx_new_objfile (struct objfile *objfile)
2091 {
2092 struct arm_exidx_data *data;
2093 asection *exidx, *extab;
2094 bfd_vma exidx_vma = 0, extab_vma = 0;
2095 LONGEST i;
2096
2097 /* If we've already touched this file, do nothing. */
2098 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2099 return;
2100
2101 /* Read contents of exception table and index. */
2102 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2103 gdb::byte_vector exidx_data;
2104 if (exidx)
2105 {
2106 exidx_vma = bfd_section_vma (exidx);
2107 exidx_data.resize (bfd_section_size (exidx));
2108
2109 if (!bfd_get_section_contents (objfile->obfd, exidx,
2110 exidx_data.data (), 0,
2111 exidx_data.size ()))
2112 return;
2113 }
2114
2115 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2116 gdb::byte_vector extab_data;
2117 if (extab)
2118 {
2119 extab_vma = bfd_section_vma (extab);
2120 extab_data.resize (bfd_section_size (extab));
2121
2122 if (!bfd_get_section_contents (objfile->obfd, extab,
2123 extab_data.data (), 0,
2124 extab_data.size ()))
2125 return;
2126 }
2127
2128 /* Allocate exception table data structure. */
2129 data = arm_exidx_data_key.emplace (objfile->obfd);
2130 data->section_maps.resize (objfile->obfd->section_count);
2131
2132 /* Fill in exception table. */
2133 for (i = 0; i < exidx_data.size () / 8; i++)
2134 {
2135 struct arm_exidx_entry new_exidx_entry;
2136 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2137 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2138 exidx_data.data () + i * 8 + 4);
2139 bfd_vma addr = 0, word = 0;
2140 int n_bytes = 0, n_words = 0;
2141 struct obj_section *sec;
2142 gdb_byte *entry = NULL;
2143
2144 /* Extract address of start of function. */
2145 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2146 idx += exidx_vma + i * 8;
2147
2148 /* Find section containing function and compute section offset. */
2149 sec = arm_obj_section_from_vma (objfile, idx);
2150 if (sec == NULL)
2151 continue;
2152 idx -= bfd_section_vma (sec->the_bfd_section);
2153
2154 /* Determine address of exception table entry. */
2155 if (val == 1)
2156 {
2157 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2158 }
2159 else if ((val & 0xff000000) == 0x80000000)
2160 {
2161 /* Exception table entry embedded in .ARM.exidx
2162 -- must be short form. */
2163 word = val;
2164 n_bytes = 3;
2165 }
2166 else if (!(val & 0x80000000))
2167 {
2168 /* Exception table entry in .ARM.extab. */
2169 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 addr += exidx_vma + i * 8 + 4;
2171
2172 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2173 {
2174 word = bfd_h_get_32 (objfile->obfd,
2175 extab_data.data () + addr - extab_vma);
2176 addr += 4;
2177
2178 if ((word & 0xff000000) == 0x80000000)
2179 {
2180 /* Short form. */
2181 n_bytes = 3;
2182 }
2183 else if ((word & 0xff000000) == 0x81000000
2184 || (word & 0xff000000) == 0x82000000)
2185 {
2186 /* Long form. */
2187 n_bytes = 2;
2188 n_words = ((word >> 16) & 0xff);
2189 }
2190 else if (!(word & 0x80000000))
2191 {
2192 bfd_vma pers;
2193 struct obj_section *pers_sec;
2194 int gnu_personality = 0;
2195
2196 /* Custom personality routine. */
2197 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2198 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2199
2200 /* Check whether we've got one of the variants of the
2201 GNU personality routines. */
2202 pers_sec = arm_obj_section_from_vma (objfile, pers);
2203 if (pers_sec)
2204 {
2205 static const char *personality[] =
2206 {
2207 "__gcc_personality_v0",
2208 "__gxx_personality_v0",
2209 "__gcj_personality_v0",
2210 "__gnu_objc_personality_v0",
2211 NULL
2212 };
2213
2214 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2215 int k;
2216
2217 for (k = 0; personality[k]; k++)
2218 if (lookup_minimal_symbol_by_pc_name
2219 (pc, personality[k], objfile))
2220 {
2221 gnu_personality = 1;
2222 break;
2223 }
2224 }
2225
2226 /* If so, the next word contains a word count in the high
2227 byte, followed by the same unwind instructions as the
2228 pre-defined forms. */
2229 if (gnu_personality
2230 && addr + 4 <= extab_vma + extab_data.size ())
2231 {
2232 word = bfd_h_get_32 (objfile->obfd,
2233 (extab_data.data ()
2234 + addr - extab_vma));
2235 addr += 4;
2236 n_bytes = 3;
2237 n_words = ((word >> 24) & 0xff);
2238 }
2239 }
2240 }
2241 }
2242
2243 /* Sanity check address. */
2244 if (n_words)
2245 if (addr < extab_vma
2246 || addr + 4 * n_words > extab_vma + extab_data.size ())
2247 n_words = n_bytes = 0;
2248
2249 /* The unwind instructions reside in WORD (only the N_BYTES least
2250 significant bytes are valid), followed by N_WORDS words in the
2251 extab section starting at ADDR. */
2252 if (n_bytes || n_words)
2253 {
2254 gdb_byte *p = entry
2255 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2256 n_bytes + n_words * 4 + 1);
2257
2258 while (n_bytes--)
2259 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2260
2261 while (n_words--)
2262 {
2263 word = bfd_h_get_32 (objfile->obfd,
2264 extab_data.data () + addr - extab_vma);
2265 addr += 4;
2266
2267 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2268 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2269 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2270 *p++ = (gdb_byte) (word & 0xff);
2271 }
2272
2273 /* Implied "Finish" to terminate the list. */
2274 *p++ = 0xb0;
2275 }
2276
2277 /* Push entry onto vector. They are guaranteed to always
2278 appear in order of increasing addresses. */
2279 new_exidx_entry.addr = idx;
2280 new_exidx_entry.entry = entry;
2281 data->section_maps[sec->the_bfd_section->index].push_back
2282 (new_exidx_entry);
2283 }
2284 }
2285
2286 /* Search for the exception table entry covering MEMADDR. If one is found,
2287 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2288 set *START to the start of the region covered by this entry. */
2289
2290 static gdb_byte *
2291 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2292 {
2293 struct obj_section *sec;
2294
2295 sec = find_pc_section (memaddr);
2296 if (sec != NULL)
2297 {
2298 struct arm_exidx_data *data;
2299 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2300
2301 data = arm_exidx_data_key.get (sec->objfile->obfd);
2302 if (data != NULL)
2303 {
2304 std::vector<arm_exidx_entry> &map
2305 = data->section_maps[sec->the_bfd_section->index];
2306 if (!map.empty ())
2307 {
2308 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2309
2310 /* std::lower_bound finds the earliest ordered insertion
2311 point. If the following symbol starts at this exact
2312 address, we use that; otherwise, the preceding
2313 exception table entry covers this address. */
2314 if (idx < map.end ())
2315 {
2316 if (idx->addr == map_key.addr)
2317 {
2318 if (start)
2319 *start = idx->addr + obj_section_addr (sec);
2320 return idx->entry;
2321 }
2322 }
2323
2324 if (idx > map.begin ())
2325 {
2326 idx = idx - 1;
2327 if (start)
2328 *start = idx->addr + obj_section_addr (sec);
2329 return idx->entry;
2330 }
2331 }
2332 }
2333 }
2334
2335 return NULL;
2336 }
2337
2338 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2339 instruction list from the ARM exception table entry ENTRY, allocate and
2340 return a prologue cache structure describing how to unwind this frame.
2341
2342 Return NULL if the unwinding instruction list contains a "spare",
2343 "reserved" or "refuse to unwind" instruction as defined in section
2344 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2345 for the ARM Architecture" document. */
2346
2347 static struct arm_prologue_cache *
2348 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2349 {
2350 CORE_ADDR vsp = 0;
2351 int vsp_valid = 0;
2352
2353 struct arm_prologue_cache *cache;
2354 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2355 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2356
2357 for (;;)
2358 {
2359 gdb_byte insn;
2360
2361 /* Whenever we reload SP, we actually have to retrieve its
2362 actual value in the current frame. */
2363 if (!vsp_valid)
2364 {
2365 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2366 {
2367 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2368 vsp = get_frame_register_unsigned (this_frame, reg);
2369 }
2370 else
2371 {
2372 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2373 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2374 }
2375
2376 vsp_valid = 1;
2377 }
2378
2379 /* Decode next unwind instruction. */
2380 insn = *entry++;
2381
2382 if ((insn & 0xc0) == 0)
2383 {
2384 int offset = insn & 0x3f;
2385 vsp += (offset << 2) + 4;
2386 }
2387 else if ((insn & 0xc0) == 0x40)
2388 {
2389 int offset = insn & 0x3f;
2390 vsp -= (offset << 2) + 4;
2391 }
2392 else if ((insn & 0xf0) == 0x80)
2393 {
2394 int mask = ((insn & 0xf) << 8) | *entry++;
2395 int i;
2396
2397 /* The special case of an all-zero mask identifies
2398 "Refuse to unwind". We return NULL to fall back
2399 to the prologue analyzer. */
2400 if (mask == 0)
2401 return NULL;
2402
2403 /* Pop registers r4..r15 under mask. */
2404 for (i = 0; i < 12; i++)
2405 if (mask & (1 << i))
2406 {
2407 cache->saved_regs[4 + i].addr = vsp;
2408 vsp += 4;
2409 }
2410
2411 /* Special-case popping SP -- we need to reload vsp. */
2412 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2413 vsp_valid = 0;
2414 }
2415 else if ((insn & 0xf0) == 0x90)
2416 {
2417 int reg = insn & 0xf;
2418
2419 /* Reserved cases. */
2420 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2421 return NULL;
2422
2423 /* Set SP from another register and mark VSP for reload. */
2424 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2425 vsp_valid = 0;
2426 }
2427 else if ((insn & 0xf0) == 0xa0)
2428 {
2429 int count = insn & 0x7;
2430 int pop_lr = (insn & 0x8) != 0;
2431 int i;
2432
2433 /* Pop r4..r[4+count]. */
2434 for (i = 0; i <= count; i++)
2435 {
2436 cache->saved_regs[4 + i].addr = vsp;
2437 vsp += 4;
2438 }
2439
2440 /* If indicated by flag, pop LR as well. */
2441 if (pop_lr)
2442 {
2443 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2444 vsp += 4;
2445 }
2446 }
2447 else if (insn == 0xb0)
2448 {
2449 /* We could only have updated PC by popping into it; if so, it
2450 will show up as address. Otherwise, copy LR into PC. */
2451 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2452 cache->saved_regs[ARM_PC_REGNUM]
2453 = cache->saved_regs[ARM_LR_REGNUM];
2454
2455 /* We're done. */
2456 break;
2457 }
2458 else if (insn == 0xb1)
2459 {
2460 int mask = *entry++;
2461 int i;
2462
2463 /* All-zero mask and mask >= 16 is "spare". */
2464 if (mask == 0 || mask >= 16)
2465 return NULL;
2466
2467 /* Pop r0..r3 under mask. */
2468 for (i = 0; i < 4; i++)
2469 if (mask & (1 << i))
2470 {
2471 cache->saved_regs[i].addr = vsp;
2472 vsp += 4;
2473 }
2474 }
2475 else if (insn == 0xb2)
2476 {
2477 ULONGEST offset = 0;
2478 unsigned shift = 0;
2479
2480 do
2481 {
2482 offset |= (*entry & 0x7f) << shift;
2483 shift += 7;
2484 }
2485 while (*entry++ & 0x80);
2486
2487 vsp += 0x204 + (offset << 2);
2488 }
2489 else if (insn == 0xb3)
2490 {
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2493 int i;
2494
2495 /* Only registers D0..D15 are valid here. */
2496 if (start + count >= 16)
2497 return NULL;
2498
2499 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2500 for (i = 0; i <= count; i++)
2501 {
2502 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2503 vsp += 8;
2504 }
2505
2506 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2507 vsp += 4;
2508 }
2509 else if ((insn & 0xf8) == 0xb8)
2510 {
2511 int count = insn & 0x7;
2512 int i;
2513
2514 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2515 for (i = 0; i <= count; i++)
2516 {
2517 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2518 vsp += 8;
2519 }
2520
2521 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2522 vsp += 4;
2523 }
2524 else if (insn == 0xc6)
2525 {
2526 int start = *entry >> 4;
2527 int count = (*entry++) & 0xf;
2528 int i;
2529
2530 /* Only registers WR0..WR15 are valid. */
2531 if (start + count >= 16)
2532 return NULL;
2533
2534 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2535 for (i = 0; i <= count; i++)
2536 {
2537 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2538 vsp += 8;
2539 }
2540 }
2541 else if (insn == 0xc7)
2542 {
2543 int mask = *entry++;
2544 int i;
2545
2546 /* All-zero mask and mask >= 16 is "spare". */
2547 if (mask == 0 || mask >= 16)
2548 return NULL;
2549
2550 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2551 for (i = 0; i < 4; i++)
2552 if (mask & (1 << i))
2553 {
2554 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2555 vsp += 4;
2556 }
2557 }
2558 else if ((insn & 0xf8) == 0xc0)
2559 {
2560 int count = insn & 0x7;
2561 int i;
2562
2563 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2564 for (i = 0; i <= count; i++)
2565 {
2566 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2567 vsp += 8;
2568 }
2569 }
2570 else if (insn == 0xc8)
2571 {
2572 int start = *entry >> 4;
2573 int count = (*entry++) & 0xf;
2574 int i;
2575
2576 /* Only registers D0..D31 are valid. */
2577 if (start + count >= 16)
2578 return NULL;
2579
2580 /* Pop VFP double-precision registers
2581 D[16+start]..D[16+start+count]. */
2582 for (i = 0; i <= count; i++)
2583 {
2584 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2585 vsp += 8;
2586 }
2587 }
2588 else if (insn == 0xc9)
2589 {
2590 int start = *entry >> 4;
2591 int count = (*entry++) & 0xf;
2592 int i;
2593
2594 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2595 for (i = 0; i <= count; i++)
2596 {
2597 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2598 vsp += 8;
2599 }
2600 }
2601 else if ((insn & 0xf8) == 0xd0)
2602 {
2603 int count = insn & 0x7;
2604 int i;
2605
2606 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2607 for (i = 0; i <= count; i++)
2608 {
2609 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2610 vsp += 8;
2611 }
2612 }
2613 else
2614 {
2615 /* Everything else is "spare". */
2616 return NULL;
2617 }
2618 }
2619
2620 /* If we restore SP from a register, assume this was the frame register.
2621 Otherwise just fall back to SP as frame register. */
2622 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2623 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2624 else
2625 cache->framereg = ARM_SP_REGNUM;
2626
2627 /* Determine offset to previous frame. */
2628 cache->framesize
2629 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2630
2631 /* We already got the previous SP. */
2632 cache->prev_sp = vsp;
2633
2634 return cache;
2635 }
2636
2637 /* Unwinding via ARM exception table entries. Note that the sniffer
2638 already computes a filled-in prologue cache, which is then used
2639 with the same arm_prologue_this_id and arm_prologue_prev_register
2640 routines also used for prologue-parsing based unwinding. */
2641
2642 static int
2643 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2644 struct frame_info *this_frame,
2645 void **this_prologue_cache)
2646 {
2647 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2649 CORE_ADDR addr_in_block, exidx_region, func_start;
2650 struct arm_prologue_cache *cache;
2651 gdb_byte *entry;
2652
2653 /* See if we have an ARM exception table entry covering this address. */
2654 addr_in_block = get_frame_address_in_block (this_frame);
2655 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2656 if (!entry)
2657 return 0;
2658
2659 /* The ARM exception table does not describe unwind information
2660 for arbitrary PC values, but is guaranteed to be correct only
2661 at call sites. We have to decide here whether we want to use
2662 ARM exception table information for this frame, or fall back
2663 to using prologue parsing. (Note that if we have DWARF CFI,
2664 this sniffer isn't even called -- CFI is always preferred.)
2665
2666 Before we make this decision, however, we check whether we
2667 actually have *symbol* information for the current frame.
2668 If not, prologue parsing would not work anyway, so we might
2669 as well use the exception table and hope for the best. */
2670 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2671 {
2672 int exc_valid = 0;
2673
2674 /* If the next frame is "normal", we are at a call site in this
2675 frame, so exception information is guaranteed to be valid. */
2676 if (get_next_frame (this_frame)
2677 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2678 exc_valid = 1;
2679
2680 /* We also assume exception information is valid if we're currently
2681 blocked in a system call. The system library is supposed to
2682 ensure this, so that e.g. pthread cancellation works. */
2683 if (arm_frame_is_thumb (this_frame))
2684 {
2685 ULONGEST insn;
2686
2687 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2688 2, byte_order_for_code, &insn)
2689 && (insn & 0xff00) == 0xdf00 /* svc */)
2690 exc_valid = 1;
2691 }
2692 else
2693 {
2694 ULONGEST insn;
2695
2696 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2697 4, byte_order_for_code, &insn)
2698 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2699 exc_valid = 1;
2700 }
2701
2702 /* Bail out if we don't know that exception information is valid. */
2703 if (!exc_valid)
2704 return 0;
2705
2706 /* The ARM exception index does not mark the *end* of the region
2707 covered by the entry, and some functions will not have any entry.
2708 To correctly recognize the end of the covered region, the linker
2709 should have inserted dummy records with a CANTUNWIND marker.
2710
2711 Unfortunately, current versions of GNU ld do not reliably do
2712 this, and thus we may have found an incorrect entry above.
2713 As a (temporary) sanity check, we only use the entry if it
2714 lies *within* the bounds of the function. Note that this check
2715 might reject perfectly valid entries that just happen to cover
2716 multiple functions; therefore this check ought to be removed
2717 once the linker is fixed. */
2718 if (func_start > exidx_region)
2719 return 0;
2720 }
2721
2722 /* Decode the list of unwinding instructions into a prologue cache.
2723 Note that this may fail due to e.g. a "refuse to unwind" code. */
2724 cache = arm_exidx_fill_cache (this_frame, entry);
2725 if (!cache)
2726 return 0;
2727
2728 *this_prologue_cache = cache;
2729 return 1;
2730 }
2731
2732 struct frame_unwind arm_exidx_unwind = {
2733 NORMAL_FRAME,
2734 default_frame_unwind_stop_reason,
2735 arm_prologue_this_id,
2736 arm_prologue_prev_register,
2737 NULL,
2738 arm_exidx_unwind_sniffer
2739 };
2740
2741 static struct arm_prologue_cache *
2742 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2743 {
2744 struct arm_prologue_cache *cache;
2745 int reg;
2746
2747 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2748 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2749
2750 /* Still rely on the offset calculated from prologue. */
2751 arm_scan_prologue (this_frame, cache);
2752
2753 /* Since we are in epilogue, the SP has been restored. */
2754 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2755
2756 /* Calculate actual addresses of saved registers using offsets
2757 determined by arm_scan_prologue. */
2758 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2759 if (trad_frame_addr_p (cache->saved_regs, reg))
2760 cache->saved_regs[reg].addr += cache->prev_sp;
2761
2762 return cache;
2763 }
2764
2765 /* Implementation of function hook 'this_id' in
2766 'struct frame_uwnind' for epilogue unwinder. */
2767
2768 static void
2769 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2770 void **this_cache,
2771 struct frame_id *this_id)
2772 {
2773 struct arm_prologue_cache *cache;
2774 CORE_ADDR pc, func;
2775
2776 if (*this_cache == NULL)
2777 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2778 cache = (struct arm_prologue_cache *) *this_cache;
2779
2780 /* Use function start address as part of the frame ID. If we cannot
2781 identify the start address (due to missing symbol information),
2782 fall back to just using the current PC. */
2783 pc = get_frame_pc (this_frame);
2784 func = get_frame_func (this_frame);
2785 if (func == 0)
2786 func = pc;
2787
2788 (*this_id) = frame_id_build (cache->prev_sp, pc);
2789 }
2790
2791 /* Implementation of function hook 'prev_register' in
2792 'struct frame_uwnind' for epilogue unwinder. */
2793
2794 static struct value *
2795 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2796 void **this_cache, int regnum)
2797 {
2798 if (*this_cache == NULL)
2799 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2800
2801 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2802 }
2803
2804 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2805 CORE_ADDR pc);
2806 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2807 CORE_ADDR pc);
2808
2809 /* Implementation of function hook 'sniffer' in
2810 'struct frame_uwnind' for epilogue unwinder. */
2811
2812 static int
2813 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2814 struct frame_info *this_frame,
2815 void **this_prologue_cache)
2816 {
2817 if (frame_relative_level (this_frame) == 0)
2818 {
2819 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2820 CORE_ADDR pc = get_frame_pc (this_frame);
2821
2822 if (arm_frame_is_thumb (this_frame))
2823 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2824 else
2825 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2826 }
2827 else
2828 return 0;
2829 }
2830
2831 /* Frame unwinder from epilogue. */
2832
2833 static const struct frame_unwind arm_epilogue_frame_unwind =
2834 {
2835 NORMAL_FRAME,
2836 default_frame_unwind_stop_reason,
2837 arm_epilogue_frame_this_id,
2838 arm_epilogue_frame_prev_register,
2839 NULL,
2840 arm_epilogue_frame_sniffer,
2841 };
2842
2843 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2844 trampoline, return the target PC. Otherwise return 0.
2845
2846 void call0a (char c, short s, int i, long l) {}
2847
2848 int main (void)
2849 {
2850 (*pointer_to_call0a) (c, s, i, l);
2851 }
2852
2853 Instead of calling a stub library function _call_via_xx (xx is
2854 the register name), GCC may inline the trampoline in the object
2855 file as below (register r2 has the address of call0a).
2856
2857 .global main
2858 .type main, %function
2859 ...
2860 bl .L1
2861 ...
2862 .size main, .-main
2863
2864 .L1:
2865 bx r2
2866
2867 The trampoline 'bx r2' doesn't belong to main. */
2868
2869 static CORE_ADDR
2870 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2871 {
2872 /* The heuristics of recognizing such trampoline is that FRAME is
2873 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2874 if (arm_frame_is_thumb (frame))
2875 {
2876 gdb_byte buf[2];
2877
2878 if (target_read_memory (pc, buf, 2) == 0)
2879 {
2880 struct gdbarch *gdbarch = get_frame_arch (frame);
2881 enum bfd_endian byte_order_for_code
2882 = gdbarch_byte_order_for_code (gdbarch);
2883 uint16_t insn
2884 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2885
2886 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2887 {
2888 CORE_ADDR dest
2889 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2890
2891 /* Clear the LSB so that gdb core sets step-resume
2892 breakpoint at the right address. */
2893 return UNMAKE_THUMB_ADDR (dest);
2894 }
2895 }
2896 }
2897
2898 return 0;
2899 }
2900
2901 static struct arm_prologue_cache *
2902 arm_make_stub_cache (struct frame_info *this_frame)
2903 {
2904 struct arm_prologue_cache *cache;
2905
2906 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2907 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2908
2909 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2910
2911 return cache;
2912 }
2913
2914 /* Our frame ID for a stub frame is the current SP and LR. */
2915
2916 static void
2917 arm_stub_this_id (struct frame_info *this_frame,
2918 void **this_cache,
2919 struct frame_id *this_id)
2920 {
2921 struct arm_prologue_cache *cache;
2922
2923 if (*this_cache == NULL)
2924 *this_cache = arm_make_stub_cache (this_frame);
2925 cache = (struct arm_prologue_cache *) *this_cache;
2926
2927 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2928 }
2929
2930 static int
2931 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2932 struct frame_info *this_frame,
2933 void **this_prologue_cache)
2934 {
2935 CORE_ADDR addr_in_block;
2936 gdb_byte dummy[4];
2937 CORE_ADDR pc, start_addr;
2938 const char *name;
2939
2940 addr_in_block = get_frame_address_in_block (this_frame);
2941 pc = get_frame_pc (this_frame);
2942 if (in_plt_section (addr_in_block)
2943 /* We also use the stub winder if the target memory is unreadable
2944 to avoid having the prologue unwinder trying to read it. */
2945 || target_read_memory (pc, dummy, 4) != 0)
2946 return 1;
2947
2948 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2949 && arm_skip_bx_reg (this_frame, pc) != 0)
2950 return 1;
2951
2952 return 0;
2953 }
2954
2955 struct frame_unwind arm_stub_unwind = {
2956 NORMAL_FRAME,
2957 default_frame_unwind_stop_reason,
2958 arm_stub_this_id,
2959 arm_prologue_prev_register,
2960 NULL,
2961 arm_stub_unwind_sniffer
2962 };
2963
2964 /* Put here the code to store, into CACHE->saved_regs, the addresses
2965 of the saved registers of frame described by THIS_FRAME. CACHE is
2966 returned. */
2967
2968 static struct arm_prologue_cache *
2969 arm_m_exception_cache (struct frame_info *this_frame)
2970 {
2971 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2972 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2973 struct arm_prologue_cache *cache;
2974 CORE_ADDR lr;
2975 CORE_ADDR sp;
2976 CORE_ADDR unwound_sp;
2977 LONGEST xpsr;
2978 uint32_t exc_return;
2979 uint32_t process_stack_used;
2980 uint32_t extended_frame_used;
2981 uint32_t secure_stack_used;
2982
2983 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2984 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2985
2986 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2987 describes which bits in LR that define which stack was used prior
2988 to the exception and if FPU is used (causing extended stack frame). */
2989
2990 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2991 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2992
2993 /* Check EXC_RETURN indicator bits. */
2994 exc_return = (((lr >> 28) & 0xf) == 0xf);
2995
2996 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2997 process_stack_used = ((lr & (1 << 2)) != 0);
2998 if (exc_return && process_stack_used)
2999 {
3000 /* Thread (process) stack used.
3001 Potentially this could be other register defined by target, but PSP
3002 can be considered a standard name for the "Process Stack Pointer".
3003 To be fully aware of system registers like MSP and PSP, these could
3004 be added to a separate XML arm-m-system-profile that is valid for
3005 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3006 corefile off-line, then these registers must be defined by GDB,
3007 and also be included in the corefile regsets. */
3008
3009 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3010 if (psp_regnum == -1)
3011 {
3012 /* Thread (process) stack could not be fetched,
3013 give warning and exit. */
3014
3015 warning (_("no PSP thread stack unwinding supported."));
3016
3017 /* Terminate any further stack unwinding by refer to self. */
3018 cache->prev_sp = sp;
3019 return cache;
3020 }
3021 else
3022 {
3023 /* Thread (process) stack used, use PSP as SP. */
3024 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3025 }
3026 }
3027 else
3028 {
3029 /* Main stack used, use MSP as SP. */
3030 unwound_sp = sp;
3031 }
3032
3033 /* The hardware saves eight 32-bit words, comprising xPSR,
3034 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3035 "B1.5.6 Exception entry behavior" in
3036 "ARMv7-M Architecture Reference Manual". */
3037 cache->saved_regs[0].addr = unwound_sp;
3038 cache->saved_regs[1].addr = unwound_sp + 4;
3039 cache->saved_regs[2].addr = unwound_sp + 8;
3040 cache->saved_regs[3].addr = unwound_sp + 12;
3041 cache->saved_regs[ARM_IP_REGNUM].addr = unwound_sp + 16;
3042 cache->saved_regs[ARM_LR_REGNUM].addr = unwound_sp + 20;
3043 cache->saved_regs[ARM_PC_REGNUM].addr = unwound_sp + 24;
3044 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3045
3046 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3047 type used. */
3048 extended_frame_used = ((lr & (1 << 4)) == 0);
3049 if (exc_return && extended_frame_used)
3050 {
3051 int i;
3052 int fpu_regs_stack_offset;
3053
3054 /* This code does not take into account the lazy stacking, see "Lazy
3055 context save of FP state", in B1.5.7, also ARM AN298, supported
3056 by Cortex-M4F architecture.
3057 To fully handle this the FPCCR register (Floating-point Context
3058 Control Register) needs to be read out and the bits ASPEN and LSPEN
3059 could be checked to setup correct lazy stacked FP registers.
3060 This register is located at address 0xE000EF34. */
3061
3062 /* Extended stack frame type used. */
3063 fpu_regs_stack_offset = unwound_sp + 0x20;
3064 for (i = 0; i < 16; i++)
3065 {
3066 cache->saved_regs[ARM_D0_REGNUM + i].addr = fpu_regs_stack_offset;
3067 fpu_regs_stack_offset += 4;
3068 }
3069 cache->saved_regs[ARM_FPSCR_REGNUM].addr = unwound_sp + 0x60;
3070
3071 /* Offset 0x64 is reserved. */
3072 cache->prev_sp = unwound_sp + 0x68;
3073 }
3074 else
3075 {
3076 /* Standard stack frame type used. */
3077 cache->prev_sp = unwound_sp + 0x20;
3078 }
3079
3080 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3081 secure_stack_used = ((lr & (1 << 6)) != 0);
3082 if (exc_return && secure_stack_used)
3083 {
3084 /* ARMv8-M Exception and interrupt handling is not considered here.
3085 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3086 the Secure or Non-secure stack was used. To separate Secure and
3087 Non-secure stacks, processors that are based on the ARMv8-M
3088 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3089 In addition, a stack limit feature is provided using stack limit
3090 registers (accessible using MSR and MRS instructions) in Privileged
3091 level. */
3092 }
3093
3094 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3095 aligner between the top of the 32-byte stack frame and the
3096 previous context's stack pointer. */
3097 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3098 && (xpsr & (1 << 9)) != 0)
3099 cache->prev_sp += 4;
3100
3101 return cache;
3102 }
3103
3104 /* Implementation of function hook 'this_id' in
3105 'struct frame_uwnind'. */
3106
3107 static void
3108 arm_m_exception_this_id (struct frame_info *this_frame,
3109 void **this_cache,
3110 struct frame_id *this_id)
3111 {
3112 struct arm_prologue_cache *cache;
3113
3114 if (*this_cache == NULL)
3115 *this_cache = arm_m_exception_cache (this_frame);
3116 cache = (struct arm_prologue_cache *) *this_cache;
3117
3118 /* Our frame ID for a stub frame is the current SP and LR. */
3119 *this_id = frame_id_build (cache->prev_sp,
3120 get_frame_pc (this_frame));
3121 }
3122
3123 /* Implementation of function hook 'prev_register' in
3124 'struct frame_uwnind'. */
3125
3126 static struct value *
3127 arm_m_exception_prev_register (struct frame_info *this_frame,
3128 void **this_cache,
3129 int prev_regnum)
3130 {
3131 struct arm_prologue_cache *cache;
3132
3133 if (*this_cache == NULL)
3134 *this_cache = arm_m_exception_cache (this_frame);
3135 cache = (struct arm_prologue_cache *) *this_cache;
3136
3137 /* The value was already reconstructed into PREV_SP. */
3138 if (prev_regnum == ARM_SP_REGNUM)
3139 return frame_unwind_got_constant (this_frame, prev_regnum,
3140 cache->prev_sp);
3141
3142 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3143 prev_regnum);
3144 }
3145
3146 /* Implementation of function hook 'sniffer' in
3147 'struct frame_uwnind'. */
3148
3149 static int
3150 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3151 struct frame_info *this_frame,
3152 void **this_prologue_cache)
3153 {
3154 CORE_ADDR this_pc = get_frame_pc (this_frame);
3155
3156 /* No need to check is_m; this sniffer is only registered for
3157 M-profile architectures. */
3158
3159 /* Check if exception frame returns to a magic PC value. */
3160 return arm_m_addr_is_magic (this_pc);
3161 }
3162
3163 /* Frame unwinder for M-profile exceptions. */
3164
3165 struct frame_unwind arm_m_exception_unwind =
3166 {
3167 SIGTRAMP_FRAME,
3168 default_frame_unwind_stop_reason,
3169 arm_m_exception_this_id,
3170 arm_m_exception_prev_register,
3171 NULL,
3172 arm_m_exception_unwind_sniffer
3173 };
3174
3175 static CORE_ADDR
3176 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3177 {
3178 struct arm_prologue_cache *cache;
3179
3180 if (*this_cache == NULL)
3181 *this_cache = arm_make_prologue_cache (this_frame);
3182 cache = (struct arm_prologue_cache *) *this_cache;
3183
3184 return cache->prev_sp - cache->framesize;
3185 }
3186
3187 struct frame_base arm_normal_base = {
3188 &arm_prologue_unwind,
3189 arm_normal_frame_base,
3190 arm_normal_frame_base,
3191 arm_normal_frame_base
3192 };
3193
3194 static struct value *
3195 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3196 int regnum)
3197 {
3198 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3199 CORE_ADDR lr, cpsr;
3200 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3201
3202 switch (regnum)
3203 {
3204 case ARM_PC_REGNUM:
3205 /* The PC is normally copied from the return column, which
3206 describes saves of LR. However, that version may have an
3207 extra bit set to indicate Thumb state. The bit is not
3208 part of the PC. */
3209 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3210 return frame_unwind_got_constant (this_frame, regnum,
3211 arm_addr_bits_remove (gdbarch, lr));
3212
3213 case ARM_PS_REGNUM:
3214 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3215 cpsr = get_frame_register_unsigned (this_frame, regnum);
3216 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3217 if (IS_THUMB_ADDR (lr))
3218 cpsr |= t_bit;
3219 else
3220 cpsr &= ~t_bit;
3221 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3222
3223 default:
3224 internal_error (__FILE__, __LINE__,
3225 _("Unexpected register %d"), regnum);
3226 }
3227 }
3228
3229 static void
3230 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3231 struct dwarf2_frame_state_reg *reg,
3232 struct frame_info *this_frame)
3233 {
3234 switch (regnum)
3235 {
3236 case ARM_PC_REGNUM:
3237 case ARM_PS_REGNUM:
3238 reg->how = DWARF2_FRAME_REG_FN;
3239 reg->loc.fn = arm_dwarf2_prev_register;
3240 break;
3241 case ARM_SP_REGNUM:
3242 reg->how = DWARF2_FRAME_REG_CFA;
3243 break;
3244 }
3245 }
3246
3247 /* Implement the stack_frame_destroyed_p gdbarch method. */
3248
3249 static int
3250 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3251 {
3252 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3253 unsigned int insn, insn2;
3254 int found_return = 0, found_stack_adjust = 0;
3255 CORE_ADDR func_start, func_end;
3256 CORE_ADDR scan_pc;
3257 gdb_byte buf[4];
3258
3259 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3260 return 0;
3261
3262 /* The epilogue is a sequence of instructions along the following lines:
3263
3264 - add stack frame size to SP or FP
3265 - [if frame pointer used] restore SP from FP
3266 - restore registers from SP [may include PC]
3267 - a return-type instruction [if PC wasn't already restored]
3268
3269 In a first pass, we scan forward from the current PC and verify the
3270 instructions we find as compatible with this sequence, ending in a
3271 return instruction.
3272
3273 However, this is not sufficient to distinguish indirect function calls
3274 within a function from indirect tail calls in the epilogue in some cases.
3275 Therefore, if we didn't already find any SP-changing instruction during
3276 forward scan, we add a backward scanning heuristic to ensure we actually
3277 are in the epilogue. */
3278
3279 scan_pc = pc;
3280 while (scan_pc < func_end && !found_return)
3281 {
3282 if (target_read_memory (scan_pc, buf, 2))
3283 break;
3284
3285 scan_pc += 2;
3286 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3287
3288 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3289 found_return = 1;
3290 else if (insn == 0x46f7) /* mov pc, lr */
3291 found_return = 1;
3292 else if (thumb_instruction_restores_sp (insn))
3293 {
3294 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3295 found_return = 1;
3296 }
3297 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3298 {
3299 if (target_read_memory (scan_pc, buf, 2))
3300 break;
3301
3302 scan_pc += 2;
3303 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3304
3305 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3306 {
3307 if (insn2 & 0x8000) /* <registers> include PC. */
3308 found_return = 1;
3309 }
3310 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3311 && (insn2 & 0x0fff) == 0x0b04)
3312 {
3313 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3314 found_return = 1;
3315 }
3316 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3317 && (insn2 & 0x0e00) == 0x0a00)
3318 ;
3319 else
3320 break;
3321 }
3322 else
3323 break;
3324 }
3325
3326 if (!found_return)
3327 return 0;
3328
3329 /* Since any instruction in the epilogue sequence, with the possible
3330 exception of return itself, updates the stack pointer, we need to
3331 scan backwards for at most one instruction. Try either a 16-bit or
3332 a 32-bit instruction. This is just a heuristic, so we do not worry
3333 too much about false positives. */
3334
3335 if (pc - 4 < func_start)
3336 return 0;
3337 if (target_read_memory (pc - 4, buf, 4))
3338 return 0;
3339
3340 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3341 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3342
3343 if (thumb_instruction_restores_sp (insn2))
3344 found_stack_adjust = 1;
3345 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3346 found_stack_adjust = 1;
3347 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3348 && (insn2 & 0x0fff) == 0x0b04)
3349 found_stack_adjust = 1;
3350 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3351 && (insn2 & 0x0e00) == 0x0a00)
3352 found_stack_adjust = 1;
3353
3354 return found_stack_adjust;
3355 }
3356
3357 static int
3358 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3359 {
3360 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3361 unsigned int insn;
3362 int found_return;
3363 CORE_ADDR func_start, func_end;
3364
3365 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3366 return 0;
3367
3368 /* We are in the epilogue if the previous instruction was a stack
3369 adjustment and the next instruction is a possible return (bx, mov
3370 pc, or pop). We could have to scan backwards to find the stack
3371 adjustment, or forwards to find the return, but this is a decent
3372 approximation. First scan forwards. */
3373
3374 found_return = 0;
3375 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3376 if (bits (insn, 28, 31) != INST_NV)
3377 {
3378 if ((insn & 0x0ffffff0) == 0x012fff10)
3379 /* BX. */
3380 found_return = 1;
3381 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3382 /* MOV PC. */
3383 found_return = 1;
3384 else if ((insn & 0x0fff0000) == 0x08bd0000
3385 && (insn & 0x0000c000) != 0)
3386 /* POP (LDMIA), including PC or LR. */
3387 found_return = 1;
3388 }
3389
3390 if (!found_return)
3391 return 0;
3392
3393 /* Scan backwards. This is just a heuristic, so do not worry about
3394 false positives from mode changes. */
3395
3396 if (pc < func_start + 4)
3397 return 0;
3398
3399 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3400 if (arm_instruction_restores_sp (insn))
3401 return 1;
3402
3403 return 0;
3404 }
3405
3406 /* Implement the stack_frame_destroyed_p gdbarch method. */
3407
3408 static int
3409 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3410 {
3411 if (arm_pc_is_thumb (gdbarch, pc))
3412 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3413 else
3414 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3415 }
3416
3417 /* When arguments must be pushed onto the stack, they go on in reverse
3418 order. The code below implements a FILO (stack) to do this. */
3419
3420 struct stack_item
3421 {
3422 int len;
3423 struct stack_item *prev;
3424 gdb_byte *data;
3425 };
3426
3427 static struct stack_item *
3428 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3429 {
3430 struct stack_item *si;
3431 si = XNEW (struct stack_item);
3432 si->data = (gdb_byte *) xmalloc (len);
3433 si->len = len;
3434 si->prev = prev;
3435 memcpy (si->data, contents, len);
3436 return si;
3437 }
3438
3439 static struct stack_item *
3440 pop_stack_item (struct stack_item *si)
3441 {
3442 struct stack_item *dead = si;
3443 si = si->prev;
3444 xfree (dead->data);
3445 xfree (dead);
3446 return si;
3447 }
3448
3449 /* Implement the gdbarch type alignment method, overrides the generic
3450 alignment algorithm for anything that is arm specific. */
3451
3452 static ULONGEST
3453 arm_type_align (gdbarch *gdbarch, struct type *t)
3454 {
3455 t = check_typedef (t);
3456 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3457 {
3458 /* Use the natural alignment for vector types (the same for
3459 scalar type), but the maximum alignment is 64-bit. */
3460 if (TYPE_LENGTH (t) > 8)
3461 return 8;
3462 else
3463 return TYPE_LENGTH (t);
3464 }
3465
3466 /* Allow the common code to calculate the alignment. */
3467 return 0;
3468 }
3469
3470 /* Possible base types for a candidate for passing and returning in
3471 VFP registers. */
3472
3473 enum arm_vfp_cprc_base_type
3474 {
3475 VFP_CPRC_UNKNOWN,
3476 VFP_CPRC_SINGLE,
3477 VFP_CPRC_DOUBLE,
3478 VFP_CPRC_VEC64,
3479 VFP_CPRC_VEC128
3480 };
3481
3482 /* The length of one element of base type B. */
3483
3484 static unsigned
3485 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3486 {
3487 switch (b)
3488 {
3489 case VFP_CPRC_SINGLE:
3490 return 4;
3491 case VFP_CPRC_DOUBLE:
3492 return 8;
3493 case VFP_CPRC_VEC64:
3494 return 8;
3495 case VFP_CPRC_VEC128:
3496 return 16;
3497 default:
3498 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3499 (int) b);
3500 }
3501 }
3502
3503 /* The character ('s', 'd' or 'q') for the type of VFP register used
3504 for passing base type B. */
3505
3506 static int
3507 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3508 {
3509 switch (b)
3510 {
3511 case VFP_CPRC_SINGLE:
3512 return 's';
3513 case VFP_CPRC_DOUBLE:
3514 return 'd';
3515 case VFP_CPRC_VEC64:
3516 return 'd';
3517 case VFP_CPRC_VEC128:
3518 return 'q';
3519 default:
3520 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3521 (int) b);
3522 }
3523 }
3524
3525 /* Determine whether T may be part of a candidate for passing and
3526 returning in VFP registers, ignoring the limit on the total number
3527 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3528 classification of the first valid component found; if it is not
3529 VFP_CPRC_UNKNOWN, all components must have the same classification
3530 as *BASE_TYPE. If it is found that T contains a type not permitted
3531 for passing and returning in VFP registers, a type differently
3532 classified from *BASE_TYPE, or two types differently classified
3533 from each other, return -1, otherwise return the total number of
3534 base-type elements found (possibly 0 in an empty structure or
3535 array). Vector types are not currently supported, matching the
3536 generic AAPCS support. */
3537
3538 static int
3539 arm_vfp_cprc_sub_candidate (struct type *t,
3540 enum arm_vfp_cprc_base_type *base_type)
3541 {
3542 t = check_typedef (t);
3543 switch (t->code ())
3544 {
3545 case TYPE_CODE_FLT:
3546 switch (TYPE_LENGTH (t))
3547 {
3548 case 4:
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_SINGLE;
3551 else if (*base_type != VFP_CPRC_SINGLE)
3552 return -1;
3553 return 1;
3554
3555 case 8:
3556 if (*base_type == VFP_CPRC_UNKNOWN)
3557 *base_type = VFP_CPRC_DOUBLE;
3558 else if (*base_type != VFP_CPRC_DOUBLE)
3559 return -1;
3560 return 1;
3561
3562 default:
3563 return -1;
3564 }
3565 break;
3566
3567 case TYPE_CODE_COMPLEX:
3568 /* Arguments of complex T where T is one of the types float or
3569 double get treated as if they are implemented as:
3570
3571 struct complexT
3572 {
3573 T real;
3574 T imag;
3575 };
3576
3577 */
3578 switch (TYPE_LENGTH (t))
3579 {
3580 case 8:
3581 if (*base_type == VFP_CPRC_UNKNOWN)
3582 *base_type = VFP_CPRC_SINGLE;
3583 else if (*base_type != VFP_CPRC_SINGLE)
3584 return -1;
3585 return 2;
3586
3587 case 16:
3588 if (*base_type == VFP_CPRC_UNKNOWN)
3589 *base_type = VFP_CPRC_DOUBLE;
3590 else if (*base_type != VFP_CPRC_DOUBLE)
3591 return -1;
3592 return 2;
3593
3594 default:
3595 return -1;
3596 }
3597 break;
3598
3599 case TYPE_CODE_ARRAY:
3600 {
3601 if (t->is_vector ())
3602 {
3603 /* A 64-bit or 128-bit containerized vector type are VFP
3604 CPRCs. */
3605 switch (TYPE_LENGTH (t))
3606 {
3607 case 8:
3608 if (*base_type == VFP_CPRC_UNKNOWN)
3609 *base_type = VFP_CPRC_VEC64;
3610 return 1;
3611 case 16:
3612 if (*base_type == VFP_CPRC_UNKNOWN)
3613 *base_type = VFP_CPRC_VEC128;
3614 return 1;
3615 default:
3616 return -1;
3617 }
3618 }
3619 else
3620 {
3621 int count;
3622 unsigned unitlen;
3623
3624 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3625 base_type);
3626 if (count == -1)
3627 return -1;
3628 if (TYPE_LENGTH (t) == 0)
3629 {
3630 gdb_assert (count == 0);
3631 return 0;
3632 }
3633 else if (count == 0)
3634 return -1;
3635 unitlen = arm_vfp_cprc_unit_length (*base_type);
3636 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3637 return TYPE_LENGTH (t) / unitlen;
3638 }
3639 }
3640 break;
3641
3642 case TYPE_CODE_STRUCT:
3643 {
3644 int count = 0;
3645 unsigned unitlen;
3646 int i;
3647 for (i = 0; i < t->num_fields (); i++)
3648 {
3649 int sub_count = 0;
3650
3651 if (!field_is_static (&t->field (i)))
3652 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3653 base_type);
3654 if (sub_count == -1)
3655 return -1;
3656 count += sub_count;
3657 }
3658 if (TYPE_LENGTH (t) == 0)
3659 {
3660 gdb_assert (count == 0);
3661 return 0;
3662 }
3663 else if (count == 0)
3664 return -1;
3665 unitlen = arm_vfp_cprc_unit_length (*base_type);
3666 if (TYPE_LENGTH (t) != unitlen * count)
3667 return -1;
3668 return count;
3669 }
3670
3671 case TYPE_CODE_UNION:
3672 {
3673 int count = 0;
3674 unsigned unitlen;
3675 int i;
3676 for (i = 0; i < t->num_fields (); i++)
3677 {
3678 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3679 base_type);
3680 if (sub_count == -1)
3681 return -1;
3682 count = (count > sub_count ? count : sub_count);
3683 }
3684 if (TYPE_LENGTH (t) == 0)
3685 {
3686 gdb_assert (count == 0);
3687 return 0;
3688 }
3689 else if (count == 0)
3690 return -1;
3691 unitlen = arm_vfp_cprc_unit_length (*base_type);
3692 if (TYPE_LENGTH (t) != unitlen * count)
3693 return -1;
3694 return count;
3695 }
3696
3697 default:
3698 break;
3699 }
3700
3701 return -1;
3702 }
3703
3704 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3705 if passed to or returned from a non-variadic function with the VFP
3706 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3707 *BASE_TYPE to the base type for T and *COUNT to the number of
3708 elements of that base type before returning. */
3709
3710 static int
3711 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3712 int *count)
3713 {
3714 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3715 int c = arm_vfp_cprc_sub_candidate (t, &b);
3716 if (c <= 0 || c > 4)
3717 return 0;
3718 *base_type = b;
3719 *count = c;
3720 return 1;
3721 }
3722
3723 /* Return 1 if the VFP ABI should be used for passing arguments to and
3724 returning values from a function of type FUNC_TYPE, 0
3725 otherwise. */
3726
3727 static int
3728 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3729 {
3730 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3731 /* Variadic functions always use the base ABI. Assume that functions
3732 without debug info are not variadic. */
3733 if (func_type && check_typedef (func_type)->has_varargs ())
3734 return 0;
3735 /* The VFP ABI is only supported as a variant of AAPCS. */
3736 if (tdep->arm_abi != ARM_ABI_AAPCS)
3737 return 0;
3738 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3739 }
3740
3741 /* We currently only support passing parameters in integer registers, which
3742 conforms with GCC's default model, and VFP argument passing following
3743 the VFP variant of AAPCS. Several other variants exist and
3744 we should probably support some of them based on the selected ABI. */
3745
3746 static CORE_ADDR
3747 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3748 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3749 struct value **args, CORE_ADDR sp,
3750 function_call_return_method return_method,
3751 CORE_ADDR struct_addr)
3752 {
3753 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3754 int argnum;
3755 int argreg;
3756 int nstack;
3757 struct stack_item *si = NULL;
3758 int use_vfp_abi;
3759 struct type *ftype;
3760 unsigned vfp_regs_free = (1 << 16) - 1;
3761
3762 /* Determine the type of this function and whether the VFP ABI
3763 applies. */
3764 ftype = check_typedef (value_type (function));
3765 if (ftype->code () == TYPE_CODE_PTR)
3766 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3767 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3768
3769 /* Set the return address. For the ARM, the return breakpoint is
3770 always at BP_ADDR. */
3771 if (arm_pc_is_thumb (gdbarch, bp_addr))
3772 bp_addr |= 1;
3773 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3774
3775 /* Walk through the list of args and determine how large a temporary
3776 stack is required. Need to take care here as structs may be
3777 passed on the stack, and we have to push them. */
3778 nstack = 0;
3779
3780 argreg = ARM_A1_REGNUM;
3781 nstack = 0;
3782
3783 /* The struct_return pointer occupies the first parameter
3784 passing register. */
3785 if (return_method == return_method_struct)
3786 {
3787 if (arm_debug)
3788 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3789 gdbarch_register_name (gdbarch, argreg),
3790 paddress (gdbarch, struct_addr));
3791 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3792 argreg++;
3793 }
3794
3795 for (argnum = 0; argnum < nargs; argnum++)
3796 {
3797 int len;
3798 struct type *arg_type;
3799 struct type *target_type;
3800 enum type_code typecode;
3801 const bfd_byte *val;
3802 int align;
3803 enum arm_vfp_cprc_base_type vfp_base_type;
3804 int vfp_base_count;
3805 int may_use_core_reg = 1;
3806
3807 arg_type = check_typedef (value_type (args[argnum]));
3808 len = TYPE_LENGTH (arg_type);
3809 target_type = TYPE_TARGET_TYPE (arg_type);
3810 typecode = arg_type->code ();
3811 val = value_contents (args[argnum]);
3812
3813 align = type_align (arg_type);
3814 /* Round alignment up to a whole number of words. */
3815 align = (align + ARM_INT_REGISTER_SIZE - 1)
3816 & ~(ARM_INT_REGISTER_SIZE - 1);
3817 /* Different ABIs have different maximum alignments. */
3818 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3819 {
3820 /* The APCS ABI only requires word alignment. */
3821 align = ARM_INT_REGISTER_SIZE;
3822 }
3823 else
3824 {
3825 /* The AAPCS requires at most doubleword alignment. */
3826 if (align > ARM_INT_REGISTER_SIZE * 2)
3827 align = ARM_INT_REGISTER_SIZE * 2;
3828 }
3829
3830 if (use_vfp_abi
3831 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3832 &vfp_base_count))
3833 {
3834 int regno;
3835 int unit_length;
3836 int shift;
3837 unsigned mask;
3838
3839 /* Because this is a CPRC it cannot go in a core register or
3840 cause a core register to be skipped for alignment.
3841 Either it goes in VFP registers and the rest of this loop
3842 iteration is skipped for this argument, or it goes on the
3843 stack (and the stack alignment code is correct for this
3844 case). */
3845 may_use_core_reg = 0;
3846
3847 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3848 shift = unit_length / 4;
3849 mask = (1 << (shift * vfp_base_count)) - 1;
3850 for (regno = 0; regno < 16; regno += shift)
3851 if (((vfp_regs_free >> regno) & mask) == mask)
3852 break;
3853
3854 if (regno < 16)
3855 {
3856 int reg_char;
3857 int reg_scaled;
3858 int i;
3859
3860 vfp_regs_free &= ~(mask << regno);
3861 reg_scaled = regno / shift;
3862 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3863 for (i = 0; i < vfp_base_count; i++)
3864 {
3865 char name_buf[4];
3866 int regnum;
3867 if (reg_char == 'q')
3868 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3869 val + i * unit_length);
3870 else
3871 {
3872 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3873 reg_char, reg_scaled + i);
3874 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3875 strlen (name_buf));
3876 regcache->cooked_write (regnum, val + i * unit_length);
3877 }
3878 }
3879 continue;
3880 }
3881 else
3882 {
3883 /* This CPRC could not go in VFP registers, so all VFP
3884 registers are now marked as used. */
3885 vfp_regs_free = 0;
3886 }
3887 }
3888
3889 /* Push stack padding for doubleword alignment. */
3890 if (nstack & (align - 1))
3891 {
3892 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3893 nstack += ARM_INT_REGISTER_SIZE;
3894 }
3895
3896 /* Doubleword aligned quantities must go in even register pairs. */
3897 if (may_use_core_reg
3898 && argreg <= ARM_LAST_ARG_REGNUM
3899 && align > ARM_INT_REGISTER_SIZE
3900 && argreg & 1)
3901 argreg++;
3902
3903 /* If the argument is a pointer to a function, and it is a
3904 Thumb function, create a LOCAL copy of the value and set
3905 the THUMB bit in it. */
3906 if (TYPE_CODE_PTR == typecode
3907 && target_type != NULL
3908 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3909 {
3910 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3911 if (arm_pc_is_thumb (gdbarch, regval))
3912 {
3913 bfd_byte *copy = (bfd_byte *) alloca (len);
3914 store_unsigned_integer (copy, len, byte_order,
3915 MAKE_THUMB_ADDR (regval));
3916 val = copy;
3917 }
3918 }
3919
3920 /* Copy the argument to general registers or the stack in
3921 register-sized pieces. Large arguments are split between
3922 registers and stack. */
3923 while (len > 0)
3924 {
3925 int partial_len = len < ARM_INT_REGISTER_SIZE
3926 ? len : ARM_INT_REGISTER_SIZE;
3927 CORE_ADDR regval
3928 = extract_unsigned_integer (val, partial_len, byte_order);
3929
3930 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3931 {
3932 /* The argument is being passed in a general purpose
3933 register. */
3934 if (byte_order == BFD_ENDIAN_BIG)
3935 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3936 if (arm_debug)
3937 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3938 argnum,
3939 gdbarch_register_name
3940 (gdbarch, argreg),
3941 phex (regval, ARM_INT_REGISTER_SIZE));
3942 regcache_cooked_write_unsigned (regcache, argreg, regval);
3943 argreg++;
3944 }
3945 else
3946 {
3947 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3948
3949 memset (buf, 0, sizeof (buf));
3950 store_unsigned_integer (buf, partial_len, byte_order, regval);
3951
3952 /* Push the arguments onto the stack. */
3953 if (arm_debug)
3954 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3955 argnum, nstack);
3956 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3957 nstack += ARM_INT_REGISTER_SIZE;
3958 }
3959
3960 len -= partial_len;
3961 val += partial_len;
3962 }
3963 }
3964 /* If we have an odd number of words to push, then decrement the stack
3965 by one word now, so first stack argument will be dword aligned. */
3966 if (nstack & 4)
3967 sp -= 4;
3968
3969 while (si)
3970 {
3971 sp -= si->len;
3972 write_memory (sp, si->data, si->len);
3973 si = pop_stack_item (si);
3974 }
3975
3976 /* Finally, update teh SP register. */
3977 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3978
3979 return sp;
3980 }
3981
3982
3983 /* Always align the frame to an 8-byte boundary. This is required on
3984 some platforms and harmless on the rest. */
3985
3986 static CORE_ADDR
3987 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3988 {
3989 /* Align the stack to eight bytes. */
3990 return sp & ~ (CORE_ADDR) 7;
3991 }
3992
3993 static void
3994 print_fpu_flags (struct ui_file *file, int flags)
3995 {
3996 if (flags & (1 << 0))
3997 fputs_filtered ("IVO ", file);
3998 if (flags & (1 << 1))
3999 fputs_filtered ("DVZ ", file);
4000 if (flags & (1 << 2))
4001 fputs_filtered ("OFL ", file);
4002 if (flags & (1 << 3))
4003 fputs_filtered ("UFL ", file);
4004 if (flags & (1 << 4))
4005 fputs_filtered ("INX ", file);
4006 fputc_filtered ('\n', file);
4007 }
4008
4009 /* Print interesting information about the floating point processor
4010 (if present) or emulator. */
4011 static void
4012 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4013 struct frame_info *frame, const char *args)
4014 {
4015 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4016 int type;
4017
4018 type = (status >> 24) & 127;
4019 if (status & (1 << 31))
4020 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4021 else
4022 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4023 /* i18n: [floating point unit] mask */
4024 fputs_filtered (_("mask: "), file);
4025 print_fpu_flags (file, status >> 16);
4026 /* i18n: [floating point unit] flags */
4027 fputs_filtered (_("flags: "), file);
4028 print_fpu_flags (file, status);
4029 }
4030
4031 /* Construct the ARM extended floating point type. */
4032 static struct type *
4033 arm_ext_type (struct gdbarch *gdbarch)
4034 {
4035 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4036
4037 if (!tdep->arm_ext_type)
4038 tdep->arm_ext_type
4039 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4040 floatformats_arm_ext);
4041
4042 return tdep->arm_ext_type;
4043 }
4044
4045 static struct type *
4046 arm_neon_double_type (struct gdbarch *gdbarch)
4047 {
4048 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4049
4050 if (tdep->neon_double_type == NULL)
4051 {
4052 struct type *t, *elem;
4053
4054 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4055 TYPE_CODE_UNION);
4056 elem = builtin_type (gdbarch)->builtin_uint8;
4057 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4058 elem = builtin_type (gdbarch)->builtin_uint16;
4059 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4060 elem = builtin_type (gdbarch)->builtin_uint32;
4061 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4062 elem = builtin_type (gdbarch)->builtin_uint64;
4063 append_composite_type_field (t, "u64", elem);
4064 elem = builtin_type (gdbarch)->builtin_float;
4065 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4066 elem = builtin_type (gdbarch)->builtin_double;
4067 append_composite_type_field (t, "f64", elem);
4068
4069 t->set_is_vector (true);
4070 t->set_name ("neon_d");
4071 tdep->neon_double_type = t;
4072 }
4073
4074 return tdep->neon_double_type;
4075 }
4076
4077 /* FIXME: The vector types are not correctly ordered on big-endian
4078 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4079 bits of d0 - regardless of what unit size is being held in d0. So
4080 the offset of the first uint8 in d0 is 7, but the offset of the
4081 first float is 4. This code works as-is for little-endian
4082 targets. */
4083
4084 static struct type *
4085 arm_neon_quad_type (struct gdbarch *gdbarch)
4086 {
4087 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4088
4089 if (tdep->neon_quad_type == NULL)
4090 {
4091 struct type *t, *elem;
4092
4093 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4094 TYPE_CODE_UNION);
4095 elem = builtin_type (gdbarch)->builtin_uint8;
4096 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4097 elem = builtin_type (gdbarch)->builtin_uint16;
4098 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4099 elem = builtin_type (gdbarch)->builtin_uint32;
4100 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4101 elem = builtin_type (gdbarch)->builtin_uint64;
4102 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4103 elem = builtin_type (gdbarch)->builtin_float;
4104 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4105 elem = builtin_type (gdbarch)->builtin_double;
4106 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4107
4108 t->set_is_vector (true);
4109 t->set_name ("neon_q");
4110 tdep->neon_quad_type = t;
4111 }
4112
4113 return tdep->neon_quad_type;
4114 }
4115
4116 /* Return the GDB type object for the "standard" data type of data in
4117 register N. */
4118
4119 static struct type *
4120 arm_register_type (struct gdbarch *gdbarch, int regnum)
4121 {
4122 int num_regs = gdbarch_num_regs (gdbarch);
4123
4124 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4125 && regnum >= num_regs && regnum < num_regs + 32)
4126 return builtin_type (gdbarch)->builtin_float;
4127
4128 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4129 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4130 return arm_neon_quad_type (gdbarch);
4131
4132 /* If the target description has register information, we are only
4133 in this function so that we can override the types of
4134 double-precision registers for NEON. */
4135 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4136 {
4137 struct type *t = tdesc_register_type (gdbarch, regnum);
4138
4139 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4140 && t->code () == TYPE_CODE_FLT
4141 && gdbarch_tdep (gdbarch)->have_neon)
4142 return arm_neon_double_type (gdbarch);
4143 else
4144 return t;
4145 }
4146
4147 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4148 {
4149 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4150 return builtin_type (gdbarch)->builtin_void;
4151
4152 return arm_ext_type (gdbarch);
4153 }
4154 else if (regnum == ARM_SP_REGNUM)
4155 return builtin_type (gdbarch)->builtin_data_ptr;
4156 else if (regnum == ARM_PC_REGNUM)
4157 return builtin_type (gdbarch)->builtin_func_ptr;
4158 else if (regnum >= ARRAY_SIZE (arm_register_names))
4159 /* These registers are only supported on targets which supply
4160 an XML description. */
4161 return builtin_type (gdbarch)->builtin_int0;
4162 else
4163 return builtin_type (gdbarch)->builtin_uint32;
4164 }
4165
4166 /* Map a DWARF register REGNUM onto the appropriate GDB register
4167 number. */
4168
4169 static int
4170 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4171 {
4172 /* Core integer regs. */
4173 if (reg >= 0 && reg <= 15)
4174 return reg;
4175
4176 /* Legacy FPA encoding. These were once used in a way which
4177 overlapped with VFP register numbering, so their use is
4178 discouraged, but GDB doesn't support the ARM toolchain
4179 which used them for VFP. */
4180 if (reg >= 16 && reg <= 23)
4181 return ARM_F0_REGNUM + reg - 16;
4182
4183 /* New assignments for the FPA registers. */
4184 if (reg >= 96 && reg <= 103)
4185 return ARM_F0_REGNUM + reg - 96;
4186
4187 /* WMMX register assignments. */
4188 if (reg >= 104 && reg <= 111)
4189 return ARM_WCGR0_REGNUM + reg - 104;
4190
4191 if (reg >= 112 && reg <= 127)
4192 return ARM_WR0_REGNUM + reg - 112;
4193
4194 if (reg >= 192 && reg <= 199)
4195 return ARM_WC0_REGNUM + reg - 192;
4196
4197 /* VFP v2 registers. A double precision value is actually
4198 in d1 rather than s2, but the ABI only defines numbering
4199 for the single precision registers. This will "just work"
4200 in GDB for little endian targets (we'll read eight bytes,
4201 starting in s0 and then progressing to s1), but will be
4202 reversed on big endian targets with VFP. This won't
4203 be a problem for the new Neon quad registers; you're supposed
4204 to use DW_OP_piece for those. */
4205 if (reg >= 64 && reg <= 95)
4206 {
4207 char name_buf[4];
4208
4209 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4210 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4211 strlen (name_buf));
4212 }
4213
4214 /* VFP v3 / Neon registers. This range is also used for VFP v2
4215 registers, except that it now describes d0 instead of s0. */
4216 if (reg >= 256 && reg <= 287)
4217 {
4218 char name_buf[4];
4219
4220 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4221 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4222 strlen (name_buf));
4223 }
4224
4225 return -1;
4226 }
4227
4228 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4229 static int
4230 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4231 {
4232 int reg = regnum;
4233 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4234
4235 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4236 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4237
4238 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4239 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4240
4241 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4242 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4243
4244 if (reg < NUM_GREGS)
4245 return SIM_ARM_R0_REGNUM + reg;
4246 reg -= NUM_GREGS;
4247
4248 if (reg < NUM_FREGS)
4249 return SIM_ARM_FP0_REGNUM + reg;
4250 reg -= NUM_FREGS;
4251
4252 if (reg < NUM_SREGS)
4253 return SIM_ARM_FPS_REGNUM + reg;
4254 reg -= NUM_SREGS;
4255
4256 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4257 }
4258
4259 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4260 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4261 NULL if an error occurs. BUF is freed. */
4262
4263 static gdb_byte *
4264 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4265 int old_len, int new_len)
4266 {
4267 gdb_byte *new_buf;
4268 int bytes_to_read = new_len - old_len;
4269
4270 new_buf = (gdb_byte *) xmalloc (new_len);
4271 memcpy (new_buf + bytes_to_read, buf, old_len);
4272 xfree (buf);
4273 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4274 {
4275 xfree (new_buf);
4276 return NULL;
4277 }
4278 return new_buf;
4279 }
4280
4281 /* An IT block is at most the 2-byte IT instruction followed by
4282 four 4-byte instructions. The furthest back we must search to
4283 find an IT block that affects the current instruction is thus
4284 2 + 3 * 4 == 14 bytes. */
4285 #define MAX_IT_BLOCK_PREFIX 14
4286
4287 /* Use a quick scan if there are more than this many bytes of
4288 code. */
4289 #define IT_SCAN_THRESHOLD 32
4290
4291 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4292 A breakpoint in an IT block may not be hit, depending on the
4293 condition flags. */
4294 static CORE_ADDR
4295 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4296 {
4297 gdb_byte *buf;
4298 char map_type;
4299 CORE_ADDR boundary, func_start;
4300 int buf_len;
4301 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4302 int i, any, last_it, last_it_count;
4303
4304 /* If we are using BKPT breakpoints, none of this is necessary. */
4305 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4306 return bpaddr;
4307
4308 /* ARM mode does not have this problem. */
4309 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4310 return bpaddr;
4311
4312 /* We are setting a breakpoint in Thumb code that could potentially
4313 contain an IT block. The first step is to find how much Thumb
4314 code there is; we do not need to read outside of known Thumb
4315 sequences. */
4316 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4317 if (map_type == 0)
4318 /* Thumb-2 code must have mapping symbols to have a chance. */
4319 return bpaddr;
4320
4321 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4322
4323 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4324 && func_start > boundary)
4325 boundary = func_start;
4326
4327 /* Search for a candidate IT instruction. We have to do some fancy
4328 footwork to distinguish a real IT instruction from the second
4329 half of a 32-bit instruction, but there is no need for that if
4330 there's no candidate. */
4331 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4332 if (buf_len == 0)
4333 /* No room for an IT instruction. */
4334 return bpaddr;
4335
4336 buf = (gdb_byte *) xmalloc (buf_len);
4337 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4338 return bpaddr;
4339 any = 0;
4340 for (i = 0; i < buf_len; i += 2)
4341 {
4342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4343 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4344 {
4345 any = 1;
4346 break;
4347 }
4348 }
4349
4350 if (any == 0)
4351 {
4352 xfree (buf);
4353 return bpaddr;
4354 }
4355
4356 /* OK, the code bytes before this instruction contain at least one
4357 halfword which resembles an IT instruction. We know that it's
4358 Thumb code, but there are still two possibilities. Either the
4359 halfword really is an IT instruction, or it is the second half of
4360 a 32-bit Thumb instruction. The only way we can tell is to
4361 scan forwards from a known instruction boundary. */
4362 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4363 {
4364 int definite;
4365
4366 /* There's a lot of code before this instruction. Start with an
4367 optimistic search; it's easy to recognize halfwords that can
4368 not be the start of a 32-bit instruction, and use that to
4369 lock on to the instruction boundaries. */
4370 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4371 if (buf == NULL)
4372 return bpaddr;
4373 buf_len = IT_SCAN_THRESHOLD;
4374
4375 definite = 0;
4376 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4377 {
4378 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4379 if (thumb_insn_size (inst1) == 2)
4380 {
4381 definite = 1;
4382 break;
4383 }
4384 }
4385
4386 /* At this point, if DEFINITE, BUF[I] is the first place we
4387 are sure that we know the instruction boundaries, and it is far
4388 enough from BPADDR that we could not miss an IT instruction
4389 affecting BPADDR. If ! DEFINITE, give up - start from a
4390 known boundary. */
4391 if (! definite)
4392 {
4393 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4394 bpaddr - boundary);
4395 if (buf == NULL)
4396 return bpaddr;
4397 buf_len = bpaddr - boundary;
4398 i = 0;
4399 }
4400 }
4401 else
4402 {
4403 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4404 if (buf == NULL)
4405 return bpaddr;
4406 buf_len = bpaddr - boundary;
4407 i = 0;
4408 }
4409
4410 /* Scan forwards. Find the last IT instruction before BPADDR. */
4411 last_it = -1;
4412 last_it_count = 0;
4413 while (i < buf_len)
4414 {
4415 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4416 last_it_count--;
4417 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4418 {
4419 last_it = i;
4420 if (inst1 & 0x0001)
4421 last_it_count = 4;
4422 else if (inst1 & 0x0002)
4423 last_it_count = 3;
4424 else if (inst1 & 0x0004)
4425 last_it_count = 2;
4426 else
4427 last_it_count = 1;
4428 }
4429 i += thumb_insn_size (inst1);
4430 }
4431
4432 xfree (buf);
4433
4434 if (last_it == -1)
4435 /* There wasn't really an IT instruction after all. */
4436 return bpaddr;
4437
4438 if (last_it_count < 1)
4439 /* It was too far away. */
4440 return bpaddr;
4441
4442 /* This really is a trouble spot. Move the breakpoint to the IT
4443 instruction. */
4444 return bpaddr - buf_len + last_it;
4445 }
4446
4447 /* ARM displaced stepping support.
4448
4449 Generally ARM displaced stepping works as follows:
4450
4451 1. When an instruction is to be single-stepped, it is first decoded by
4452 arm_process_displaced_insn. Depending on the type of instruction, it is
4453 then copied to a scratch location, possibly in a modified form. The
4454 copy_* set of functions performs such modification, as necessary. A
4455 breakpoint is placed after the modified instruction in the scratch space
4456 to return control to GDB. Note in particular that instructions which
4457 modify the PC will no longer do so after modification.
4458
4459 2. The instruction is single-stepped, by setting the PC to the scratch
4460 location address, and resuming. Control returns to GDB when the
4461 breakpoint is hit.
4462
4463 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4464 function used for the current instruction. This function's job is to
4465 put the CPU/memory state back to what it would have been if the
4466 instruction had been executed unmodified in its original location. */
4467
4468 /* NOP instruction (mov r0, r0). */
4469 #define ARM_NOP 0xe1a00000
4470 #define THUMB_NOP 0x4600
4471
4472 /* Helper for register reads for displaced stepping. In particular, this
4473 returns the PC as it would be seen by the instruction at its original
4474 location. */
4475
4476 ULONGEST
4477 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4478 int regno)
4479 {
4480 ULONGEST ret;
4481 CORE_ADDR from = dsc->insn_addr;
4482
4483 if (regno == ARM_PC_REGNUM)
4484 {
4485 /* Compute pipeline offset:
4486 - When executing an ARM instruction, PC reads as the address of the
4487 current instruction plus 8.
4488 - When executing a Thumb instruction, PC reads as the address of the
4489 current instruction plus 4. */
4490
4491 if (!dsc->is_thumb)
4492 from += 8;
4493 else
4494 from += 4;
4495
4496 displaced_debug_printf ("read pc value %.8lx",
4497 (unsigned long) from);
4498 return (ULONGEST) from;
4499 }
4500 else
4501 {
4502 regcache_cooked_read_unsigned (regs, regno, &ret);
4503
4504 displaced_debug_printf ("read r%d value %.8lx",
4505 regno, (unsigned long) ret);
4506
4507 return ret;
4508 }
4509 }
4510
4511 static int
4512 displaced_in_arm_mode (struct regcache *regs)
4513 {
4514 ULONGEST ps;
4515 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4516
4517 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4518
4519 return (ps & t_bit) == 0;
4520 }
4521
4522 /* Write to the PC as from a branch instruction. */
4523
4524 static void
4525 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4526 ULONGEST val)
4527 {
4528 if (!dsc->is_thumb)
4529 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4530 architecture versions < 6. */
4531 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4532 val & ~(ULONGEST) 0x3);
4533 else
4534 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4535 val & ~(ULONGEST) 0x1);
4536 }
4537
4538 /* Write to the PC as from a branch-exchange instruction. */
4539
4540 static void
4541 bx_write_pc (struct regcache *regs, ULONGEST val)
4542 {
4543 ULONGEST ps;
4544 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4545
4546 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4547
4548 if ((val & 1) == 1)
4549 {
4550 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4551 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4552 }
4553 else if ((val & 2) == 0)
4554 {
4555 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4556 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4557 }
4558 else
4559 {
4560 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4561 mode, align dest to 4 bytes). */
4562 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4563 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4564 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4565 }
4566 }
4567
4568 /* Write to the PC as if from a load instruction. */
4569
4570 static void
4571 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4572 ULONGEST val)
4573 {
4574 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4575 bx_write_pc (regs, val);
4576 else
4577 branch_write_pc (regs, dsc, val);
4578 }
4579
4580 /* Write to the PC as if from an ALU instruction. */
4581
4582 static void
4583 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4584 ULONGEST val)
4585 {
4586 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4587 bx_write_pc (regs, val);
4588 else
4589 branch_write_pc (regs, dsc, val);
4590 }
4591
4592 /* Helper for writing to registers for displaced stepping. Writing to the PC
4593 has a varying effects depending on the instruction which does the write:
4594 this is controlled by the WRITE_PC argument. */
4595
4596 void
4597 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4598 int regno, ULONGEST val, enum pc_write_style write_pc)
4599 {
4600 if (regno == ARM_PC_REGNUM)
4601 {
4602 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4603
4604 switch (write_pc)
4605 {
4606 case BRANCH_WRITE_PC:
4607 branch_write_pc (regs, dsc, val);
4608 break;
4609
4610 case BX_WRITE_PC:
4611 bx_write_pc (regs, val);
4612 break;
4613
4614 case LOAD_WRITE_PC:
4615 load_write_pc (regs, dsc, val);
4616 break;
4617
4618 case ALU_WRITE_PC:
4619 alu_write_pc (regs, dsc, val);
4620 break;
4621
4622 case CANNOT_WRITE_PC:
4623 warning (_("Instruction wrote to PC in an unexpected way when "
4624 "single-stepping"));
4625 break;
4626
4627 default:
4628 internal_error (__FILE__, __LINE__,
4629 _("Invalid argument to displaced_write_reg"));
4630 }
4631
4632 dsc->wrote_to_pc = 1;
4633 }
4634 else
4635 {
4636 displaced_debug_printf ("writing r%d value %.8lx",
4637 regno, (unsigned long) val);
4638 regcache_cooked_write_unsigned (regs, regno, val);
4639 }
4640 }
4641
4642 /* This function is used to concisely determine if an instruction INSN
4643 references PC. Register fields of interest in INSN should have the
4644 corresponding fields of BITMASK set to 0b1111. The function
4645 returns return 1 if any of these fields in INSN reference the PC
4646 (also 0b1111, r15), else it returns 0. */
4647
4648 static int
4649 insn_references_pc (uint32_t insn, uint32_t bitmask)
4650 {
4651 uint32_t lowbit = 1;
4652
4653 while (bitmask != 0)
4654 {
4655 uint32_t mask;
4656
4657 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4658 ;
4659
4660 if (!lowbit)
4661 break;
4662
4663 mask = lowbit * 0xf;
4664
4665 if ((insn & mask) == mask)
4666 return 1;
4667
4668 bitmask &= ~mask;
4669 }
4670
4671 return 0;
4672 }
4673
4674 /* The simplest copy function. Many instructions have the same effect no
4675 matter what address they are executed at: in those cases, use this. */
4676
4677 static int
4678 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4679 const char *iname, arm_displaced_step_closure *dsc)
4680 {
4681 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4682 (unsigned long) insn, iname);
4683
4684 dsc->modinsn[0] = insn;
4685
4686 return 0;
4687 }
4688
4689 static int
4690 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4691 uint16_t insn2, const char *iname,
4692 arm_displaced_step_closure *dsc)
4693 {
4694 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4695 "unmodified", insn1, insn2, iname);
4696
4697 dsc->modinsn[0] = insn1;
4698 dsc->modinsn[1] = insn2;
4699 dsc->numinsns = 2;
4700
4701 return 0;
4702 }
4703
4704 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4705 modification. */
4706 static int
4707 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4708 const char *iname,
4709 arm_displaced_step_closure *dsc)
4710 {
4711 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4712 insn, iname);
4713
4714 dsc->modinsn[0] = insn;
4715
4716 return 0;
4717 }
4718
4719 /* Preload instructions with immediate offset. */
4720
4721 static void
4722 cleanup_preload (struct gdbarch *gdbarch,
4723 struct regcache *regs, arm_displaced_step_closure *dsc)
4724 {
4725 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4726 if (!dsc->u.preload.immed)
4727 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4728 }
4729
4730 static void
4731 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4732 arm_displaced_step_closure *dsc, unsigned int rn)
4733 {
4734 ULONGEST rn_val;
4735 /* Preload instructions:
4736
4737 {pli/pld} [rn, #+/-imm]
4738 ->
4739 {pli/pld} [r0, #+/-imm]. */
4740
4741 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4742 rn_val = displaced_read_reg (regs, dsc, rn);
4743 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4744 dsc->u.preload.immed = 1;
4745
4746 dsc->cleanup = &cleanup_preload;
4747 }
4748
4749 static int
4750 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4751 arm_displaced_step_closure *dsc)
4752 {
4753 unsigned int rn = bits (insn, 16, 19);
4754
4755 if (!insn_references_pc (insn, 0x000f0000ul))
4756 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4757
4758 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4759
4760 dsc->modinsn[0] = insn & 0xfff0ffff;
4761
4762 install_preload (gdbarch, regs, dsc, rn);
4763
4764 return 0;
4765 }
4766
4767 static int
4768 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4769 struct regcache *regs, arm_displaced_step_closure *dsc)
4770 {
4771 unsigned int rn = bits (insn1, 0, 3);
4772 unsigned int u_bit = bit (insn1, 7);
4773 int imm12 = bits (insn2, 0, 11);
4774 ULONGEST pc_val;
4775
4776 if (rn != ARM_PC_REGNUM)
4777 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4778
4779 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4780 PLD (literal) Encoding T1. */
4781 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4782 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4783 imm12);
4784
4785 if (!u_bit)
4786 imm12 = -1 * imm12;
4787
4788 /* Rewrite instruction {pli/pld} PC imm12 into:
4789 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4790
4791 {pli/pld} [r0, r1]
4792
4793 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4794
4795 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4796 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4797
4798 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4799
4800 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4801 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4802 dsc->u.preload.immed = 0;
4803
4804 /* {pli/pld} [r0, r1] */
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = 0xf001;
4807 dsc->numinsns = 2;
4808
4809 dsc->cleanup = &cleanup_preload;
4810 return 0;
4811 }
4812
4813 /* Preload instructions with register offset. */
4814
4815 static void
4816 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4817 arm_displaced_step_closure *dsc, unsigned int rn,
4818 unsigned int rm)
4819 {
4820 ULONGEST rn_val, rm_val;
4821
4822 /* Preload register-offset instructions:
4823
4824 {pli/pld} [rn, rm {, shift}]
4825 ->
4826 {pli/pld} [r0, r1 {, shift}]. */
4827
4828 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4829 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4830 rn_val = displaced_read_reg (regs, dsc, rn);
4831 rm_val = displaced_read_reg (regs, dsc, rm);
4832 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4833 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4834 dsc->u.preload.immed = 0;
4835
4836 dsc->cleanup = &cleanup_preload;
4837 }
4838
4839 static int
4840 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4841 struct regcache *regs,
4842 arm_displaced_step_closure *dsc)
4843 {
4844 unsigned int rn = bits (insn, 16, 19);
4845 unsigned int rm = bits (insn, 0, 3);
4846
4847
4848 if (!insn_references_pc (insn, 0x000f000ful))
4849 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4850
4851 displaced_debug_printf ("copying preload insn %.8lx",
4852 (unsigned long) insn);
4853
4854 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4855
4856 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4857 return 0;
4858 }
4859
4860 /* Copy/cleanup coprocessor load and store instructions. */
4861
4862 static void
4863 cleanup_copro_load_store (struct gdbarch *gdbarch,
4864 struct regcache *regs,
4865 arm_displaced_step_closure *dsc)
4866 {
4867 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4868
4869 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4870
4871 if (dsc->u.ldst.writeback)
4872 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4873 }
4874
4875 static void
4876 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4877 arm_displaced_step_closure *dsc,
4878 int writeback, unsigned int rn)
4879 {
4880 ULONGEST rn_val;
4881
4882 /* Coprocessor load/store instructions:
4883
4884 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4885 ->
4886 {stc/stc2} [r0, #+/-imm].
4887
4888 ldc/ldc2 are handled identically. */
4889
4890 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4891 rn_val = displaced_read_reg (regs, dsc, rn);
4892 /* PC should be 4-byte aligned. */
4893 rn_val = rn_val & 0xfffffffc;
4894 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4895
4896 dsc->u.ldst.writeback = writeback;
4897 dsc->u.ldst.rn = rn;
4898
4899 dsc->cleanup = &cleanup_copro_load_store;
4900 }
4901
4902 static int
4903 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4904 struct regcache *regs,
4905 arm_displaced_step_closure *dsc)
4906 {
4907 unsigned int rn = bits (insn, 16, 19);
4908
4909 if (!insn_references_pc (insn, 0x000f0000ul))
4910 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4911
4912 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4913 (unsigned long) insn);
4914
4915 dsc->modinsn[0] = insn & 0xfff0ffff;
4916
4917 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4918
4919 return 0;
4920 }
4921
4922 static int
4923 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4924 uint16_t insn2, struct regcache *regs,
4925 arm_displaced_step_closure *dsc)
4926 {
4927 unsigned int rn = bits (insn1, 0, 3);
4928
4929 if (rn != ARM_PC_REGNUM)
4930 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4931 "copro load/store", dsc);
4932
4933 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
4934 insn1, insn2);
4935
4936 dsc->modinsn[0] = insn1 & 0xfff0;
4937 dsc->modinsn[1] = insn2;
4938 dsc->numinsns = 2;
4939
4940 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4941 doesn't support writeback, so pass 0. */
4942 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4943
4944 return 0;
4945 }
4946
4947 /* Clean up branch instructions (actually perform the branch, by setting
4948 PC). */
4949
4950 static void
4951 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4952 arm_displaced_step_closure *dsc)
4953 {
4954 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4955 int branch_taken = condition_true (dsc->u.branch.cond, status);
4956 enum pc_write_style write_pc = dsc->u.branch.exchange
4957 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4958
4959 if (!branch_taken)
4960 return;
4961
4962 if (dsc->u.branch.link)
4963 {
4964 /* The value of LR should be the next insn of current one. In order
4965 not to confuse logic handling later insn `bx lr', if current insn mode
4966 is Thumb, the bit 0 of LR value should be set to 1. */
4967 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4968
4969 if (dsc->is_thumb)
4970 next_insn_addr |= 0x1;
4971
4972 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4973 CANNOT_WRITE_PC);
4974 }
4975
4976 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4977 }
4978
4979 /* Copy B/BL/BLX instructions with immediate destinations. */
4980
4981 static void
4982 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4983 arm_displaced_step_closure *dsc,
4984 unsigned int cond, int exchange, int link, long offset)
4985 {
4986 /* Implement "BL<cond> <label>" as:
4987
4988 Preparation: cond <- instruction condition
4989 Insn: mov r0, r0 (nop)
4990 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4991
4992 B<cond> similar, but don't set r14 in cleanup. */
4993
4994 dsc->u.branch.cond = cond;
4995 dsc->u.branch.link = link;
4996 dsc->u.branch.exchange = exchange;
4997
4998 dsc->u.branch.dest = dsc->insn_addr;
4999 if (link && exchange)
5000 /* For BLX, offset is computed from the Align (PC, 4). */
5001 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5002
5003 if (dsc->is_thumb)
5004 dsc->u.branch.dest += 4 + offset;
5005 else
5006 dsc->u.branch.dest += 8 + offset;
5007
5008 dsc->cleanup = &cleanup_branch;
5009 }
5010 static int
5011 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5012 struct regcache *regs, arm_displaced_step_closure *dsc)
5013 {
5014 unsigned int cond = bits (insn, 28, 31);
5015 int exchange = (cond == 0xf);
5016 int link = exchange || bit (insn, 24);
5017 long offset;
5018
5019 displaced_debug_printf ("copying %s immediate insn %.8lx",
5020 (exchange) ? "blx" : (link) ? "bl" : "b",
5021 (unsigned long) insn);
5022 if (exchange)
5023 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5024 then arrange the switch into Thumb mode. */
5025 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5026 else
5027 offset = bits (insn, 0, 23) << 2;
5028
5029 if (bit (offset, 25))
5030 offset = offset | ~0x3ffffff;
5031
5032 dsc->modinsn[0] = ARM_NOP;
5033
5034 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5035 return 0;
5036 }
5037
5038 static int
5039 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5040 uint16_t insn2, struct regcache *regs,
5041 arm_displaced_step_closure *dsc)
5042 {
5043 int link = bit (insn2, 14);
5044 int exchange = link && !bit (insn2, 12);
5045 int cond = INST_AL;
5046 long offset = 0;
5047 int j1 = bit (insn2, 13);
5048 int j2 = bit (insn2, 11);
5049 int s = sbits (insn1, 10, 10);
5050 int i1 = !(j1 ^ bit (insn1, 10));
5051 int i2 = !(j2 ^ bit (insn1, 10));
5052
5053 if (!link && !exchange) /* B */
5054 {
5055 offset = (bits (insn2, 0, 10) << 1);
5056 if (bit (insn2, 12)) /* Encoding T4 */
5057 {
5058 offset |= (bits (insn1, 0, 9) << 12)
5059 | (i2 << 22)
5060 | (i1 << 23)
5061 | (s << 24);
5062 cond = INST_AL;
5063 }
5064 else /* Encoding T3 */
5065 {
5066 offset |= (bits (insn1, 0, 5) << 12)
5067 | (j1 << 18)
5068 | (j2 << 19)
5069 | (s << 20);
5070 cond = bits (insn1, 6, 9);
5071 }
5072 }
5073 else
5074 {
5075 offset = (bits (insn1, 0, 9) << 12);
5076 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5077 offset |= exchange ?
5078 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5079 }
5080
5081 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5082 link ? (exchange) ? "blx" : "bl" : "b",
5083 insn1, insn2, offset);
5084
5085 dsc->modinsn[0] = THUMB_NOP;
5086
5087 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5088 return 0;
5089 }
5090
5091 /* Copy B Thumb instructions. */
5092 static int
5093 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5094 arm_displaced_step_closure *dsc)
5095 {
5096 unsigned int cond = 0;
5097 int offset = 0;
5098 unsigned short bit_12_15 = bits (insn, 12, 15);
5099 CORE_ADDR from = dsc->insn_addr;
5100
5101 if (bit_12_15 == 0xd)
5102 {
5103 /* offset = SignExtend (imm8:0, 32) */
5104 offset = sbits ((insn << 1), 0, 8);
5105 cond = bits (insn, 8, 11);
5106 }
5107 else if (bit_12_15 == 0xe) /* Encoding T2 */
5108 {
5109 offset = sbits ((insn << 1), 0, 11);
5110 cond = INST_AL;
5111 }
5112
5113 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5114 insn, offset);
5115
5116 dsc->u.branch.cond = cond;
5117 dsc->u.branch.link = 0;
5118 dsc->u.branch.exchange = 0;
5119 dsc->u.branch.dest = from + 4 + offset;
5120
5121 dsc->modinsn[0] = THUMB_NOP;
5122
5123 dsc->cleanup = &cleanup_branch;
5124
5125 return 0;
5126 }
5127
5128 /* Copy BX/BLX with register-specified destinations. */
5129
5130 static void
5131 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5132 arm_displaced_step_closure *dsc, int link,
5133 unsigned int cond, unsigned int rm)
5134 {
5135 /* Implement {BX,BLX}<cond> <reg>" as:
5136
5137 Preparation: cond <- instruction condition
5138 Insn: mov r0, r0 (nop)
5139 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5140
5141 Don't set r14 in cleanup for BX. */
5142
5143 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5144
5145 dsc->u.branch.cond = cond;
5146 dsc->u.branch.link = link;
5147
5148 dsc->u.branch.exchange = 1;
5149
5150 dsc->cleanup = &cleanup_branch;
5151 }
5152
5153 static int
5154 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5155 struct regcache *regs, arm_displaced_step_closure *dsc)
5156 {
5157 unsigned int cond = bits (insn, 28, 31);
5158 /* BX: x12xxx1x
5159 BLX: x12xxx3x. */
5160 int link = bit (insn, 5);
5161 unsigned int rm = bits (insn, 0, 3);
5162
5163 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5164
5165 dsc->modinsn[0] = ARM_NOP;
5166
5167 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5168 return 0;
5169 }
5170
5171 static int
5172 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5173 struct regcache *regs,
5174 arm_displaced_step_closure *dsc)
5175 {
5176 int link = bit (insn, 7);
5177 unsigned int rm = bits (insn, 3, 6);
5178
5179 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5180
5181 dsc->modinsn[0] = THUMB_NOP;
5182
5183 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5184
5185 return 0;
5186 }
5187
5188
5189 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5190
5191 static void
5192 cleanup_alu_imm (struct gdbarch *gdbarch,
5193 struct regcache *regs, arm_displaced_step_closure *dsc)
5194 {
5195 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5196 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5197 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5198 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5199 }
5200
5201 static int
5202 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5203 arm_displaced_step_closure *dsc)
5204 {
5205 unsigned int rn = bits (insn, 16, 19);
5206 unsigned int rd = bits (insn, 12, 15);
5207 unsigned int op = bits (insn, 21, 24);
5208 int is_mov = (op == 0xd);
5209 ULONGEST rd_val, rn_val;
5210
5211 if (!insn_references_pc (insn, 0x000ff000ul))
5212 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5213
5214 displaced_debug_printf ("copying immediate %s insn %.8lx",
5215 is_mov ? "move" : "ALU",
5216 (unsigned long) insn);
5217
5218 /* Instruction is of form:
5219
5220 <op><cond> rd, [rn,] #imm
5221
5222 Rewrite as:
5223
5224 Preparation: tmp1, tmp2 <- r0, r1;
5225 r0, r1 <- rd, rn
5226 Insn: <op><cond> r0, r1, #imm
5227 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5228 */
5229
5230 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5231 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5232 rn_val = displaced_read_reg (regs, dsc, rn);
5233 rd_val = displaced_read_reg (regs, dsc, rd);
5234 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5235 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5236 dsc->rd = rd;
5237
5238 if (is_mov)
5239 dsc->modinsn[0] = insn & 0xfff00fff;
5240 else
5241 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5242
5243 dsc->cleanup = &cleanup_alu_imm;
5244
5245 return 0;
5246 }
5247
5248 static int
5249 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5250 uint16_t insn2, struct regcache *regs,
5251 arm_displaced_step_closure *dsc)
5252 {
5253 unsigned int op = bits (insn1, 5, 8);
5254 unsigned int rn, rm, rd;
5255 ULONGEST rd_val, rn_val;
5256
5257 rn = bits (insn1, 0, 3); /* Rn */
5258 rm = bits (insn2, 0, 3); /* Rm */
5259 rd = bits (insn2, 8, 11); /* Rd */
5260
5261 /* This routine is only called for instruction MOV. */
5262 gdb_assert (op == 0x2 && rn == 0xf);
5263
5264 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5266
5267 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5268
5269 /* Instruction is of form:
5270
5271 <op><cond> rd, [rn,] #imm
5272
5273 Rewrite as:
5274
5275 Preparation: tmp1, tmp2 <- r0, r1;
5276 r0, r1 <- rd, rn
5277 Insn: <op><cond> r0, r1, #imm
5278 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5279 */
5280
5281 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5282 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5283 rn_val = displaced_read_reg (regs, dsc, rn);
5284 rd_val = displaced_read_reg (regs, dsc, rd);
5285 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5286 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5287 dsc->rd = rd;
5288
5289 dsc->modinsn[0] = insn1;
5290 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5291 dsc->numinsns = 2;
5292
5293 dsc->cleanup = &cleanup_alu_imm;
5294
5295 return 0;
5296 }
5297
5298 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5299
5300 static void
5301 cleanup_alu_reg (struct gdbarch *gdbarch,
5302 struct regcache *regs, arm_displaced_step_closure *dsc)
5303 {
5304 ULONGEST rd_val;
5305 int i;
5306
5307 rd_val = displaced_read_reg (regs, dsc, 0);
5308
5309 for (i = 0; i < 3; i++)
5310 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5311
5312 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5313 }
5314
5315 static void
5316 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5317 arm_displaced_step_closure *dsc,
5318 unsigned int rd, unsigned int rn, unsigned int rm)
5319 {
5320 ULONGEST rd_val, rn_val, rm_val;
5321
5322 /* Instruction is of form:
5323
5324 <op><cond> rd, [rn,] rm [, <shift>]
5325
5326 Rewrite as:
5327
5328 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5329 r0, r1, r2 <- rd, rn, rm
5330 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5331 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5332 */
5333
5334 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5335 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5336 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5337 rd_val = displaced_read_reg (regs, dsc, rd);
5338 rn_val = displaced_read_reg (regs, dsc, rn);
5339 rm_val = displaced_read_reg (regs, dsc, rm);
5340 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5341 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5342 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5343 dsc->rd = rd;
5344
5345 dsc->cleanup = &cleanup_alu_reg;
5346 }
5347
5348 static int
5349 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5350 arm_displaced_step_closure *dsc)
5351 {
5352 unsigned int op = bits (insn, 21, 24);
5353 int is_mov = (op == 0xd);
5354
5355 if (!insn_references_pc (insn, 0x000ff00ful))
5356 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5357
5358 displaced_debug_printf ("copying reg %s insn %.8lx",
5359 is_mov ? "move" : "ALU", (unsigned long) insn);
5360
5361 if (is_mov)
5362 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5363 else
5364 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5365
5366 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5367 bits (insn, 0, 3));
5368 return 0;
5369 }
5370
5371 static int
5372 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5373 struct regcache *regs,
5374 arm_displaced_step_closure *dsc)
5375 {
5376 unsigned rm, rd;
5377
5378 rm = bits (insn, 3, 6);
5379 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5380
5381 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5382 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5383
5384 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5385
5386 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5387
5388 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5389
5390 return 0;
5391 }
5392
5393 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5394
5395 static void
5396 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5397 struct regcache *regs,
5398 arm_displaced_step_closure *dsc)
5399 {
5400 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5401 int i;
5402
5403 for (i = 0; i < 4; i++)
5404 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5405
5406 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5407 }
5408
5409 static void
5410 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5411 arm_displaced_step_closure *dsc,
5412 unsigned int rd, unsigned int rn, unsigned int rm,
5413 unsigned rs)
5414 {
5415 int i;
5416 ULONGEST rd_val, rn_val, rm_val, rs_val;
5417
5418 /* Instruction is of form:
5419
5420 <op><cond> rd, [rn,] rm, <shift> rs
5421
5422 Rewrite as:
5423
5424 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5425 r0, r1, r2, r3 <- rd, rn, rm, rs
5426 Insn: <op><cond> r0, r1, r2, <shift> r3
5427 Cleanup: tmp5 <- r0
5428 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5429 rd <- tmp5
5430 */
5431
5432 for (i = 0; i < 4; i++)
5433 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5434
5435 rd_val = displaced_read_reg (regs, dsc, rd);
5436 rn_val = displaced_read_reg (regs, dsc, rn);
5437 rm_val = displaced_read_reg (regs, dsc, rm);
5438 rs_val = displaced_read_reg (regs, dsc, rs);
5439 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5440 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5441 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5442 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5443 dsc->rd = rd;
5444 dsc->cleanup = &cleanup_alu_shifted_reg;
5445 }
5446
5447 static int
5448 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5449 struct regcache *regs,
5450 arm_displaced_step_closure *dsc)
5451 {
5452 unsigned int op = bits (insn, 21, 24);
5453 int is_mov = (op == 0xd);
5454 unsigned int rd, rn, rm, rs;
5455
5456 if (!insn_references_pc (insn, 0x000fff0ful))
5457 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5458
5459 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5460 is_mov ? "move" : "ALU",
5461 (unsigned long) insn);
5462
5463 rn = bits (insn, 16, 19);
5464 rm = bits (insn, 0, 3);
5465 rs = bits (insn, 8, 11);
5466 rd = bits (insn, 12, 15);
5467
5468 if (is_mov)
5469 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5470 else
5471 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5472
5473 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5474
5475 return 0;
5476 }
5477
5478 /* Clean up load instructions. */
5479
5480 static void
5481 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5482 arm_displaced_step_closure *dsc)
5483 {
5484 ULONGEST rt_val, rt_val2 = 0, rn_val;
5485
5486 rt_val = displaced_read_reg (regs, dsc, 0);
5487 if (dsc->u.ldst.xfersize == 8)
5488 rt_val2 = displaced_read_reg (regs, dsc, 1);
5489 rn_val = displaced_read_reg (regs, dsc, 2);
5490
5491 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5492 if (dsc->u.ldst.xfersize > 4)
5493 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5494 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5495 if (!dsc->u.ldst.immed)
5496 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5497
5498 /* Handle register writeback. */
5499 if (dsc->u.ldst.writeback)
5500 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5501 /* Put result in right place. */
5502 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5503 if (dsc->u.ldst.xfersize == 8)
5504 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5505 }
5506
5507 /* Clean up store instructions. */
5508
5509 static void
5510 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5511 arm_displaced_step_closure *dsc)
5512 {
5513 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5514
5515 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5516 if (dsc->u.ldst.xfersize > 4)
5517 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5518 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5519 if (!dsc->u.ldst.immed)
5520 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5521 if (!dsc->u.ldst.restore_r4)
5522 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5523
5524 /* Writeback. */
5525 if (dsc->u.ldst.writeback)
5526 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5527 }
5528
5529 /* Copy "extra" load/store instructions. These are halfword/doubleword
5530 transfers, which have a different encoding to byte/word transfers. */
5531
5532 static int
5533 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5534 struct regcache *regs, arm_displaced_step_closure *dsc)
5535 {
5536 unsigned int op1 = bits (insn, 20, 24);
5537 unsigned int op2 = bits (insn, 5, 6);
5538 unsigned int rt = bits (insn, 12, 15);
5539 unsigned int rn = bits (insn, 16, 19);
5540 unsigned int rm = bits (insn, 0, 3);
5541 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5542 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5543 int immed = (op1 & 0x4) != 0;
5544 int opcode;
5545 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5546
5547 if (!insn_references_pc (insn, 0x000ff00ful))
5548 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5549
5550 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5551 unprivileged ? "unprivileged " : "",
5552 (unsigned long) insn);
5553
5554 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5555
5556 if (opcode < 0)
5557 internal_error (__FILE__, __LINE__,
5558 _("copy_extra_ld_st: instruction decode error"));
5559
5560 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5561 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5562 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5563 if (!immed)
5564 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5565
5566 rt_val = displaced_read_reg (regs, dsc, rt);
5567 if (bytesize[opcode] == 8)
5568 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5569 rn_val = displaced_read_reg (regs, dsc, rn);
5570 if (!immed)
5571 rm_val = displaced_read_reg (regs, dsc, rm);
5572
5573 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5574 if (bytesize[opcode] == 8)
5575 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5576 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5577 if (!immed)
5578 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5579
5580 dsc->rd = rt;
5581 dsc->u.ldst.xfersize = bytesize[opcode];
5582 dsc->u.ldst.rn = rn;
5583 dsc->u.ldst.immed = immed;
5584 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5585 dsc->u.ldst.restore_r4 = 0;
5586
5587 if (immed)
5588 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5589 ->
5590 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5591 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5592 else
5593 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5594 ->
5595 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5596 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5597
5598 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5599
5600 return 0;
5601 }
5602
5603 /* Copy byte/half word/word loads and stores. */
5604
5605 static void
5606 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5607 arm_displaced_step_closure *dsc, int load,
5608 int immed, int writeback, int size, int usermode,
5609 int rt, int rm, int rn)
5610 {
5611 ULONGEST rt_val, rn_val, rm_val = 0;
5612
5613 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5614 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5615 if (!immed)
5616 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5617 if (!load)
5618 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5619
5620 rt_val = displaced_read_reg (regs, dsc, rt);
5621 rn_val = displaced_read_reg (regs, dsc, rn);
5622 if (!immed)
5623 rm_val = displaced_read_reg (regs, dsc, rm);
5624
5625 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5626 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5627 if (!immed)
5628 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5629 dsc->rd = rt;
5630 dsc->u.ldst.xfersize = size;
5631 dsc->u.ldst.rn = rn;
5632 dsc->u.ldst.immed = immed;
5633 dsc->u.ldst.writeback = writeback;
5634
5635 /* To write PC we can do:
5636
5637 Before this sequence of instructions:
5638 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5639 r2 is the Rn value got from displaced_read_reg.
5640
5641 Insn1: push {pc} Write address of STR instruction + offset on stack
5642 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5643 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5644 = addr(Insn1) + offset - addr(Insn3) - 8
5645 = offset - 16
5646 Insn4: add r4, r4, #8 r4 = offset - 8
5647 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5648 = from + offset
5649 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5650
5651 Otherwise we don't know what value to write for PC, since the offset is
5652 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5653 of this can be found in Section "Saving from r15" in
5654 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5655
5656 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5657 }
5658
5659
5660 static int
5661 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5662 uint16_t insn2, struct regcache *regs,
5663 arm_displaced_step_closure *dsc, int size)
5664 {
5665 unsigned int u_bit = bit (insn1, 7);
5666 unsigned int rt = bits (insn2, 12, 15);
5667 int imm12 = bits (insn2, 0, 11);
5668 ULONGEST pc_val;
5669
5670 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5671 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5672 imm12);
5673
5674 if (!u_bit)
5675 imm12 = -1 * imm12;
5676
5677 /* Rewrite instruction LDR Rt imm12 into:
5678
5679 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5680
5681 LDR R0, R2, R3,
5682
5683 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5684
5685
5686 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5687 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5688 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5689
5690 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5691
5692 pc_val = pc_val & 0xfffffffc;
5693
5694 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5695 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5696
5697 dsc->rd = rt;
5698
5699 dsc->u.ldst.xfersize = size;
5700 dsc->u.ldst.immed = 0;
5701 dsc->u.ldst.writeback = 0;
5702 dsc->u.ldst.restore_r4 = 0;
5703
5704 /* LDR R0, R2, R3 */
5705 dsc->modinsn[0] = 0xf852;
5706 dsc->modinsn[1] = 0x3;
5707 dsc->numinsns = 2;
5708
5709 dsc->cleanup = &cleanup_load;
5710
5711 return 0;
5712 }
5713
5714 static int
5715 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5716 uint16_t insn2, struct regcache *regs,
5717 arm_displaced_step_closure *dsc,
5718 int writeback, int immed)
5719 {
5720 unsigned int rt = bits (insn2, 12, 15);
5721 unsigned int rn = bits (insn1, 0, 3);
5722 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5723 /* In LDR (register), there is also a register Rm, which is not allowed to
5724 be PC, so we don't have to check it. */
5725
5726 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5728 dsc);
5729
5730 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5731 rt, rn, insn1, insn2);
5732
5733 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5734 0, rt, rm, rn);
5735
5736 dsc->u.ldst.restore_r4 = 0;
5737
5738 if (immed)
5739 /* ldr[b]<cond> rt, [rn, #imm], etc.
5740 ->
5741 ldr[b]<cond> r0, [r2, #imm]. */
5742 {
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = insn2 & 0x0fff;
5745 }
5746 else
5747 /* ldr[b]<cond> rt, [rn, rm], etc.
5748 ->
5749 ldr[b]<cond> r0, [r2, r3]. */
5750 {
5751 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5752 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5753 }
5754
5755 dsc->numinsns = 2;
5756
5757 return 0;
5758 }
5759
5760
5761 static int
5762 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5763 struct regcache *regs,
5764 arm_displaced_step_closure *dsc,
5765 int load, int size, int usermode)
5766 {
5767 int immed = !bit (insn, 25);
5768 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5769 unsigned int rt = bits (insn, 12, 15);
5770 unsigned int rn = bits (insn, 16, 19);
5771 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5772
5773 if (!insn_references_pc (insn, 0x000ff00ful))
5774 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5775
5776 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5777 load ? (size == 1 ? "ldrb" : "ldr")
5778 : (size == 1 ? "strb" : "str"),
5779 usermode ? "t" : "",
5780 rt, rn,
5781 (unsigned long) insn);
5782
5783 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5784 usermode, rt, rm, rn);
5785
5786 if (load || rt != ARM_PC_REGNUM)
5787 {
5788 dsc->u.ldst.restore_r4 = 0;
5789
5790 if (immed)
5791 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5792 ->
5793 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5794 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5795 else
5796 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5797 ->
5798 {ldr,str}[b]<cond> r0, [r2, r3]. */
5799 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5800 }
5801 else
5802 {
5803 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5804 dsc->u.ldst.restore_r4 = 1;
5805 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5806 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5807 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5808 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5809 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5810
5811 /* As above. */
5812 if (immed)
5813 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5814 else
5815 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5816
5817 dsc->numinsns = 6;
5818 }
5819
5820 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5821
5822 return 0;
5823 }
5824
5825 /* Cleanup LDM instructions with fully-populated register list. This is an
5826 unfortunate corner case: it's impossible to implement correctly by modifying
5827 the instruction. The issue is as follows: we have an instruction,
5828
5829 ldm rN, {r0-r15}
5830
5831 which we must rewrite to avoid loading PC. A possible solution would be to
5832 do the load in two halves, something like (with suitable cleanup
5833 afterwards):
5834
5835 mov r8, rN
5836 ldm[id][ab] r8!, {r0-r7}
5837 str r7, <temp>
5838 ldm[id][ab] r8, {r7-r14}
5839 <bkpt>
5840
5841 but at present there's no suitable place for <temp>, since the scratch space
5842 is overwritten before the cleanup routine is called. For now, we simply
5843 emulate the instruction. */
5844
5845 static void
5846 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5847 arm_displaced_step_closure *dsc)
5848 {
5849 int inc = dsc->u.block.increment;
5850 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5851 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5852 uint32_t regmask = dsc->u.block.regmask;
5853 int regno = inc ? 0 : 15;
5854 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5855 int exception_return = dsc->u.block.load && dsc->u.block.user
5856 && (regmask & 0x8000) != 0;
5857 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5858 int do_transfer = condition_true (dsc->u.block.cond, status);
5859 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5860
5861 if (!do_transfer)
5862 return;
5863
5864 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5865 sensible we can do here. Complain loudly. */
5866 if (exception_return)
5867 error (_("Cannot single-step exception return"));
5868
5869 /* We don't handle any stores here for now. */
5870 gdb_assert (dsc->u.block.load != 0);
5871
5872 displaced_debug_printf ("emulating block transfer: %s %s %s",
5873 dsc->u.block.load ? "ldm" : "stm",
5874 dsc->u.block.increment ? "inc" : "dec",
5875 dsc->u.block.before ? "before" : "after");
5876
5877 while (regmask)
5878 {
5879 uint32_t memword;
5880
5881 if (inc)
5882 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5883 regno++;
5884 else
5885 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5886 regno--;
5887
5888 xfer_addr += bump_before;
5889
5890 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5891 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5892
5893 xfer_addr += bump_after;
5894
5895 regmask &= ~(1 << regno);
5896 }
5897
5898 if (dsc->u.block.writeback)
5899 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5900 CANNOT_WRITE_PC);
5901 }
5902
5903 /* Clean up an STM which included the PC in the register list. */
5904
5905 static void
5906 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5907 arm_displaced_step_closure *dsc)
5908 {
5909 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5910 int store_executed = condition_true (dsc->u.block.cond, status);
5911 CORE_ADDR pc_stored_at, transferred_regs
5912 = count_one_bits (dsc->u.block.regmask);
5913 CORE_ADDR stm_insn_addr;
5914 uint32_t pc_val;
5915 long offset;
5916 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5917
5918 /* If condition code fails, there's nothing else to do. */
5919 if (!store_executed)
5920 return;
5921
5922 if (dsc->u.block.increment)
5923 {
5924 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5925
5926 if (dsc->u.block.before)
5927 pc_stored_at += 4;
5928 }
5929 else
5930 {
5931 pc_stored_at = dsc->u.block.xfer_addr;
5932
5933 if (dsc->u.block.before)
5934 pc_stored_at -= 4;
5935 }
5936
5937 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5938 stm_insn_addr = dsc->scratch_base;
5939 offset = pc_val - stm_insn_addr;
5940
5941 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
5942 offset);
5943
5944 /* Rewrite the stored PC to the proper value for the non-displaced original
5945 instruction. */
5946 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5947 dsc->insn_addr + offset);
5948 }
5949
5950 /* Clean up an LDM which includes the PC in the register list. We clumped all
5951 the registers in the transferred list into a contiguous range r0...rX (to
5952 avoid loading PC directly and losing control of the debugged program), so we
5953 must undo that here. */
5954
5955 static void
5956 cleanup_block_load_pc (struct gdbarch *gdbarch,
5957 struct regcache *regs,
5958 arm_displaced_step_closure *dsc)
5959 {
5960 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5961 int load_executed = condition_true (dsc->u.block.cond, status);
5962 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5963 unsigned int regs_loaded = count_one_bits (mask);
5964 unsigned int num_to_shuffle = regs_loaded, clobbered;
5965
5966 /* The method employed here will fail if the register list is fully populated
5967 (we need to avoid loading PC directly). */
5968 gdb_assert (num_to_shuffle < 16);
5969
5970 if (!load_executed)
5971 return;
5972
5973 clobbered = (1 << num_to_shuffle) - 1;
5974
5975 while (num_to_shuffle > 0)
5976 {
5977 if ((mask & (1 << write_reg)) != 0)
5978 {
5979 unsigned int read_reg = num_to_shuffle - 1;
5980
5981 if (read_reg != write_reg)
5982 {
5983 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5984 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5985 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
5986 read_reg, write_reg);
5987 }
5988 else
5989 displaced_debug_printf ("LDM: register r%d already in the right "
5990 "place", write_reg);
5991
5992 clobbered &= ~(1 << write_reg);
5993
5994 num_to_shuffle--;
5995 }
5996
5997 write_reg--;
5998 }
5999
6000 /* Restore any registers we scribbled over. */
6001 for (write_reg = 0; clobbered != 0; write_reg++)
6002 {
6003 if ((clobbered & (1 << write_reg)) != 0)
6004 {
6005 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6006 CANNOT_WRITE_PC);
6007 displaced_debug_printf ("LDM: restored clobbered register r%d",
6008 write_reg);
6009 clobbered &= ~(1 << write_reg);
6010 }
6011 }
6012
6013 /* Perform register writeback manually. */
6014 if (dsc->u.block.writeback)
6015 {
6016 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6017
6018 if (dsc->u.block.increment)
6019 new_rn_val += regs_loaded * 4;
6020 else
6021 new_rn_val -= regs_loaded * 4;
6022
6023 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6024 CANNOT_WRITE_PC);
6025 }
6026 }
6027
6028 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6029 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6030
6031 static int
6032 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6033 struct regcache *regs,
6034 arm_displaced_step_closure *dsc)
6035 {
6036 int load = bit (insn, 20);
6037 int user = bit (insn, 22);
6038 int increment = bit (insn, 23);
6039 int before = bit (insn, 24);
6040 int writeback = bit (insn, 21);
6041 int rn = bits (insn, 16, 19);
6042
6043 /* Block transfers which don't mention PC can be run directly
6044 out-of-line. */
6045 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6046 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6047
6048 if (rn == ARM_PC_REGNUM)
6049 {
6050 warning (_("displaced: Unpredictable LDM or STM with "
6051 "base register r15"));
6052 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6053 }
6054
6055 displaced_debug_printf ("copying block transfer insn %.8lx",
6056 (unsigned long) insn);
6057
6058 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6059 dsc->u.block.rn = rn;
6060
6061 dsc->u.block.load = load;
6062 dsc->u.block.user = user;
6063 dsc->u.block.increment = increment;
6064 dsc->u.block.before = before;
6065 dsc->u.block.writeback = writeback;
6066 dsc->u.block.cond = bits (insn, 28, 31);
6067
6068 dsc->u.block.regmask = insn & 0xffff;
6069
6070 if (load)
6071 {
6072 if ((insn & 0xffff) == 0xffff)
6073 {
6074 /* LDM with a fully-populated register list. This case is
6075 particularly tricky. Implement for now by fully emulating the
6076 instruction (which might not behave perfectly in all cases, but
6077 these instructions should be rare enough for that not to matter
6078 too much). */
6079 dsc->modinsn[0] = ARM_NOP;
6080
6081 dsc->cleanup = &cleanup_block_load_all;
6082 }
6083 else
6084 {
6085 /* LDM of a list of registers which includes PC. Implement by
6086 rewriting the list of registers to be transferred into a
6087 contiguous chunk r0...rX before doing the transfer, then shuffling
6088 registers into the correct places in the cleanup routine. */
6089 unsigned int regmask = insn & 0xffff;
6090 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6091 unsigned int i;
6092
6093 for (i = 0; i < num_in_list; i++)
6094 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6095
6096 /* Writeback makes things complicated. We need to avoid clobbering
6097 the base register with one of the registers in our modified
6098 register list, but just using a different register can't work in
6099 all cases, e.g.:
6100
6101 ldm r14!, {r0-r13,pc}
6102
6103 which would need to be rewritten as:
6104
6105 ldm rN!, {r0-r14}
6106
6107 but that can't work, because there's no free register for N.
6108
6109 Solve this by turning off the writeback bit, and emulating
6110 writeback manually in the cleanup routine. */
6111
6112 if (writeback)
6113 insn &= ~(1 << 21);
6114
6115 new_regmask = (1 << num_in_list) - 1;
6116
6117 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6118 "%.4x, modified list %.4x",
6119 rn, writeback ? "!" : "",
6120 (int) insn & 0xffff, new_regmask);
6121
6122 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6123
6124 dsc->cleanup = &cleanup_block_load_pc;
6125 }
6126 }
6127 else
6128 {
6129 /* STM of a list of registers which includes PC. Run the instruction
6130 as-is, but out of line: this will store the wrong value for the PC,
6131 so we must manually fix up the memory in the cleanup routine.
6132 Doing things this way has the advantage that we can auto-detect
6133 the offset of the PC write (which is architecture-dependent) in
6134 the cleanup routine. */
6135 dsc->modinsn[0] = insn;
6136
6137 dsc->cleanup = &cleanup_block_store_pc;
6138 }
6139
6140 return 0;
6141 }
6142
6143 static int
6144 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6145 struct regcache *regs,
6146 arm_displaced_step_closure *dsc)
6147 {
6148 int rn = bits (insn1, 0, 3);
6149 int load = bit (insn1, 4);
6150 int writeback = bit (insn1, 5);
6151
6152 /* Block transfers which don't mention PC can be run directly
6153 out-of-line. */
6154 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6156
6157 if (rn == ARM_PC_REGNUM)
6158 {
6159 warning (_("displaced: Unpredictable LDM or STM with "
6160 "base register r15"));
6161 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6162 "unpredictable ldm/stm", dsc);
6163 }
6164
6165 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6166 insn1, insn2);
6167
6168 /* Clear bit 13, since it should be always zero. */
6169 dsc->u.block.regmask = (insn2 & 0xdfff);
6170 dsc->u.block.rn = rn;
6171
6172 dsc->u.block.load = load;
6173 dsc->u.block.user = 0;
6174 dsc->u.block.increment = bit (insn1, 7);
6175 dsc->u.block.before = bit (insn1, 8);
6176 dsc->u.block.writeback = writeback;
6177 dsc->u.block.cond = INST_AL;
6178 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6179
6180 if (load)
6181 {
6182 if (dsc->u.block.regmask == 0xffff)
6183 {
6184 /* This branch is impossible to happen. */
6185 gdb_assert (0);
6186 }
6187 else
6188 {
6189 unsigned int regmask = dsc->u.block.regmask;
6190 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6191 unsigned int i;
6192
6193 for (i = 0; i < num_in_list; i++)
6194 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6195
6196 if (writeback)
6197 insn1 &= ~(1 << 5);
6198
6199 new_regmask = (1 << num_in_list) - 1;
6200
6201 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6202 "%.4x, modified list %.4x",
6203 rn, writeback ? "!" : "",
6204 (int) dsc->u.block.regmask, new_regmask);
6205
6206 dsc->modinsn[0] = insn1;
6207 dsc->modinsn[1] = (new_regmask & 0xffff);
6208 dsc->numinsns = 2;
6209
6210 dsc->cleanup = &cleanup_block_load_pc;
6211 }
6212 }
6213 else
6214 {
6215 dsc->modinsn[0] = insn1;
6216 dsc->modinsn[1] = insn2;
6217 dsc->numinsns = 2;
6218 dsc->cleanup = &cleanup_block_store_pc;
6219 }
6220 return 0;
6221 }
6222
6223 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6224 This is used to avoid a dependency on BFD's bfd_endian enum. */
6225
6226 ULONGEST
6227 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6228 int byte_order)
6229 {
6230 return read_memory_unsigned_integer (memaddr, len,
6231 (enum bfd_endian) byte_order);
6232 }
6233
6234 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6235
6236 CORE_ADDR
6237 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6238 CORE_ADDR val)
6239 {
6240 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6241 }
6242
6243 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6244
6245 static CORE_ADDR
6246 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6247 {
6248 return 0;
6249 }
6250
6251 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6252
6253 int
6254 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6255 {
6256 return arm_is_thumb (self->regcache);
6257 }
6258
6259 /* single_step() is called just before we want to resume the inferior,
6260 if we want to single-step it but there is no hardware or kernel
6261 single-step support. We find the target of the coming instructions
6262 and breakpoint them. */
6263
6264 std::vector<CORE_ADDR>
6265 arm_software_single_step (struct regcache *regcache)
6266 {
6267 struct gdbarch *gdbarch = regcache->arch ();
6268 struct arm_get_next_pcs next_pcs_ctx;
6269
6270 arm_get_next_pcs_ctor (&next_pcs_ctx,
6271 &arm_get_next_pcs_ops,
6272 gdbarch_byte_order (gdbarch),
6273 gdbarch_byte_order_for_code (gdbarch),
6274 0,
6275 regcache);
6276
6277 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6278
6279 for (CORE_ADDR &pc_ref : next_pcs)
6280 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6281
6282 return next_pcs;
6283 }
6284
6285 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6286 for Linux, where some SVC instructions must be treated specially. */
6287
6288 static void
6289 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6290 arm_displaced_step_closure *dsc)
6291 {
6292 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6293
6294 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6295 (unsigned long) resume_addr);
6296
6297 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6298 }
6299
6300
6301 /* Common copy routine for svc instruction. */
6302
6303 static int
6304 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6305 arm_displaced_step_closure *dsc)
6306 {
6307 /* Preparation: none.
6308 Insn: unmodified svc.
6309 Cleanup: pc <- insn_addr + insn_size. */
6310
6311 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6312 instruction. */
6313 dsc->wrote_to_pc = 1;
6314
6315 /* Allow OS-specific code to override SVC handling. */
6316 if (dsc->u.svc.copy_svc_os)
6317 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6318 else
6319 {
6320 dsc->cleanup = &cleanup_svc;
6321 return 0;
6322 }
6323 }
6324
6325 static int
6326 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6327 struct regcache *regs, arm_displaced_step_closure *dsc)
6328 {
6329
6330 displaced_debug_printf ("copying svc insn %.8lx",
6331 (unsigned long) insn);
6332
6333 dsc->modinsn[0] = insn;
6334
6335 return install_svc (gdbarch, regs, dsc);
6336 }
6337
6338 static int
6339 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6340 struct regcache *regs, arm_displaced_step_closure *dsc)
6341 {
6342
6343 displaced_debug_printf ("copying svc insn %.4x", insn);
6344
6345 dsc->modinsn[0] = insn;
6346
6347 return install_svc (gdbarch, regs, dsc);
6348 }
6349
6350 /* Copy undefined instructions. */
6351
6352 static int
6353 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6354 arm_displaced_step_closure *dsc)
6355 {
6356 displaced_debug_printf ("copying undefined insn %.8lx",
6357 (unsigned long) insn);
6358
6359 dsc->modinsn[0] = insn;
6360
6361 return 0;
6362 }
6363
6364 static int
6365 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6366 arm_displaced_step_closure *dsc)
6367 {
6368
6369 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6370 (unsigned short) insn1, (unsigned short) insn2);
6371
6372 dsc->modinsn[0] = insn1;
6373 dsc->modinsn[1] = insn2;
6374 dsc->numinsns = 2;
6375
6376 return 0;
6377 }
6378
6379 /* Copy unpredictable instructions. */
6380
6381 static int
6382 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6383 arm_displaced_step_closure *dsc)
6384 {
6385 displaced_debug_printf ("copying unpredictable insn %.8lx",
6386 (unsigned long) insn);
6387
6388 dsc->modinsn[0] = insn;
6389
6390 return 0;
6391 }
6392
6393 /* The decode_* functions are instruction decoding helpers. They mostly follow
6394 the presentation in the ARM ARM. */
6395
6396 static int
6397 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6398 struct regcache *regs,
6399 arm_displaced_step_closure *dsc)
6400 {
6401 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6402 unsigned int rn = bits (insn, 16, 19);
6403
6404 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6405 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6406 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6407 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6408 else if ((op1 & 0x60) == 0x20)
6409 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6410 else if ((op1 & 0x71) == 0x40)
6411 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6412 dsc);
6413 else if ((op1 & 0x77) == 0x41)
6414 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6415 else if ((op1 & 0x77) == 0x45)
6416 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6417 else if ((op1 & 0x77) == 0x51)
6418 {
6419 if (rn != 0xf)
6420 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6421 else
6422 return arm_copy_unpred (gdbarch, insn, dsc);
6423 }
6424 else if ((op1 & 0x77) == 0x55)
6425 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6426 else if (op1 == 0x57)
6427 switch (op2)
6428 {
6429 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6430 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6431 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6432 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6433 default: return arm_copy_unpred (gdbarch, insn, dsc);
6434 }
6435 else if ((op1 & 0x63) == 0x43)
6436 return arm_copy_unpred (gdbarch, insn, dsc);
6437 else if ((op2 & 0x1) == 0x0)
6438 switch (op1 & ~0x80)
6439 {
6440 case 0x61:
6441 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6442 case 0x65:
6443 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6444 case 0x71: case 0x75:
6445 /* pld/pldw reg. */
6446 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6447 case 0x63: case 0x67: case 0x73: case 0x77:
6448 return arm_copy_unpred (gdbarch, insn, dsc);
6449 default:
6450 return arm_copy_undef (gdbarch, insn, dsc);
6451 }
6452 else
6453 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6454 }
6455
6456 static int
6457 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6458 struct regcache *regs,
6459 arm_displaced_step_closure *dsc)
6460 {
6461 if (bit (insn, 27) == 0)
6462 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6463 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6464 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6465 {
6466 case 0x0: case 0x2:
6467 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6468
6469 case 0x1: case 0x3:
6470 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6471
6472 case 0x4: case 0x5: case 0x6: case 0x7:
6473 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6474
6475 case 0x8:
6476 switch ((insn & 0xe00000) >> 21)
6477 {
6478 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6479 /* stc/stc2. */
6480 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6481
6482 case 0x2:
6483 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6484
6485 default:
6486 return arm_copy_undef (gdbarch, insn, dsc);
6487 }
6488
6489 case 0x9:
6490 {
6491 int rn_f = (bits (insn, 16, 19) == 0xf);
6492 switch ((insn & 0xe00000) >> 21)
6493 {
6494 case 0x1: case 0x3:
6495 /* ldc/ldc2 imm (undefined for rn == pc). */
6496 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6497 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6498
6499 case 0x2:
6500 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6501
6502 case 0x4: case 0x5: case 0x6: case 0x7:
6503 /* ldc/ldc2 lit (undefined for rn != pc). */
6504 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6505 : arm_copy_undef (gdbarch, insn, dsc);
6506
6507 default:
6508 return arm_copy_undef (gdbarch, insn, dsc);
6509 }
6510 }
6511
6512 case 0xa:
6513 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6514
6515 case 0xb:
6516 if (bits (insn, 16, 19) == 0xf)
6517 /* ldc/ldc2 lit. */
6518 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6519 else
6520 return arm_copy_undef (gdbarch, insn, dsc);
6521
6522 case 0xc:
6523 if (bit (insn, 4))
6524 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6525 else
6526 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6527
6528 case 0xd:
6529 if (bit (insn, 4))
6530 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6531 else
6532 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6533
6534 default:
6535 return arm_copy_undef (gdbarch, insn, dsc);
6536 }
6537 }
6538
6539 /* Decode miscellaneous instructions in dp/misc encoding space. */
6540
6541 static int
6542 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6543 struct regcache *regs,
6544 arm_displaced_step_closure *dsc)
6545 {
6546 unsigned int op2 = bits (insn, 4, 6);
6547 unsigned int op = bits (insn, 21, 22);
6548
6549 switch (op2)
6550 {
6551 case 0x0:
6552 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6553
6554 case 0x1:
6555 if (op == 0x1) /* bx. */
6556 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6557 else if (op == 0x3)
6558 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6559 else
6560 return arm_copy_undef (gdbarch, insn, dsc);
6561
6562 case 0x2:
6563 if (op == 0x1)
6564 /* Not really supported. */
6565 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6566 else
6567 return arm_copy_undef (gdbarch, insn, dsc);
6568
6569 case 0x3:
6570 if (op == 0x1)
6571 return arm_copy_bx_blx_reg (gdbarch, insn,
6572 regs, dsc); /* blx register. */
6573 else
6574 return arm_copy_undef (gdbarch, insn, dsc);
6575
6576 case 0x5:
6577 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6578
6579 case 0x7:
6580 if (op == 0x1)
6581 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6582 else if (op == 0x3)
6583 /* Not really supported. */
6584 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6585 /* Fall through. */
6586
6587 default:
6588 return arm_copy_undef (gdbarch, insn, dsc);
6589 }
6590 }
6591
6592 static int
6593 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6594 struct regcache *regs,
6595 arm_displaced_step_closure *dsc)
6596 {
6597 if (bit (insn, 25))
6598 switch (bits (insn, 20, 24))
6599 {
6600 case 0x10:
6601 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6602
6603 case 0x14:
6604 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6605
6606 case 0x12: case 0x16:
6607 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6608
6609 default:
6610 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6611 }
6612 else
6613 {
6614 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6615
6616 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6617 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6618 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6619 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6620 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6621 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6622 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6623 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6624 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6625 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6626 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6627 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6628 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6629 /* 2nd arg means "unprivileged". */
6630 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6631 dsc);
6632 }
6633
6634 /* Should be unreachable. */
6635 return 1;
6636 }
6637
6638 static int
6639 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6640 struct regcache *regs,
6641 arm_displaced_step_closure *dsc)
6642 {
6643 int a = bit (insn, 25), b = bit (insn, 4);
6644 uint32_t op1 = bits (insn, 20, 24);
6645
6646 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6647 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6649 else if ((!a && (op1 & 0x17) == 0x02)
6650 || (a && (op1 & 0x17) == 0x02 && !b))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6652 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6653 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6655 else if ((!a && (op1 & 0x17) == 0x03)
6656 || (a && (op1 & 0x17) == 0x03 && !b))
6657 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6658 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6659 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6660 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6661 else if ((!a && (op1 & 0x17) == 0x06)
6662 || (a && (op1 & 0x17) == 0x06 && !b))
6663 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6664 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6665 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6667 else if ((!a && (op1 & 0x17) == 0x07)
6668 || (a && (op1 & 0x17) == 0x07 && !b))
6669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6670
6671 /* Should be unreachable. */
6672 return 1;
6673 }
6674
6675 static int
6676 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6677 arm_displaced_step_closure *dsc)
6678 {
6679 switch (bits (insn, 20, 24))
6680 {
6681 case 0x00: case 0x01: case 0x02: case 0x03:
6682 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6683
6684 case 0x04: case 0x05: case 0x06: case 0x07:
6685 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6686
6687 case 0x08: case 0x09: case 0x0a: case 0x0b:
6688 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6689 return arm_copy_unmodified (gdbarch, insn,
6690 "decode/pack/unpack/saturate/reverse", dsc);
6691
6692 case 0x18:
6693 if (bits (insn, 5, 7) == 0) /* op2. */
6694 {
6695 if (bits (insn, 12, 15) == 0xf)
6696 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6697 else
6698 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6699 }
6700 else
6701 return arm_copy_undef (gdbarch, insn, dsc);
6702
6703 case 0x1a: case 0x1b:
6704 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6705 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6706 else
6707 return arm_copy_undef (gdbarch, insn, dsc);
6708
6709 case 0x1c: case 0x1d:
6710 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6711 {
6712 if (bits (insn, 0, 3) == 0xf)
6713 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6714 else
6715 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6716 }
6717 else
6718 return arm_copy_undef (gdbarch, insn, dsc);
6719
6720 case 0x1e: case 0x1f:
6721 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6722 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6723 else
6724 return arm_copy_undef (gdbarch, insn, dsc);
6725 }
6726
6727 /* Should be unreachable. */
6728 return 1;
6729 }
6730
6731 static int
6732 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6733 struct regcache *regs,
6734 arm_displaced_step_closure *dsc)
6735 {
6736 if (bit (insn, 25))
6737 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6738 else
6739 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6740 }
6741
6742 static int
6743 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6744 struct regcache *regs,
6745 arm_displaced_step_closure *dsc)
6746 {
6747 unsigned int opcode = bits (insn, 20, 24);
6748
6749 switch (opcode)
6750 {
6751 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6752 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6753
6754 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6755 case 0x12: case 0x16:
6756 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6757
6758 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6759 case 0x13: case 0x17:
6760 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6761
6762 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6763 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6764 /* Note: no writeback for these instructions. Bit 25 will always be
6765 zero though (via caller), so the following works OK. */
6766 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6767 }
6768
6769 /* Should be unreachable. */
6770 return 1;
6771 }
6772
6773 /* Decode shifted register instructions. */
6774
6775 static int
6776 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6777 uint16_t insn2, struct regcache *regs,
6778 arm_displaced_step_closure *dsc)
6779 {
6780 /* PC is only allowed to be used in instruction MOV. */
6781
6782 unsigned int op = bits (insn1, 5, 8);
6783 unsigned int rn = bits (insn1, 0, 3);
6784
6785 if (op == 0x2 && rn == 0xf) /* MOV */
6786 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6787 else
6788 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6789 "dp (shift reg)", dsc);
6790 }
6791
6792
6793 /* Decode extension register load/store. Exactly the same as
6794 arm_decode_ext_reg_ld_st. */
6795
6796 static int
6797 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6798 uint16_t insn2, struct regcache *regs,
6799 arm_displaced_step_closure *dsc)
6800 {
6801 unsigned int opcode = bits (insn1, 4, 8);
6802
6803 switch (opcode)
6804 {
6805 case 0x04: case 0x05:
6806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6807 "vfp/neon vmov", dsc);
6808
6809 case 0x08: case 0x0c: /* 01x00 */
6810 case 0x0a: case 0x0e: /* 01x10 */
6811 case 0x12: case 0x16: /* 10x10 */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 "vfp/neon vstm/vpush", dsc);
6814
6815 case 0x09: case 0x0d: /* 01x01 */
6816 case 0x0b: case 0x0f: /* 01x11 */
6817 case 0x13: case 0x17: /* 10x11 */
6818 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6819 "vfp/neon vldm/vpop", dsc);
6820
6821 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6823 "vstr", dsc);
6824 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6825 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6826 }
6827
6828 /* Should be unreachable. */
6829 return 1;
6830 }
6831
6832 static int
6833 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6834 struct regcache *regs, arm_displaced_step_closure *dsc)
6835 {
6836 unsigned int op1 = bits (insn, 20, 25);
6837 int op = bit (insn, 4);
6838 unsigned int coproc = bits (insn, 8, 11);
6839
6840 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6841 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6842 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6843 && (coproc & 0xe) != 0xa)
6844 /* stc/stc2. */
6845 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6846 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6847 && (coproc & 0xe) != 0xa)
6848 /* ldc/ldc2 imm/lit. */
6849 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6850 else if ((op1 & 0x3e) == 0x00)
6851 return arm_copy_undef (gdbarch, insn, dsc);
6852 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6853 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6854 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6855 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6856 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6857 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6858 else if ((op1 & 0x30) == 0x20 && !op)
6859 {
6860 if ((coproc & 0xe) == 0xa)
6861 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6862 else
6863 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6864 }
6865 else if ((op1 & 0x30) == 0x20 && op)
6866 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6867 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6868 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6869 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6870 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6871 else if ((op1 & 0x30) == 0x30)
6872 return arm_copy_svc (gdbarch, insn, regs, dsc);
6873 else
6874 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6875 }
6876
6877 static int
6878 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6879 uint16_t insn2, struct regcache *regs,
6880 arm_displaced_step_closure *dsc)
6881 {
6882 unsigned int coproc = bits (insn2, 8, 11);
6883 unsigned int bit_5_8 = bits (insn1, 5, 8);
6884 unsigned int bit_9 = bit (insn1, 9);
6885 unsigned int bit_4 = bit (insn1, 4);
6886
6887 if (bit_9 == 0)
6888 {
6889 if (bit_5_8 == 2)
6890 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6891 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6892 dsc);
6893 else if (bit_5_8 == 0) /* UNDEFINED. */
6894 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6895 else
6896 {
6897 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6898 if ((coproc & 0xe) == 0xa)
6899 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6900 dsc);
6901 else /* coproc is not 101x. */
6902 {
6903 if (bit_4 == 0) /* STC/STC2. */
6904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6905 "stc/stc2", dsc);
6906 else /* LDC/LDC2 {literal, immediate}. */
6907 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6908 regs, dsc);
6909 }
6910 }
6911 }
6912 else
6913 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6914
6915 return 0;
6916 }
6917
6918 static void
6919 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6920 arm_displaced_step_closure *dsc, int rd)
6921 {
6922 /* ADR Rd, #imm
6923
6924 Rewrite as:
6925
6926 Preparation: Rd <- PC
6927 Insn: ADD Rd, #imm
6928 Cleanup: Null.
6929 */
6930
6931 /* Rd <- PC */
6932 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6933 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6934 }
6935
6936 static int
6937 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6938 arm_displaced_step_closure *dsc,
6939 int rd, unsigned int imm)
6940 {
6941
6942 /* Encoding T2: ADDS Rd, #imm */
6943 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6944
6945 install_pc_relative (gdbarch, regs, dsc, rd);
6946
6947 return 0;
6948 }
6949
6950 static int
6951 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6952 struct regcache *regs,
6953 arm_displaced_step_closure *dsc)
6954 {
6955 unsigned int rd = bits (insn, 8, 10);
6956 unsigned int imm8 = bits (insn, 0, 7);
6957
6958 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
6959 rd, imm8, insn);
6960
6961 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6962 }
6963
6964 static int
6965 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6966 uint16_t insn2, struct regcache *regs,
6967 arm_displaced_step_closure *dsc)
6968 {
6969 unsigned int rd = bits (insn2, 8, 11);
6970 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6971 extract raw immediate encoding rather than computing immediate. When
6972 generating ADD or SUB instruction, we can simply perform OR operation to
6973 set immediate into ADD. */
6974 unsigned int imm_3_8 = insn2 & 0x70ff;
6975 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6976
6977 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
6978 rd, imm_i, imm_3_8, insn1, insn2);
6979
6980 if (bit (insn1, 7)) /* Encoding T2 */
6981 {
6982 /* Encoding T3: SUB Rd, Rd, #imm */
6983 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6984 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6985 }
6986 else /* Encoding T3 */
6987 {
6988 /* Encoding T3: ADD Rd, Rd, #imm */
6989 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6990 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6991 }
6992 dsc->numinsns = 2;
6993
6994 install_pc_relative (gdbarch, regs, dsc, rd);
6995
6996 return 0;
6997 }
6998
6999 static int
7000 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7001 struct regcache *regs,
7002 arm_displaced_step_closure *dsc)
7003 {
7004 unsigned int rt = bits (insn1, 8, 10);
7005 unsigned int pc;
7006 int imm8 = (bits (insn1, 0, 7) << 2);
7007
7008 /* LDR Rd, #imm8
7009
7010 Rwrite as:
7011
7012 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7013
7014 Insn: LDR R0, [R2, R3];
7015 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7016
7017 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7018
7019 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7020 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7021 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7022 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7023 /* The assembler calculates the required value of the offset from the
7024 Align(PC,4) value of this instruction to the label. */
7025 pc = pc & 0xfffffffc;
7026
7027 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7028 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7029
7030 dsc->rd = rt;
7031 dsc->u.ldst.xfersize = 4;
7032 dsc->u.ldst.rn = 0;
7033 dsc->u.ldst.immed = 0;
7034 dsc->u.ldst.writeback = 0;
7035 dsc->u.ldst.restore_r4 = 0;
7036
7037 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7038
7039 dsc->cleanup = &cleanup_load;
7040
7041 return 0;
7042 }
7043
7044 /* Copy Thumb cbnz/cbz instruction. */
7045
7046 static int
7047 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7048 struct regcache *regs,
7049 arm_displaced_step_closure *dsc)
7050 {
7051 int non_zero = bit (insn1, 11);
7052 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7053 CORE_ADDR from = dsc->insn_addr;
7054 int rn = bits (insn1, 0, 2);
7055 int rn_val = displaced_read_reg (regs, dsc, rn);
7056
7057 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7058 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7059 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7060 condition is false, let it be, cleanup_branch will do nothing. */
7061 if (dsc->u.branch.cond)
7062 {
7063 dsc->u.branch.cond = INST_AL;
7064 dsc->u.branch.dest = from + 4 + imm5;
7065 }
7066 else
7067 dsc->u.branch.dest = from + 2;
7068
7069 dsc->u.branch.link = 0;
7070 dsc->u.branch.exchange = 0;
7071
7072 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7073 non_zero ? "cbnz" : "cbz",
7074 rn, rn_val, insn1, dsc->u.branch.dest);
7075
7076 dsc->modinsn[0] = THUMB_NOP;
7077
7078 dsc->cleanup = &cleanup_branch;
7079 return 0;
7080 }
7081
7082 /* Copy Table Branch Byte/Halfword */
7083 static int
7084 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7085 uint16_t insn2, struct regcache *regs,
7086 arm_displaced_step_closure *dsc)
7087 {
7088 ULONGEST rn_val, rm_val;
7089 int is_tbh = bit (insn2, 4);
7090 CORE_ADDR halfwords = 0;
7091 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7092
7093 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7094 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7095
7096 if (is_tbh)
7097 {
7098 gdb_byte buf[2];
7099
7100 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7101 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7102 }
7103 else
7104 {
7105 gdb_byte buf[1];
7106
7107 target_read_memory (rn_val + rm_val, buf, 1);
7108 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7109 }
7110
7111 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7112 is_tbh ? "tbh" : "tbb",
7113 (unsigned int) rn_val, (unsigned int) rm_val,
7114 (unsigned int) halfwords);
7115
7116 dsc->u.branch.cond = INST_AL;
7117 dsc->u.branch.link = 0;
7118 dsc->u.branch.exchange = 0;
7119 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7120
7121 dsc->cleanup = &cleanup_branch;
7122
7123 return 0;
7124 }
7125
7126 static void
7127 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7128 arm_displaced_step_closure *dsc)
7129 {
7130 /* PC <- r7 */
7131 int val = displaced_read_reg (regs, dsc, 7);
7132 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7133
7134 /* r7 <- r8 */
7135 val = displaced_read_reg (regs, dsc, 8);
7136 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7137
7138 /* r8 <- tmp[0] */
7139 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7140
7141 }
7142
7143 static int
7144 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7145 struct regcache *regs,
7146 arm_displaced_step_closure *dsc)
7147 {
7148 dsc->u.block.regmask = insn1 & 0x00ff;
7149
7150 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7151 to :
7152
7153 (1) register list is full, that is, r0-r7 are used.
7154 Prepare: tmp[0] <- r8
7155
7156 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7157 MOV r8, r7; Move value of r7 to r8;
7158 POP {r7}; Store PC value into r7.
7159
7160 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7161
7162 (2) register list is not full, supposing there are N registers in
7163 register list (except PC, 0 <= N <= 7).
7164 Prepare: for each i, 0 - N, tmp[i] <- ri.
7165
7166 POP {r0, r1, ...., rN};
7167
7168 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7169 from tmp[] properly.
7170 */
7171 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7172 dsc->u.block.regmask, insn1);
7173
7174 if (dsc->u.block.regmask == 0xff)
7175 {
7176 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7177
7178 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7179 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7180 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7181
7182 dsc->numinsns = 3;
7183 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7184 }
7185 else
7186 {
7187 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7188 unsigned int i;
7189 unsigned int new_regmask;
7190
7191 for (i = 0; i < num_in_list + 1; i++)
7192 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7193
7194 new_regmask = (1 << (num_in_list + 1)) - 1;
7195
7196 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7197 "modified list %.4x",
7198 (int) dsc->u.block.regmask, new_regmask);
7199
7200 dsc->u.block.regmask |= 0x8000;
7201 dsc->u.block.writeback = 0;
7202 dsc->u.block.cond = INST_AL;
7203
7204 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7205
7206 dsc->cleanup = &cleanup_block_load_pc;
7207 }
7208
7209 return 0;
7210 }
7211
7212 static void
7213 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7214 struct regcache *regs,
7215 arm_displaced_step_closure *dsc)
7216 {
7217 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7218 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7219 int err = 0;
7220
7221 /* 16-bit thumb instructions. */
7222 switch (op_bit_12_15)
7223 {
7224 /* Shift (imme), add, subtract, move and compare. */
7225 case 0: case 1: case 2: case 3:
7226 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7227 "shift/add/sub/mov/cmp",
7228 dsc);
7229 break;
7230 case 4:
7231 switch (op_bit_10_11)
7232 {
7233 case 0: /* Data-processing */
7234 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7235 "data-processing",
7236 dsc);
7237 break;
7238 case 1: /* Special data instructions and branch and exchange. */
7239 {
7240 unsigned short op = bits (insn1, 7, 9);
7241 if (op == 6 || op == 7) /* BX or BLX */
7242 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7243 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7244 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7245 else
7246 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7247 dsc);
7248 }
7249 break;
7250 default: /* LDR (literal) */
7251 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7252 }
7253 break;
7254 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7255 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7256 break;
7257 case 10:
7258 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7259 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7260 else /* Generate SP-relative address */
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7262 break;
7263 case 11: /* Misc 16-bit instructions */
7264 {
7265 switch (bits (insn1, 8, 11))
7266 {
7267 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7268 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7269 break;
7270 case 12: case 13: /* POP */
7271 if (bit (insn1, 8)) /* PC is in register list. */
7272 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7273 else
7274 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7275 break;
7276 case 15: /* If-Then, and hints */
7277 if (bits (insn1, 0, 3))
7278 /* If-Then makes up to four following instructions conditional.
7279 IT instruction itself is not conditional, so handle it as a
7280 common unmodified instruction. */
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7282 dsc);
7283 else
7284 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7285 break;
7286 default:
7287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7288 }
7289 }
7290 break;
7291 case 12:
7292 if (op_bit_10_11 < 2) /* Store multiple registers */
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7294 else /* Load multiple registers */
7295 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7296 break;
7297 case 13: /* Conditional branch and supervisor call */
7298 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7299 err = thumb_copy_b (gdbarch, insn1, dsc);
7300 else
7301 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7302 break;
7303 case 14: /* Unconditional branch */
7304 err = thumb_copy_b (gdbarch, insn1, dsc);
7305 break;
7306 default:
7307 err = 1;
7308 }
7309
7310 if (err)
7311 internal_error (__FILE__, __LINE__,
7312 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7313 }
7314
7315 static int
7316 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7317 uint16_t insn1, uint16_t insn2,
7318 struct regcache *regs,
7319 arm_displaced_step_closure *dsc)
7320 {
7321 int rt = bits (insn2, 12, 15);
7322 int rn = bits (insn1, 0, 3);
7323 int op1 = bits (insn1, 7, 8);
7324
7325 switch (bits (insn1, 5, 6))
7326 {
7327 case 0: /* Load byte and memory hints */
7328 if (rt == 0xf) /* PLD/PLI */
7329 {
7330 if (rn == 0xf)
7331 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7332 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7333 else
7334 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7335 "pli/pld", dsc);
7336 }
7337 else
7338 {
7339 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7340 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7341 1);
7342 else
7343 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "ldrb{reg, immediate}/ldrbt",
7345 dsc);
7346 }
7347
7348 break;
7349 case 1: /* Load halfword and memory hints. */
7350 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "pld/unalloc memhint", dsc);
7353 else
7354 {
7355 if (rn == 0xf)
7356 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7357 2);
7358 else
7359 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7360 "ldrh/ldrht", dsc);
7361 }
7362 break;
7363 case 2: /* Load word */
7364 {
7365 int insn2_bit_8_11 = bits (insn2, 8, 11);
7366
7367 if (rn == 0xf)
7368 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7369 else if (op1 == 0x1) /* Encoding T3 */
7370 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7371 0, 1);
7372 else /* op1 == 0x0 */
7373 {
7374 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7375 /* LDR (immediate) */
7376 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7377 dsc, bit (insn2, 8), 1);
7378 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7379 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7380 "ldrt", dsc);
7381 else
7382 /* LDR (register) */
7383 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7384 dsc, 0, 0);
7385 }
7386 break;
7387 }
7388 default:
7389 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7390 break;
7391 }
7392 return 0;
7393 }
7394
7395 static void
7396 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7397 uint16_t insn2, struct regcache *regs,
7398 arm_displaced_step_closure *dsc)
7399 {
7400 int err = 0;
7401 unsigned short op = bit (insn2, 15);
7402 unsigned int op1 = bits (insn1, 11, 12);
7403
7404 switch (op1)
7405 {
7406 case 1:
7407 {
7408 switch (bits (insn1, 9, 10))
7409 {
7410 case 0:
7411 if (bit (insn1, 6))
7412 {
7413 /* Load/store {dual, exclusive}, table branch. */
7414 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7415 && bits (insn2, 5, 7) == 0)
7416 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7417 dsc);
7418 else
7419 /* PC is not allowed to use in load/store {dual, exclusive}
7420 instructions. */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "load/store dual/ex", dsc);
7423 }
7424 else /* load/store multiple */
7425 {
7426 switch (bits (insn1, 7, 8))
7427 {
7428 case 0: case 3: /* SRS, RFE */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7430 "srs/rfe", dsc);
7431 break;
7432 case 1: case 2: /* LDM/STM/PUSH/POP */
7433 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7434 break;
7435 }
7436 }
7437 break;
7438
7439 case 1:
7440 /* Data-processing (shift register). */
7441 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7442 dsc);
7443 break;
7444 default: /* Coprocessor instructions. */
7445 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7446 break;
7447 }
7448 break;
7449 }
7450 case 2: /* op1 = 2 */
7451 if (op) /* Branch and misc control. */
7452 {
7453 if (bit (insn2, 14) /* BLX/BL */
7454 || bit (insn2, 12) /* Unconditional branch */
7455 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7456 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7457 else
7458 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7459 "misc ctrl", dsc);
7460 }
7461 else
7462 {
7463 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7464 {
7465 int dp_op = bits (insn1, 4, 8);
7466 int rn = bits (insn1, 0, 3);
7467 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7468 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7469 regs, dsc);
7470 else
7471 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7472 "dp/pb", dsc);
7473 }
7474 else /* Data processing (modified immediate) */
7475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7476 "dp/mi", dsc);
7477 }
7478 break;
7479 case 3: /* op1 = 3 */
7480 switch (bits (insn1, 9, 10))
7481 {
7482 case 0:
7483 if (bit (insn1, 4))
7484 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7485 regs, dsc);
7486 else /* NEON Load/Store and Store single data item */
7487 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7488 "neon elt/struct load/store",
7489 dsc);
7490 break;
7491 case 1: /* op1 = 3, bits (9, 10) == 1 */
7492 switch (bits (insn1, 7, 8))
7493 {
7494 case 0: case 1: /* Data processing (register) */
7495 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7496 "dp(reg)", dsc);
7497 break;
7498 case 2: /* Multiply and absolute difference */
7499 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7500 "mul/mua/diff", dsc);
7501 break;
7502 case 3: /* Long multiply and divide */
7503 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7504 "lmul/lmua", dsc);
7505 break;
7506 }
7507 break;
7508 default: /* Coprocessor instructions */
7509 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7510 break;
7511 }
7512 break;
7513 default:
7514 err = 1;
7515 }
7516
7517 if (err)
7518 internal_error (__FILE__, __LINE__,
7519 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7520
7521 }
7522
7523 static void
7524 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7525 struct regcache *regs,
7526 arm_displaced_step_closure *dsc)
7527 {
7528 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7529 uint16_t insn1
7530 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7531
7532 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7533 insn1, (unsigned long) from);
7534
7535 dsc->is_thumb = 1;
7536 dsc->insn_size = thumb_insn_size (insn1);
7537 if (thumb_insn_size (insn1) == 4)
7538 {
7539 uint16_t insn2
7540 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7541 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7542 }
7543 else
7544 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7545 }
7546
7547 void
7548 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7549 CORE_ADDR to, struct regcache *regs,
7550 arm_displaced_step_closure *dsc)
7551 {
7552 int err = 0;
7553 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7554 uint32_t insn;
7555
7556 /* Most displaced instructions use a 1-instruction scratch space, so set this
7557 here and override below if/when necessary. */
7558 dsc->numinsns = 1;
7559 dsc->insn_addr = from;
7560 dsc->scratch_base = to;
7561 dsc->cleanup = NULL;
7562 dsc->wrote_to_pc = 0;
7563
7564 if (!displaced_in_arm_mode (regs))
7565 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7566
7567 dsc->is_thumb = 0;
7568 dsc->insn_size = 4;
7569 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7570 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7571 (unsigned long) insn, (unsigned long) from);
7572
7573 if ((insn & 0xf0000000) == 0xf0000000)
7574 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7575 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7576 {
7577 case 0x0: case 0x1: case 0x2: case 0x3:
7578 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7579 break;
7580
7581 case 0x4: case 0x5: case 0x6:
7582 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7583 break;
7584
7585 case 0x7:
7586 err = arm_decode_media (gdbarch, insn, dsc);
7587 break;
7588
7589 case 0x8: case 0x9: case 0xa: case 0xb:
7590 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7591 break;
7592
7593 case 0xc: case 0xd: case 0xe: case 0xf:
7594 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7595 break;
7596 }
7597
7598 if (err)
7599 internal_error (__FILE__, __LINE__,
7600 _("arm_process_displaced_insn: Instruction decode error"));
7601 }
7602
7603 /* Actually set up the scratch space for a displaced instruction. */
7604
7605 void
7606 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7607 CORE_ADDR to, arm_displaced_step_closure *dsc)
7608 {
7609 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7610 unsigned int i, len, offset;
7611 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7612 int size = dsc->is_thumb? 2 : 4;
7613 const gdb_byte *bkp_insn;
7614
7615 offset = 0;
7616 /* Poke modified instruction(s). */
7617 for (i = 0; i < dsc->numinsns; i++)
7618 {
7619 if (size == 4)
7620 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7621 dsc->modinsn[i], (unsigned long) to + offset);
7622 else if (size == 2)
7623 displaced_debug_printf ("writing insn %.4x at %.8lx",
7624 (unsigned short) dsc->modinsn[i],
7625 (unsigned long) to + offset);
7626
7627 write_memory_unsigned_integer (to + offset, size,
7628 byte_order_for_code,
7629 dsc->modinsn[i]);
7630 offset += size;
7631 }
7632
7633 /* Choose the correct breakpoint instruction. */
7634 if (dsc->is_thumb)
7635 {
7636 bkp_insn = tdep->thumb_breakpoint;
7637 len = tdep->thumb_breakpoint_size;
7638 }
7639 else
7640 {
7641 bkp_insn = tdep->arm_breakpoint;
7642 len = tdep->arm_breakpoint_size;
7643 }
7644
7645 /* Put breakpoint afterwards. */
7646 write_memory (to + offset, bkp_insn, len);
7647
7648 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7649 paddress (gdbarch, to));
7650 }
7651
7652 /* Entry point for cleaning things up after a displaced instruction has been
7653 single-stepped. */
7654
7655 void
7656 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7657 struct displaced_step_closure *dsc_,
7658 CORE_ADDR from, CORE_ADDR to,
7659 struct regcache *regs)
7660 {
7661 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7662
7663 if (dsc->cleanup)
7664 dsc->cleanup (gdbarch, regs, dsc);
7665
7666 if (!dsc->wrote_to_pc)
7667 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7668 dsc->insn_addr + dsc->insn_size);
7669
7670 }
7671
7672 #include "bfd-in2.h"
7673 #include "libcoff.h"
7674
7675 static int
7676 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7677 {
7678 gdb_disassembler *di
7679 = static_cast<gdb_disassembler *>(info->application_data);
7680 struct gdbarch *gdbarch = di->arch ();
7681
7682 if (arm_pc_is_thumb (gdbarch, memaddr))
7683 {
7684 static asymbol *asym;
7685 static combined_entry_type ce;
7686 static struct coff_symbol_struct csym;
7687 static struct bfd fake_bfd;
7688 static bfd_target fake_target;
7689
7690 if (csym.native == NULL)
7691 {
7692 /* Create a fake symbol vector containing a Thumb symbol.
7693 This is solely so that the code in print_insn_little_arm()
7694 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7695 the presence of a Thumb symbol and switch to decoding
7696 Thumb instructions. */
7697
7698 fake_target.flavour = bfd_target_coff_flavour;
7699 fake_bfd.xvec = &fake_target;
7700 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7701 csym.native = &ce;
7702 csym.symbol.the_bfd = &fake_bfd;
7703 csym.symbol.name = "fake";
7704 asym = (asymbol *) & csym;
7705 }
7706
7707 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7708 info->symbols = &asym;
7709 }
7710 else
7711 info->symbols = NULL;
7712
7713 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7714 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7715 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7716 the assert on the mismatch of info->mach and
7717 bfd_get_mach (current_program_space->exec_bfd ()) in
7718 default_print_insn. */
7719 if (current_program_space->exec_bfd () != NULL)
7720 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7721
7722 return default_print_insn (memaddr, info);
7723 }
7724
7725 /* The following define instruction sequences that will cause ARM
7726 cpu's to take an undefined instruction trap. These are used to
7727 signal a breakpoint to GDB.
7728
7729 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7730 modes. A different instruction is required for each mode. The ARM
7731 cpu's can also be big or little endian. Thus four different
7732 instructions are needed to support all cases.
7733
7734 Note: ARMv4 defines several new instructions that will take the
7735 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7736 not in fact add the new instructions. The new undefined
7737 instructions in ARMv4 are all instructions that had no defined
7738 behaviour in earlier chips. There is no guarantee that they will
7739 raise an exception, but may be treated as NOP's. In practice, it
7740 may only safe to rely on instructions matching:
7741
7742 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7743 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7744 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7745
7746 Even this may only true if the condition predicate is true. The
7747 following use a condition predicate of ALWAYS so it is always TRUE.
7748
7749 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7750 and NetBSD all use a software interrupt rather than an undefined
7751 instruction to force a trap. This can be handled by by the
7752 abi-specific code during establishment of the gdbarch vector. */
7753
7754 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7755 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7756 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7757 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7758
7759 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7760 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7761 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7762 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7763
7764 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7765
7766 static int
7767 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7768 {
7769 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7770 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7771
7772 if (arm_pc_is_thumb (gdbarch, *pcptr))
7773 {
7774 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7775
7776 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7777 check whether we are replacing a 32-bit instruction. */
7778 if (tdep->thumb2_breakpoint != NULL)
7779 {
7780 gdb_byte buf[2];
7781
7782 if (target_read_memory (*pcptr, buf, 2) == 0)
7783 {
7784 unsigned short inst1;
7785
7786 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7787 if (thumb_insn_size (inst1) == 4)
7788 return ARM_BP_KIND_THUMB2;
7789 }
7790 }
7791
7792 return ARM_BP_KIND_THUMB;
7793 }
7794 else
7795 return ARM_BP_KIND_ARM;
7796
7797 }
7798
7799 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7800
7801 static const gdb_byte *
7802 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7803 {
7804 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7805
7806 switch (kind)
7807 {
7808 case ARM_BP_KIND_ARM:
7809 *size = tdep->arm_breakpoint_size;
7810 return tdep->arm_breakpoint;
7811 case ARM_BP_KIND_THUMB:
7812 *size = tdep->thumb_breakpoint_size;
7813 return tdep->thumb_breakpoint;
7814 case ARM_BP_KIND_THUMB2:
7815 *size = tdep->thumb2_breakpoint_size;
7816 return tdep->thumb2_breakpoint;
7817 default:
7818 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7819 }
7820 }
7821
7822 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7823
7824 static int
7825 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7826 struct regcache *regcache,
7827 CORE_ADDR *pcptr)
7828 {
7829 gdb_byte buf[4];
7830
7831 /* Check the memory pointed by PC is readable. */
7832 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7833 {
7834 struct arm_get_next_pcs next_pcs_ctx;
7835
7836 arm_get_next_pcs_ctor (&next_pcs_ctx,
7837 &arm_get_next_pcs_ops,
7838 gdbarch_byte_order (gdbarch),
7839 gdbarch_byte_order_for_code (gdbarch),
7840 0,
7841 regcache);
7842
7843 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7844
7845 /* If MEMADDR is the next instruction of current pc, do the
7846 software single step computation, and get the thumb mode by
7847 the destination address. */
7848 for (CORE_ADDR pc : next_pcs)
7849 {
7850 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7851 {
7852 if (IS_THUMB_ADDR (pc))
7853 {
7854 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7855 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7856 }
7857 else
7858 return ARM_BP_KIND_ARM;
7859 }
7860 }
7861 }
7862
7863 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7864 }
7865
7866 /* Extract from an array REGBUF containing the (raw) register state a
7867 function return value of type TYPE, and copy that, in virtual
7868 format, into VALBUF. */
7869
7870 static void
7871 arm_extract_return_value (struct type *type, struct regcache *regs,
7872 gdb_byte *valbuf)
7873 {
7874 struct gdbarch *gdbarch = regs->arch ();
7875 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7876
7877 if (TYPE_CODE_FLT == type->code ())
7878 {
7879 switch (gdbarch_tdep (gdbarch)->fp_model)
7880 {
7881 case ARM_FLOAT_FPA:
7882 {
7883 /* The value is in register F0 in internal format. We need to
7884 extract the raw value and then convert it to the desired
7885 internal type. */
7886 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7887
7888 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7889 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7890 valbuf, type);
7891 }
7892 break;
7893
7894 case ARM_FLOAT_SOFT_FPA:
7895 case ARM_FLOAT_SOFT_VFP:
7896 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7897 not using the VFP ABI code. */
7898 case ARM_FLOAT_VFP:
7899 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7900 if (TYPE_LENGTH (type) > 4)
7901 regs->cooked_read (ARM_A1_REGNUM + 1,
7902 valbuf + ARM_INT_REGISTER_SIZE);
7903 break;
7904
7905 default:
7906 internal_error (__FILE__, __LINE__,
7907 _("arm_extract_return_value: "
7908 "Floating point model not supported"));
7909 break;
7910 }
7911 }
7912 else if (type->code () == TYPE_CODE_INT
7913 || type->code () == TYPE_CODE_CHAR
7914 || type->code () == TYPE_CODE_BOOL
7915 || type->code () == TYPE_CODE_PTR
7916 || TYPE_IS_REFERENCE (type)
7917 || type->code () == TYPE_CODE_ENUM)
7918 {
7919 /* If the type is a plain integer, then the access is
7920 straight-forward. Otherwise we have to play around a bit
7921 more. */
7922 int len = TYPE_LENGTH (type);
7923 int regno = ARM_A1_REGNUM;
7924 ULONGEST tmp;
7925
7926 while (len > 0)
7927 {
7928 /* By using store_unsigned_integer we avoid having to do
7929 anything special for small big-endian values. */
7930 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7931 store_unsigned_integer (valbuf,
7932 (len > ARM_INT_REGISTER_SIZE
7933 ? ARM_INT_REGISTER_SIZE : len),
7934 byte_order, tmp);
7935 len -= ARM_INT_REGISTER_SIZE;
7936 valbuf += ARM_INT_REGISTER_SIZE;
7937 }
7938 }
7939 else
7940 {
7941 /* For a structure or union the behaviour is as if the value had
7942 been stored to word-aligned memory and then loaded into
7943 registers with 32-bit load instruction(s). */
7944 int len = TYPE_LENGTH (type);
7945 int regno = ARM_A1_REGNUM;
7946 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7947
7948 while (len > 0)
7949 {
7950 regs->cooked_read (regno++, tmpbuf);
7951 memcpy (valbuf, tmpbuf,
7952 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7953 len -= ARM_INT_REGISTER_SIZE;
7954 valbuf += ARM_INT_REGISTER_SIZE;
7955 }
7956 }
7957 }
7958
7959
7960 /* Will a function return an aggregate type in memory or in a
7961 register? Return 0 if an aggregate type can be returned in a
7962 register, 1 if it must be returned in memory. */
7963
7964 static int
7965 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7966 {
7967 enum type_code code;
7968
7969 type = check_typedef (type);
7970
7971 /* Simple, non-aggregate types (ie not including vectors and
7972 complex) are always returned in a register (or registers). */
7973 code = type->code ();
7974 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7975 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7976 return 0;
7977
7978 if (TYPE_CODE_ARRAY == code && type->is_vector ())
7979 {
7980 /* Vector values should be returned using ARM registers if they
7981 are not over 16 bytes. */
7982 return (TYPE_LENGTH (type) > 16);
7983 }
7984
7985 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7986 {
7987 /* The AAPCS says all aggregates not larger than a word are returned
7988 in a register. */
7989 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7990 return 0;
7991
7992 return 1;
7993 }
7994 else
7995 {
7996 int nRc;
7997
7998 /* All aggregate types that won't fit in a register must be returned
7999 in memory. */
8000 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8001 return 1;
8002
8003 /* In the ARM ABI, "integer" like aggregate types are returned in
8004 registers. For an aggregate type to be integer like, its size
8005 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8006 offset of each addressable subfield must be zero. Note that bit
8007 fields are not addressable, and all addressable subfields of
8008 unions always start at offset zero.
8009
8010 This function is based on the behaviour of GCC 2.95.1.
8011 See: gcc/arm.c: arm_return_in_memory() for details.
8012
8013 Note: All versions of GCC before GCC 2.95.2 do not set up the
8014 parameters correctly for a function returning the following
8015 structure: struct { float f;}; This should be returned in memory,
8016 not a register. Richard Earnshaw sent me a patch, but I do not
8017 know of any way to detect if a function like the above has been
8018 compiled with the correct calling convention. */
8019
8020 /* Assume all other aggregate types can be returned in a register.
8021 Run a check for structures, unions and arrays. */
8022 nRc = 0;
8023
8024 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8025 {
8026 int i;
8027 /* Need to check if this struct/union is "integer" like. For
8028 this to be true, its size must be less than or equal to
8029 ARM_INT_REGISTER_SIZE and the offset of each addressable
8030 subfield must be zero. Note that bit fields are not
8031 addressable, and unions always start at offset zero. If any
8032 of the subfields is a floating point type, the struct/union
8033 cannot be an integer type. */
8034
8035 /* For each field in the object, check:
8036 1) Is it FP? --> yes, nRc = 1;
8037 2) Is it addressable (bitpos != 0) and
8038 not packed (bitsize == 0)?
8039 --> yes, nRc = 1
8040 */
8041
8042 for (i = 0; i < type->num_fields (); i++)
8043 {
8044 enum type_code field_type_code;
8045
8046 field_type_code
8047 = check_typedef (type->field (i).type ())->code ();
8048
8049 /* Is it a floating point type field? */
8050 if (field_type_code == TYPE_CODE_FLT)
8051 {
8052 nRc = 1;
8053 break;
8054 }
8055
8056 /* If bitpos != 0, then we have to care about it. */
8057 if (TYPE_FIELD_BITPOS (type, i) != 0)
8058 {
8059 /* Bitfields are not addressable. If the field bitsize is
8060 zero, then the field is not packed. Hence it cannot be
8061 a bitfield or any other packed type. */
8062 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8063 {
8064 nRc = 1;
8065 break;
8066 }
8067 }
8068 }
8069 }
8070
8071 return nRc;
8072 }
8073 }
8074
8075 /* Write into appropriate registers a function return value of type
8076 TYPE, given in virtual format. */
8077
8078 static void
8079 arm_store_return_value (struct type *type, struct regcache *regs,
8080 const gdb_byte *valbuf)
8081 {
8082 struct gdbarch *gdbarch = regs->arch ();
8083 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8084
8085 if (type->code () == TYPE_CODE_FLT)
8086 {
8087 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8088
8089 switch (gdbarch_tdep (gdbarch)->fp_model)
8090 {
8091 case ARM_FLOAT_FPA:
8092
8093 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8094 regs->cooked_write (ARM_F0_REGNUM, buf);
8095 break;
8096
8097 case ARM_FLOAT_SOFT_FPA:
8098 case ARM_FLOAT_SOFT_VFP:
8099 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8100 not using the VFP ABI code. */
8101 case ARM_FLOAT_VFP:
8102 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8103 if (TYPE_LENGTH (type) > 4)
8104 regs->cooked_write (ARM_A1_REGNUM + 1,
8105 valbuf + ARM_INT_REGISTER_SIZE);
8106 break;
8107
8108 default:
8109 internal_error (__FILE__, __LINE__,
8110 _("arm_store_return_value: Floating "
8111 "point model not supported"));
8112 break;
8113 }
8114 }
8115 else if (type->code () == TYPE_CODE_INT
8116 || type->code () == TYPE_CODE_CHAR
8117 || type->code () == TYPE_CODE_BOOL
8118 || type->code () == TYPE_CODE_PTR
8119 || TYPE_IS_REFERENCE (type)
8120 || type->code () == TYPE_CODE_ENUM)
8121 {
8122 if (TYPE_LENGTH (type) <= 4)
8123 {
8124 /* Values of one word or less are zero/sign-extended and
8125 returned in r0. */
8126 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8127 LONGEST val = unpack_long (type, valbuf);
8128
8129 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8130 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8131 }
8132 else
8133 {
8134 /* Integral values greater than one word are stored in consecutive
8135 registers starting with r0. This will always be a multiple of
8136 the regiser size. */
8137 int len = TYPE_LENGTH (type);
8138 int regno = ARM_A1_REGNUM;
8139
8140 while (len > 0)
8141 {
8142 regs->cooked_write (regno++, valbuf);
8143 len -= ARM_INT_REGISTER_SIZE;
8144 valbuf += ARM_INT_REGISTER_SIZE;
8145 }
8146 }
8147 }
8148 else
8149 {
8150 /* For a structure or union the behaviour is as if the value had
8151 been stored to word-aligned memory and then loaded into
8152 registers with 32-bit load instruction(s). */
8153 int len = TYPE_LENGTH (type);
8154 int regno = ARM_A1_REGNUM;
8155 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8156
8157 while (len > 0)
8158 {
8159 memcpy (tmpbuf, valbuf,
8160 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8161 regs->cooked_write (regno++, tmpbuf);
8162 len -= ARM_INT_REGISTER_SIZE;
8163 valbuf += ARM_INT_REGISTER_SIZE;
8164 }
8165 }
8166 }
8167
8168
8169 /* Handle function return values. */
8170
8171 static enum return_value_convention
8172 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8173 struct type *valtype, struct regcache *regcache,
8174 gdb_byte *readbuf, const gdb_byte *writebuf)
8175 {
8176 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8177 struct type *func_type = function ? value_type (function) : NULL;
8178 enum arm_vfp_cprc_base_type vfp_base_type;
8179 int vfp_base_count;
8180
8181 if (arm_vfp_abi_for_function (gdbarch, func_type)
8182 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8183 {
8184 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8185 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8186 int i;
8187 for (i = 0; i < vfp_base_count; i++)
8188 {
8189 if (reg_char == 'q')
8190 {
8191 if (writebuf)
8192 arm_neon_quad_write (gdbarch, regcache, i,
8193 writebuf + i * unit_length);
8194
8195 if (readbuf)
8196 arm_neon_quad_read (gdbarch, regcache, i,
8197 readbuf + i * unit_length);
8198 }
8199 else
8200 {
8201 char name_buf[4];
8202 int regnum;
8203
8204 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8205 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8206 strlen (name_buf));
8207 if (writebuf)
8208 regcache->cooked_write (regnum, writebuf + i * unit_length);
8209 if (readbuf)
8210 regcache->cooked_read (regnum, readbuf + i * unit_length);
8211 }
8212 }
8213 return RETURN_VALUE_REGISTER_CONVENTION;
8214 }
8215
8216 if (valtype->code () == TYPE_CODE_STRUCT
8217 || valtype->code () == TYPE_CODE_UNION
8218 || valtype->code () == TYPE_CODE_ARRAY)
8219 {
8220 if (tdep->struct_return == pcc_struct_return
8221 || arm_return_in_memory (gdbarch, valtype))
8222 return RETURN_VALUE_STRUCT_CONVENTION;
8223 }
8224 else if (valtype->code () == TYPE_CODE_COMPLEX)
8225 {
8226 if (arm_return_in_memory (gdbarch, valtype))
8227 return RETURN_VALUE_STRUCT_CONVENTION;
8228 }
8229
8230 if (writebuf)
8231 arm_store_return_value (valtype, regcache, writebuf);
8232
8233 if (readbuf)
8234 arm_extract_return_value (valtype, regcache, readbuf);
8235
8236 return RETURN_VALUE_REGISTER_CONVENTION;
8237 }
8238
8239
8240 static int
8241 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8242 {
8243 struct gdbarch *gdbarch = get_frame_arch (frame);
8244 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8245 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8246 CORE_ADDR jb_addr;
8247 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8248
8249 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8250
8251 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8252 ARM_INT_REGISTER_SIZE))
8253 return 0;
8254
8255 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8256 return 1;
8257 }
8258 /* A call to cmse secure entry function "foo" at "a" is modified by
8259 GNU ld as "b".
8260 a) bl xxxx <foo>
8261
8262 <foo>
8263 xxxx:
8264
8265 b) bl yyyy <__acle_se_foo>
8266
8267 section .gnu.sgstubs:
8268 <foo>
8269 yyyy: sg // secure gateway
8270 b.w xxxx <__acle_se_foo> // original_branch_dest
8271
8272 <__acle_se_foo>
8273 xxxx:
8274
8275 When the control at "b", the pc contains "yyyy" (sg address) which is a
8276 trampoline and does not exist in source code. This function returns the
8277 target pc "xxxx". For more details please refer to section 5.4
8278 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8279 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8280 document on www.developer.arm.com. */
8281
8282 static CORE_ADDR
8283 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8284 {
8285 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8286 char *target_name = (char *) alloca (target_len);
8287 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8288
8289 struct bound_minimal_symbol minsym
8290 = lookup_minimal_symbol (target_name, NULL, objfile);
8291
8292 if (minsym.minsym != nullptr)
8293 return BMSYMBOL_VALUE_ADDRESS (minsym);
8294
8295 return 0;
8296 }
8297
8298 /* Return true when SEC points to ".gnu.sgstubs" section. */
8299
8300 static bool
8301 arm_is_sgstubs_section (struct obj_section *sec)
8302 {
8303 return (sec != nullptr
8304 && sec->the_bfd_section != nullptr
8305 && sec->the_bfd_section->name != nullptr
8306 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8307 }
8308
8309 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8310 return the target PC. Otherwise return 0. */
8311
8312 CORE_ADDR
8313 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8314 {
8315 const char *name;
8316 int namelen;
8317 CORE_ADDR start_addr;
8318
8319 /* Find the starting address and name of the function containing the PC. */
8320 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8321 {
8322 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8323 check here. */
8324 start_addr = arm_skip_bx_reg (frame, pc);
8325 if (start_addr != 0)
8326 return start_addr;
8327
8328 return 0;
8329 }
8330
8331 /* If PC is in a Thumb call or return stub, return the address of the
8332 target PC, which is in a register. The thunk functions are called
8333 _call_via_xx, where x is the register name. The possible names
8334 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8335 functions, named __ARM_call_via_r[0-7]. */
8336 if (startswith (name, "_call_via_")
8337 || startswith (name, "__ARM_call_via_"))
8338 {
8339 /* Use the name suffix to determine which register contains the
8340 target PC. */
8341 static const char *table[15] =
8342 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8343 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8344 };
8345 int regno;
8346 int offset = strlen (name) - 2;
8347
8348 for (regno = 0; regno <= 14; regno++)
8349 if (strcmp (&name[offset], table[regno]) == 0)
8350 return get_frame_register_unsigned (frame, regno);
8351 }
8352
8353 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8354 non-interworking calls to foo. We could decode the stubs
8355 to find the target but it's easier to use the symbol table. */
8356 namelen = strlen (name);
8357 if (name[0] == '_' && name[1] == '_'
8358 && ((namelen > 2 + strlen ("_from_thumb")
8359 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8360 || (namelen > 2 + strlen ("_from_arm")
8361 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8362 {
8363 char *target_name;
8364 int target_len = namelen - 2;
8365 struct bound_minimal_symbol minsym;
8366 struct objfile *objfile;
8367 struct obj_section *sec;
8368
8369 if (name[namelen - 1] == 'b')
8370 target_len -= strlen ("_from_thumb");
8371 else
8372 target_len -= strlen ("_from_arm");
8373
8374 target_name = (char *) alloca (target_len + 1);
8375 memcpy (target_name, name + 2, target_len);
8376 target_name[target_len] = '\0';
8377
8378 sec = find_pc_section (pc);
8379 objfile = (sec == NULL) ? NULL : sec->objfile;
8380 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8381 if (minsym.minsym != NULL)
8382 return BMSYMBOL_VALUE_ADDRESS (minsym);
8383 else
8384 return 0;
8385 }
8386
8387 struct obj_section *section = find_pc_section (pc);
8388
8389 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8390 if (arm_is_sgstubs_section (section))
8391 return arm_skip_cmse_entry (pc, name, section->objfile);
8392
8393 return 0; /* not a stub */
8394 }
8395
8396 static void
8397 arm_update_current_architecture (void)
8398 {
8399 struct gdbarch_info info;
8400
8401 /* If the current architecture is not ARM, we have nothing to do. */
8402 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8403 return;
8404
8405 /* Update the architecture. */
8406 gdbarch_info_init (&info);
8407
8408 if (!gdbarch_update_p (info))
8409 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8410 }
8411
8412 static void
8413 set_fp_model_sfunc (const char *args, int from_tty,
8414 struct cmd_list_element *c)
8415 {
8416 int fp_model;
8417
8418 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8419 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8420 {
8421 arm_fp_model = (enum arm_float_model) fp_model;
8422 break;
8423 }
8424
8425 if (fp_model == ARM_FLOAT_LAST)
8426 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8427 current_fp_model);
8428
8429 arm_update_current_architecture ();
8430 }
8431
8432 static void
8433 show_fp_model (struct ui_file *file, int from_tty,
8434 struct cmd_list_element *c, const char *value)
8435 {
8436 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8437
8438 if (arm_fp_model == ARM_FLOAT_AUTO
8439 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8440 fprintf_filtered (file, _("\
8441 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8442 fp_model_strings[tdep->fp_model]);
8443 else
8444 fprintf_filtered (file, _("\
8445 The current ARM floating point model is \"%s\".\n"),
8446 fp_model_strings[arm_fp_model]);
8447 }
8448
8449 static void
8450 arm_set_abi (const char *args, int from_tty,
8451 struct cmd_list_element *c)
8452 {
8453 int arm_abi;
8454
8455 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8456 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8457 {
8458 arm_abi_global = (enum arm_abi_kind) arm_abi;
8459 break;
8460 }
8461
8462 if (arm_abi == ARM_ABI_LAST)
8463 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8464 arm_abi_string);
8465
8466 arm_update_current_architecture ();
8467 }
8468
8469 static void
8470 arm_show_abi (struct ui_file *file, int from_tty,
8471 struct cmd_list_element *c, const char *value)
8472 {
8473 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8474
8475 if (arm_abi_global == ARM_ABI_AUTO
8476 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8477 fprintf_filtered (file, _("\
8478 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8479 arm_abi_strings[tdep->arm_abi]);
8480 else
8481 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8482 arm_abi_string);
8483 }
8484
8485 static void
8486 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8487 struct cmd_list_element *c, const char *value)
8488 {
8489 fprintf_filtered (file,
8490 _("The current execution mode assumed "
8491 "(when symbols are unavailable) is \"%s\".\n"),
8492 arm_fallback_mode_string);
8493 }
8494
8495 static void
8496 arm_show_force_mode (struct ui_file *file, int from_tty,
8497 struct cmd_list_element *c, const char *value)
8498 {
8499 fprintf_filtered (file,
8500 _("The current execution mode assumed "
8501 "(even when symbols are available) is \"%s\".\n"),
8502 arm_force_mode_string);
8503 }
8504
8505 /* If the user changes the register disassembly style used for info
8506 register and other commands, we have to also switch the style used
8507 in opcodes for disassembly output. This function is run in the "set
8508 arm disassembly" command, and does that. */
8509
8510 static void
8511 set_disassembly_style_sfunc (const char *args, int from_tty,
8512 struct cmd_list_element *c)
8513 {
8514 /* Convert the short style name into the long style name (eg, reg-names-*)
8515 before calling the generic set_disassembler_options() function. */
8516 std::string long_name = std::string ("reg-names-") + disassembly_style;
8517 set_disassembler_options (&long_name[0]);
8518 }
8519
8520 static void
8521 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8522 struct cmd_list_element *c, const char *value)
8523 {
8524 struct gdbarch *gdbarch = get_current_arch ();
8525 char *options = get_disassembler_options (gdbarch);
8526 const char *style = "";
8527 int len = 0;
8528 const char *opt;
8529
8530 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8531 if (CONST_STRNEQ (opt, "reg-names-"))
8532 {
8533 style = &opt[strlen ("reg-names-")];
8534 len = strcspn (style, ",");
8535 }
8536
8537 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8538 }
8539 \f
8540 /* Return the ARM register name corresponding to register I. */
8541 static const char *
8542 arm_register_name (struct gdbarch *gdbarch, int i)
8543 {
8544 const int num_regs = gdbarch_num_regs (gdbarch);
8545
8546 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8547 && i >= num_regs && i < num_regs + 32)
8548 {
8549 static const char *const vfp_pseudo_names[] = {
8550 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8551 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8552 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8553 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8554 };
8555
8556 return vfp_pseudo_names[i - num_regs];
8557 }
8558
8559 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8560 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8561 {
8562 static const char *const neon_pseudo_names[] = {
8563 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8564 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8565 };
8566
8567 return neon_pseudo_names[i - num_regs - 32];
8568 }
8569
8570 if (i >= ARRAY_SIZE (arm_register_names))
8571 /* These registers are only supported on targets which supply
8572 an XML description. */
8573 return "";
8574
8575 return arm_register_names[i];
8576 }
8577
8578 /* Test whether the coff symbol specific value corresponds to a Thumb
8579 function. */
8580
8581 static int
8582 coff_sym_is_thumb (int val)
8583 {
8584 return (val == C_THUMBEXT
8585 || val == C_THUMBSTAT
8586 || val == C_THUMBEXTFUNC
8587 || val == C_THUMBSTATFUNC
8588 || val == C_THUMBLABEL);
8589 }
8590
8591 /* arm_coff_make_msymbol_special()
8592 arm_elf_make_msymbol_special()
8593
8594 These functions test whether the COFF or ELF symbol corresponds to
8595 an address in thumb code, and set a "special" bit in a minimal
8596 symbol to indicate that it does. */
8597
8598 static void
8599 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8600 {
8601 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8602
8603 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8604 == ST_BRANCH_TO_THUMB)
8605 MSYMBOL_SET_SPECIAL (msym);
8606 }
8607
8608 static void
8609 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8610 {
8611 if (coff_sym_is_thumb (val))
8612 MSYMBOL_SET_SPECIAL (msym);
8613 }
8614
8615 static void
8616 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8617 asymbol *sym)
8618 {
8619 const char *name = bfd_asymbol_name (sym);
8620 struct arm_per_bfd *data;
8621 struct arm_mapping_symbol new_map_sym;
8622
8623 gdb_assert (name[0] == '$');
8624 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8625 return;
8626
8627 data = arm_bfd_data_key.get (objfile->obfd);
8628 if (data == NULL)
8629 data = arm_bfd_data_key.emplace (objfile->obfd,
8630 objfile->obfd->section_count);
8631 arm_mapping_symbol_vec &map
8632 = data->section_maps[bfd_asymbol_section (sym)->index];
8633
8634 new_map_sym.value = sym->value;
8635 new_map_sym.type = name[1];
8636
8637 /* Insert at the end, the vector will be sorted on first use. */
8638 map.push_back (new_map_sym);
8639 }
8640
8641 static void
8642 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8643 {
8644 struct gdbarch *gdbarch = regcache->arch ();
8645 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8646
8647 /* If necessary, set the T bit. */
8648 if (arm_apcs_32)
8649 {
8650 ULONGEST val, t_bit;
8651 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8652 t_bit = arm_psr_thumb_bit (gdbarch);
8653 if (arm_pc_is_thumb (gdbarch, pc))
8654 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8655 val | t_bit);
8656 else
8657 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8658 val & ~t_bit);
8659 }
8660 }
8661
8662 /* Read the contents of a NEON quad register, by reading from two
8663 double registers. This is used to implement the quad pseudo
8664 registers, and for argument passing in case the quad registers are
8665 missing; vectors are passed in quad registers when using the VFP
8666 ABI, even if a NEON unit is not present. REGNUM is the index of
8667 the quad register, in [0, 15]. */
8668
8669 static enum register_status
8670 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8671 int regnum, gdb_byte *buf)
8672 {
8673 char name_buf[4];
8674 gdb_byte reg_buf[8];
8675 int offset, double_regnum;
8676 enum register_status status;
8677
8678 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8679 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8680 strlen (name_buf));
8681
8682 /* d0 is always the least significant half of q0. */
8683 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8684 offset = 8;
8685 else
8686 offset = 0;
8687
8688 status = regcache->raw_read (double_regnum, reg_buf);
8689 if (status != REG_VALID)
8690 return status;
8691 memcpy (buf + offset, reg_buf, 8);
8692
8693 offset = 8 - offset;
8694 status = regcache->raw_read (double_regnum + 1, reg_buf);
8695 if (status != REG_VALID)
8696 return status;
8697 memcpy (buf + offset, reg_buf, 8);
8698
8699 return REG_VALID;
8700 }
8701
8702 static enum register_status
8703 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8704 int regnum, gdb_byte *buf)
8705 {
8706 const int num_regs = gdbarch_num_regs (gdbarch);
8707 char name_buf[4];
8708 gdb_byte reg_buf[8];
8709 int offset, double_regnum;
8710
8711 gdb_assert (regnum >= num_regs);
8712 regnum -= num_regs;
8713
8714 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8715 /* Quad-precision register. */
8716 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8717 else
8718 {
8719 enum register_status status;
8720
8721 /* Single-precision register. */
8722 gdb_assert (regnum < 32);
8723
8724 /* s0 is always the least significant half of d0. */
8725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8726 offset = (regnum & 1) ? 0 : 4;
8727 else
8728 offset = (regnum & 1) ? 4 : 0;
8729
8730 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8731 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8732 strlen (name_buf));
8733
8734 status = regcache->raw_read (double_regnum, reg_buf);
8735 if (status == REG_VALID)
8736 memcpy (buf, reg_buf + offset, 4);
8737 return status;
8738 }
8739 }
8740
8741 /* Store the contents of BUF to a NEON quad register, by writing to
8742 two double registers. This is used to implement the quad pseudo
8743 registers, and for argument passing in case the quad registers are
8744 missing; vectors are passed in quad registers when using the VFP
8745 ABI, even if a NEON unit is not present. REGNUM is the index
8746 of the quad register, in [0, 15]. */
8747
8748 static void
8749 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8750 int regnum, const gdb_byte *buf)
8751 {
8752 char name_buf[4];
8753 int offset, double_regnum;
8754
8755 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8756 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8757 strlen (name_buf));
8758
8759 /* d0 is always the least significant half of q0. */
8760 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8761 offset = 8;
8762 else
8763 offset = 0;
8764
8765 regcache->raw_write (double_regnum, buf + offset);
8766 offset = 8 - offset;
8767 regcache->raw_write (double_regnum + 1, buf + offset);
8768 }
8769
8770 static void
8771 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8772 int regnum, const gdb_byte *buf)
8773 {
8774 const int num_regs = gdbarch_num_regs (gdbarch);
8775 char name_buf[4];
8776 gdb_byte reg_buf[8];
8777 int offset, double_regnum;
8778
8779 gdb_assert (regnum >= num_regs);
8780 regnum -= num_regs;
8781
8782 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8783 /* Quad-precision register. */
8784 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8785 else
8786 {
8787 /* Single-precision register. */
8788 gdb_assert (regnum < 32);
8789
8790 /* s0 is always the least significant half of d0. */
8791 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8792 offset = (regnum & 1) ? 0 : 4;
8793 else
8794 offset = (regnum & 1) ? 4 : 0;
8795
8796 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8797 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8798 strlen (name_buf));
8799
8800 regcache->raw_read (double_regnum, reg_buf);
8801 memcpy (reg_buf + offset, buf, 4);
8802 regcache->raw_write (double_regnum, reg_buf);
8803 }
8804 }
8805
8806 static struct value *
8807 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8808 {
8809 const int *reg_p = (const int *) baton;
8810 return value_of_register (*reg_p, frame);
8811 }
8812 \f
8813 static enum gdb_osabi
8814 arm_elf_osabi_sniffer (bfd *abfd)
8815 {
8816 unsigned int elfosabi;
8817 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8818
8819 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8820
8821 if (elfosabi == ELFOSABI_ARM)
8822 /* GNU tools use this value. Check note sections in this case,
8823 as well. */
8824 {
8825 for (asection *sect : gdb_bfd_sections (abfd))
8826 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8827 }
8828
8829 /* Anything else will be handled by the generic ELF sniffer. */
8830 return osabi;
8831 }
8832
8833 static int
8834 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8835 struct reggroup *group)
8836 {
8837 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8838 this, FPS register belongs to save_regroup, restore_reggroup, and
8839 all_reggroup, of course. */
8840 if (regnum == ARM_FPS_REGNUM)
8841 return (group == float_reggroup
8842 || group == save_reggroup
8843 || group == restore_reggroup
8844 || group == all_reggroup);
8845 else
8846 return default_register_reggroup_p (gdbarch, regnum, group);
8847 }
8848
8849 /* For backward-compatibility we allow two 'g' packet lengths with
8850 the remote protocol depending on whether FPA registers are
8851 supplied. M-profile targets do not have FPA registers, but some
8852 stubs already exist in the wild which use a 'g' packet which
8853 supplies them albeit with dummy values. The packet format which
8854 includes FPA registers should be considered deprecated for
8855 M-profile targets. */
8856
8857 static void
8858 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8859 {
8860 if (gdbarch_tdep (gdbarch)->is_m)
8861 {
8862 const target_desc *tdesc;
8863
8864 /* If we know from the executable this is an M-profile target,
8865 cater for remote targets whose register set layout is the
8866 same as the FPA layout. */
8867 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8868 register_remote_g_packet_guess (gdbarch,
8869 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8870 tdesc);
8871
8872 /* The regular M-profile layout. */
8873 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8874 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8875 tdesc);
8876
8877 /* M-profile plus M4F VFP. */
8878 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8879 register_remote_g_packet_guess (gdbarch,
8880 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8881 tdesc);
8882 }
8883
8884 /* Otherwise we don't have a useful guess. */
8885 }
8886
8887 /* Implement the code_of_frame_writable gdbarch method. */
8888
8889 static int
8890 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8891 {
8892 if (gdbarch_tdep (gdbarch)->is_m
8893 && get_frame_type (frame) == SIGTRAMP_FRAME)
8894 {
8895 /* M-profile exception frames return to some magic PCs, where
8896 isn't writable at all. */
8897 return 0;
8898 }
8899 else
8900 return 1;
8901 }
8902
8903 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8904 to be postfixed by a version (eg armv7hl). */
8905
8906 static const char *
8907 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8908 {
8909 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8910 return "arm(v[^- ]*)?";
8911 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8912 }
8913
8914 /* Initialize the current architecture based on INFO. If possible,
8915 re-use an architecture from ARCHES, which is a list of
8916 architectures already created during this debugging session.
8917
8918 Called e.g. at program startup, when reading a core file, and when
8919 reading a binary file. */
8920
8921 static struct gdbarch *
8922 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8923 {
8924 struct gdbarch_tdep *tdep;
8925 struct gdbarch *gdbarch;
8926 struct gdbarch_list *best_arch;
8927 enum arm_abi_kind arm_abi = arm_abi_global;
8928 enum arm_float_model fp_model = arm_fp_model;
8929 tdesc_arch_data_up tdesc_data;
8930 int i;
8931 bool is_m = false;
8932 int vfp_register_count = 0;
8933 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8934 bool have_wmmx_registers = false;
8935 bool have_neon = false;
8936 bool have_fpa_registers = true;
8937 const struct target_desc *tdesc = info.target_desc;
8938
8939 /* If we have an object to base this architecture on, try to determine
8940 its ABI. */
8941
8942 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8943 {
8944 int ei_osabi, e_flags;
8945
8946 switch (bfd_get_flavour (info.abfd))
8947 {
8948 case bfd_target_coff_flavour:
8949 /* Assume it's an old APCS-style ABI. */
8950 /* XXX WinCE? */
8951 arm_abi = ARM_ABI_APCS;
8952 break;
8953
8954 case bfd_target_elf_flavour:
8955 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8956 e_flags = elf_elfheader (info.abfd)->e_flags;
8957
8958 if (ei_osabi == ELFOSABI_ARM)
8959 {
8960 /* GNU tools used to use this value, but do not for EABI
8961 objects. There's nowhere to tag an EABI version
8962 anyway, so assume APCS. */
8963 arm_abi = ARM_ABI_APCS;
8964 }
8965 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8966 {
8967 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8968
8969 switch (eabi_ver)
8970 {
8971 case EF_ARM_EABI_UNKNOWN:
8972 /* Assume GNU tools. */
8973 arm_abi = ARM_ABI_APCS;
8974 break;
8975
8976 case EF_ARM_EABI_VER4:
8977 case EF_ARM_EABI_VER5:
8978 arm_abi = ARM_ABI_AAPCS;
8979 /* EABI binaries default to VFP float ordering.
8980 They may also contain build attributes that can
8981 be used to identify if the VFP argument-passing
8982 ABI is in use. */
8983 if (fp_model == ARM_FLOAT_AUTO)
8984 {
8985 #ifdef HAVE_ELF
8986 switch (bfd_elf_get_obj_attr_int (info.abfd,
8987 OBJ_ATTR_PROC,
8988 Tag_ABI_VFP_args))
8989 {
8990 case AEABI_VFP_args_base:
8991 /* "The user intended FP parameter/result
8992 passing to conform to AAPCS, base
8993 variant". */
8994 fp_model = ARM_FLOAT_SOFT_VFP;
8995 break;
8996 case AEABI_VFP_args_vfp:
8997 /* "The user intended FP parameter/result
8998 passing to conform to AAPCS, VFP
8999 variant". */
9000 fp_model = ARM_FLOAT_VFP;
9001 break;
9002 case AEABI_VFP_args_toolchain:
9003 /* "The user intended FP parameter/result
9004 passing to conform to tool chain-specific
9005 conventions" - we don't know any such
9006 conventions, so leave it as "auto". */
9007 break;
9008 case AEABI_VFP_args_compatible:
9009 /* "Code is compatible with both the base
9010 and VFP variants; the user did not permit
9011 non-variadic functions to pass FP
9012 parameters/results" - leave it as
9013 "auto". */
9014 break;
9015 default:
9016 /* Attribute value not mentioned in the
9017 November 2012 ABI, so leave it as
9018 "auto". */
9019 break;
9020 }
9021 #else
9022 fp_model = ARM_FLOAT_SOFT_VFP;
9023 #endif
9024 }
9025 break;
9026
9027 default:
9028 /* Leave it as "auto". */
9029 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9030 break;
9031 }
9032
9033 #ifdef HAVE_ELF
9034 /* Detect M-profile programs. This only works if the
9035 executable file includes build attributes; GCC does
9036 copy them to the executable, but e.g. RealView does
9037 not. */
9038 int attr_arch
9039 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9040 Tag_CPU_arch);
9041 int attr_profile
9042 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9043 Tag_CPU_arch_profile);
9044
9045 /* GCC specifies the profile for v6-M; RealView only
9046 specifies the profile for architectures starting with
9047 V7 (as opposed to architectures with a tag
9048 numerically greater than TAG_CPU_ARCH_V7). */
9049 if (!tdesc_has_registers (tdesc)
9050 && (attr_arch == TAG_CPU_ARCH_V6_M
9051 || attr_arch == TAG_CPU_ARCH_V6S_M
9052 || attr_profile == 'M'))
9053 is_m = true;
9054 #endif
9055 }
9056
9057 if (fp_model == ARM_FLOAT_AUTO)
9058 {
9059 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9060 {
9061 case 0:
9062 /* Leave it as "auto". Strictly speaking this case
9063 means FPA, but almost nobody uses that now, and
9064 many toolchains fail to set the appropriate bits
9065 for the floating-point model they use. */
9066 break;
9067 case EF_ARM_SOFT_FLOAT:
9068 fp_model = ARM_FLOAT_SOFT_FPA;
9069 break;
9070 case EF_ARM_VFP_FLOAT:
9071 fp_model = ARM_FLOAT_VFP;
9072 break;
9073 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9074 fp_model = ARM_FLOAT_SOFT_VFP;
9075 break;
9076 }
9077 }
9078
9079 if (e_flags & EF_ARM_BE8)
9080 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9081
9082 break;
9083
9084 default:
9085 /* Leave it as "auto". */
9086 break;
9087 }
9088 }
9089
9090 /* Check any target description for validity. */
9091 if (tdesc_has_registers (tdesc))
9092 {
9093 /* For most registers we require GDB's default names; but also allow
9094 the numeric names for sp / lr / pc, as a convenience. */
9095 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9096 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9097 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9098
9099 const struct tdesc_feature *feature;
9100 int valid_p;
9101
9102 feature = tdesc_find_feature (tdesc,
9103 "org.gnu.gdb.arm.core");
9104 if (feature == NULL)
9105 {
9106 feature = tdesc_find_feature (tdesc,
9107 "org.gnu.gdb.arm.m-profile");
9108 if (feature == NULL)
9109 return NULL;
9110 else
9111 is_m = true;
9112 }
9113
9114 tdesc_data = tdesc_data_alloc ();
9115
9116 valid_p = 1;
9117 for (i = 0; i < ARM_SP_REGNUM; i++)
9118 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9119 arm_register_names[i]);
9120 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9121 ARM_SP_REGNUM,
9122 arm_sp_names);
9123 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9124 ARM_LR_REGNUM,
9125 arm_lr_names);
9126 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9127 ARM_PC_REGNUM,
9128 arm_pc_names);
9129 if (is_m)
9130 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9131 ARM_PS_REGNUM, "xpsr");
9132 else
9133 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9134 ARM_PS_REGNUM, "cpsr");
9135
9136 if (!valid_p)
9137 return NULL;
9138
9139 feature = tdesc_find_feature (tdesc,
9140 "org.gnu.gdb.arm.fpa");
9141 if (feature != NULL)
9142 {
9143 valid_p = 1;
9144 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9145 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9146 arm_register_names[i]);
9147 if (!valid_p)
9148 return NULL;
9149 }
9150 else
9151 have_fpa_registers = false;
9152
9153 feature = tdesc_find_feature (tdesc,
9154 "org.gnu.gdb.xscale.iwmmxt");
9155 if (feature != NULL)
9156 {
9157 static const char *const iwmmxt_names[] = {
9158 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9159 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9160 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9161 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9162 };
9163
9164 valid_p = 1;
9165 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9166 valid_p
9167 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9168 iwmmxt_names[i - ARM_WR0_REGNUM]);
9169
9170 /* Check for the control registers, but do not fail if they
9171 are missing. */
9172 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9173 tdesc_numbered_register (feature, tdesc_data.get (), i,
9174 iwmmxt_names[i - ARM_WR0_REGNUM]);
9175
9176 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9177 valid_p
9178 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9179 iwmmxt_names[i - ARM_WR0_REGNUM]);
9180
9181 if (!valid_p)
9182 return NULL;
9183
9184 have_wmmx_registers = true;
9185 }
9186
9187 /* If we have a VFP unit, check whether the single precision registers
9188 are present. If not, then we will synthesize them as pseudo
9189 registers. */
9190 feature = tdesc_find_feature (tdesc,
9191 "org.gnu.gdb.arm.vfp");
9192 if (feature != NULL)
9193 {
9194 static const char *const vfp_double_names[] = {
9195 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9196 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9197 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9198 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9199 };
9200
9201 /* Require the double precision registers. There must be either
9202 16 or 32. */
9203 valid_p = 1;
9204 for (i = 0; i < 32; i++)
9205 {
9206 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9207 ARM_D0_REGNUM + i,
9208 vfp_double_names[i]);
9209 if (!valid_p)
9210 break;
9211 }
9212 if (!valid_p && i == 16)
9213 valid_p = 1;
9214
9215 /* Also require FPSCR. */
9216 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9217 ARM_FPSCR_REGNUM, "fpscr");
9218 if (!valid_p)
9219 return NULL;
9220
9221 if (tdesc_unnumbered_register (feature, "s0") == 0)
9222 have_vfp_pseudos = true;
9223
9224 vfp_register_count = i;
9225
9226 /* If we have VFP, also check for NEON. The architecture allows
9227 NEON without VFP (integer vector operations only), but GDB
9228 does not support that. */
9229 feature = tdesc_find_feature (tdesc,
9230 "org.gnu.gdb.arm.neon");
9231 if (feature != NULL)
9232 {
9233 /* NEON requires 32 double-precision registers. */
9234 if (i != 32)
9235 return NULL;
9236
9237 /* If there are quad registers defined by the stub, use
9238 their type; otherwise (normally) provide them with
9239 the default type. */
9240 if (tdesc_unnumbered_register (feature, "q0") == 0)
9241 have_neon_pseudos = true;
9242
9243 have_neon = true;
9244 }
9245 }
9246 }
9247
9248 /* If there is already a candidate, use it. */
9249 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9250 best_arch != NULL;
9251 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9252 {
9253 if (arm_abi != ARM_ABI_AUTO
9254 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9255 continue;
9256
9257 if (fp_model != ARM_FLOAT_AUTO
9258 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9259 continue;
9260
9261 /* There are various other properties in tdep that we do not
9262 need to check here: those derived from a target description,
9263 since gdbarches with a different target description are
9264 automatically disqualified. */
9265
9266 /* Do check is_m, though, since it might come from the binary. */
9267 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9268 continue;
9269
9270 /* Found a match. */
9271 break;
9272 }
9273
9274 if (best_arch != NULL)
9275 return best_arch->gdbarch;
9276
9277 tdep = XCNEW (struct gdbarch_tdep);
9278 gdbarch = gdbarch_alloc (&info, tdep);
9279
9280 /* Record additional information about the architecture we are defining.
9281 These are gdbarch discriminators, like the OSABI. */
9282 tdep->arm_abi = arm_abi;
9283 tdep->fp_model = fp_model;
9284 tdep->is_m = is_m;
9285 tdep->have_fpa_registers = have_fpa_registers;
9286 tdep->have_wmmx_registers = have_wmmx_registers;
9287 gdb_assert (vfp_register_count == 0
9288 || vfp_register_count == 16
9289 || vfp_register_count == 32);
9290 tdep->vfp_register_count = vfp_register_count;
9291 tdep->have_vfp_pseudos = have_vfp_pseudos;
9292 tdep->have_neon_pseudos = have_neon_pseudos;
9293 tdep->have_neon = have_neon;
9294
9295 arm_register_g_packet_guesses (gdbarch);
9296
9297 /* Breakpoints. */
9298 switch (info.byte_order_for_code)
9299 {
9300 case BFD_ENDIAN_BIG:
9301 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9302 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9303 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9304 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9305
9306 break;
9307
9308 case BFD_ENDIAN_LITTLE:
9309 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9310 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9311 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9312 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9313
9314 break;
9315
9316 default:
9317 internal_error (__FILE__, __LINE__,
9318 _("arm_gdbarch_init: bad byte order for float format"));
9319 }
9320
9321 /* On ARM targets char defaults to unsigned. */
9322 set_gdbarch_char_signed (gdbarch, 0);
9323
9324 /* wchar_t is unsigned under the AAPCS. */
9325 if (tdep->arm_abi == ARM_ABI_AAPCS)
9326 set_gdbarch_wchar_signed (gdbarch, 0);
9327 else
9328 set_gdbarch_wchar_signed (gdbarch, 1);
9329
9330 /* Compute type alignment. */
9331 set_gdbarch_type_align (gdbarch, arm_type_align);
9332
9333 /* Note: for displaced stepping, this includes the breakpoint, and one word
9334 of additional scratch space. This setting isn't used for anything beside
9335 displaced stepping at present. */
9336 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9337
9338 /* This should be low enough for everything. */
9339 tdep->lowest_pc = 0x20;
9340 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9341
9342 /* The default, for both APCS and AAPCS, is to return small
9343 structures in registers. */
9344 tdep->struct_return = reg_struct_return;
9345
9346 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9347 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9348
9349 if (is_m)
9350 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9351
9352 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9353
9354 frame_base_set_default (gdbarch, &arm_normal_base);
9355
9356 /* Address manipulation. */
9357 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9358
9359 /* Advance PC across function entry code. */
9360 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9361
9362 /* Detect whether PC is at a point where the stack has been destroyed. */
9363 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9364
9365 /* Skip trampolines. */
9366 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9367
9368 /* The stack grows downward. */
9369 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9370
9371 /* Breakpoint manipulation. */
9372 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9373 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9374 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9375 arm_breakpoint_kind_from_current_state);
9376
9377 /* Information about registers, etc. */
9378 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9379 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9380 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9381 set_gdbarch_register_type (gdbarch, arm_register_type);
9382 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9383
9384 /* This "info float" is FPA-specific. Use the generic version if we
9385 do not have FPA. */
9386 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9387 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9388
9389 /* Internal <-> external register number maps. */
9390 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9391 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9392
9393 set_gdbarch_register_name (gdbarch, arm_register_name);
9394
9395 /* Returning results. */
9396 set_gdbarch_return_value (gdbarch, arm_return_value);
9397
9398 /* Disassembly. */
9399 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9400
9401 /* Minsymbol frobbing. */
9402 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9403 set_gdbarch_coff_make_msymbol_special (gdbarch,
9404 arm_coff_make_msymbol_special);
9405 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9406
9407 /* Thumb-2 IT block support. */
9408 set_gdbarch_adjust_breakpoint_address (gdbarch,
9409 arm_adjust_breakpoint_address);
9410
9411 /* Virtual tables. */
9412 set_gdbarch_vbit_in_delta (gdbarch, 1);
9413
9414 /* Hook in the ABI-specific overrides, if they have been registered. */
9415 gdbarch_init_osabi (info, gdbarch);
9416
9417 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9418
9419 /* Add some default predicates. */
9420 if (is_m)
9421 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9422 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9423 dwarf2_append_unwinders (gdbarch);
9424 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9425 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9426 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9427
9428 /* Now we have tuned the configuration, set a few final things,
9429 based on what the OS ABI has told us. */
9430
9431 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9432 binaries are always marked. */
9433 if (tdep->arm_abi == ARM_ABI_AUTO)
9434 tdep->arm_abi = ARM_ABI_APCS;
9435
9436 /* Watchpoints are not steppable. */
9437 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9438
9439 /* We used to default to FPA for generic ARM, but almost nobody
9440 uses that now, and we now provide a way for the user to force
9441 the model. So default to the most useful variant. */
9442 if (tdep->fp_model == ARM_FLOAT_AUTO)
9443 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9444
9445 if (tdep->jb_pc >= 0)
9446 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9447
9448 /* Floating point sizes and format. */
9449 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9450 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9451 {
9452 set_gdbarch_double_format
9453 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9454 set_gdbarch_long_double_format
9455 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9456 }
9457 else
9458 {
9459 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9460 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9461 }
9462
9463 if (have_vfp_pseudos)
9464 {
9465 /* NOTE: These are the only pseudo registers used by
9466 the ARM target at the moment. If more are added, a
9467 little more care in numbering will be needed. */
9468
9469 int num_pseudos = 32;
9470 if (have_neon_pseudos)
9471 num_pseudos += 16;
9472 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9473 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9474 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9475 }
9476
9477 if (tdesc_data != nullptr)
9478 {
9479 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9480
9481 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9482
9483 /* Override tdesc_register_type to adjust the types of VFP
9484 registers for NEON. */
9485 set_gdbarch_register_type (gdbarch, arm_register_type);
9486 }
9487
9488 /* Add standard register aliases. We add aliases even for those
9489 names which are used by the current architecture - it's simpler,
9490 and does no harm, since nothing ever lists user registers. */
9491 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9492 user_reg_add (gdbarch, arm_register_aliases[i].name,
9493 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9494
9495 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9496 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9497
9498 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9499
9500 return gdbarch;
9501 }
9502
9503 static void
9504 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9505 {
9506 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9507
9508 if (tdep == NULL)
9509 return;
9510
9511 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9512 (int) tdep->fp_model);
9513 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9514 (int) tdep->have_fpa_registers);
9515 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9516 (int) tdep->have_wmmx_registers);
9517 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9518 (int) tdep->vfp_register_count);
9519 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9520 (int) tdep->have_vfp_pseudos);
9521 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9522 (int) tdep->have_neon_pseudos);
9523 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9524 (int) tdep->have_neon);
9525 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9526 (unsigned long) tdep->lowest_pc);
9527 }
9528
9529 #if GDB_SELF_TEST
9530 namespace selftests
9531 {
9532 static void arm_record_test (void);
9533 static void arm_analyze_prologue_test ();
9534 }
9535 #endif
9536
9537 void _initialize_arm_tdep ();
9538 void
9539 _initialize_arm_tdep ()
9540 {
9541 long length;
9542 int i, j;
9543 char regdesc[1024], *rdptr = regdesc;
9544 size_t rest = sizeof (regdesc);
9545
9546 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9547
9548 /* Add ourselves to objfile event chain. */
9549 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9550
9551 /* Register an ELF OS ABI sniffer for ARM binaries. */
9552 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9553 bfd_target_elf_flavour,
9554 arm_elf_osabi_sniffer);
9555
9556 /* Add root prefix command for all "set arm"/"show arm" commands. */
9557 add_basic_prefix_cmd ("arm", no_class,
9558 _("Various ARM-specific commands."),
9559 &setarmcmdlist, "set arm ", 0, &setlist);
9560
9561 add_show_prefix_cmd ("arm", no_class,
9562 _("Various ARM-specific commands."),
9563 &showarmcmdlist, "show arm ", 0, &showlist);
9564
9565
9566 arm_disassembler_options = xstrdup ("reg-names-std");
9567 const disasm_options_t *disasm_options
9568 = &disassembler_options_arm ()->options;
9569 int num_disassembly_styles = 0;
9570 for (i = 0; disasm_options->name[i] != NULL; i++)
9571 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9572 num_disassembly_styles++;
9573
9574 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9575 valid_disassembly_styles = XNEWVEC (const char *,
9576 num_disassembly_styles + 1);
9577 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9578 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9579 {
9580 size_t offset = strlen ("reg-names-");
9581 const char *style = disasm_options->name[i];
9582 valid_disassembly_styles[j++] = &style[offset];
9583 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9584 disasm_options->description[i]);
9585 rdptr += length;
9586 rest -= length;
9587 }
9588 /* Mark the end of valid options. */
9589 valid_disassembly_styles[num_disassembly_styles] = NULL;
9590
9591 /* Create the help text. */
9592 std::string helptext = string_printf ("%s%s%s",
9593 _("The valid values are:\n"),
9594 regdesc,
9595 _("The default is \"std\"."));
9596
9597 add_setshow_enum_cmd("disassembler", no_class,
9598 valid_disassembly_styles, &disassembly_style,
9599 _("Set the disassembly style."),
9600 _("Show the disassembly style."),
9601 helptext.c_str (),
9602 set_disassembly_style_sfunc,
9603 show_disassembly_style_sfunc,
9604 &setarmcmdlist, &showarmcmdlist);
9605
9606 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9607 _("Set usage of ARM 32-bit mode."),
9608 _("Show usage of ARM 32-bit mode."),
9609 _("When off, a 26-bit PC will be used."),
9610 NULL,
9611 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9612 mode is %s. */
9613 &setarmcmdlist, &showarmcmdlist);
9614
9615 /* Add a command to allow the user to force the FPU model. */
9616 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9617 _("Set the floating point type."),
9618 _("Show the floating point type."),
9619 _("auto - Determine the FP typefrom the OS-ABI.\n\
9620 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9621 fpa - FPA co-processor (GCC compiled).\n\
9622 softvfp - Software FP with pure-endian doubles.\n\
9623 vfp - VFP co-processor."),
9624 set_fp_model_sfunc, show_fp_model,
9625 &setarmcmdlist, &showarmcmdlist);
9626
9627 /* Add a command to allow the user to force the ABI. */
9628 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9629 _("Set the ABI."),
9630 _("Show the ABI."),
9631 NULL, arm_set_abi, arm_show_abi,
9632 &setarmcmdlist, &showarmcmdlist);
9633
9634 /* Add two commands to allow the user to force the assumed
9635 execution mode. */
9636 add_setshow_enum_cmd ("fallback-mode", class_support,
9637 arm_mode_strings, &arm_fallback_mode_string,
9638 _("Set the mode assumed when symbols are unavailable."),
9639 _("Show the mode assumed when symbols are unavailable."),
9640 NULL, NULL, arm_show_fallback_mode,
9641 &setarmcmdlist, &showarmcmdlist);
9642 add_setshow_enum_cmd ("force-mode", class_support,
9643 arm_mode_strings, &arm_force_mode_string,
9644 _("Set the mode assumed even when symbols are available."),
9645 _("Show the mode assumed even when symbols are available."),
9646 NULL, NULL, arm_show_force_mode,
9647 &setarmcmdlist, &showarmcmdlist);
9648
9649 /* Debugging flag. */
9650 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9651 _("Set ARM debugging."),
9652 _("Show ARM debugging."),
9653 _("When on, arm-specific debugging is enabled."),
9654 NULL,
9655 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9656 &setdebuglist, &showdebuglist);
9657
9658 #if GDB_SELF_TEST
9659 selftests::register_test ("arm-record", selftests::arm_record_test);
9660 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
9661 #endif
9662
9663 }
9664
9665 /* ARM-reversible process record data structures. */
9666
9667 #define ARM_INSN_SIZE_BYTES 4
9668 #define THUMB_INSN_SIZE_BYTES 2
9669 #define THUMB2_INSN_SIZE_BYTES 4
9670
9671
9672 /* Position of the bit within a 32-bit ARM instruction
9673 that defines whether the instruction is a load or store. */
9674 #define INSN_S_L_BIT_NUM 20
9675
9676 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9677 do \
9678 { \
9679 unsigned int reg_len = LENGTH; \
9680 if (reg_len) \
9681 { \
9682 REGS = XNEWVEC (uint32_t, reg_len); \
9683 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9684 } \
9685 } \
9686 while (0)
9687
9688 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9689 do \
9690 { \
9691 unsigned int mem_len = LENGTH; \
9692 if (mem_len) \
9693 { \
9694 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9695 memcpy(&MEMS->len, &RECORD_BUF[0], \
9696 sizeof(struct arm_mem_r) * LENGTH); \
9697 } \
9698 } \
9699 while (0)
9700
9701 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9702 #define INSN_RECORDED(ARM_RECORD) \
9703 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9704
9705 /* ARM memory record structure. */
9706 struct arm_mem_r
9707 {
9708 uint32_t len; /* Record length. */
9709 uint32_t addr; /* Memory address. */
9710 };
9711
9712 /* ARM instruction record contains opcode of current insn
9713 and execution state (before entry to decode_insn()),
9714 contains list of to-be-modified registers and
9715 memory blocks (on return from decode_insn()). */
9716
9717 typedef struct insn_decode_record_t
9718 {
9719 struct gdbarch *gdbarch;
9720 struct regcache *regcache;
9721 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9722 uint32_t arm_insn; /* Should accommodate thumb. */
9723 uint32_t cond; /* Condition code. */
9724 uint32_t opcode; /* Insn opcode. */
9725 uint32_t decode; /* Insn decode bits. */
9726 uint32_t mem_rec_count; /* No of mem records. */
9727 uint32_t reg_rec_count; /* No of reg records. */
9728 uint32_t *arm_regs; /* Registers to be saved for this record. */
9729 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9730 } insn_decode_record;
9731
9732
9733 /* Checks ARM SBZ and SBO mandatory fields. */
9734
9735 static int
9736 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9737 {
9738 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9739
9740 if (!len)
9741 return 1;
9742
9743 if (!sbo)
9744 ones = ~ones;
9745
9746 while (ones)
9747 {
9748 if (!(ones & sbo))
9749 {
9750 return 0;
9751 }
9752 ones = ones >> 1;
9753 }
9754 return 1;
9755 }
9756
9757 enum arm_record_result
9758 {
9759 ARM_RECORD_SUCCESS = 0,
9760 ARM_RECORD_FAILURE = 1
9761 };
9762
9763 typedef enum
9764 {
9765 ARM_RECORD_STRH=1,
9766 ARM_RECORD_STRD
9767 } arm_record_strx_t;
9768
9769 typedef enum
9770 {
9771 ARM_RECORD=1,
9772 THUMB_RECORD,
9773 THUMB2_RECORD
9774 } record_type_t;
9775
9776
9777 static int
9778 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9779 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9780 {
9781
9782 struct regcache *reg_cache = arm_insn_r->regcache;
9783 ULONGEST u_regval[2]= {0};
9784
9785 uint32_t reg_src1 = 0, reg_src2 = 0;
9786 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9787
9788 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9789 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9790
9791 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9792 {
9793 /* 1) Handle misc store, immediate offset. */
9794 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9795 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9796 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9797 regcache_raw_read_unsigned (reg_cache, reg_src1,
9798 &u_regval[0]);
9799 if (ARM_PC_REGNUM == reg_src1)
9800 {
9801 /* If R15 was used as Rn, hence current PC+8. */
9802 u_regval[0] = u_regval[0] + 8;
9803 }
9804 offset_8 = (immed_high << 4) | immed_low;
9805 /* Calculate target store address. */
9806 if (14 == arm_insn_r->opcode)
9807 {
9808 tgt_mem_addr = u_regval[0] + offset_8;
9809 }
9810 else
9811 {
9812 tgt_mem_addr = u_regval[0] - offset_8;
9813 }
9814 if (ARM_RECORD_STRH == str_type)
9815 {
9816 record_buf_mem[0] = 2;
9817 record_buf_mem[1] = tgt_mem_addr;
9818 arm_insn_r->mem_rec_count = 1;
9819 }
9820 else if (ARM_RECORD_STRD == str_type)
9821 {
9822 record_buf_mem[0] = 4;
9823 record_buf_mem[1] = tgt_mem_addr;
9824 record_buf_mem[2] = 4;
9825 record_buf_mem[3] = tgt_mem_addr + 4;
9826 arm_insn_r->mem_rec_count = 2;
9827 }
9828 }
9829 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9830 {
9831 /* 2) Store, register offset. */
9832 /* Get Rm. */
9833 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9834 /* Get Rn. */
9835 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9836 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9837 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9838 if (15 == reg_src2)
9839 {
9840 /* If R15 was used as Rn, hence current PC+8. */
9841 u_regval[0] = u_regval[0] + 8;
9842 }
9843 /* Calculate target store address, Rn +/- Rm, register offset. */
9844 if (12 == arm_insn_r->opcode)
9845 {
9846 tgt_mem_addr = u_regval[0] + u_regval[1];
9847 }
9848 else
9849 {
9850 tgt_mem_addr = u_regval[1] - u_regval[0];
9851 }
9852 if (ARM_RECORD_STRH == str_type)
9853 {
9854 record_buf_mem[0] = 2;
9855 record_buf_mem[1] = tgt_mem_addr;
9856 arm_insn_r->mem_rec_count = 1;
9857 }
9858 else if (ARM_RECORD_STRD == str_type)
9859 {
9860 record_buf_mem[0] = 4;
9861 record_buf_mem[1] = tgt_mem_addr;
9862 record_buf_mem[2] = 4;
9863 record_buf_mem[3] = tgt_mem_addr + 4;
9864 arm_insn_r->mem_rec_count = 2;
9865 }
9866 }
9867 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9868 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9869 {
9870 /* 3) Store, immediate pre-indexed. */
9871 /* 5) Store, immediate post-indexed. */
9872 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9873 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9874 offset_8 = (immed_high << 4) | immed_low;
9875 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9876 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9877 /* Calculate target store address, Rn +/- Rm, register offset. */
9878 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9879 {
9880 tgt_mem_addr = u_regval[0] + offset_8;
9881 }
9882 else
9883 {
9884 tgt_mem_addr = u_regval[0] - offset_8;
9885 }
9886 if (ARM_RECORD_STRH == str_type)
9887 {
9888 record_buf_mem[0] = 2;
9889 record_buf_mem[1] = tgt_mem_addr;
9890 arm_insn_r->mem_rec_count = 1;
9891 }
9892 else if (ARM_RECORD_STRD == str_type)
9893 {
9894 record_buf_mem[0] = 4;
9895 record_buf_mem[1] = tgt_mem_addr;
9896 record_buf_mem[2] = 4;
9897 record_buf_mem[3] = tgt_mem_addr + 4;
9898 arm_insn_r->mem_rec_count = 2;
9899 }
9900 /* Record Rn also as it changes. */
9901 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9902 arm_insn_r->reg_rec_count = 1;
9903 }
9904 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9905 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9906 {
9907 /* 4) Store, register pre-indexed. */
9908 /* 6) Store, register post -indexed. */
9909 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9910 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9911 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9912 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9913 /* Calculate target store address, Rn +/- Rm, register offset. */
9914 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9915 {
9916 tgt_mem_addr = u_regval[0] + u_regval[1];
9917 }
9918 else
9919 {
9920 tgt_mem_addr = u_regval[1] - u_regval[0];
9921 }
9922 if (ARM_RECORD_STRH == str_type)
9923 {
9924 record_buf_mem[0] = 2;
9925 record_buf_mem[1] = tgt_mem_addr;
9926 arm_insn_r->mem_rec_count = 1;
9927 }
9928 else if (ARM_RECORD_STRD == str_type)
9929 {
9930 record_buf_mem[0] = 4;
9931 record_buf_mem[1] = tgt_mem_addr;
9932 record_buf_mem[2] = 4;
9933 record_buf_mem[3] = tgt_mem_addr + 4;
9934 arm_insn_r->mem_rec_count = 2;
9935 }
9936 /* Record Rn also as it changes. */
9937 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9938 arm_insn_r->reg_rec_count = 1;
9939 }
9940 return 0;
9941 }
9942
9943 /* Handling ARM extension space insns. */
9944
9945 static int
9946 arm_record_extension_space (insn_decode_record *arm_insn_r)
9947 {
9948 int ret = 0; /* Return value: -1:record failure ; 0:success */
9949 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9950 uint32_t record_buf[8], record_buf_mem[8];
9951 uint32_t reg_src1 = 0;
9952 struct regcache *reg_cache = arm_insn_r->regcache;
9953 ULONGEST u_regval = 0;
9954
9955 gdb_assert (!INSN_RECORDED(arm_insn_r));
9956 /* Handle unconditional insn extension space. */
9957
9958 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9959 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9960 if (arm_insn_r->cond)
9961 {
9962 /* PLD has no affect on architectural state, it just affects
9963 the caches. */
9964 if (5 == ((opcode1 & 0xE0) >> 5))
9965 {
9966 /* BLX(1) */
9967 record_buf[0] = ARM_PS_REGNUM;
9968 record_buf[1] = ARM_LR_REGNUM;
9969 arm_insn_r->reg_rec_count = 2;
9970 }
9971 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9972 }
9973
9974
9975 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9976 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9977 {
9978 ret = -1;
9979 /* Undefined instruction on ARM V5; need to handle if later
9980 versions define it. */
9981 }
9982
9983 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9984 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9985 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9986
9987 /* Handle arithmetic insn extension space. */
9988 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9989 && !INSN_RECORDED(arm_insn_r))
9990 {
9991 /* Handle MLA(S) and MUL(S). */
9992 if (in_inclusive_range (insn_op1, 0U, 3U))
9993 {
9994 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9995 record_buf[1] = ARM_PS_REGNUM;
9996 arm_insn_r->reg_rec_count = 2;
9997 }
9998 else if (in_inclusive_range (insn_op1, 4U, 15U))
9999 {
10000 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10001 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10002 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10003 record_buf[2] = ARM_PS_REGNUM;
10004 arm_insn_r->reg_rec_count = 3;
10005 }
10006 }
10007
10008 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10009 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10010 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10011
10012 /* Handle control insn extension space. */
10013
10014 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10015 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10016 {
10017 if (!bit (arm_insn_r->arm_insn,25))
10018 {
10019 if (!bits (arm_insn_r->arm_insn, 4, 7))
10020 {
10021 if ((0 == insn_op1) || (2 == insn_op1))
10022 {
10023 /* MRS. */
10024 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10025 arm_insn_r->reg_rec_count = 1;
10026 }
10027 else if (1 == insn_op1)
10028 {
10029 /* CSPR is going to be changed. */
10030 record_buf[0] = ARM_PS_REGNUM;
10031 arm_insn_r->reg_rec_count = 1;
10032 }
10033 else if (3 == insn_op1)
10034 {
10035 /* SPSR is going to be changed. */
10036 /* We need to get SPSR value, which is yet to be done. */
10037 return -1;
10038 }
10039 }
10040 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10041 {
10042 if (1 == insn_op1)
10043 {
10044 /* BX. */
10045 record_buf[0] = ARM_PS_REGNUM;
10046 arm_insn_r->reg_rec_count = 1;
10047 }
10048 else if (3 == insn_op1)
10049 {
10050 /* CLZ. */
10051 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10052 arm_insn_r->reg_rec_count = 1;
10053 }
10054 }
10055 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10056 {
10057 /* BLX. */
10058 record_buf[0] = ARM_PS_REGNUM;
10059 record_buf[1] = ARM_LR_REGNUM;
10060 arm_insn_r->reg_rec_count = 2;
10061 }
10062 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10063 {
10064 /* QADD, QSUB, QDADD, QDSUB */
10065 record_buf[0] = ARM_PS_REGNUM;
10066 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10067 arm_insn_r->reg_rec_count = 2;
10068 }
10069 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10070 {
10071 /* BKPT. */
10072 record_buf[0] = ARM_PS_REGNUM;
10073 record_buf[1] = ARM_LR_REGNUM;
10074 arm_insn_r->reg_rec_count = 2;
10075
10076 /* Save SPSR also;how? */
10077 return -1;
10078 }
10079 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10080 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10081 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10082 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10083 )
10084 {
10085 if (0 == insn_op1 || 1 == insn_op1)
10086 {
10087 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10088 /* We dont do optimization for SMULW<y> where we
10089 need only Rd. */
10090 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10091 record_buf[1] = ARM_PS_REGNUM;
10092 arm_insn_r->reg_rec_count = 2;
10093 }
10094 else if (2 == insn_op1)
10095 {
10096 /* SMLAL<x><y>. */
10097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10098 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10099 arm_insn_r->reg_rec_count = 2;
10100 }
10101 else if (3 == insn_op1)
10102 {
10103 /* SMUL<x><y>. */
10104 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10105 arm_insn_r->reg_rec_count = 1;
10106 }
10107 }
10108 }
10109 else
10110 {
10111 /* MSR : immediate form. */
10112 if (1 == insn_op1)
10113 {
10114 /* CSPR is going to be changed. */
10115 record_buf[0] = ARM_PS_REGNUM;
10116 arm_insn_r->reg_rec_count = 1;
10117 }
10118 else if (3 == insn_op1)
10119 {
10120 /* SPSR is going to be changed. */
10121 /* we need to get SPSR value, which is yet to be done */
10122 return -1;
10123 }
10124 }
10125 }
10126
10127 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10128 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10129 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10130
10131 /* Handle load/store insn extension space. */
10132
10133 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10134 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10135 && !INSN_RECORDED(arm_insn_r))
10136 {
10137 /* SWP/SWPB. */
10138 if (0 == insn_op1)
10139 {
10140 /* These insn, changes register and memory as well. */
10141 /* SWP or SWPB insn. */
10142 /* Get memory address given by Rn. */
10143 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10144 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10145 /* SWP insn ?, swaps word. */
10146 if (8 == arm_insn_r->opcode)
10147 {
10148 record_buf_mem[0] = 4;
10149 }
10150 else
10151 {
10152 /* SWPB insn, swaps only byte. */
10153 record_buf_mem[0] = 1;
10154 }
10155 record_buf_mem[1] = u_regval;
10156 arm_insn_r->mem_rec_count = 1;
10157 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10158 arm_insn_r->reg_rec_count = 1;
10159 }
10160 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10161 {
10162 /* STRH. */
10163 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10164 ARM_RECORD_STRH);
10165 }
10166 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10167 {
10168 /* LDRD. */
10169 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10170 record_buf[1] = record_buf[0] + 1;
10171 arm_insn_r->reg_rec_count = 2;
10172 }
10173 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10174 {
10175 /* STRD. */
10176 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10177 ARM_RECORD_STRD);
10178 }
10179 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10180 {
10181 /* LDRH, LDRSB, LDRSH. */
10182 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10183 arm_insn_r->reg_rec_count = 1;
10184 }
10185
10186 }
10187
10188 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10189 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10190 && !INSN_RECORDED(arm_insn_r))
10191 {
10192 ret = -1;
10193 /* Handle coprocessor insn extension space. */
10194 }
10195
10196 /* To be done for ARMv5 and later; as of now we return -1. */
10197 if (-1 == ret)
10198 return ret;
10199
10200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10201 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10202
10203 return ret;
10204 }
10205
10206 /* Handling opcode 000 insns. */
10207
10208 static int
10209 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10210 {
10211 struct regcache *reg_cache = arm_insn_r->regcache;
10212 uint32_t record_buf[8], record_buf_mem[8];
10213 ULONGEST u_regval[2] = {0};
10214
10215 uint32_t reg_src1 = 0;
10216 uint32_t opcode1 = 0;
10217
10218 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10219 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10220 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10221
10222 if (!((opcode1 & 0x19) == 0x10))
10223 {
10224 /* Data-processing (register) and Data-processing (register-shifted
10225 register */
10226 /* Out of 11 shifter operands mode, all the insn modifies destination
10227 register, which is specified by 13-16 decode. */
10228 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10229 record_buf[1] = ARM_PS_REGNUM;
10230 arm_insn_r->reg_rec_count = 2;
10231 }
10232 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10233 {
10234 /* Miscellaneous instructions */
10235
10236 if (3 == arm_insn_r->decode && 0x12 == opcode1
10237 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10238 {
10239 /* Handle BLX, branch and link/exchange. */
10240 if (9 == arm_insn_r->opcode)
10241 {
10242 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10243 and R14 stores the return address. */
10244 record_buf[0] = ARM_PS_REGNUM;
10245 record_buf[1] = ARM_LR_REGNUM;
10246 arm_insn_r->reg_rec_count = 2;
10247 }
10248 }
10249 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10250 {
10251 /* Handle enhanced software breakpoint insn, BKPT. */
10252 /* CPSR is changed to be executed in ARM state, disabling normal
10253 interrupts, entering abort mode. */
10254 /* According to high vector configuration PC is set. */
10255 /* user hit breakpoint and type reverse, in
10256 that case, we need to go back with previous CPSR and
10257 Program Counter. */
10258 record_buf[0] = ARM_PS_REGNUM;
10259 record_buf[1] = ARM_LR_REGNUM;
10260 arm_insn_r->reg_rec_count = 2;
10261
10262 /* Save SPSR also; how? */
10263 return -1;
10264 }
10265 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10266 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10267 {
10268 /* Handle BX, branch and link/exchange. */
10269 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10270 record_buf[0] = ARM_PS_REGNUM;
10271 arm_insn_r->reg_rec_count = 1;
10272 }
10273 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10274 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10275 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10276 {
10277 /* Count leading zeros: CLZ. */
10278 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10279 arm_insn_r->reg_rec_count = 1;
10280 }
10281 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10282 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10283 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10284 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10285 {
10286 /* Handle MRS insn. */
10287 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10288 arm_insn_r->reg_rec_count = 1;
10289 }
10290 }
10291 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10292 {
10293 /* Multiply and multiply-accumulate */
10294
10295 /* Handle multiply instructions. */
10296 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10297 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10298 {
10299 /* Handle MLA and MUL. */
10300 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10301 record_buf[1] = ARM_PS_REGNUM;
10302 arm_insn_r->reg_rec_count = 2;
10303 }
10304 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10305 {
10306 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10307 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10308 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10309 record_buf[2] = ARM_PS_REGNUM;
10310 arm_insn_r->reg_rec_count = 3;
10311 }
10312 }
10313 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10314 {
10315 /* Synchronization primitives */
10316
10317 /* Handling SWP, SWPB. */
10318 /* These insn, changes register and memory as well. */
10319 /* SWP or SWPB insn. */
10320
10321 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10322 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10323 /* SWP insn ?, swaps word. */
10324 if (8 == arm_insn_r->opcode)
10325 {
10326 record_buf_mem[0] = 4;
10327 }
10328 else
10329 {
10330 /* SWPB insn, swaps only byte. */
10331 record_buf_mem[0] = 1;
10332 }
10333 record_buf_mem[1] = u_regval[0];
10334 arm_insn_r->mem_rec_count = 1;
10335 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10336 arm_insn_r->reg_rec_count = 1;
10337 }
10338 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10339 || 15 == arm_insn_r->decode)
10340 {
10341 if ((opcode1 & 0x12) == 2)
10342 {
10343 /* Extra load/store (unprivileged) */
10344 return -1;
10345 }
10346 else
10347 {
10348 /* Extra load/store */
10349 switch (bits (arm_insn_r->arm_insn, 5, 6))
10350 {
10351 case 1:
10352 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10353 {
10354 /* STRH (register), STRH (immediate) */
10355 arm_record_strx (arm_insn_r, &record_buf[0],
10356 &record_buf_mem[0], ARM_RECORD_STRH);
10357 }
10358 else if ((opcode1 & 0x05) == 0x1)
10359 {
10360 /* LDRH (register) */
10361 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10362 arm_insn_r->reg_rec_count = 1;
10363
10364 if (bit (arm_insn_r->arm_insn, 21))
10365 {
10366 /* Write back to Rn. */
10367 record_buf[arm_insn_r->reg_rec_count++]
10368 = bits (arm_insn_r->arm_insn, 16, 19);
10369 }
10370 }
10371 else if ((opcode1 & 0x05) == 0x5)
10372 {
10373 /* LDRH (immediate), LDRH (literal) */
10374 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10375
10376 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10377 arm_insn_r->reg_rec_count = 1;
10378
10379 if (rn != 15)
10380 {
10381 /*LDRH (immediate) */
10382 if (bit (arm_insn_r->arm_insn, 21))
10383 {
10384 /* Write back to Rn. */
10385 record_buf[arm_insn_r->reg_rec_count++] = rn;
10386 }
10387 }
10388 }
10389 else
10390 return -1;
10391 break;
10392 case 2:
10393 if ((opcode1 & 0x05) == 0x0)
10394 {
10395 /* LDRD (register) */
10396 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10397 record_buf[1] = record_buf[0] + 1;
10398 arm_insn_r->reg_rec_count = 2;
10399
10400 if (bit (arm_insn_r->arm_insn, 21))
10401 {
10402 /* Write back to Rn. */
10403 record_buf[arm_insn_r->reg_rec_count++]
10404 = bits (arm_insn_r->arm_insn, 16, 19);
10405 }
10406 }
10407 else if ((opcode1 & 0x05) == 0x1)
10408 {
10409 /* LDRSB (register) */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 arm_insn_r->reg_rec_count = 1;
10412
10413 if (bit (arm_insn_r->arm_insn, 21))
10414 {
10415 /* Write back to Rn. */
10416 record_buf[arm_insn_r->reg_rec_count++]
10417 = bits (arm_insn_r->arm_insn, 16, 19);
10418 }
10419 }
10420 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10421 {
10422 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10423 LDRSB (literal) */
10424 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10425
10426 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10427 arm_insn_r->reg_rec_count = 1;
10428
10429 if (rn != 15)
10430 {
10431 /*LDRD (immediate), LDRSB (immediate) */
10432 if (bit (arm_insn_r->arm_insn, 21))
10433 {
10434 /* Write back to Rn. */
10435 record_buf[arm_insn_r->reg_rec_count++] = rn;
10436 }
10437 }
10438 }
10439 else
10440 return -1;
10441 break;
10442 case 3:
10443 if ((opcode1 & 0x05) == 0x0)
10444 {
10445 /* STRD (register) */
10446 arm_record_strx (arm_insn_r, &record_buf[0],
10447 &record_buf_mem[0], ARM_RECORD_STRD);
10448 }
10449 else if ((opcode1 & 0x05) == 0x1)
10450 {
10451 /* LDRSH (register) */
10452 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10453 arm_insn_r->reg_rec_count = 1;
10454
10455 if (bit (arm_insn_r->arm_insn, 21))
10456 {
10457 /* Write back to Rn. */
10458 record_buf[arm_insn_r->reg_rec_count++]
10459 = bits (arm_insn_r->arm_insn, 16, 19);
10460 }
10461 }
10462 else if ((opcode1 & 0x05) == 0x4)
10463 {
10464 /* STRD (immediate) */
10465 arm_record_strx (arm_insn_r, &record_buf[0],
10466 &record_buf_mem[0], ARM_RECORD_STRD);
10467 }
10468 else if ((opcode1 & 0x05) == 0x5)
10469 {
10470 /* LDRSH (immediate), LDRSH (literal) */
10471 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10472 arm_insn_r->reg_rec_count = 1;
10473
10474 if (bit (arm_insn_r->arm_insn, 21))
10475 {
10476 /* Write back to Rn. */
10477 record_buf[arm_insn_r->reg_rec_count++]
10478 = bits (arm_insn_r->arm_insn, 16, 19);
10479 }
10480 }
10481 else
10482 return -1;
10483 break;
10484 default:
10485 return -1;
10486 }
10487 }
10488 }
10489 else
10490 {
10491 return -1;
10492 }
10493
10494 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10495 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10496 return 0;
10497 }
10498
10499 /* Handling opcode 001 insns. */
10500
10501 static int
10502 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10503 {
10504 uint32_t record_buf[8], record_buf_mem[8];
10505
10506 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10507 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10508
10509 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10510 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10511 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10512 )
10513 {
10514 /* Handle MSR insn. */
10515 if (9 == arm_insn_r->opcode)
10516 {
10517 /* CSPR is going to be changed. */
10518 record_buf[0] = ARM_PS_REGNUM;
10519 arm_insn_r->reg_rec_count = 1;
10520 }
10521 else
10522 {
10523 /* SPSR is going to be changed. */
10524 }
10525 }
10526 else if (arm_insn_r->opcode <= 15)
10527 {
10528 /* Normal data processing insns. */
10529 /* Out of 11 shifter operands mode, all the insn modifies destination
10530 register, which is specified by 13-16 decode. */
10531 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10532 record_buf[1] = ARM_PS_REGNUM;
10533 arm_insn_r->reg_rec_count = 2;
10534 }
10535 else
10536 {
10537 return -1;
10538 }
10539
10540 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10541 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10542 return 0;
10543 }
10544
10545 static int
10546 arm_record_media (insn_decode_record *arm_insn_r)
10547 {
10548 uint32_t record_buf[8];
10549
10550 switch (bits (arm_insn_r->arm_insn, 22, 24))
10551 {
10552 case 0:
10553 /* Parallel addition and subtraction, signed */
10554 case 1:
10555 /* Parallel addition and subtraction, unsigned */
10556 case 2:
10557 case 3:
10558 /* Packing, unpacking, saturation and reversal */
10559 {
10560 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10561
10562 record_buf[arm_insn_r->reg_rec_count++] = rd;
10563 }
10564 break;
10565
10566 case 4:
10567 case 5:
10568 /* Signed multiplies */
10569 {
10570 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10571 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10572
10573 record_buf[arm_insn_r->reg_rec_count++] = rd;
10574 if (op1 == 0x0)
10575 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10576 else if (op1 == 0x4)
10577 record_buf[arm_insn_r->reg_rec_count++]
10578 = bits (arm_insn_r->arm_insn, 12, 15);
10579 }
10580 break;
10581
10582 case 6:
10583 {
10584 if (bit (arm_insn_r->arm_insn, 21)
10585 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10586 {
10587 /* SBFX */
10588 record_buf[arm_insn_r->reg_rec_count++]
10589 = bits (arm_insn_r->arm_insn, 12, 15);
10590 }
10591 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10592 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10593 {
10594 /* USAD8 and USADA8 */
10595 record_buf[arm_insn_r->reg_rec_count++]
10596 = bits (arm_insn_r->arm_insn, 16, 19);
10597 }
10598 }
10599 break;
10600
10601 case 7:
10602 {
10603 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10604 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10605 {
10606 /* Permanently UNDEFINED */
10607 return -1;
10608 }
10609 else
10610 {
10611 /* BFC, BFI and UBFX */
10612 record_buf[arm_insn_r->reg_rec_count++]
10613 = bits (arm_insn_r->arm_insn, 12, 15);
10614 }
10615 }
10616 break;
10617
10618 default:
10619 return -1;
10620 }
10621
10622 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10623
10624 return 0;
10625 }
10626
10627 /* Handle ARM mode instructions with opcode 010. */
10628
10629 static int
10630 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10631 {
10632 struct regcache *reg_cache = arm_insn_r->regcache;
10633
10634 uint32_t reg_base , reg_dest;
10635 uint32_t offset_12, tgt_mem_addr;
10636 uint32_t record_buf[8], record_buf_mem[8];
10637 unsigned char wback;
10638 ULONGEST u_regval;
10639
10640 /* Calculate wback. */
10641 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10642 || (bit (arm_insn_r->arm_insn, 21) == 1);
10643
10644 arm_insn_r->reg_rec_count = 0;
10645 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10646
10647 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10648 {
10649 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10650 and LDRT. */
10651
10652 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10653 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10654
10655 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10656 preceeds a LDR instruction having R15 as reg_base, it
10657 emulates a branch and link instruction, and hence we need to save
10658 CPSR and PC as well. */
10659 if (ARM_PC_REGNUM == reg_dest)
10660 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10661
10662 /* If wback is true, also save the base register, which is going to be
10663 written to. */
10664 if (wback)
10665 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10666 }
10667 else
10668 {
10669 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10670
10671 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10672 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10673
10674 /* Handle bit U. */
10675 if (bit (arm_insn_r->arm_insn, 23))
10676 {
10677 /* U == 1: Add the offset. */
10678 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10679 }
10680 else
10681 {
10682 /* U == 0: subtract the offset. */
10683 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10684 }
10685
10686 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10687 bytes. */
10688 if (bit (arm_insn_r->arm_insn, 22))
10689 {
10690 /* STRB and STRBT: 1 byte. */
10691 record_buf_mem[0] = 1;
10692 }
10693 else
10694 {
10695 /* STR and STRT: 4 bytes. */
10696 record_buf_mem[0] = 4;
10697 }
10698
10699 /* Handle bit P. */
10700 if (bit (arm_insn_r->arm_insn, 24))
10701 record_buf_mem[1] = tgt_mem_addr;
10702 else
10703 record_buf_mem[1] = (uint32_t) u_regval;
10704
10705 arm_insn_r->mem_rec_count = 1;
10706
10707 /* If wback is true, also save the base register, which is going to be
10708 written to. */
10709 if (wback)
10710 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10711 }
10712
10713 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10714 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10715 return 0;
10716 }
10717
10718 /* Handling opcode 011 insns. */
10719
10720 static int
10721 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10722 {
10723 struct regcache *reg_cache = arm_insn_r->regcache;
10724
10725 uint32_t shift_imm = 0;
10726 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10727 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10728 uint32_t record_buf[8], record_buf_mem[8];
10729
10730 LONGEST s_word;
10731 ULONGEST u_regval[2];
10732
10733 if (bit (arm_insn_r->arm_insn, 4))
10734 return arm_record_media (arm_insn_r);
10735
10736 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10737 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10738
10739 /* Handle enhanced store insns and LDRD DSP insn,
10740 order begins according to addressing modes for store insns
10741 STRH insn. */
10742
10743 /* LDR or STR? */
10744 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10745 {
10746 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10747 /* LDR insn has a capability to do branching, if
10748 MOV LR, PC is preceded by LDR insn having Rn as R15
10749 in that case, it emulates branch and link insn, and hence we
10750 need to save CSPR and PC as well. */
10751 if (15 != reg_dest)
10752 {
10753 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10754 arm_insn_r->reg_rec_count = 1;
10755 }
10756 else
10757 {
10758 record_buf[0] = reg_dest;
10759 record_buf[1] = ARM_PS_REGNUM;
10760 arm_insn_r->reg_rec_count = 2;
10761 }
10762 }
10763 else
10764 {
10765 if (! bits (arm_insn_r->arm_insn, 4, 11))
10766 {
10767 /* Store insn, register offset and register pre-indexed,
10768 register post-indexed. */
10769 /* Get Rm. */
10770 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10771 /* Get Rn. */
10772 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10773 regcache_raw_read_unsigned (reg_cache, reg_src1
10774 , &u_regval[0]);
10775 regcache_raw_read_unsigned (reg_cache, reg_src2
10776 , &u_regval[1]);
10777 if (15 == reg_src2)
10778 {
10779 /* If R15 was used as Rn, hence current PC+8. */
10780 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10781 u_regval[0] = u_regval[0] + 8;
10782 }
10783 /* Calculate target store address, Rn +/- Rm, register offset. */
10784 /* U == 1. */
10785 if (bit (arm_insn_r->arm_insn, 23))
10786 {
10787 tgt_mem_addr = u_regval[0] + u_regval[1];
10788 }
10789 else
10790 {
10791 tgt_mem_addr = u_regval[1] - u_regval[0];
10792 }
10793
10794 switch (arm_insn_r->opcode)
10795 {
10796 /* STR. */
10797 case 8:
10798 case 12:
10799 /* STR. */
10800 case 9:
10801 case 13:
10802 /* STRT. */
10803 case 1:
10804 case 5:
10805 /* STR. */
10806 case 0:
10807 case 4:
10808 record_buf_mem[0] = 4;
10809 break;
10810
10811 /* STRB. */
10812 case 10:
10813 case 14:
10814 /* STRB. */
10815 case 11:
10816 case 15:
10817 /* STRBT. */
10818 case 3:
10819 case 7:
10820 /* STRB. */
10821 case 2:
10822 case 6:
10823 record_buf_mem[0] = 1;
10824 break;
10825
10826 default:
10827 gdb_assert_not_reached ("no decoding pattern found");
10828 break;
10829 }
10830 record_buf_mem[1] = tgt_mem_addr;
10831 arm_insn_r->mem_rec_count = 1;
10832
10833 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10834 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10835 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10836 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10837 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10838 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10839 )
10840 {
10841 /* Rn is going to be changed in pre-indexed mode and
10842 post-indexed mode as well. */
10843 record_buf[0] = reg_src2;
10844 arm_insn_r->reg_rec_count = 1;
10845 }
10846 }
10847 else
10848 {
10849 /* Store insn, scaled register offset; scaled pre-indexed. */
10850 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10851 /* Get Rm. */
10852 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10853 /* Get Rn. */
10854 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10855 /* Get shift_imm. */
10856 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10857 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10858 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10859 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10860 /* Offset_12 used as shift. */
10861 switch (offset_12)
10862 {
10863 case 0:
10864 /* Offset_12 used as index. */
10865 offset_12 = u_regval[0] << shift_imm;
10866 break;
10867
10868 case 1:
10869 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10870 break;
10871
10872 case 2:
10873 if (!shift_imm)
10874 {
10875 if (bit (u_regval[0], 31))
10876 {
10877 offset_12 = 0xFFFFFFFF;
10878 }
10879 else
10880 {
10881 offset_12 = 0;
10882 }
10883 }
10884 else
10885 {
10886 /* This is arithmetic shift. */
10887 offset_12 = s_word >> shift_imm;
10888 }
10889 break;
10890
10891 case 3:
10892 if (!shift_imm)
10893 {
10894 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10895 &u_regval[1]);
10896 /* Get C flag value and shift it by 31. */
10897 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10898 | (u_regval[0]) >> 1);
10899 }
10900 else
10901 {
10902 offset_12 = (u_regval[0] >> shift_imm) \
10903 | (u_regval[0] <<
10904 (sizeof(uint32_t) - shift_imm));
10905 }
10906 break;
10907
10908 default:
10909 gdb_assert_not_reached ("no decoding pattern found");
10910 break;
10911 }
10912
10913 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10914 /* bit U set. */
10915 if (bit (arm_insn_r->arm_insn, 23))
10916 {
10917 tgt_mem_addr = u_regval[1] + offset_12;
10918 }
10919 else
10920 {
10921 tgt_mem_addr = u_regval[1] - offset_12;
10922 }
10923
10924 switch (arm_insn_r->opcode)
10925 {
10926 /* STR. */
10927 case 8:
10928 case 12:
10929 /* STR. */
10930 case 9:
10931 case 13:
10932 /* STRT. */
10933 case 1:
10934 case 5:
10935 /* STR. */
10936 case 0:
10937 case 4:
10938 record_buf_mem[0] = 4;
10939 break;
10940
10941 /* STRB. */
10942 case 10:
10943 case 14:
10944 /* STRB. */
10945 case 11:
10946 case 15:
10947 /* STRBT. */
10948 case 3:
10949 case 7:
10950 /* STRB. */
10951 case 2:
10952 case 6:
10953 record_buf_mem[0] = 1;
10954 break;
10955
10956 default:
10957 gdb_assert_not_reached ("no decoding pattern found");
10958 break;
10959 }
10960 record_buf_mem[1] = tgt_mem_addr;
10961 arm_insn_r->mem_rec_count = 1;
10962
10963 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10964 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10965 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10966 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10967 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10968 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10969 )
10970 {
10971 /* Rn is going to be changed in register scaled pre-indexed
10972 mode,and scaled post indexed mode. */
10973 record_buf[0] = reg_src2;
10974 arm_insn_r->reg_rec_count = 1;
10975 }
10976 }
10977 }
10978
10979 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10980 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10981 return 0;
10982 }
10983
10984 /* Handle ARM mode instructions with opcode 100. */
10985
10986 static int
10987 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10988 {
10989 struct regcache *reg_cache = arm_insn_r->regcache;
10990 uint32_t register_count = 0, register_bits;
10991 uint32_t reg_base, addr_mode;
10992 uint32_t record_buf[24], record_buf_mem[48];
10993 uint32_t wback;
10994 ULONGEST u_regval;
10995
10996 /* Fetch the list of registers. */
10997 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10998 arm_insn_r->reg_rec_count = 0;
10999
11000 /* Fetch the base register that contains the address we are loading data
11001 to. */
11002 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11003
11004 /* Calculate wback. */
11005 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11006
11007 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11008 {
11009 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11010
11011 /* Find out which registers are going to be loaded from memory. */
11012 while (register_bits)
11013 {
11014 if (register_bits & 0x00000001)
11015 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11016 register_bits = register_bits >> 1;
11017 register_count++;
11018 }
11019
11020
11021 /* If wback is true, also save the base register, which is going to be
11022 written to. */
11023 if (wback)
11024 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11025
11026 /* Save the CPSR register. */
11027 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11028 }
11029 else
11030 {
11031 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11032
11033 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11034
11035 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11036
11037 /* Find out how many registers are going to be stored to memory. */
11038 while (register_bits)
11039 {
11040 if (register_bits & 0x00000001)
11041 register_count++;
11042 register_bits = register_bits >> 1;
11043 }
11044
11045 switch (addr_mode)
11046 {
11047 /* STMDA (STMED): Decrement after. */
11048 case 0:
11049 record_buf_mem[1] = (uint32_t) u_regval
11050 - register_count * ARM_INT_REGISTER_SIZE + 4;
11051 break;
11052 /* STM (STMIA, STMEA): Increment after. */
11053 case 1:
11054 record_buf_mem[1] = (uint32_t) u_regval;
11055 break;
11056 /* STMDB (STMFD): Decrement before. */
11057 case 2:
11058 record_buf_mem[1] = (uint32_t) u_regval
11059 - register_count * ARM_INT_REGISTER_SIZE;
11060 break;
11061 /* STMIB (STMFA): Increment before. */
11062 case 3:
11063 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11064 break;
11065 default:
11066 gdb_assert_not_reached ("no decoding pattern found");
11067 break;
11068 }
11069
11070 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11071 arm_insn_r->mem_rec_count = 1;
11072
11073 /* If wback is true, also save the base register, which is going to be
11074 written to. */
11075 if (wback)
11076 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11077 }
11078
11079 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11080 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11081 return 0;
11082 }
11083
11084 /* Handling opcode 101 insns. */
11085
11086 static int
11087 arm_record_b_bl (insn_decode_record *arm_insn_r)
11088 {
11089 uint32_t record_buf[8];
11090
11091 /* Handle B, BL, BLX(1) insns. */
11092 /* B simply branches so we do nothing here. */
11093 /* Note: BLX(1) doesnt fall here but instead it falls into
11094 extension space. */
11095 if (bit (arm_insn_r->arm_insn, 24))
11096 {
11097 record_buf[0] = ARM_LR_REGNUM;
11098 arm_insn_r->reg_rec_count = 1;
11099 }
11100
11101 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11102
11103 return 0;
11104 }
11105
11106 static int
11107 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11108 {
11109 printf_unfiltered (_("Process record does not support instruction "
11110 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11111 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11112
11113 return -1;
11114 }
11115
11116 /* Record handler for vector data transfer instructions. */
11117
11118 static int
11119 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11120 {
11121 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11122 uint32_t record_buf[4];
11123
11124 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11125 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11126 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11127 bit_l = bit (arm_insn_r->arm_insn, 20);
11128 bit_c = bit (arm_insn_r->arm_insn, 8);
11129
11130 /* Handle VMOV instruction. */
11131 if (bit_l && bit_c)
11132 {
11133 record_buf[0] = reg_t;
11134 arm_insn_r->reg_rec_count = 1;
11135 }
11136 else if (bit_l && !bit_c)
11137 {
11138 /* Handle VMOV instruction. */
11139 if (bits_a == 0x00)
11140 {
11141 record_buf[0] = reg_t;
11142 arm_insn_r->reg_rec_count = 1;
11143 }
11144 /* Handle VMRS instruction. */
11145 else if (bits_a == 0x07)
11146 {
11147 if (reg_t == 15)
11148 reg_t = ARM_PS_REGNUM;
11149
11150 record_buf[0] = reg_t;
11151 arm_insn_r->reg_rec_count = 1;
11152 }
11153 }
11154 else if (!bit_l && !bit_c)
11155 {
11156 /* Handle VMOV instruction. */
11157 if (bits_a == 0x00)
11158 {
11159 record_buf[0] = ARM_D0_REGNUM + reg_v;
11160
11161 arm_insn_r->reg_rec_count = 1;
11162 }
11163 /* Handle VMSR instruction. */
11164 else if (bits_a == 0x07)
11165 {
11166 record_buf[0] = ARM_FPSCR_REGNUM;
11167 arm_insn_r->reg_rec_count = 1;
11168 }
11169 }
11170 else if (!bit_l && bit_c)
11171 {
11172 /* Handle VMOV instruction. */
11173 if (!(bits_a & 0x04))
11174 {
11175 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11176 + ARM_D0_REGNUM;
11177 arm_insn_r->reg_rec_count = 1;
11178 }
11179 /* Handle VDUP instruction. */
11180 else
11181 {
11182 if (bit (arm_insn_r->arm_insn, 21))
11183 {
11184 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11185 record_buf[0] = reg_v + ARM_D0_REGNUM;
11186 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11187 arm_insn_r->reg_rec_count = 2;
11188 }
11189 else
11190 {
11191 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11192 record_buf[0] = reg_v + ARM_D0_REGNUM;
11193 arm_insn_r->reg_rec_count = 1;
11194 }
11195 }
11196 }
11197
11198 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11199 return 0;
11200 }
11201
11202 /* Record handler for extension register load/store instructions. */
11203
11204 static int
11205 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11206 {
11207 uint32_t opcode, single_reg;
11208 uint8_t op_vldm_vstm;
11209 uint32_t record_buf[8], record_buf_mem[128];
11210 ULONGEST u_regval = 0;
11211
11212 struct regcache *reg_cache = arm_insn_r->regcache;
11213
11214 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11215 single_reg = !bit (arm_insn_r->arm_insn, 8);
11216 op_vldm_vstm = opcode & 0x1b;
11217
11218 /* Handle VMOV instructions. */
11219 if ((opcode & 0x1e) == 0x04)
11220 {
11221 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11222 {
11223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11224 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11225 arm_insn_r->reg_rec_count = 2;
11226 }
11227 else
11228 {
11229 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11230 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11231
11232 if (single_reg)
11233 {
11234 /* The first S register number m is REG_M:M (M is bit 5),
11235 the corresponding D register number is REG_M:M / 2, which
11236 is REG_M. */
11237 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11238 /* The second S register number is REG_M:M + 1, the
11239 corresponding D register number is (REG_M:M + 1) / 2.
11240 IOW, if bit M is 1, the first and second S registers
11241 are mapped to different D registers, otherwise, they are
11242 in the same D register. */
11243 if (bit_m)
11244 {
11245 record_buf[arm_insn_r->reg_rec_count++]
11246 = ARM_D0_REGNUM + reg_m + 1;
11247 }
11248 }
11249 else
11250 {
11251 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11252 arm_insn_r->reg_rec_count = 1;
11253 }
11254 }
11255 }
11256 /* Handle VSTM and VPUSH instructions. */
11257 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11258 || op_vldm_vstm == 0x12)
11259 {
11260 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11261 uint32_t memory_index = 0;
11262
11263 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11264 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11265 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11266 imm_off32 = imm_off8 << 2;
11267 memory_count = imm_off8;
11268
11269 if (bit (arm_insn_r->arm_insn, 23))
11270 start_address = u_regval;
11271 else
11272 start_address = u_regval - imm_off32;
11273
11274 if (bit (arm_insn_r->arm_insn, 21))
11275 {
11276 record_buf[0] = reg_rn;
11277 arm_insn_r->reg_rec_count = 1;
11278 }
11279
11280 while (memory_count > 0)
11281 {
11282 if (single_reg)
11283 {
11284 record_buf_mem[memory_index] = 4;
11285 record_buf_mem[memory_index + 1] = start_address;
11286 start_address = start_address + 4;
11287 memory_index = memory_index + 2;
11288 }
11289 else
11290 {
11291 record_buf_mem[memory_index] = 4;
11292 record_buf_mem[memory_index + 1] = start_address;
11293 record_buf_mem[memory_index + 2] = 4;
11294 record_buf_mem[memory_index + 3] = start_address + 4;
11295 start_address = start_address + 8;
11296 memory_index = memory_index + 4;
11297 }
11298 memory_count--;
11299 }
11300 arm_insn_r->mem_rec_count = (memory_index >> 1);
11301 }
11302 /* Handle VLDM instructions. */
11303 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11304 || op_vldm_vstm == 0x13)
11305 {
11306 uint32_t reg_count, reg_vd;
11307 uint32_t reg_index = 0;
11308 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11309
11310 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11311 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11312
11313 /* REG_VD is the first D register number. If the instruction
11314 loads memory to S registers (SINGLE_REG is TRUE), the register
11315 number is (REG_VD << 1 | bit D), so the corresponding D
11316 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11317 if (!single_reg)
11318 reg_vd = reg_vd | (bit_d << 4);
11319
11320 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11321 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11322
11323 /* If the instruction loads memory to D register, REG_COUNT should
11324 be divided by 2, according to the ARM Architecture Reference
11325 Manual. If the instruction loads memory to S register, divide by
11326 2 as well because two S registers are mapped to D register. */
11327 reg_count = reg_count / 2;
11328 if (single_reg && bit_d)
11329 {
11330 /* Increase the register count if S register list starts from
11331 an odd number (bit d is one). */
11332 reg_count++;
11333 }
11334
11335 while (reg_count > 0)
11336 {
11337 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11338 reg_count--;
11339 }
11340 arm_insn_r->reg_rec_count = reg_index;
11341 }
11342 /* VSTR Vector store register. */
11343 else if ((opcode & 0x13) == 0x10)
11344 {
11345 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11346 uint32_t memory_index = 0;
11347
11348 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11349 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11350 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11351 imm_off32 = imm_off8 << 2;
11352
11353 if (bit (arm_insn_r->arm_insn, 23))
11354 start_address = u_regval + imm_off32;
11355 else
11356 start_address = u_regval - imm_off32;
11357
11358 if (single_reg)
11359 {
11360 record_buf_mem[memory_index] = 4;
11361 record_buf_mem[memory_index + 1] = start_address;
11362 arm_insn_r->mem_rec_count = 1;
11363 }
11364 else
11365 {
11366 record_buf_mem[memory_index] = 4;
11367 record_buf_mem[memory_index + 1] = start_address;
11368 record_buf_mem[memory_index + 2] = 4;
11369 record_buf_mem[memory_index + 3] = start_address + 4;
11370 arm_insn_r->mem_rec_count = 2;
11371 }
11372 }
11373 /* VLDR Vector load register. */
11374 else if ((opcode & 0x13) == 0x11)
11375 {
11376 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11377
11378 if (!single_reg)
11379 {
11380 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11381 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11382 }
11383 else
11384 {
11385 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11386 /* Record register D rather than pseudo register S. */
11387 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11388 }
11389 arm_insn_r->reg_rec_count = 1;
11390 }
11391
11392 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11393 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11394 return 0;
11395 }
11396
11397 /* Record handler for arm/thumb mode VFP data processing instructions. */
11398
11399 static int
11400 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11401 {
11402 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11403 uint32_t record_buf[4];
11404 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11405 enum insn_types curr_insn_type = INSN_INV;
11406
11407 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11408 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11409 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11410 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11411 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11412 bit_d = bit (arm_insn_r->arm_insn, 22);
11413 /* Mask off the "D" bit. */
11414 opc1 = opc1 & ~0x04;
11415
11416 /* Handle VMLA, VMLS. */
11417 if (opc1 == 0x00)
11418 {
11419 if (bit (arm_insn_r->arm_insn, 10))
11420 {
11421 if (bit (arm_insn_r->arm_insn, 6))
11422 curr_insn_type = INSN_T0;
11423 else
11424 curr_insn_type = INSN_T1;
11425 }
11426 else
11427 {
11428 if (dp_op_sz)
11429 curr_insn_type = INSN_T1;
11430 else
11431 curr_insn_type = INSN_T2;
11432 }
11433 }
11434 /* Handle VNMLA, VNMLS, VNMUL. */
11435 else if (opc1 == 0x01)
11436 {
11437 if (dp_op_sz)
11438 curr_insn_type = INSN_T1;
11439 else
11440 curr_insn_type = INSN_T2;
11441 }
11442 /* Handle VMUL. */
11443 else if (opc1 == 0x02 && !(opc3 & 0x01))
11444 {
11445 if (bit (arm_insn_r->arm_insn, 10))
11446 {
11447 if (bit (arm_insn_r->arm_insn, 6))
11448 curr_insn_type = INSN_T0;
11449 else
11450 curr_insn_type = INSN_T1;
11451 }
11452 else
11453 {
11454 if (dp_op_sz)
11455 curr_insn_type = INSN_T1;
11456 else
11457 curr_insn_type = INSN_T2;
11458 }
11459 }
11460 /* Handle VADD, VSUB. */
11461 else if (opc1 == 0x03)
11462 {
11463 if (!bit (arm_insn_r->arm_insn, 9))
11464 {
11465 if (bit (arm_insn_r->arm_insn, 6))
11466 curr_insn_type = INSN_T0;
11467 else
11468 curr_insn_type = INSN_T1;
11469 }
11470 else
11471 {
11472 if (dp_op_sz)
11473 curr_insn_type = INSN_T1;
11474 else
11475 curr_insn_type = INSN_T2;
11476 }
11477 }
11478 /* Handle VDIV. */
11479 else if (opc1 == 0x08)
11480 {
11481 if (dp_op_sz)
11482 curr_insn_type = INSN_T1;
11483 else
11484 curr_insn_type = INSN_T2;
11485 }
11486 /* Handle all other vfp data processing instructions. */
11487 else if (opc1 == 0x0b)
11488 {
11489 /* Handle VMOV. */
11490 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11491 {
11492 if (bit (arm_insn_r->arm_insn, 4))
11493 {
11494 if (bit (arm_insn_r->arm_insn, 6))
11495 curr_insn_type = INSN_T0;
11496 else
11497 curr_insn_type = INSN_T1;
11498 }
11499 else
11500 {
11501 if (dp_op_sz)
11502 curr_insn_type = INSN_T1;
11503 else
11504 curr_insn_type = INSN_T2;
11505 }
11506 }
11507 /* Handle VNEG and VABS. */
11508 else if ((opc2 == 0x01 && opc3 == 0x01)
11509 || (opc2 == 0x00 && opc3 == 0x03))
11510 {
11511 if (!bit (arm_insn_r->arm_insn, 11))
11512 {
11513 if (bit (arm_insn_r->arm_insn, 6))
11514 curr_insn_type = INSN_T0;
11515 else
11516 curr_insn_type = INSN_T1;
11517 }
11518 else
11519 {
11520 if (dp_op_sz)
11521 curr_insn_type = INSN_T1;
11522 else
11523 curr_insn_type = INSN_T2;
11524 }
11525 }
11526 /* Handle VSQRT. */
11527 else if (opc2 == 0x01 && opc3 == 0x03)
11528 {
11529 if (dp_op_sz)
11530 curr_insn_type = INSN_T1;
11531 else
11532 curr_insn_type = INSN_T2;
11533 }
11534 /* Handle VCVT. */
11535 else if (opc2 == 0x07 && opc3 == 0x03)
11536 {
11537 if (!dp_op_sz)
11538 curr_insn_type = INSN_T1;
11539 else
11540 curr_insn_type = INSN_T2;
11541 }
11542 else if (opc3 & 0x01)
11543 {
11544 /* Handle VCVT. */
11545 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11546 {
11547 if (!bit (arm_insn_r->arm_insn, 18))
11548 curr_insn_type = INSN_T2;
11549 else
11550 {
11551 if (dp_op_sz)
11552 curr_insn_type = INSN_T1;
11553 else
11554 curr_insn_type = INSN_T2;
11555 }
11556 }
11557 /* Handle VCVT. */
11558 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11559 {
11560 if (dp_op_sz)
11561 curr_insn_type = INSN_T1;
11562 else
11563 curr_insn_type = INSN_T2;
11564 }
11565 /* Handle VCVTB, VCVTT. */
11566 else if ((opc2 & 0x0e) == 0x02)
11567 curr_insn_type = INSN_T2;
11568 /* Handle VCMP, VCMPE. */
11569 else if ((opc2 & 0x0e) == 0x04)
11570 curr_insn_type = INSN_T3;
11571 }
11572 }
11573
11574 switch (curr_insn_type)
11575 {
11576 case INSN_T0:
11577 reg_vd = reg_vd | (bit_d << 4);
11578 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11579 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11580 arm_insn_r->reg_rec_count = 2;
11581 break;
11582
11583 case INSN_T1:
11584 reg_vd = reg_vd | (bit_d << 4);
11585 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11586 arm_insn_r->reg_rec_count = 1;
11587 break;
11588
11589 case INSN_T2:
11590 reg_vd = (reg_vd << 1) | bit_d;
11591 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11592 arm_insn_r->reg_rec_count = 1;
11593 break;
11594
11595 case INSN_T3:
11596 record_buf[0] = ARM_FPSCR_REGNUM;
11597 arm_insn_r->reg_rec_count = 1;
11598 break;
11599
11600 default:
11601 gdb_assert_not_reached ("no decoding pattern found");
11602 break;
11603 }
11604
11605 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11606 return 0;
11607 }
11608
11609 /* Handling opcode 110 insns. */
11610
11611 static int
11612 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11613 {
11614 uint32_t op1, op1_ebit, coproc;
11615
11616 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11617 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11618 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11619
11620 if ((coproc & 0x0e) == 0x0a)
11621 {
11622 /* Handle extension register ld/st instructions. */
11623 if (!(op1 & 0x20))
11624 return arm_record_exreg_ld_st_insn (arm_insn_r);
11625
11626 /* 64-bit transfers between arm core and extension registers. */
11627 if ((op1 & 0x3e) == 0x04)
11628 return arm_record_exreg_ld_st_insn (arm_insn_r);
11629 }
11630 else
11631 {
11632 /* Handle coprocessor ld/st instructions. */
11633 if (!(op1 & 0x3a))
11634 {
11635 /* Store. */
11636 if (!op1_ebit)
11637 return arm_record_unsupported_insn (arm_insn_r);
11638 else
11639 /* Load. */
11640 return arm_record_unsupported_insn (arm_insn_r);
11641 }
11642
11643 /* Move to coprocessor from two arm core registers. */
11644 if (op1 == 0x4)
11645 return arm_record_unsupported_insn (arm_insn_r);
11646
11647 /* Move to two arm core registers from coprocessor. */
11648 if (op1 == 0x5)
11649 {
11650 uint32_t reg_t[2];
11651
11652 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11653 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11654 arm_insn_r->reg_rec_count = 2;
11655
11656 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11657 return 0;
11658 }
11659 }
11660 return arm_record_unsupported_insn (arm_insn_r);
11661 }
11662
11663 /* Handling opcode 111 insns. */
11664
11665 static int
11666 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11667 {
11668 uint32_t op, op1_ebit, coproc, bits_24_25;
11669 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11670 struct regcache *reg_cache = arm_insn_r->regcache;
11671
11672 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11673 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11674 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11675 op = bit (arm_insn_r->arm_insn, 4);
11676 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11677
11678 /* Handle arm SWI/SVC system call instructions. */
11679 if (bits_24_25 == 0x3)
11680 {
11681 if (tdep->arm_syscall_record != NULL)
11682 {
11683 ULONGEST svc_operand, svc_number;
11684
11685 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11686
11687 if (svc_operand) /* OABI. */
11688 svc_number = svc_operand - 0x900000;
11689 else /* EABI. */
11690 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11691
11692 return tdep->arm_syscall_record (reg_cache, svc_number);
11693 }
11694 else
11695 {
11696 printf_unfiltered (_("no syscall record support\n"));
11697 return -1;
11698 }
11699 }
11700 else if (bits_24_25 == 0x02)
11701 {
11702 if (op)
11703 {
11704 if ((coproc & 0x0e) == 0x0a)
11705 {
11706 /* 8, 16, and 32-bit transfer */
11707 return arm_record_vdata_transfer_insn (arm_insn_r);
11708 }
11709 else
11710 {
11711 if (op1_ebit)
11712 {
11713 /* MRC, MRC2 */
11714 uint32_t record_buf[1];
11715
11716 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11717 if (record_buf[0] == 15)
11718 record_buf[0] = ARM_PS_REGNUM;
11719
11720 arm_insn_r->reg_rec_count = 1;
11721 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11722 record_buf);
11723 return 0;
11724 }
11725 else
11726 {
11727 /* MCR, MCR2 */
11728 return -1;
11729 }
11730 }
11731 }
11732 else
11733 {
11734 if ((coproc & 0x0e) == 0x0a)
11735 {
11736 /* VFP data-processing instructions. */
11737 return arm_record_vfp_data_proc_insn (arm_insn_r);
11738 }
11739 else
11740 {
11741 /* CDP, CDP2 */
11742 return -1;
11743 }
11744 }
11745 }
11746 else
11747 {
11748 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11749
11750 if (op1 == 5)
11751 {
11752 if ((coproc & 0x0e) != 0x0a)
11753 {
11754 /* MRRC, MRRC2 */
11755 return -1;
11756 }
11757 }
11758 else if (op1 == 4 || op1 == 5)
11759 {
11760 if ((coproc & 0x0e) == 0x0a)
11761 {
11762 /* 64-bit transfers between ARM core and extension */
11763 return -1;
11764 }
11765 else if (op1 == 4)
11766 {
11767 /* MCRR, MCRR2 */
11768 return -1;
11769 }
11770 }
11771 else if (op1 == 0 || op1 == 1)
11772 {
11773 /* UNDEFINED */
11774 return -1;
11775 }
11776 else
11777 {
11778 if ((coproc & 0x0e) == 0x0a)
11779 {
11780 /* Extension register load/store */
11781 }
11782 else
11783 {
11784 /* STC, STC2, LDC, LDC2 */
11785 }
11786 return -1;
11787 }
11788 }
11789
11790 return -1;
11791 }
11792
11793 /* Handling opcode 000 insns. */
11794
11795 static int
11796 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11797 {
11798 uint32_t record_buf[8];
11799 uint32_t reg_src1 = 0;
11800
11801 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11802
11803 record_buf[0] = ARM_PS_REGNUM;
11804 record_buf[1] = reg_src1;
11805 thumb_insn_r->reg_rec_count = 2;
11806
11807 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11808
11809 return 0;
11810 }
11811
11812
11813 /* Handling opcode 001 insns. */
11814
11815 static int
11816 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11817 {
11818 uint32_t record_buf[8];
11819 uint32_t reg_src1 = 0;
11820
11821 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11822
11823 record_buf[0] = ARM_PS_REGNUM;
11824 record_buf[1] = reg_src1;
11825 thumb_insn_r->reg_rec_count = 2;
11826
11827 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11828
11829 return 0;
11830 }
11831
11832 /* Handling opcode 010 insns. */
11833
11834 static int
11835 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11836 {
11837 struct regcache *reg_cache = thumb_insn_r->regcache;
11838 uint32_t record_buf[8], record_buf_mem[8];
11839
11840 uint32_t reg_src1 = 0, reg_src2 = 0;
11841 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11842
11843 ULONGEST u_regval[2] = {0};
11844
11845 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11846
11847 if (bit (thumb_insn_r->arm_insn, 12))
11848 {
11849 /* Handle load/store register offset. */
11850 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11851
11852 if (in_inclusive_range (opB, 4U, 7U))
11853 {
11854 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11855 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11856 record_buf[0] = reg_src1;
11857 thumb_insn_r->reg_rec_count = 1;
11858 }
11859 else if (in_inclusive_range (opB, 0U, 2U))
11860 {
11861 /* STR(2), STRB(2), STRH(2) . */
11862 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11863 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11864 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11865 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11866 if (0 == opB)
11867 record_buf_mem[0] = 4; /* STR (2). */
11868 else if (2 == opB)
11869 record_buf_mem[0] = 1; /* STRB (2). */
11870 else if (1 == opB)
11871 record_buf_mem[0] = 2; /* STRH (2). */
11872 record_buf_mem[1] = u_regval[0] + u_regval[1];
11873 thumb_insn_r->mem_rec_count = 1;
11874 }
11875 }
11876 else if (bit (thumb_insn_r->arm_insn, 11))
11877 {
11878 /* Handle load from literal pool. */
11879 /* LDR(3). */
11880 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11881 record_buf[0] = reg_src1;
11882 thumb_insn_r->reg_rec_count = 1;
11883 }
11884 else if (opcode1)
11885 {
11886 /* Special data instructions and branch and exchange */
11887 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11888 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11889 if ((3 == opcode2) && (!opcode3))
11890 {
11891 /* Branch with exchange. */
11892 record_buf[0] = ARM_PS_REGNUM;
11893 thumb_insn_r->reg_rec_count = 1;
11894 }
11895 else
11896 {
11897 /* Format 8; special data processing insns. */
11898 record_buf[0] = ARM_PS_REGNUM;
11899 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11900 | bits (thumb_insn_r->arm_insn, 0, 2));
11901 thumb_insn_r->reg_rec_count = 2;
11902 }
11903 }
11904 else
11905 {
11906 /* Format 5; data processing insns. */
11907 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11908 if (bit (thumb_insn_r->arm_insn, 7))
11909 {
11910 reg_src1 = reg_src1 + 8;
11911 }
11912 record_buf[0] = ARM_PS_REGNUM;
11913 record_buf[1] = reg_src1;
11914 thumb_insn_r->reg_rec_count = 2;
11915 }
11916
11917 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11918 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11919 record_buf_mem);
11920
11921 return 0;
11922 }
11923
11924 /* Handling opcode 001 insns. */
11925
11926 static int
11927 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11928 {
11929 struct regcache *reg_cache = thumb_insn_r->regcache;
11930 uint32_t record_buf[8], record_buf_mem[8];
11931
11932 uint32_t reg_src1 = 0;
11933 uint32_t opcode = 0, immed_5 = 0;
11934
11935 ULONGEST u_regval = 0;
11936
11937 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11938
11939 if (opcode)
11940 {
11941 /* LDR(1). */
11942 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11943 record_buf[0] = reg_src1;
11944 thumb_insn_r->reg_rec_count = 1;
11945 }
11946 else
11947 {
11948 /* STR(1). */
11949 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11950 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11951 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11952 record_buf_mem[0] = 4;
11953 record_buf_mem[1] = u_regval + (immed_5 * 4);
11954 thumb_insn_r->mem_rec_count = 1;
11955 }
11956
11957 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11958 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11959 record_buf_mem);
11960
11961 return 0;
11962 }
11963
11964 /* Handling opcode 100 insns. */
11965
11966 static int
11967 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11968 {
11969 struct regcache *reg_cache = thumb_insn_r->regcache;
11970 uint32_t record_buf[8], record_buf_mem[8];
11971
11972 uint32_t reg_src1 = 0;
11973 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11974
11975 ULONGEST u_regval = 0;
11976
11977 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11978
11979 if (3 == opcode)
11980 {
11981 /* LDR(4). */
11982 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11983 record_buf[0] = reg_src1;
11984 thumb_insn_r->reg_rec_count = 1;
11985 }
11986 else if (1 == opcode)
11987 {
11988 /* LDRH(1). */
11989 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11990 record_buf[0] = reg_src1;
11991 thumb_insn_r->reg_rec_count = 1;
11992 }
11993 else if (2 == opcode)
11994 {
11995 /* STR(3). */
11996 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11997 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11998 record_buf_mem[0] = 4;
11999 record_buf_mem[1] = u_regval + (immed_8 * 4);
12000 thumb_insn_r->mem_rec_count = 1;
12001 }
12002 else if (0 == opcode)
12003 {
12004 /* STRH(1). */
12005 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12006 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12007 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12008 record_buf_mem[0] = 2;
12009 record_buf_mem[1] = u_regval + (immed_5 * 2);
12010 thumb_insn_r->mem_rec_count = 1;
12011 }
12012
12013 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12014 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12015 record_buf_mem);
12016
12017 return 0;
12018 }
12019
12020 /* Handling opcode 101 insns. */
12021
12022 static int
12023 thumb_record_misc (insn_decode_record *thumb_insn_r)
12024 {
12025 struct regcache *reg_cache = thumb_insn_r->regcache;
12026
12027 uint32_t opcode = 0;
12028 uint32_t register_bits = 0, register_count = 0;
12029 uint32_t index = 0, start_address = 0;
12030 uint32_t record_buf[24], record_buf_mem[48];
12031 uint32_t reg_src1;
12032
12033 ULONGEST u_regval = 0;
12034
12035 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12036
12037 if (opcode == 0 || opcode == 1)
12038 {
12039 /* ADR and ADD (SP plus immediate) */
12040
12041 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12042 record_buf[0] = reg_src1;
12043 thumb_insn_r->reg_rec_count = 1;
12044 }
12045 else
12046 {
12047 /* Miscellaneous 16-bit instructions */
12048 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12049
12050 switch (opcode2)
12051 {
12052 case 6:
12053 /* SETEND and CPS */
12054 break;
12055 case 0:
12056 /* ADD/SUB (SP plus immediate) */
12057 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12058 record_buf[0] = ARM_SP_REGNUM;
12059 thumb_insn_r->reg_rec_count = 1;
12060 break;
12061 case 1: /* fall through */
12062 case 3: /* fall through */
12063 case 9: /* fall through */
12064 case 11:
12065 /* CBNZ, CBZ */
12066 break;
12067 case 2:
12068 /* SXTH, SXTB, UXTH, UXTB */
12069 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12070 thumb_insn_r->reg_rec_count = 1;
12071 break;
12072 case 4: /* fall through */
12073 case 5:
12074 /* PUSH. */
12075 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12076 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12077 while (register_bits)
12078 {
12079 if (register_bits & 0x00000001)
12080 register_count++;
12081 register_bits = register_bits >> 1;
12082 }
12083 start_address = u_regval - \
12084 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12085 thumb_insn_r->mem_rec_count = register_count;
12086 while (register_count)
12087 {
12088 record_buf_mem[(register_count * 2) - 1] = start_address;
12089 record_buf_mem[(register_count * 2) - 2] = 4;
12090 start_address = start_address + 4;
12091 register_count--;
12092 }
12093 record_buf[0] = ARM_SP_REGNUM;
12094 thumb_insn_r->reg_rec_count = 1;
12095 break;
12096 case 10:
12097 /* REV, REV16, REVSH */
12098 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12099 thumb_insn_r->reg_rec_count = 1;
12100 break;
12101 case 12: /* fall through */
12102 case 13:
12103 /* POP. */
12104 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12105 while (register_bits)
12106 {
12107 if (register_bits & 0x00000001)
12108 record_buf[index++] = register_count;
12109 register_bits = register_bits >> 1;
12110 register_count++;
12111 }
12112 record_buf[index++] = ARM_PS_REGNUM;
12113 record_buf[index++] = ARM_SP_REGNUM;
12114 thumb_insn_r->reg_rec_count = index;
12115 break;
12116 case 0xe:
12117 /* BKPT insn. */
12118 /* Handle enhanced software breakpoint insn, BKPT. */
12119 /* CPSR is changed to be executed in ARM state, disabling normal
12120 interrupts, entering abort mode. */
12121 /* According to high vector configuration PC is set. */
12122 /* User hits breakpoint and type reverse, in that case, we need to go back with
12123 previous CPSR and Program Counter. */
12124 record_buf[0] = ARM_PS_REGNUM;
12125 record_buf[1] = ARM_LR_REGNUM;
12126 thumb_insn_r->reg_rec_count = 2;
12127 /* We need to save SPSR value, which is not yet done. */
12128 printf_unfiltered (_("Process record does not support instruction "
12129 "0x%0x at address %s.\n"),
12130 thumb_insn_r->arm_insn,
12131 paddress (thumb_insn_r->gdbarch,
12132 thumb_insn_r->this_addr));
12133 return -1;
12134
12135 case 0xf:
12136 /* If-Then, and hints */
12137 break;
12138 default:
12139 return -1;
12140 };
12141 }
12142
12143 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12144 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12145 record_buf_mem);
12146
12147 return 0;
12148 }
12149
12150 /* Handling opcode 110 insns. */
12151
12152 static int
12153 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12154 {
12155 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12156 struct regcache *reg_cache = thumb_insn_r->regcache;
12157
12158 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12159 uint32_t reg_src1 = 0;
12160 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12161 uint32_t index = 0, start_address = 0;
12162 uint32_t record_buf[24], record_buf_mem[48];
12163
12164 ULONGEST u_regval = 0;
12165
12166 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12167 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12168
12169 if (1 == opcode2)
12170 {
12171
12172 /* LDMIA. */
12173 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12174 /* Get Rn. */
12175 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12176 while (register_bits)
12177 {
12178 if (register_bits & 0x00000001)
12179 record_buf[index++] = register_count;
12180 register_bits = register_bits >> 1;
12181 register_count++;
12182 }
12183 record_buf[index++] = reg_src1;
12184 thumb_insn_r->reg_rec_count = index;
12185 }
12186 else if (0 == opcode2)
12187 {
12188 /* It handles both STMIA. */
12189 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12190 /* Get Rn. */
12191 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12192 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12193 while (register_bits)
12194 {
12195 if (register_bits & 0x00000001)
12196 register_count++;
12197 register_bits = register_bits >> 1;
12198 }
12199 start_address = u_regval;
12200 thumb_insn_r->mem_rec_count = register_count;
12201 while (register_count)
12202 {
12203 record_buf_mem[(register_count * 2) - 1] = start_address;
12204 record_buf_mem[(register_count * 2) - 2] = 4;
12205 start_address = start_address + 4;
12206 register_count--;
12207 }
12208 }
12209 else if (0x1F == opcode1)
12210 {
12211 /* Handle arm syscall insn. */
12212 if (tdep->arm_syscall_record != NULL)
12213 {
12214 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12215 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12216 }
12217 else
12218 {
12219 printf_unfiltered (_("no syscall record support\n"));
12220 return -1;
12221 }
12222 }
12223
12224 /* B (1), conditional branch is automatically taken care in process_record,
12225 as PC is saved there. */
12226
12227 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12228 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12229 record_buf_mem);
12230
12231 return ret;
12232 }
12233
12234 /* Handling opcode 111 insns. */
12235
12236 static int
12237 thumb_record_branch (insn_decode_record *thumb_insn_r)
12238 {
12239 uint32_t record_buf[8];
12240 uint32_t bits_h = 0;
12241
12242 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12243
12244 if (2 == bits_h || 3 == bits_h)
12245 {
12246 /* BL */
12247 record_buf[0] = ARM_LR_REGNUM;
12248 thumb_insn_r->reg_rec_count = 1;
12249 }
12250 else if (1 == bits_h)
12251 {
12252 /* BLX(1). */
12253 record_buf[0] = ARM_PS_REGNUM;
12254 record_buf[1] = ARM_LR_REGNUM;
12255 thumb_insn_r->reg_rec_count = 2;
12256 }
12257
12258 /* B(2) is automatically taken care in process_record, as PC is
12259 saved there. */
12260
12261 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12262
12263 return 0;
12264 }
12265
12266 /* Handler for thumb2 load/store multiple instructions. */
12267
12268 static int
12269 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12270 {
12271 struct regcache *reg_cache = thumb2_insn_r->regcache;
12272
12273 uint32_t reg_rn, op;
12274 uint32_t register_bits = 0, register_count = 0;
12275 uint32_t index = 0, start_address = 0;
12276 uint32_t record_buf[24], record_buf_mem[48];
12277
12278 ULONGEST u_regval = 0;
12279
12280 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12281 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12282
12283 if (0 == op || 3 == op)
12284 {
12285 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12286 {
12287 /* Handle RFE instruction. */
12288 record_buf[0] = ARM_PS_REGNUM;
12289 thumb2_insn_r->reg_rec_count = 1;
12290 }
12291 else
12292 {
12293 /* Handle SRS instruction after reading banked SP. */
12294 return arm_record_unsupported_insn (thumb2_insn_r);
12295 }
12296 }
12297 else if (1 == op || 2 == op)
12298 {
12299 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12300 {
12301 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12302 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12303 while (register_bits)
12304 {
12305 if (register_bits & 0x00000001)
12306 record_buf[index++] = register_count;
12307
12308 register_count++;
12309 register_bits = register_bits >> 1;
12310 }
12311 record_buf[index++] = reg_rn;
12312 record_buf[index++] = ARM_PS_REGNUM;
12313 thumb2_insn_r->reg_rec_count = index;
12314 }
12315 else
12316 {
12317 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12318 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12319 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12320 while (register_bits)
12321 {
12322 if (register_bits & 0x00000001)
12323 register_count++;
12324
12325 register_bits = register_bits >> 1;
12326 }
12327
12328 if (1 == op)
12329 {
12330 /* Start address calculation for LDMDB/LDMEA. */
12331 start_address = u_regval;
12332 }
12333 else if (2 == op)
12334 {
12335 /* Start address calculation for LDMDB/LDMEA. */
12336 start_address = u_regval - register_count * 4;
12337 }
12338
12339 thumb2_insn_r->mem_rec_count = register_count;
12340 while (register_count)
12341 {
12342 record_buf_mem[register_count * 2 - 1] = start_address;
12343 record_buf_mem[register_count * 2 - 2] = 4;
12344 start_address = start_address + 4;
12345 register_count--;
12346 }
12347 record_buf[0] = reg_rn;
12348 record_buf[1] = ARM_PS_REGNUM;
12349 thumb2_insn_r->reg_rec_count = 2;
12350 }
12351 }
12352
12353 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12354 record_buf_mem);
12355 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12356 record_buf);
12357 return ARM_RECORD_SUCCESS;
12358 }
12359
12360 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12361 instructions. */
12362
12363 static int
12364 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12365 {
12366 struct regcache *reg_cache = thumb2_insn_r->regcache;
12367
12368 uint32_t reg_rd, reg_rn, offset_imm;
12369 uint32_t reg_dest1, reg_dest2;
12370 uint32_t address, offset_addr;
12371 uint32_t record_buf[8], record_buf_mem[8];
12372 uint32_t op1, op2, op3;
12373
12374 ULONGEST u_regval[2];
12375
12376 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12377 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12378 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12379
12380 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12381 {
12382 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12383 {
12384 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12385 record_buf[0] = reg_dest1;
12386 record_buf[1] = ARM_PS_REGNUM;
12387 thumb2_insn_r->reg_rec_count = 2;
12388 }
12389
12390 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12391 {
12392 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12393 record_buf[2] = reg_dest2;
12394 thumb2_insn_r->reg_rec_count = 3;
12395 }
12396 }
12397 else
12398 {
12399 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12400 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12401
12402 if (0 == op1 && 0 == op2)
12403 {
12404 /* Handle STREX. */
12405 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12406 address = u_regval[0] + (offset_imm * 4);
12407 record_buf_mem[0] = 4;
12408 record_buf_mem[1] = address;
12409 thumb2_insn_r->mem_rec_count = 1;
12410 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12411 record_buf[0] = reg_rd;
12412 thumb2_insn_r->reg_rec_count = 1;
12413 }
12414 else if (1 == op1 && 0 == op2)
12415 {
12416 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12417 record_buf[0] = reg_rd;
12418 thumb2_insn_r->reg_rec_count = 1;
12419 address = u_regval[0];
12420 record_buf_mem[1] = address;
12421
12422 if (4 == op3)
12423 {
12424 /* Handle STREXB. */
12425 record_buf_mem[0] = 1;
12426 thumb2_insn_r->mem_rec_count = 1;
12427 }
12428 else if (5 == op3)
12429 {
12430 /* Handle STREXH. */
12431 record_buf_mem[0] = 2 ;
12432 thumb2_insn_r->mem_rec_count = 1;
12433 }
12434 else if (7 == op3)
12435 {
12436 /* Handle STREXD. */
12437 address = u_regval[0];
12438 record_buf_mem[0] = 4;
12439 record_buf_mem[2] = 4;
12440 record_buf_mem[3] = address + 4;
12441 thumb2_insn_r->mem_rec_count = 2;
12442 }
12443 }
12444 else
12445 {
12446 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12447
12448 if (bit (thumb2_insn_r->arm_insn, 24))
12449 {
12450 if (bit (thumb2_insn_r->arm_insn, 23))
12451 offset_addr = u_regval[0] + (offset_imm * 4);
12452 else
12453 offset_addr = u_regval[0] - (offset_imm * 4);
12454
12455 address = offset_addr;
12456 }
12457 else
12458 address = u_regval[0];
12459
12460 record_buf_mem[0] = 4;
12461 record_buf_mem[1] = address;
12462 record_buf_mem[2] = 4;
12463 record_buf_mem[3] = address + 4;
12464 thumb2_insn_r->mem_rec_count = 2;
12465 record_buf[0] = reg_rn;
12466 thumb2_insn_r->reg_rec_count = 1;
12467 }
12468 }
12469
12470 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12471 record_buf);
12472 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12473 record_buf_mem);
12474 return ARM_RECORD_SUCCESS;
12475 }
12476
12477 /* Handler for thumb2 data processing (shift register and modified immediate)
12478 instructions. */
12479
12480 static int
12481 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12482 {
12483 uint32_t reg_rd, op;
12484 uint32_t record_buf[8];
12485
12486 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12487 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12488
12489 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12490 {
12491 record_buf[0] = ARM_PS_REGNUM;
12492 thumb2_insn_r->reg_rec_count = 1;
12493 }
12494 else
12495 {
12496 record_buf[0] = reg_rd;
12497 record_buf[1] = ARM_PS_REGNUM;
12498 thumb2_insn_r->reg_rec_count = 2;
12499 }
12500
12501 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12502 record_buf);
12503 return ARM_RECORD_SUCCESS;
12504 }
12505
12506 /* Generic handler for thumb2 instructions which effect destination and PS
12507 registers. */
12508
12509 static int
12510 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12511 {
12512 uint32_t reg_rd;
12513 uint32_t record_buf[8];
12514
12515 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12516
12517 record_buf[0] = reg_rd;
12518 record_buf[1] = ARM_PS_REGNUM;
12519 thumb2_insn_r->reg_rec_count = 2;
12520
12521 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12522 record_buf);
12523 return ARM_RECORD_SUCCESS;
12524 }
12525
12526 /* Handler for thumb2 branch and miscellaneous control instructions. */
12527
12528 static int
12529 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12530 {
12531 uint32_t op, op1, op2;
12532 uint32_t record_buf[8];
12533
12534 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12535 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12536 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12537
12538 /* Handle MSR insn. */
12539 if (!(op1 & 0x2) && 0x38 == op)
12540 {
12541 if (!(op2 & 0x3))
12542 {
12543 /* CPSR is going to be changed. */
12544 record_buf[0] = ARM_PS_REGNUM;
12545 thumb2_insn_r->reg_rec_count = 1;
12546 }
12547 else
12548 {
12549 arm_record_unsupported_insn(thumb2_insn_r);
12550 return -1;
12551 }
12552 }
12553 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12554 {
12555 /* BLX. */
12556 record_buf[0] = ARM_PS_REGNUM;
12557 record_buf[1] = ARM_LR_REGNUM;
12558 thumb2_insn_r->reg_rec_count = 2;
12559 }
12560
12561 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12562 record_buf);
12563 return ARM_RECORD_SUCCESS;
12564 }
12565
12566 /* Handler for thumb2 store single data item instructions. */
12567
12568 static int
12569 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12570 {
12571 struct regcache *reg_cache = thumb2_insn_r->regcache;
12572
12573 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12574 uint32_t address, offset_addr;
12575 uint32_t record_buf[8], record_buf_mem[8];
12576 uint32_t op1, op2;
12577
12578 ULONGEST u_regval[2];
12579
12580 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12581 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12582 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12583 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12584
12585 if (bit (thumb2_insn_r->arm_insn, 23))
12586 {
12587 /* T2 encoding. */
12588 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12589 offset_addr = u_regval[0] + offset_imm;
12590 address = offset_addr;
12591 }
12592 else
12593 {
12594 /* T3 encoding. */
12595 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12596 {
12597 /* Handle STRB (register). */
12598 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12599 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12600 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12601 offset_addr = u_regval[1] << shift_imm;
12602 address = u_regval[0] + offset_addr;
12603 }
12604 else
12605 {
12606 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12607 if (bit (thumb2_insn_r->arm_insn, 10))
12608 {
12609 if (bit (thumb2_insn_r->arm_insn, 9))
12610 offset_addr = u_regval[0] + offset_imm;
12611 else
12612 offset_addr = u_regval[0] - offset_imm;
12613
12614 address = offset_addr;
12615 }
12616 else
12617 address = u_regval[0];
12618 }
12619 }
12620
12621 switch (op1)
12622 {
12623 /* Store byte instructions. */
12624 case 4:
12625 case 0:
12626 record_buf_mem[0] = 1;
12627 break;
12628 /* Store half word instructions. */
12629 case 1:
12630 case 5:
12631 record_buf_mem[0] = 2;
12632 break;
12633 /* Store word instructions. */
12634 case 2:
12635 case 6:
12636 record_buf_mem[0] = 4;
12637 break;
12638
12639 default:
12640 gdb_assert_not_reached ("no decoding pattern found");
12641 break;
12642 }
12643
12644 record_buf_mem[1] = address;
12645 thumb2_insn_r->mem_rec_count = 1;
12646 record_buf[0] = reg_rn;
12647 thumb2_insn_r->reg_rec_count = 1;
12648
12649 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12650 record_buf);
12651 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12652 record_buf_mem);
12653 return ARM_RECORD_SUCCESS;
12654 }
12655
12656 /* Handler for thumb2 load memory hints instructions. */
12657
12658 static int
12659 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12660 {
12661 uint32_t record_buf[8];
12662 uint32_t reg_rt, reg_rn;
12663
12664 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12665 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12666
12667 if (ARM_PC_REGNUM != reg_rt)
12668 {
12669 record_buf[0] = reg_rt;
12670 record_buf[1] = reg_rn;
12671 record_buf[2] = ARM_PS_REGNUM;
12672 thumb2_insn_r->reg_rec_count = 3;
12673
12674 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12675 record_buf);
12676 return ARM_RECORD_SUCCESS;
12677 }
12678
12679 return ARM_RECORD_FAILURE;
12680 }
12681
12682 /* Handler for thumb2 load word instructions. */
12683
12684 static int
12685 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12686 {
12687 uint32_t record_buf[8];
12688
12689 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12690 record_buf[1] = ARM_PS_REGNUM;
12691 thumb2_insn_r->reg_rec_count = 2;
12692
12693 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12694 record_buf);
12695 return ARM_RECORD_SUCCESS;
12696 }
12697
12698 /* Handler for thumb2 long multiply, long multiply accumulate, and
12699 divide instructions. */
12700
12701 static int
12702 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12703 {
12704 uint32_t opcode1 = 0, opcode2 = 0;
12705 uint32_t record_buf[8];
12706
12707 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12708 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12709
12710 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12711 {
12712 /* Handle SMULL, UMULL, SMULAL. */
12713 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12714 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12715 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12716 record_buf[2] = ARM_PS_REGNUM;
12717 thumb2_insn_r->reg_rec_count = 3;
12718 }
12719 else if (1 == opcode1 || 3 == opcode2)
12720 {
12721 /* Handle SDIV and UDIV. */
12722 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12723 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12724 record_buf[2] = ARM_PS_REGNUM;
12725 thumb2_insn_r->reg_rec_count = 3;
12726 }
12727 else
12728 return ARM_RECORD_FAILURE;
12729
12730 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12731 record_buf);
12732 return ARM_RECORD_SUCCESS;
12733 }
12734
12735 /* Record handler for thumb32 coprocessor instructions. */
12736
12737 static int
12738 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12739 {
12740 if (bit (thumb2_insn_r->arm_insn, 25))
12741 return arm_record_coproc_data_proc (thumb2_insn_r);
12742 else
12743 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12744 }
12745
12746 /* Record handler for advance SIMD structure load/store instructions. */
12747
12748 static int
12749 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12750 {
12751 struct regcache *reg_cache = thumb2_insn_r->regcache;
12752 uint32_t l_bit, a_bit, b_bits;
12753 uint32_t record_buf[128], record_buf_mem[128];
12754 uint32_t reg_rn, reg_vd, address, f_elem;
12755 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12756 uint8_t f_ebytes;
12757
12758 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12759 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12760 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12761 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12762 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12763 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12764 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12765 f_elem = 8 / f_ebytes;
12766
12767 if (!l_bit)
12768 {
12769 ULONGEST u_regval = 0;
12770 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12771 address = u_regval;
12772
12773 if (!a_bit)
12774 {
12775 /* Handle VST1. */
12776 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12777 {
12778 if (b_bits == 0x07)
12779 bf_regs = 1;
12780 else if (b_bits == 0x0a)
12781 bf_regs = 2;
12782 else if (b_bits == 0x06)
12783 bf_regs = 3;
12784 else if (b_bits == 0x02)
12785 bf_regs = 4;
12786 else
12787 bf_regs = 0;
12788
12789 for (index_r = 0; index_r < bf_regs; index_r++)
12790 {
12791 for (index_e = 0; index_e < f_elem; index_e++)
12792 {
12793 record_buf_mem[index_m++] = f_ebytes;
12794 record_buf_mem[index_m++] = address;
12795 address = address + f_ebytes;
12796 thumb2_insn_r->mem_rec_count += 1;
12797 }
12798 }
12799 }
12800 /* Handle VST2. */
12801 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12802 {
12803 if (b_bits == 0x09 || b_bits == 0x08)
12804 bf_regs = 1;
12805 else if (b_bits == 0x03)
12806 bf_regs = 2;
12807 else
12808 bf_regs = 0;
12809
12810 for (index_r = 0; index_r < bf_regs; index_r++)
12811 for (index_e = 0; index_e < f_elem; index_e++)
12812 {
12813 for (loop_t = 0; loop_t < 2; loop_t++)
12814 {
12815 record_buf_mem[index_m++] = f_ebytes;
12816 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12817 thumb2_insn_r->mem_rec_count += 1;
12818 }
12819 address = address + (2 * f_ebytes);
12820 }
12821 }
12822 /* Handle VST3. */
12823 else if ((b_bits & 0x0e) == 0x04)
12824 {
12825 for (index_e = 0; index_e < f_elem; index_e++)
12826 {
12827 for (loop_t = 0; loop_t < 3; loop_t++)
12828 {
12829 record_buf_mem[index_m++] = f_ebytes;
12830 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12831 thumb2_insn_r->mem_rec_count += 1;
12832 }
12833 address = address + (3 * f_ebytes);
12834 }
12835 }
12836 /* Handle VST4. */
12837 else if (!(b_bits & 0x0e))
12838 {
12839 for (index_e = 0; index_e < f_elem; index_e++)
12840 {
12841 for (loop_t = 0; loop_t < 4; loop_t++)
12842 {
12843 record_buf_mem[index_m++] = f_ebytes;
12844 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12845 thumb2_insn_r->mem_rec_count += 1;
12846 }
12847 address = address + (4 * f_ebytes);
12848 }
12849 }
12850 }
12851 else
12852 {
12853 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12854
12855 if (bft_size == 0x00)
12856 f_ebytes = 1;
12857 else if (bft_size == 0x01)
12858 f_ebytes = 2;
12859 else if (bft_size == 0x02)
12860 f_ebytes = 4;
12861 else
12862 f_ebytes = 0;
12863
12864 /* Handle VST1. */
12865 if (!(b_bits & 0x0b) || b_bits == 0x08)
12866 thumb2_insn_r->mem_rec_count = 1;
12867 /* Handle VST2. */
12868 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12869 thumb2_insn_r->mem_rec_count = 2;
12870 /* Handle VST3. */
12871 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12872 thumb2_insn_r->mem_rec_count = 3;
12873 /* Handle VST4. */
12874 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12875 thumb2_insn_r->mem_rec_count = 4;
12876
12877 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12878 {
12879 record_buf_mem[index_m] = f_ebytes;
12880 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12881 }
12882 }
12883 }
12884 else
12885 {
12886 if (!a_bit)
12887 {
12888 /* Handle VLD1. */
12889 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12890 thumb2_insn_r->reg_rec_count = 1;
12891 /* Handle VLD2. */
12892 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12893 thumb2_insn_r->reg_rec_count = 2;
12894 /* Handle VLD3. */
12895 else if ((b_bits & 0x0e) == 0x04)
12896 thumb2_insn_r->reg_rec_count = 3;
12897 /* Handle VLD4. */
12898 else if (!(b_bits & 0x0e))
12899 thumb2_insn_r->reg_rec_count = 4;
12900 }
12901 else
12902 {
12903 /* Handle VLD1. */
12904 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12905 thumb2_insn_r->reg_rec_count = 1;
12906 /* Handle VLD2. */
12907 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12908 thumb2_insn_r->reg_rec_count = 2;
12909 /* Handle VLD3. */
12910 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12911 thumb2_insn_r->reg_rec_count = 3;
12912 /* Handle VLD4. */
12913 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12914 thumb2_insn_r->reg_rec_count = 4;
12915
12916 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12917 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12918 }
12919 }
12920
12921 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12922 {
12923 record_buf[index_r] = reg_rn;
12924 thumb2_insn_r->reg_rec_count += 1;
12925 }
12926
12927 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12928 record_buf);
12929 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12930 record_buf_mem);
12931 return 0;
12932 }
12933
12934 /* Decodes thumb2 instruction type and invokes its record handler. */
12935
12936 static unsigned int
12937 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12938 {
12939 uint32_t op, op1, op2;
12940
12941 op = bit (thumb2_insn_r->arm_insn, 15);
12942 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12943 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12944
12945 if (op1 == 0x01)
12946 {
12947 if (!(op2 & 0x64 ))
12948 {
12949 /* Load/store multiple instruction. */
12950 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12951 }
12952 else if ((op2 & 0x64) == 0x4)
12953 {
12954 /* Load/store (dual/exclusive) and table branch instruction. */
12955 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12956 }
12957 else if ((op2 & 0x60) == 0x20)
12958 {
12959 /* Data-processing (shifted register). */
12960 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12961 }
12962 else if (op2 & 0x40)
12963 {
12964 /* Co-processor instructions. */
12965 return thumb2_record_coproc_insn (thumb2_insn_r);
12966 }
12967 }
12968 else if (op1 == 0x02)
12969 {
12970 if (op)
12971 {
12972 /* Branches and miscellaneous control instructions. */
12973 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12974 }
12975 else if (op2 & 0x20)
12976 {
12977 /* Data-processing (plain binary immediate) instruction. */
12978 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12979 }
12980 else
12981 {
12982 /* Data-processing (modified immediate). */
12983 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12984 }
12985 }
12986 else if (op1 == 0x03)
12987 {
12988 if (!(op2 & 0x71 ))
12989 {
12990 /* Store single data item. */
12991 return thumb2_record_str_single_data (thumb2_insn_r);
12992 }
12993 else if (!((op2 & 0x71) ^ 0x10))
12994 {
12995 /* Advanced SIMD or structure load/store instructions. */
12996 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12997 }
12998 else if (!((op2 & 0x67) ^ 0x01))
12999 {
13000 /* Load byte, memory hints instruction. */
13001 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13002 }
13003 else if (!((op2 & 0x67) ^ 0x03))
13004 {
13005 /* Load halfword, memory hints instruction. */
13006 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13007 }
13008 else if (!((op2 & 0x67) ^ 0x05))
13009 {
13010 /* Load word instruction. */
13011 return thumb2_record_ld_word (thumb2_insn_r);
13012 }
13013 else if (!((op2 & 0x70) ^ 0x20))
13014 {
13015 /* Data-processing (register) instruction. */
13016 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13017 }
13018 else if (!((op2 & 0x78) ^ 0x30))
13019 {
13020 /* Multiply, multiply accumulate, abs diff instruction. */
13021 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13022 }
13023 else if (!((op2 & 0x78) ^ 0x38))
13024 {
13025 /* Long multiply, long multiply accumulate, and divide. */
13026 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13027 }
13028 else if (op2 & 0x40)
13029 {
13030 /* Co-processor instructions. */
13031 return thumb2_record_coproc_insn (thumb2_insn_r);
13032 }
13033 }
13034
13035 return -1;
13036 }
13037
13038 namespace {
13039 /* Abstract memory reader. */
13040
13041 class abstract_memory_reader
13042 {
13043 public:
13044 /* Read LEN bytes of target memory at address MEMADDR, placing the
13045 results in GDB's memory at BUF. Return true on success. */
13046
13047 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13048 };
13049
13050 /* Instruction reader from real target. */
13051
13052 class instruction_reader : public abstract_memory_reader
13053 {
13054 public:
13055 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13056 {
13057 if (target_read_memory (memaddr, buf, len))
13058 return false;
13059 else
13060 return true;
13061 }
13062 };
13063
13064 } // namespace
13065
13066 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13067 and positive val on failure. */
13068
13069 static int
13070 extract_arm_insn (abstract_memory_reader& reader,
13071 insn_decode_record *insn_record, uint32_t insn_size)
13072 {
13073 gdb_byte buf[insn_size];
13074
13075 memset (&buf[0], 0, insn_size);
13076
13077 if (!reader.read (insn_record->this_addr, buf, insn_size))
13078 return 1;
13079 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13080 insn_size,
13081 gdbarch_byte_order_for_code (insn_record->gdbarch));
13082 return 0;
13083 }
13084
13085 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13086
13087 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13088 dispatch it. */
13089
13090 static int
13091 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13092 record_type_t record_type, uint32_t insn_size)
13093 {
13094
13095 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13096 instruction. */
13097 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13098 {
13099 arm_record_data_proc_misc_ld_str, /* 000. */
13100 arm_record_data_proc_imm, /* 001. */
13101 arm_record_ld_st_imm_offset, /* 010. */
13102 arm_record_ld_st_reg_offset, /* 011. */
13103 arm_record_ld_st_multiple, /* 100. */
13104 arm_record_b_bl, /* 101. */
13105 arm_record_asimd_vfp_coproc, /* 110. */
13106 arm_record_coproc_data_proc /* 111. */
13107 };
13108
13109 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13110 instruction. */
13111 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13112 { \
13113 thumb_record_shift_add_sub, /* 000. */
13114 thumb_record_add_sub_cmp_mov, /* 001. */
13115 thumb_record_ld_st_reg_offset, /* 010. */
13116 thumb_record_ld_st_imm_offset, /* 011. */
13117 thumb_record_ld_st_stack, /* 100. */
13118 thumb_record_misc, /* 101. */
13119 thumb_record_ldm_stm_swi, /* 110. */
13120 thumb_record_branch /* 111. */
13121 };
13122
13123 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13124 uint32_t insn_id = 0;
13125
13126 if (extract_arm_insn (reader, arm_record, insn_size))
13127 {
13128 if (record_debug)
13129 {
13130 printf_unfiltered (_("Process record: error reading memory at "
13131 "addr %s len = %d.\n"),
13132 paddress (arm_record->gdbarch,
13133 arm_record->this_addr), insn_size);
13134 }
13135 return -1;
13136 }
13137 else if (ARM_RECORD == record_type)
13138 {
13139 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13140 insn_id = bits (arm_record->arm_insn, 25, 27);
13141
13142 if (arm_record->cond == 0xf)
13143 ret = arm_record_extension_space (arm_record);
13144 else
13145 {
13146 /* If this insn has fallen into extension space
13147 then we need not decode it anymore. */
13148 ret = arm_handle_insn[insn_id] (arm_record);
13149 }
13150 if (ret != ARM_RECORD_SUCCESS)
13151 {
13152 arm_record_unsupported_insn (arm_record);
13153 ret = -1;
13154 }
13155 }
13156 else if (THUMB_RECORD == record_type)
13157 {
13158 /* As thumb does not have condition codes, we set negative. */
13159 arm_record->cond = -1;
13160 insn_id = bits (arm_record->arm_insn, 13, 15);
13161 ret = thumb_handle_insn[insn_id] (arm_record);
13162 if (ret != ARM_RECORD_SUCCESS)
13163 {
13164 arm_record_unsupported_insn (arm_record);
13165 ret = -1;
13166 }
13167 }
13168 else if (THUMB2_RECORD == record_type)
13169 {
13170 /* As thumb does not have condition codes, we set negative. */
13171 arm_record->cond = -1;
13172
13173 /* Swap first half of 32bit thumb instruction with second half. */
13174 arm_record->arm_insn
13175 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13176
13177 ret = thumb2_record_decode_insn_handler (arm_record);
13178
13179 if (ret != ARM_RECORD_SUCCESS)
13180 {
13181 arm_record_unsupported_insn (arm_record);
13182 ret = -1;
13183 }
13184 }
13185 else
13186 {
13187 /* Throw assertion. */
13188 gdb_assert_not_reached ("not a valid instruction, could not decode");
13189 }
13190
13191 return ret;
13192 }
13193
13194 #if GDB_SELF_TEST
13195 namespace selftests {
13196
13197 /* Provide both 16-bit and 32-bit thumb instructions. */
13198
13199 class instruction_reader_thumb : public abstract_memory_reader
13200 {
13201 public:
13202 template<size_t SIZE>
13203 instruction_reader_thumb (enum bfd_endian endian,
13204 const uint16_t (&insns)[SIZE])
13205 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13206 {}
13207
13208 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13209 {
13210 SELF_CHECK (len == 4 || len == 2);
13211 SELF_CHECK (memaddr % 2 == 0);
13212 SELF_CHECK ((memaddr / 2) < m_insns_size);
13213
13214 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13215 if (len == 4)
13216 {
13217 store_unsigned_integer (&buf[2], 2, m_endian,
13218 m_insns[memaddr / 2 + 1]);
13219 }
13220 return true;
13221 }
13222
13223 private:
13224 enum bfd_endian m_endian;
13225 const uint16_t *m_insns;
13226 size_t m_insns_size;
13227 };
13228
13229 static void
13230 arm_record_test (void)
13231 {
13232 struct gdbarch_info info;
13233 gdbarch_info_init (&info);
13234 info.bfd_arch_info = bfd_scan_arch ("arm");
13235
13236 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13237
13238 SELF_CHECK (gdbarch != NULL);
13239
13240 /* 16-bit Thumb instructions. */
13241 {
13242 insn_decode_record arm_record;
13243
13244 memset (&arm_record, 0, sizeof (insn_decode_record));
13245 arm_record.gdbarch = gdbarch;
13246
13247 static const uint16_t insns[] = {
13248 /* db b2 uxtb r3, r3 */
13249 0xb2db,
13250 /* cd 58 ldr r5, [r1, r3] */
13251 0x58cd,
13252 };
13253
13254 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13255 instruction_reader_thumb reader (endian, insns);
13256 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13257 THUMB_INSN_SIZE_BYTES);
13258
13259 SELF_CHECK (ret == 0);
13260 SELF_CHECK (arm_record.mem_rec_count == 0);
13261 SELF_CHECK (arm_record.reg_rec_count == 1);
13262 SELF_CHECK (arm_record.arm_regs[0] == 3);
13263
13264 arm_record.this_addr += 2;
13265 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13266 THUMB_INSN_SIZE_BYTES);
13267
13268 SELF_CHECK (ret == 0);
13269 SELF_CHECK (arm_record.mem_rec_count == 0);
13270 SELF_CHECK (arm_record.reg_rec_count == 1);
13271 SELF_CHECK (arm_record.arm_regs[0] == 5);
13272 }
13273
13274 /* 32-bit Thumb-2 instructions. */
13275 {
13276 insn_decode_record arm_record;
13277
13278 memset (&arm_record, 0, sizeof (insn_decode_record));
13279 arm_record.gdbarch = gdbarch;
13280
13281 static const uint16_t insns[] = {
13282 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13283 0xee1d, 0x7f70,
13284 };
13285
13286 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13287 instruction_reader_thumb reader (endian, insns);
13288 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13289 THUMB2_INSN_SIZE_BYTES);
13290
13291 SELF_CHECK (ret == 0);
13292 SELF_CHECK (arm_record.mem_rec_count == 0);
13293 SELF_CHECK (arm_record.reg_rec_count == 1);
13294 SELF_CHECK (arm_record.arm_regs[0] == 7);
13295 }
13296 }
13297
13298 /* Instruction reader from manually cooked instruction sequences. */
13299
13300 class test_arm_instruction_reader : public arm_instruction_reader
13301 {
13302 public:
13303 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13304 : m_insns (insns)
13305 {}
13306
13307 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13308 {
13309 SELF_CHECK (memaddr % 4 == 0);
13310 SELF_CHECK (memaddr / 4 < m_insns.size ());
13311
13312 return m_insns[memaddr / 4];
13313 }
13314
13315 private:
13316 const gdb::array_view<const uint32_t> m_insns;
13317 };
13318
13319 static void
13320 arm_analyze_prologue_test ()
13321 {
13322 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13323 {
13324 struct gdbarch_info info;
13325 gdbarch_info_init (&info);
13326 info.byte_order = endianness;
13327 info.byte_order_for_code = endianness;
13328 info.bfd_arch_info = bfd_scan_arch ("arm");
13329
13330 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13331
13332 SELF_CHECK (gdbarch != NULL);
13333
13334 /* The "sub" instruction contains an immediate value rotate count of 0,
13335 which resulted in a 32-bit shift of a 32-bit value, caught by
13336 UBSan. */
13337 const uint32_t insns[] = {
13338 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13339 0xe1a05000, /* mov r5, r0 */
13340 0xe5903020, /* ldr r3, [r0, #32] */
13341 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13342 };
13343
13344 test_arm_instruction_reader mem_reader (insns);
13345 arm_prologue_cache cache;
13346 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13347
13348 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13349 }
13350 }
13351
13352 } // namespace selftests
13353 #endif /* GDB_SELF_TEST */
13354
13355 /* Cleans up local record registers and memory allocations. */
13356
13357 static void
13358 deallocate_reg_mem (insn_decode_record *record)
13359 {
13360 xfree (record->arm_regs);
13361 xfree (record->arm_mems);
13362 }
13363
13364
13365 /* Parse the current instruction and record the values of the registers and
13366 memory that will be changed in current instruction to record_arch_list".
13367 Return -1 if something is wrong. */
13368
13369 int
13370 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13371 CORE_ADDR insn_addr)
13372 {
13373
13374 uint32_t no_of_rec = 0;
13375 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13376 ULONGEST t_bit = 0, insn_id = 0;
13377
13378 ULONGEST u_regval = 0;
13379
13380 insn_decode_record arm_record;
13381
13382 memset (&arm_record, 0, sizeof (insn_decode_record));
13383 arm_record.regcache = regcache;
13384 arm_record.this_addr = insn_addr;
13385 arm_record.gdbarch = gdbarch;
13386
13387
13388 if (record_debug > 1)
13389 {
13390 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13391 "addr = %s\n",
13392 paddress (gdbarch, arm_record.this_addr));
13393 }
13394
13395 instruction_reader reader;
13396 if (extract_arm_insn (reader, &arm_record, 2))
13397 {
13398 if (record_debug)
13399 {
13400 printf_unfiltered (_("Process record: error reading memory at "
13401 "addr %s len = %d.\n"),
13402 paddress (arm_record.gdbarch,
13403 arm_record.this_addr), 2);
13404 }
13405 return -1;
13406 }
13407
13408 /* Check the insn, whether it is thumb or arm one. */
13409
13410 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13411 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13412
13413
13414 if (!(u_regval & t_bit))
13415 {
13416 /* We are decoding arm insn. */
13417 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13418 }
13419 else
13420 {
13421 insn_id = bits (arm_record.arm_insn, 11, 15);
13422 /* is it thumb2 insn? */
13423 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13424 {
13425 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13426 THUMB2_INSN_SIZE_BYTES);
13427 }
13428 else
13429 {
13430 /* We are decoding thumb insn. */
13431 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13432 THUMB_INSN_SIZE_BYTES);
13433 }
13434 }
13435
13436 if (0 == ret)
13437 {
13438 /* Record registers. */
13439 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13440 if (arm_record.arm_regs)
13441 {
13442 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13443 {
13444 if (record_full_arch_list_add_reg
13445 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13446 ret = -1;
13447 }
13448 }
13449 /* Record memories. */
13450 if (arm_record.arm_mems)
13451 {
13452 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13453 {
13454 if (record_full_arch_list_add_mem
13455 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13456 arm_record.arm_mems[no_of_rec].len))
13457 ret = -1;
13458 }
13459 }
13460
13461 if (record_full_arch_list_add_end ())
13462 ret = -1;
13463 }
13464
13465
13466 deallocate_reg_mem (&arm_record);
13467
13468 return ret;
13469 }
13470
13471 /* See arm-tdep.h. */
13472
13473 const target_desc *
13474 arm_read_description (arm_fp_type fp_type)
13475 {
13476 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13477
13478 if (tdesc == nullptr)
13479 {
13480 tdesc = arm_create_target_description (fp_type);
13481 tdesc_arm_list[fp_type] = tdesc;
13482 }
13483
13484 return tdesc;
13485 }
13486
13487 /* See arm-tdep.h. */
13488
13489 const target_desc *
13490 arm_read_mprofile_description (arm_m_profile_type m_type)
13491 {
13492 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13493
13494 if (tdesc == nullptr)
13495 {
13496 tdesc = arm_create_mprofile_target_description (m_type);
13497 tdesc_arm_mprofile_list[m_type] = tdesc;
13498 }
13499
13500 return tdesc;
13501 }
This page took 0.360227 seconds and 4 git commands to generate.