Turn target_have_steppable_watchpoint into function
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 struct arm_mapping_symbol
85 {
86 CORE_ADDR value;
87 char type;
88
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
91 };
92
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94
95 struct arm_per_bfd
96 {
97 explicit arm_per_bfd (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
100 {}
101
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
103
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
108 index).
109
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113
114 /* For each corresponding element of section_maps above, is this vector
115 sorted. */
116 std::unique_ptr<bool[]> section_maps_sorted;
117 };
118
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key<arm_per_bfd> arm_bfd_data_key;
121
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
125
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
129 {
130 "auto",
131 "softfpa",
132 "fpa",
133 "softvfp",
134 "vfp",
135 NULL
136 };
137
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
141
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
144 {
145 "auto",
146 "APCS",
147 "AAPCS",
148 NULL
149 };
150
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
154
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
157 {
158 "auto",
159 "arm",
160 "thumb",
161 NULL
162 };
163
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
166
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
171 static const struct
172 {
173 const char *name;
174 int regnum;
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
177 { "r0", 0 },
178 { "r1", 1 },
179 { "r2", 2 },
180 { "r3", 3 },
181 { "r4", 4 },
182 { "r5", 5 },
183 { "r6", 6 },
184 { "r7", 7 },
185 { "r8", 8 },
186 { "r9", 9 },
187 { "r10", 10 },
188 { "r11", 11 },
189 { "r12", 12 },
190 { "r13", 13 },
191 { "r14", 14 },
192 { "r15", 15 },
193 /* Synonyms (argument and variable registers). */
194 { "a1", 0 },
195 { "a2", 1 },
196 { "a3", 2 },
197 { "a4", 3 },
198 { "v1", 4 },
199 { "v2", 5 },
200 { "v3", 6 },
201 { "v4", 7 },
202 { "v5", 8 },
203 { "v6", 9 },
204 { "v7", 10 },
205 { "v8", 11 },
206 /* Other platform-specific names for r9. */
207 { "sb", 9 },
208 { "tr", 9 },
209 /* Special names. */
210 { "ip", 12 },
211 { "lr", 14 },
212 /* Names used by GCC (not listed in the ARM EABI). */
213 { "sl", 10 },
214 /* A special name from the older ATPCS. */
215 { "wr", 7 },
216 };
217
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
226
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
229
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
232
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
235
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239
240 /* This is used to keep the bfd arch_info in sync with the disassembly
241 style. */
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
246 const char *);
247
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
254
255 static CORE_ADDR
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
257
258
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
265 NULL,
266 };
267
268 struct arm_prologue_cache
269 {
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
273 CORE_ADDR prev_sp;
274
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
278
279 int framesize;
280
281 /* The register used to hold the frame pointer for this frame. */
282 int framereg;
283
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
286 };
287
288 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
289 CORE_ADDR prologue_start,
290 CORE_ADDR prologue_end,
291 struct arm_prologue_cache *cache);
292
293 /* Architecture version for displaced stepping. This effects the behaviour of
294 certain instructions, and really should not be hard-wired. */
295
296 #define DISPLACED_STEPPING_ARCH_VERSION 5
297
298 /* See arm-tdep.h. */
299
300 bool arm_apcs_32 = true;
301
302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
303
304 int
305 arm_psr_thumb_bit (struct gdbarch *gdbarch)
306 {
307 if (gdbarch_tdep (gdbarch)->is_m)
308 return XPSR_T;
309 else
310 return CPSR_T;
311 }
312
313 /* Determine if the processor is currently executing in Thumb mode. */
314
315 int
316 arm_is_thumb (struct regcache *regcache)
317 {
318 ULONGEST cpsr;
319 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
320
321 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
322
323 return (cpsr & t_bit) != 0;
324 }
325
326 /* Determine if FRAME is executing in Thumb mode. */
327
328 int
329 arm_frame_is_thumb (struct frame_info *frame)
330 {
331 CORE_ADDR cpsr;
332 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
333
334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
335 directly (from a signal frame or dummy frame) or by interpreting
336 the saved LR (from a prologue or DWARF frame). So consult it and
337 trust the unwinders. */
338 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
339
340 return (cpsr & t_bit) != 0;
341 }
342
343 /* Search for the mapping symbol covering MEMADDR. If one is found,
344 return its type. Otherwise, return 0. If START is non-NULL,
345 set *START to the location of the mapping symbol. */
346
347 static char
348 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
349 {
350 struct obj_section *sec;
351
352 /* If there are mapping symbols, consult them. */
353 sec = find_pc_section (memaddr);
354 if (sec != NULL)
355 {
356 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
357 if (data != NULL)
358 {
359 unsigned int section_idx = sec->the_bfd_section->index;
360 arm_mapping_symbol_vec &map
361 = data->section_maps[section_idx];
362
363 /* Sort the vector on first use. */
364 if (!data->section_maps_sorted[section_idx])
365 {
366 std::sort (map.begin (), map.end ());
367 data->section_maps_sorted[section_idx] = true;
368 }
369
370 struct arm_mapping_symbol map_key
371 = { memaddr - obj_section_addr (sec), 0 };
372 arm_mapping_symbol_vec::const_iterator it
373 = std::lower_bound (map.begin (), map.end (), map_key);
374
375 /* std::lower_bound finds the earliest ordered insertion
376 point. If the symbol at this position starts at this exact
377 address, we use that; otherwise, the preceding
378 mapping symbol covers this address. */
379 if (it < map.end ())
380 {
381 if (it->value == map_key.value)
382 {
383 if (start)
384 *start = it->value + obj_section_addr (sec);
385 return it->type;
386 }
387 }
388
389 if (it > map.begin ())
390 {
391 arm_mapping_symbol_vec::const_iterator prev_it
392 = it - 1;
393
394 if (start)
395 *start = prev_it->value + obj_section_addr (sec);
396 return prev_it->type;
397 }
398 }
399 }
400
401 return 0;
402 }
403
404 /* Determine if the program counter specified in MEMADDR is in a Thumb
405 function. This function should be called for addresses unrelated to
406 any executing frame; otherwise, prefer arm_frame_is_thumb. */
407
408 int
409 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
410 {
411 struct bound_minimal_symbol sym;
412 char type;
413 arm_displaced_step_closure *dsc
414 = ((arm_displaced_step_closure * )
415 get_displaced_step_closure_by_addr (memaddr));
416
417 /* If checking the mode of displaced instruction in copy area, the mode
418 should be determined by instruction on the original address. */
419 if (dsc)
420 {
421 if (debug_displaced)
422 fprintf_unfiltered (gdb_stdlog,
423 "displaced: check mode of %.8lx instead of %.8lx\n",
424 (unsigned long) dsc->insn_addr,
425 (unsigned long) memaddr);
426 memaddr = dsc->insn_addr;
427 }
428
429 /* If bit 0 of the address is set, assume this is a Thumb address. */
430 if (IS_THUMB_ADDR (memaddr))
431 return 1;
432
433 /* If the user wants to override the symbol table, let him. */
434 if (strcmp (arm_force_mode_string, "arm") == 0)
435 return 0;
436 if (strcmp (arm_force_mode_string, "thumb") == 0)
437 return 1;
438
439 /* ARM v6-M and v7-M are always in Thumb mode. */
440 if (gdbarch_tdep (gdbarch)->is_m)
441 return 1;
442
443 /* If there are mapping symbols, consult them. */
444 type = arm_find_mapping_symbol (memaddr, NULL);
445 if (type)
446 return type == 't';
447
448 /* Thumb functions have a "special" bit set in minimal symbols. */
449 sym = lookup_minimal_symbol_by_pc (memaddr);
450 if (sym.minsym)
451 return (MSYMBOL_IS_SPECIAL (sym.minsym));
452
453 /* If the user wants to override the fallback mode, let them. */
454 if (strcmp (arm_fallback_mode_string, "arm") == 0)
455 return 0;
456 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
457 return 1;
458
459 /* If we couldn't find any symbol, but we're talking to a running
460 target, then trust the current value of $cpsr. This lets
461 "display/i $pc" always show the correct mode (though if there is
462 a symbol table we will not reach here, so it still may not be
463 displayed in the mode it will be executed). */
464 if (target_has_registers ())
465 return arm_frame_is_thumb (get_current_frame ());
466
467 /* Otherwise we're out of luck; we assume ARM. */
468 return 0;
469 }
470
471 /* Determine if the address specified equals any of these magic return
472 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
473 architectures.
474
475 From ARMv6-M Reference Manual B1.5.8
476 Table B1-5 Exception return behavior
477
478 EXC_RETURN Return To Return Stack
479 0xFFFFFFF1 Handler mode Main
480 0xFFFFFFF9 Thread mode Main
481 0xFFFFFFFD Thread mode Process
482
483 From ARMv7-M Reference Manual B1.5.8
484 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
485
486 EXC_RETURN Return To Return Stack
487 0xFFFFFFF1 Handler mode Main
488 0xFFFFFFF9 Thread mode Main
489 0xFFFFFFFD Thread mode Process
490
491 Table B1-9 EXC_RETURN definition of exception return behavior, with
492 FP
493
494 EXC_RETURN Return To Return Stack Frame Type
495 0xFFFFFFE1 Handler mode Main Extended
496 0xFFFFFFE9 Thread mode Main Extended
497 0xFFFFFFED Thread mode Process Extended
498 0xFFFFFFF1 Handler mode Main Basic
499 0xFFFFFFF9 Thread mode Main Basic
500 0xFFFFFFFD Thread mode Process Basic
501
502 For more details see "B1.5.8 Exception return behavior"
503 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
504
505 In the ARMv8-M Architecture Technical Reference also adds
506 for implementations without the Security Extension:
507
508 EXC_RETURN Condition
509 0xFFFFFFB0 Return to Handler mode.
510 0xFFFFFFB8 Return to Thread mode using the main stack.
511 0xFFFFFFBC Return to Thread mode using the process stack. */
512
513 static int
514 arm_m_addr_is_magic (CORE_ADDR addr)
515 {
516 switch (addr)
517 {
518 /* Values from ARMv8-M Architecture Technical Reference. */
519 case 0xffffffb0:
520 case 0xffffffb8:
521 case 0xffffffbc:
522 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
523 the exception return behavior. */
524 case 0xffffffe1:
525 case 0xffffffe9:
526 case 0xffffffed:
527 case 0xfffffff1:
528 case 0xfffffff9:
529 case 0xfffffffd:
530 /* Address is magic. */
531 return 1;
532
533 default:
534 /* Address is not magic. */
535 return 0;
536 }
537 }
538
539 /* Remove useless bits from addresses in a running program. */
540 static CORE_ADDR
541 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
542 {
543 /* On M-profile devices, do not strip the low bit from EXC_RETURN
544 (the magic exception return address). */
545 if (gdbarch_tdep (gdbarch)->is_m
546 && arm_m_addr_is_magic (val))
547 return val;
548
549 if (arm_apcs_32)
550 return UNMAKE_THUMB_ADDR (val);
551 else
552 return (val & 0x03fffffc);
553 }
554
555 /* Return 1 if PC is the start of a compiler helper function which
556 can be safely ignored during prologue skipping. IS_THUMB is true
557 if the function is known to be a Thumb function due to the way it
558 is being called. */
559 static int
560 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
561 {
562 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
563 struct bound_minimal_symbol msym;
564
565 msym = lookup_minimal_symbol_by_pc (pc);
566 if (msym.minsym != NULL
567 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
568 && msym.minsym->linkage_name () != NULL)
569 {
570 const char *name = msym.minsym->linkage_name ();
571
572 /* The GNU linker's Thumb call stub to foo is named
573 __foo_from_thumb. */
574 if (strstr (name, "_from_thumb") != NULL)
575 name += 2;
576
577 /* On soft-float targets, __truncdfsf2 is called to convert promoted
578 arguments to their argument types in non-prototyped
579 functions. */
580 if (startswith (name, "__truncdfsf2"))
581 return 1;
582 if (startswith (name, "__aeabi_d2f"))
583 return 1;
584
585 /* Internal functions related to thread-local storage. */
586 if (startswith (name, "__tls_get_addr"))
587 return 1;
588 if (startswith (name, "__aeabi_read_tp"))
589 return 1;
590 }
591 else
592 {
593 /* If we run against a stripped glibc, we may be unable to identify
594 special functions by name. Check for one important case,
595 __aeabi_read_tp, by comparing the *code* against the default
596 implementation (this is hand-written ARM assembler in glibc). */
597
598 if (!is_thumb
599 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
600 == 0xe3e00a0f /* mov r0, #0xffff0fff */
601 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
602 == 0xe240f01f) /* sub pc, r0, #31 */
603 return 1;
604 }
605
606 return 0;
607 }
608
609 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
610 the first 16-bit of instruction, and INSN2 is the second 16-bit of
611 instruction. */
612 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
613 ((bits ((insn1), 0, 3) << 12) \
614 | (bits ((insn1), 10, 10) << 11) \
615 | (bits ((insn2), 12, 14) << 8) \
616 | bits ((insn2), 0, 7))
617
618 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
619 the 32-bit instruction. */
620 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
621 ((bits ((insn), 16, 19) << 12) \
622 | bits ((insn), 0, 11))
623
624 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
625
626 static unsigned int
627 thumb_expand_immediate (unsigned int imm)
628 {
629 unsigned int count = imm >> 7;
630
631 if (count < 8)
632 switch (count / 2)
633 {
634 case 0:
635 return imm & 0xff;
636 case 1:
637 return (imm & 0xff) | ((imm & 0xff) << 16);
638 case 2:
639 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
640 case 3:
641 return (imm & 0xff) | ((imm & 0xff) << 8)
642 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
643 }
644
645 return (0x80 | (imm & 0x7f)) << (32 - count);
646 }
647
648 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
649 epilogue, 0 otherwise. */
650
651 static int
652 thumb_instruction_restores_sp (unsigned short insn)
653 {
654 return (insn == 0x46bd /* mov sp, r7 */
655 || (insn & 0xff80) == 0xb000 /* add sp, imm */
656 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
657 }
658
659 /* Analyze a Thumb prologue, looking for a recognizable stack frame
660 and frame pointer. Scan until we encounter a store that could
661 clobber the stack frame unexpectedly, or an unknown instruction.
662 Return the last address which is definitely safe to skip for an
663 initial breakpoint. */
664
665 static CORE_ADDR
666 thumb_analyze_prologue (struct gdbarch *gdbarch,
667 CORE_ADDR start, CORE_ADDR limit,
668 struct arm_prologue_cache *cache)
669 {
670 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
671 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
672 int i;
673 pv_t regs[16];
674 CORE_ADDR offset;
675 CORE_ADDR unrecognized_pc = 0;
676
677 for (i = 0; i < 16; i++)
678 regs[i] = pv_register (i, 0);
679 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
680
681 while (start < limit)
682 {
683 unsigned short insn;
684
685 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
686
687 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
688 {
689 int regno;
690 int mask;
691
692 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
693 break;
694
695 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
696 whether to save LR (R14). */
697 mask = (insn & 0xff) | ((insn & 0x100) << 6);
698
699 /* Calculate offsets of saved R0-R7 and LR. */
700 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
701 if (mask & (1 << regno))
702 {
703 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
704 -4);
705 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
706 }
707 }
708 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
709 {
710 offset = (insn & 0x7f) << 2; /* get scaled offset */
711 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
712 -offset);
713 }
714 else if (thumb_instruction_restores_sp (insn))
715 {
716 /* Don't scan past the epilogue. */
717 break;
718 }
719 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
720 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
721 (insn & 0xff) << 2);
722 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
723 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
724 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
725 bits (insn, 6, 8));
726 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
727 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
728 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
729 bits (insn, 0, 7));
730 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
731 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
732 && pv_is_constant (regs[bits (insn, 3, 5)]))
733 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
734 regs[bits (insn, 6, 8)]);
735 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
736 && pv_is_constant (regs[bits (insn, 3, 6)]))
737 {
738 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
739 int rm = bits (insn, 3, 6);
740 regs[rd] = pv_add (regs[rd], regs[rm]);
741 }
742 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
743 {
744 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
745 int src_reg = (insn & 0x78) >> 3;
746 regs[dst_reg] = regs[src_reg];
747 }
748 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
749 {
750 /* Handle stores to the stack. Normally pushes are used,
751 but with GCC -mtpcs-frame, there may be other stores
752 in the prologue to create the frame. */
753 int regno = (insn >> 8) & 0x7;
754 pv_t addr;
755
756 offset = (insn & 0xff) << 2;
757 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[regno]);
763 }
764 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
765 {
766 int rd = bits (insn, 0, 2);
767 int rn = bits (insn, 3, 5);
768 pv_t addr;
769
770 offset = bits (insn, 6, 10) << 2;
771 addr = pv_add_constant (regs[rn], offset);
772
773 if (stack.store_would_trash (addr))
774 break;
775
776 stack.store (addr, 4, regs[rd]);
777 }
778 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
779 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
780 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
781 /* Ignore stores of argument registers to the stack. */
782 ;
783 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
784 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
785 /* Ignore block loads from the stack, potentially copying
786 parameters from memory. */
787 ;
788 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
789 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
790 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
791 /* Similarly ignore single loads from the stack. */
792 ;
793 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
794 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
795 /* Skip register copies, i.e. saves to another register
796 instead of the stack. */
797 ;
798 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
799 /* Recognize constant loads; even with small stacks these are necessary
800 on Thumb. */
801 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
802 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
803 {
804 /* Constant pool loads, for the same reason. */
805 unsigned int constant;
806 CORE_ADDR loc;
807
808 loc = start + 4 + bits (insn, 0, 7) * 4;
809 constant = read_memory_unsigned_integer (loc, 4, byte_order);
810 regs[bits (insn, 8, 10)] = pv_constant (constant);
811 }
812 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
813 {
814 unsigned short inst2;
815
816 inst2 = read_code_unsigned_integer (start + 2, 2,
817 byte_order_for_code);
818
819 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
820 {
821 /* BL, BLX. Allow some special function calls when
822 skipping the prologue; GCC generates these before
823 storing arguments to the stack. */
824 CORE_ADDR nextpc;
825 int j1, j2, imm1, imm2;
826
827 imm1 = sbits (insn, 0, 10);
828 imm2 = bits (inst2, 0, 10);
829 j1 = bit (inst2, 13);
830 j2 = bit (inst2, 11);
831
832 offset = ((imm1 << 12) + (imm2 << 1));
833 offset ^= ((!j2) << 22) | ((!j1) << 23);
834
835 nextpc = start + 4 + offset;
836 /* For BLX make sure to clear the low bits. */
837 if (bit (inst2, 12) == 0)
838 nextpc = nextpc & 0xfffffffc;
839
840 if (!skip_prologue_function (gdbarch, nextpc,
841 bit (inst2, 12) != 0))
842 break;
843 }
844
845 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
846 { registers } */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 {
849 pv_t addr = regs[bits (insn, 0, 3)];
850 int regno;
851
852 if (stack.store_would_trash (addr))
853 break;
854
855 /* Calculate offsets of saved registers. */
856 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
857 if (inst2 & (1 << regno))
858 {
859 addr = pv_add_constant (addr, -4);
860 stack.store (addr, 4, regs[regno]);
861 }
862
863 if (insn & 0x0020)
864 regs[bits (insn, 0, 3)] = addr;
865 }
866
867 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
868 [Rn, #+/-imm]{!} */
869 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 {
871 int regno1 = bits (inst2, 12, 15);
872 int regno2 = bits (inst2, 8, 11);
873 pv_t addr = regs[bits (insn, 0, 3)];
874
875 offset = inst2 & 0xff;
876 if (insn & 0x0080)
877 addr = pv_add_constant (addr, offset);
878 else
879 addr = pv_add_constant (addr, -offset);
880
881 if (stack.store_would_trash (addr))
882 break;
883
884 stack.store (addr, 4, regs[regno1]);
885 stack.store (pv_add_constant (addr, 4),
886 4, regs[regno2]);
887
888 if (insn & 0x0020)
889 regs[bits (insn, 0, 3)] = addr;
890 }
891
892 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
893 && (inst2 & 0x0c00) == 0x0c00
894 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
895 {
896 int regno = bits (inst2, 12, 15);
897 pv_t addr = regs[bits (insn, 0, 3)];
898
899 offset = inst2 & 0xff;
900 if (inst2 & 0x0200)
901 addr = pv_add_constant (addr, offset);
902 else
903 addr = pv_add_constant (addr, -offset);
904
905 if (stack.store_would_trash (addr))
906 break;
907
908 stack.store (addr, 4, regs[regno]);
909
910 if (inst2 & 0x0100)
911 regs[bits (insn, 0, 3)] = addr;
912 }
913
914 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 {
917 int regno = bits (inst2, 12, 15);
918 pv_t addr;
919
920 offset = inst2 & 0xfff;
921 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
922
923 if (stack.store_would_trash (addr))
924 break;
925
926 stack.store (addr, 4, regs[regno]);
927 }
928
929 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore stores of argument registers to the stack. */
932 ;
933
934 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
935 && (inst2 & 0x0d00) == 0x0c00
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Ignore stores of argument registers to the stack. */
938 ;
939
940 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
941 { registers } */
942 && (inst2 & 0x8000) == 0x0000
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Ignore block loads from the stack, potentially copying
945 parameters from memory. */
946 ;
947
948 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
949 [Rn, #+/-imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
951 /* Similarly ignore dual loads from the stack. */
952 ;
953
954 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
955 && (inst2 & 0x0d00) == 0x0c00
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 /* Similarly ignore single loads from the stack. */
958 ;
959
960 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
961 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
962 /* Similarly ignore single loads from the stack. */
963 ;
964
965 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
986 }
987
988 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)],
997 - (CORE_ADDR) thumb_expand_immediate (imm));
998 }
999
1000 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1002 {
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1006
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1009 }
1010
1011 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1012 {
1013 unsigned int imm = ((bits (insn, 10, 10) << 11)
1014 | (bits (inst2, 12, 14) << 8)
1015 | bits (inst2, 0, 7));
1016
1017 regs[bits (inst2, 8, 11)]
1018 = pv_constant (thumb_expand_immediate (imm));
1019 }
1020
1021 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1022 {
1023 unsigned int imm
1024 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1025
1026 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1027 }
1028
1029 else if (insn == 0xea5f /* mov.w Rd,Rm */
1030 && (inst2 & 0xf0f0) == 0)
1031 {
1032 int dst_reg = (inst2 & 0x0f00) >> 8;
1033 int src_reg = inst2 & 0xf;
1034 regs[dst_reg] = regs[src_reg];
1035 }
1036
1037 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1038 {
1039 /* Constant pool loads. */
1040 unsigned int constant;
1041 CORE_ADDR loc;
1042
1043 offset = bits (inst2, 0, 11);
1044 if (insn & 0x0080)
1045 loc = start + 4 + offset;
1046 else
1047 loc = start + 4 - offset;
1048
1049 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1050 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1051 }
1052
1053 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1054 {
1055 /* Constant pool loads. */
1056 unsigned int constant;
1057 CORE_ADDR loc;
1058
1059 offset = bits (inst2, 0, 7) << 2;
1060 if (insn & 0x0080)
1061 loc = start + 4 + offset;
1062 else
1063 loc = start + 4 - offset;
1064
1065 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1066 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1067
1068 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1069 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1070 }
1071
1072 else if (thumb2_instruction_changes_pc (insn, inst2))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086 else if (thumb_instruction_changes_pc (insn))
1087 {
1088 /* Don't scan past anything that might change control flow. */
1089 break;
1090 }
1091 else
1092 {
1093 /* The optimizer might shove anything into the prologue,
1094 so we just skip what we don't recognize. */
1095 unrecognized_pc = start;
1096 }
1097
1098 start += 2;
1099 }
1100
1101 if (arm_debug)
1102 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1103 paddress (gdbarch, start));
1104
1105 if (unrecognized_pc == 0)
1106 unrecognized_pc = start;
1107
1108 if (cache == NULL)
1109 return unrecognized_pc;
1110
1111 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1112 {
1113 /* Frame pointer is fp. Frame size is constant. */
1114 cache->framereg = ARM_FP_REGNUM;
1115 cache->framesize = -regs[ARM_FP_REGNUM].k;
1116 }
1117 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1118 {
1119 /* Frame pointer is r7. Frame size is constant. */
1120 cache->framereg = THUMB_FP_REGNUM;
1121 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1122 }
1123 else
1124 {
1125 /* Try the stack pointer... this is a bit desperate. */
1126 cache->framereg = ARM_SP_REGNUM;
1127 cache->framesize = -regs[ARM_SP_REGNUM].k;
1128 }
1129
1130 for (i = 0; i < 16; i++)
1131 if (stack.find_reg (gdbarch, i, &offset))
1132 cache->saved_regs[i].addr = offset;
1133
1134 return unrecognized_pc;
1135 }
1136
1137
1138 /* Try to analyze the instructions starting from PC, which load symbol
1139 __stack_chk_guard. Return the address of instruction after loading this
1140 symbol, set the dest register number to *BASEREG, and set the size of
1141 instructions for loading symbol in OFFSET. Return 0 if instructions are
1142 not recognized. */
1143
1144 static CORE_ADDR
1145 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1146 unsigned int *destreg, int *offset)
1147 {
1148 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1149 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1150 unsigned int low, high, address;
1151
1152 address = 0;
1153 if (is_thumb)
1154 {
1155 unsigned short insn1
1156 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1157
1158 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1159 {
1160 *destreg = bits (insn1, 8, 10);
1161 *offset = 2;
1162 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1163 address = read_memory_unsigned_integer (address, 4,
1164 byte_order_for_code);
1165 }
1166 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1167 {
1168 unsigned short insn2
1169 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1170
1171 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1172
1173 insn1
1174 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1175 insn2
1176 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1177
1178 /* movt Rd, #const */
1179 if ((insn1 & 0xfbc0) == 0xf2c0)
1180 {
1181 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1182 *destreg = bits (insn2, 8, 11);
1183 *offset = 8;
1184 address = (high << 16 | low);
1185 }
1186 }
1187 }
1188 else
1189 {
1190 unsigned int insn
1191 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1192
1193 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1194 {
1195 address = bits (insn, 0, 11) + pc + 8;
1196 address = read_memory_unsigned_integer (address, 4,
1197 byte_order_for_code);
1198
1199 *destreg = bits (insn, 12, 15);
1200 *offset = 4;
1201 }
1202 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1203 {
1204 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1205
1206 insn
1207 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1208
1209 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1210 {
1211 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1212 *destreg = bits (insn, 12, 15);
1213 *offset = 8;
1214 address = (high << 16 | low);
1215 }
1216 }
1217 }
1218
1219 return address;
1220 }
1221
1222 /* Try to skip a sequence of instructions used for stack protector. If PC
1223 points to the first instruction of this sequence, return the address of
1224 first instruction after this sequence, otherwise, return original PC.
1225
1226 On arm, this sequence of instructions is composed of mainly three steps,
1227 Step 1: load symbol __stack_chk_guard,
1228 Step 2: load from address of __stack_chk_guard,
1229 Step 3: store it to somewhere else.
1230
1231 Usually, instructions on step 2 and step 3 are the same on various ARM
1232 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1233 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1234 instructions in step 1 vary from different ARM architectures. On ARMv7,
1235 they are,
1236
1237 movw Rn, #:lower16:__stack_chk_guard
1238 movt Rn, #:upper16:__stack_chk_guard
1239
1240 On ARMv5t, it is,
1241
1242 ldr Rn, .Label
1243 ....
1244 .Lable:
1245 .word __stack_chk_guard
1246
1247 Since ldr/str is a very popular instruction, we can't use them as
1248 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1249 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1250 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1251
1252 static CORE_ADDR
1253 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1254 {
1255 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1256 unsigned int basereg;
1257 struct bound_minimal_symbol stack_chk_guard;
1258 int offset;
1259 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1260 CORE_ADDR addr;
1261
1262 /* Try to parse the instructions in Step 1. */
1263 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1264 &basereg, &offset);
1265 if (!addr)
1266 return pc;
1267
1268 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1269 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1270 Otherwise, this sequence cannot be for stack protector. */
1271 if (stack_chk_guard.minsym == NULL
1272 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1273 return pc;
1274
1275 if (is_thumb)
1276 {
1277 unsigned int destreg;
1278 unsigned short insn
1279 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1280
1281 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1282 if ((insn & 0xf800) != 0x6800)
1283 return pc;
1284 if (bits (insn, 3, 5) != basereg)
1285 return pc;
1286 destreg = bits (insn, 0, 2);
1287
1288 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1289 byte_order_for_code);
1290 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1291 if ((insn & 0xf800) != 0x6000)
1292 return pc;
1293 if (destreg != bits (insn, 0, 2))
1294 return pc;
1295 }
1296 else
1297 {
1298 unsigned int destreg;
1299 unsigned int insn
1300 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1301
1302 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1303 if ((insn & 0x0e500000) != 0x04100000)
1304 return pc;
1305 if (bits (insn, 16, 19) != basereg)
1306 return pc;
1307 destreg = bits (insn, 12, 15);
1308 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1309 insn = read_code_unsigned_integer (pc + offset + 4,
1310 4, byte_order_for_code);
1311 if ((insn & 0x0e500000) != 0x04000000)
1312 return pc;
1313 if (bits (insn, 12, 15) != destreg)
1314 return pc;
1315 }
1316 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1317 on arm. */
1318 if (is_thumb)
1319 return pc + offset + 4;
1320 else
1321 return pc + offset + 8;
1322 }
1323
1324 /* Advance the PC across any function entry prologue instructions to
1325 reach some "real" code.
1326
1327 The APCS (ARM Procedure Call Standard) defines the following
1328 prologue:
1329
1330 mov ip, sp
1331 [stmfd sp!, {a1,a2,a3,a4}]
1332 stmfd sp!, {...,fp,ip,lr,pc}
1333 [stfe f7, [sp, #-12]!]
1334 [stfe f6, [sp, #-12]!]
1335 [stfe f5, [sp, #-12]!]
1336 [stfe f4, [sp, #-12]!]
1337 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1338
1339 static CORE_ADDR
1340 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1341 {
1342 CORE_ADDR func_addr, limit_pc;
1343
1344 /* See if we can determine the end of the prologue via the symbol table.
1345 If so, then return either PC, or the PC after the prologue, whichever
1346 is greater. */
1347 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1348 {
1349 CORE_ADDR post_prologue_pc
1350 = skip_prologue_using_sal (gdbarch, func_addr);
1351 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1352
1353 if (post_prologue_pc)
1354 post_prologue_pc
1355 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1356
1357
1358 /* GCC always emits a line note before the prologue and another
1359 one after, even if the two are at the same address or on the
1360 same line. Take advantage of this so that we do not need to
1361 know every instruction that might appear in the prologue. We
1362 will have producer information for most binaries; if it is
1363 missing (e.g. for -gstabs), assuming the GNU tools. */
1364 if (post_prologue_pc
1365 && (cust == NULL
1366 || COMPUNIT_PRODUCER (cust) == NULL
1367 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1368 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1369 return post_prologue_pc;
1370
1371 if (post_prologue_pc != 0)
1372 {
1373 CORE_ADDR analyzed_limit;
1374
1375 /* For non-GCC compilers, make sure the entire line is an
1376 acceptable prologue; GDB will round this function's
1377 return value up to the end of the following line so we
1378 can not skip just part of a line (and we do not want to).
1379
1380 RealView does not treat the prologue specially, but does
1381 associate prologue code with the opening brace; so this
1382 lets us skip the first line if we think it is the opening
1383 brace. */
1384 if (arm_pc_is_thumb (gdbarch, func_addr))
1385 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1386 post_prologue_pc, NULL);
1387 else
1388 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1389 post_prologue_pc, NULL);
1390
1391 if (analyzed_limit != post_prologue_pc)
1392 return func_addr;
1393
1394 return post_prologue_pc;
1395 }
1396 }
1397
1398 /* Can't determine prologue from the symbol table, need to examine
1399 instructions. */
1400
1401 /* Find an upper limit on the function prologue using the debug
1402 information. If the debug information could not be used to provide
1403 that bound, then use an arbitrary large number as the upper bound. */
1404 /* Like arm_scan_prologue, stop no later than pc + 64. */
1405 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1406 if (limit_pc == 0)
1407 limit_pc = pc + 64; /* Magic. */
1408
1409
1410 /* Check if this is Thumb code. */
1411 if (arm_pc_is_thumb (gdbarch, pc))
1412 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1413 else
1414 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1415 }
1416
1417 /* *INDENT-OFF* */
1418 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1419 This function decodes a Thumb function prologue to determine:
1420 1) the size of the stack frame
1421 2) which registers are saved on it
1422 3) the offsets of saved regs
1423 4) the offset from the stack pointer to the frame pointer
1424
1425 A typical Thumb function prologue would create this stack frame
1426 (offsets relative to FP)
1427 old SP -> 24 stack parameters
1428 20 LR
1429 16 R7
1430 R7 -> 0 local variables (16 bytes)
1431 SP -> -12 additional stack space (12 bytes)
1432 The frame size would thus be 36 bytes, and the frame offset would be
1433 12 bytes. The frame register is R7.
1434
1435 The comments for thumb_skip_prolog() describe the algorithm we use
1436 to detect the end of the prolog. */
1437 /* *INDENT-ON* */
1438
1439 static void
1440 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1441 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1442 {
1443 CORE_ADDR prologue_start;
1444 CORE_ADDR prologue_end;
1445
1446 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1447 &prologue_end))
1448 {
1449 /* See comment in arm_scan_prologue for an explanation of
1450 this heuristics. */
1451 if (prologue_end > prologue_start + 64)
1452 {
1453 prologue_end = prologue_start + 64;
1454 }
1455 }
1456 else
1457 /* We're in the boondocks: we have no idea where the start of the
1458 function is. */
1459 return;
1460
1461 prologue_end = std::min (prologue_end, prev_pc);
1462
1463 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1464 }
1465
1466 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1467 otherwise. */
1468
1469 static int
1470 arm_instruction_restores_sp (unsigned int insn)
1471 {
1472 if (bits (insn, 28, 31) != INST_NV)
1473 {
1474 if ((insn & 0x0df0f000) == 0x0080d000
1475 /* ADD SP (register or immediate). */
1476 || (insn & 0x0df0f000) == 0x0040d000
1477 /* SUB SP (register or immediate). */
1478 || (insn & 0x0ffffff0) == 0x01a0d000
1479 /* MOV SP. */
1480 || (insn & 0x0fff0000) == 0x08bd0000
1481 /* POP (LDMIA). */
1482 || (insn & 0x0fff0000) == 0x049d0000)
1483 /* POP of a single register. */
1484 return 1;
1485 }
1486
1487 return 0;
1488 }
1489
1490 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1491 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1492 fill it in. Return the first address not recognized as a prologue
1493 instruction.
1494
1495 We recognize all the instructions typically found in ARM prologues,
1496 plus harmless instructions which can be skipped (either for analysis
1497 purposes, or a more restrictive set that can be skipped when finding
1498 the end of the prologue). */
1499
1500 static CORE_ADDR
1501 arm_analyze_prologue (struct gdbarch *gdbarch,
1502 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1503 struct arm_prologue_cache *cache)
1504 {
1505 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1506 int regno;
1507 CORE_ADDR offset, current_pc;
1508 pv_t regs[ARM_FPS_REGNUM];
1509 CORE_ADDR unrecognized_pc = 0;
1510
1511 /* Search the prologue looking for instructions that set up the
1512 frame pointer, adjust the stack pointer, and save registers.
1513
1514 Be careful, however, and if it doesn't look like a prologue,
1515 don't try to scan it. If, for instance, a frameless function
1516 begins with stmfd sp!, then we will tell ourselves there is
1517 a frame, which will confuse stack traceback, as well as "finish"
1518 and other operations that rely on a knowledge of the stack
1519 traceback. */
1520
1521 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1522 regs[regno] = pv_register (regno, 0);
1523 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1524
1525 for (current_pc = prologue_start;
1526 current_pc < prologue_end;
1527 current_pc += 4)
1528 {
1529 unsigned int insn
1530 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1531
1532 if (insn == 0xe1a0c00d) /* mov ip, sp */
1533 {
1534 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1535 continue;
1536 }
1537 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1538 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1539 {
1540 unsigned imm = insn & 0xff; /* immediate value */
1541 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1542 int rd = bits (insn, 12, 15);
1543 imm = (imm >> rot) | (imm << (32 - rot));
1544 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1545 continue;
1546 }
1547 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1548 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1549 {
1550 unsigned imm = insn & 0xff; /* immediate value */
1551 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1552 int rd = bits (insn, 12, 15);
1553 imm = (imm >> rot) | (imm << (32 - rot));
1554 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1555 continue;
1556 }
1557 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1558 [sp, #-4]! */
1559 {
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1563 stack.store (regs[ARM_SP_REGNUM], 4,
1564 regs[bits (insn, 12, 15)]);
1565 continue;
1566 }
1567 else if ((insn & 0xffff0000) == 0xe92d0000)
1568 /* stmfd sp!, {..., fp, ip, lr, pc}
1569 or
1570 stmfd sp!, {a1, a2, a3, a4} */
1571 {
1572 int mask = insn & 0xffff;
1573
1574 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1575 break;
1576
1577 /* Calculate offsets of saved registers. */
1578 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1579 if (mask & (1 << regno))
1580 {
1581 regs[ARM_SP_REGNUM]
1582 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1583 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1584 }
1585 }
1586 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1587 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1588 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1589 {
1590 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 continue;
1592 }
1593 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1594 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1595 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1596 {
1597 /* No need to add this to saved_regs -- it's just an arg reg. */
1598 continue;
1599 }
1600 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1601 { registers } */
1602 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1603 {
1604 /* No need to add this to saved_regs -- it's just arg regs. */
1605 continue;
1606 }
1607 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1608 {
1609 unsigned imm = insn & 0xff; /* immediate value */
1610 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1611 imm = (imm >> rot) | (imm << (32 - rot));
1612 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1613 }
1614 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1615 {
1616 unsigned imm = insn & 0xff; /* immediate value */
1617 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1618 imm = (imm >> rot) | (imm << (32 - rot));
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1620 }
1621 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1622 [sp, -#c]! */
1623 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1624 {
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1629 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1630 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1631 }
1632 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1633 [sp!] */
1634 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1635 {
1636 int n_saved_fp_regs;
1637 unsigned int fp_start_reg, fp_bound_reg;
1638
1639 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1640 break;
1641
1642 if ((insn & 0x800) == 0x800) /* N0 is set */
1643 {
1644 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1645 n_saved_fp_regs = 3;
1646 else
1647 n_saved_fp_regs = 1;
1648 }
1649 else
1650 {
1651 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1652 n_saved_fp_regs = 2;
1653 else
1654 n_saved_fp_regs = 4;
1655 }
1656
1657 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1658 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1659 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1660 {
1661 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1662 stack.store (regs[ARM_SP_REGNUM], 12,
1663 regs[fp_start_reg++]);
1664 }
1665 }
1666 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1667 {
1668 /* Allow some special function calls when skipping the
1669 prologue; GCC generates these before storing arguments to
1670 the stack. */
1671 CORE_ADDR dest = BranchDest (current_pc, insn);
1672
1673 if (skip_prologue_function (gdbarch, dest, 0))
1674 continue;
1675 else
1676 break;
1677 }
1678 else if ((insn & 0xf0000000) != 0xe0000000)
1679 break; /* Condition not true, exit early. */
1680 else if (arm_instruction_changes_pc (insn))
1681 /* Don't scan past anything that might change control flow. */
1682 break;
1683 else if (arm_instruction_restores_sp (insn))
1684 {
1685 /* Don't scan past the epilogue. */
1686 break;
1687 }
1688 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1689 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1690 /* Ignore block loads from the stack, potentially copying
1691 parameters from memory. */
1692 continue;
1693 else if ((insn & 0xfc500000) == 0xe4100000
1694 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1695 /* Similarly ignore single loads from the stack. */
1696 continue;
1697 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1698 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1699 register instead of the stack. */
1700 continue;
1701 else
1702 {
1703 /* The optimizer might shove anything into the prologue, if
1704 we build up cache (cache != NULL) from scanning prologue,
1705 we just skip what we don't recognize and scan further to
1706 make cache as complete as possible. However, if we skip
1707 prologue, we'll stop immediately on unrecognized
1708 instruction. */
1709 unrecognized_pc = current_pc;
1710 if (cache != NULL)
1711 continue;
1712 else
1713 break;
1714 }
1715 }
1716
1717 if (unrecognized_pc == 0)
1718 unrecognized_pc = current_pc;
1719
1720 if (cache)
1721 {
1722 int framereg, framesize;
1723
1724 /* The frame size is just the distance from the frame register
1725 to the original stack pointer. */
1726 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1727 {
1728 /* Frame pointer is fp. */
1729 framereg = ARM_FP_REGNUM;
1730 framesize = -regs[ARM_FP_REGNUM].k;
1731 }
1732 else
1733 {
1734 /* Try the stack pointer... this is a bit desperate. */
1735 framereg = ARM_SP_REGNUM;
1736 framesize = -regs[ARM_SP_REGNUM].k;
1737 }
1738
1739 cache->framereg = framereg;
1740 cache->framesize = framesize;
1741
1742 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1743 if (stack.find_reg (gdbarch, regno, &offset))
1744 cache->saved_regs[regno].addr = offset;
1745 }
1746
1747 if (arm_debug)
1748 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1749 paddress (gdbarch, unrecognized_pc));
1750
1751 return unrecognized_pc;
1752 }
1753
1754 static void
1755 arm_scan_prologue (struct frame_info *this_frame,
1756 struct arm_prologue_cache *cache)
1757 {
1758 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1759 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1760 CORE_ADDR prologue_start, prologue_end;
1761 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1762 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1763
1764 /* Assume there is no frame until proven otherwise. */
1765 cache->framereg = ARM_SP_REGNUM;
1766 cache->framesize = 0;
1767
1768 /* Check for Thumb prologue. */
1769 if (arm_frame_is_thumb (this_frame))
1770 {
1771 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1772 return;
1773 }
1774
1775 /* Find the function prologue. If we can't find the function in
1776 the symbol table, peek in the stack frame to find the PC. */
1777 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1778 &prologue_end))
1779 {
1780 /* One way to find the end of the prologue (which works well
1781 for unoptimized code) is to do the following:
1782
1783 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1784
1785 if (sal.line == 0)
1786 prologue_end = prev_pc;
1787 else if (sal.end < prologue_end)
1788 prologue_end = sal.end;
1789
1790 This mechanism is very accurate so long as the optimizer
1791 doesn't move any instructions from the function body into the
1792 prologue. If this happens, sal.end will be the last
1793 instruction in the first hunk of prologue code just before
1794 the first instruction that the scheduler has moved from
1795 the body to the prologue.
1796
1797 In order to make sure that we scan all of the prologue
1798 instructions, we use a slightly less accurate mechanism which
1799 may scan more than necessary. To help compensate for this
1800 lack of accuracy, the prologue scanning loop below contains
1801 several clauses which'll cause the loop to terminate early if
1802 an implausible prologue instruction is encountered.
1803
1804 The expression
1805
1806 prologue_start + 64
1807
1808 is a suitable endpoint since it accounts for the largest
1809 possible prologue plus up to five instructions inserted by
1810 the scheduler. */
1811
1812 if (prologue_end > prologue_start + 64)
1813 {
1814 prologue_end = prologue_start + 64; /* See above. */
1815 }
1816 }
1817 else
1818 {
1819 /* We have no symbol information. Our only option is to assume this
1820 function has a standard stack frame and the normal frame register.
1821 Then, we can find the value of our frame pointer on entrance to
1822 the callee (or at the present moment if this is the innermost frame).
1823 The value stored there should be the address of the stmfd + 8. */
1824 CORE_ADDR frame_loc;
1825 ULONGEST return_value;
1826
1827 /* AAPCS does not use a frame register, so we can abort here. */
1828 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1829 return;
1830
1831 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1832 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1833 &return_value))
1834 return;
1835 else
1836 {
1837 prologue_start = gdbarch_addr_bits_remove
1838 (gdbarch, return_value) - 8;
1839 prologue_end = prologue_start + 64; /* See above. */
1840 }
1841 }
1842
1843 if (prev_pc < prologue_end)
1844 prologue_end = prev_pc;
1845
1846 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1847 }
1848
1849 static struct arm_prologue_cache *
1850 arm_make_prologue_cache (struct frame_info *this_frame)
1851 {
1852 int reg;
1853 struct arm_prologue_cache *cache;
1854 CORE_ADDR unwound_fp;
1855
1856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1858
1859 arm_scan_prologue (this_frame, cache);
1860
1861 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1862 if (unwound_fp == 0)
1863 return cache;
1864
1865 cache->prev_sp = unwound_fp + cache->framesize;
1866
1867 /* Calculate actual addresses of saved registers using offsets
1868 determined by arm_scan_prologue. */
1869 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1870 if (trad_frame_addr_p (cache->saved_regs, reg))
1871 cache->saved_regs[reg].addr += cache->prev_sp;
1872
1873 return cache;
1874 }
1875
1876 /* Implementation of the stop_reason hook for arm_prologue frames. */
1877
1878 static enum unwind_stop_reason
1879 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1880 void **this_cache)
1881 {
1882 struct arm_prologue_cache *cache;
1883 CORE_ADDR pc;
1884
1885 if (*this_cache == NULL)
1886 *this_cache = arm_make_prologue_cache (this_frame);
1887 cache = (struct arm_prologue_cache *) *this_cache;
1888
1889 /* This is meant to halt the backtrace at "_start". */
1890 pc = get_frame_pc (this_frame);
1891 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1892 return UNWIND_OUTERMOST;
1893
1894 /* If we've hit a wall, stop. */
1895 if (cache->prev_sp == 0)
1896 return UNWIND_OUTERMOST;
1897
1898 return UNWIND_NO_REASON;
1899 }
1900
1901 /* Our frame ID for a normal frame is the current function's starting PC
1902 and the caller's SP when we were called. */
1903
1904 static void
1905 arm_prologue_this_id (struct frame_info *this_frame,
1906 void **this_cache,
1907 struct frame_id *this_id)
1908 {
1909 struct arm_prologue_cache *cache;
1910 struct frame_id id;
1911 CORE_ADDR pc, func;
1912
1913 if (*this_cache == NULL)
1914 *this_cache = arm_make_prologue_cache (this_frame);
1915 cache = (struct arm_prologue_cache *) *this_cache;
1916
1917 /* Use function start address as part of the frame ID. If we cannot
1918 identify the start address (due to missing symbol information),
1919 fall back to just using the current PC. */
1920 pc = get_frame_pc (this_frame);
1921 func = get_frame_func (this_frame);
1922 if (!func)
1923 func = pc;
1924
1925 id = frame_id_build (cache->prev_sp, func);
1926 *this_id = id;
1927 }
1928
1929 static struct value *
1930 arm_prologue_prev_register (struct frame_info *this_frame,
1931 void **this_cache,
1932 int prev_regnum)
1933 {
1934 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1935 struct arm_prologue_cache *cache;
1936
1937 if (*this_cache == NULL)
1938 *this_cache = arm_make_prologue_cache (this_frame);
1939 cache = (struct arm_prologue_cache *) *this_cache;
1940
1941 /* If we are asked to unwind the PC, then we need to return the LR
1942 instead. The prologue may save PC, but it will point into this
1943 frame's prologue, not the next frame's resume location. Also
1944 strip the saved T bit. A valid LR may have the low bit set, but
1945 a valid PC never does. */
1946 if (prev_regnum == ARM_PC_REGNUM)
1947 {
1948 CORE_ADDR lr;
1949
1950 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1951 return frame_unwind_got_constant (this_frame, prev_regnum,
1952 arm_addr_bits_remove (gdbarch, lr));
1953 }
1954
1955 /* SP is generally not saved to the stack, but this frame is
1956 identified by the next frame's stack pointer at the time of the call.
1957 The value was already reconstructed into PREV_SP. */
1958 if (prev_regnum == ARM_SP_REGNUM)
1959 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1960
1961 /* The CPSR may have been changed by the call instruction and by the
1962 called function. The only bit we can reconstruct is the T bit,
1963 by checking the low bit of LR as of the call. This is a reliable
1964 indicator of Thumb-ness except for some ARM v4T pre-interworking
1965 Thumb code, which could get away with a clear low bit as long as
1966 the called function did not use bx. Guess that all other
1967 bits are unchanged; the condition flags are presumably lost,
1968 but the processor status is likely valid. */
1969 if (prev_regnum == ARM_PS_REGNUM)
1970 {
1971 CORE_ADDR lr, cpsr;
1972 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1973
1974 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1975 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1976 if (IS_THUMB_ADDR (lr))
1977 cpsr |= t_bit;
1978 else
1979 cpsr &= ~t_bit;
1980 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1981 }
1982
1983 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1984 prev_regnum);
1985 }
1986
1987 struct frame_unwind arm_prologue_unwind = {
1988 NORMAL_FRAME,
1989 arm_prologue_unwind_stop_reason,
1990 arm_prologue_this_id,
1991 arm_prologue_prev_register,
1992 NULL,
1993 default_frame_sniffer
1994 };
1995
1996 /* Maintain a list of ARM exception table entries per objfile, similar to the
1997 list of mapping symbols. We only cache entries for standard ARM-defined
1998 personality routines; the cache will contain only the frame unwinding
1999 instructions associated with the entry (not the descriptors). */
2000
2001 struct arm_exidx_entry
2002 {
2003 CORE_ADDR addr;
2004 gdb_byte *entry;
2005
2006 bool operator< (const arm_exidx_entry &other) const
2007 {
2008 return addr < other.addr;
2009 }
2010 };
2011
2012 struct arm_exidx_data
2013 {
2014 std::vector<std::vector<arm_exidx_entry>> section_maps;
2015 };
2016
2017 /* Per-BFD key to store exception handling information. */
2018 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2019
2020 static struct obj_section *
2021 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2022 {
2023 struct obj_section *osect;
2024
2025 ALL_OBJFILE_OSECTIONS (objfile, osect)
2026 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2027 {
2028 bfd_vma start, size;
2029 start = bfd_section_vma (osect->the_bfd_section);
2030 size = bfd_section_size (osect->the_bfd_section);
2031
2032 if (start <= vma && vma < start + size)
2033 return osect;
2034 }
2035
2036 return NULL;
2037 }
2038
2039 /* Parse contents of exception table and exception index sections
2040 of OBJFILE, and fill in the exception table entry cache.
2041
2042 For each entry that refers to a standard ARM-defined personality
2043 routine, extract the frame unwinding instructions (from either
2044 the index or the table section). The unwinding instructions
2045 are normalized by:
2046 - extracting them from the rest of the table data
2047 - converting to host endianness
2048 - appending the implicit 0xb0 ("Finish") code
2049
2050 The extracted and normalized instructions are stored for later
2051 retrieval by the arm_find_exidx_entry routine. */
2052
2053 static void
2054 arm_exidx_new_objfile (struct objfile *objfile)
2055 {
2056 struct arm_exidx_data *data;
2057 asection *exidx, *extab;
2058 bfd_vma exidx_vma = 0, extab_vma = 0;
2059 LONGEST i;
2060
2061 /* If we've already touched this file, do nothing. */
2062 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2063 return;
2064
2065 /* Read contents of exception table and index. */
2066 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2067 gdb::byte_vector exidx_data;
2068 if (exidx)
2069 {
2070 exidx_vma = bfd_section_vma (exidx);
2071 exidx_data.resize (bfd_section_size (exidx));
2072
2073 if (!bfd_get_section_contents (objfile->obfd, exidx,
2074 exidx_data.data (), 0,
2075 exidx_data.size ()))
2076 return;
2077 }
2078
2079 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2080 gdb::byte_vector extab_data;
2081 if (extab)
2082 {
2083 extab_vma = bfd_section_vma (extab);
2084 extab_data.resize (bfd_section_size (extab));
2085
2086 if (!bfd_get_section_contents (objfile->obfd, extab,
2087 extab_data.data (), 0,
2088 extab_data.size ()))
2089 return;
2090 }
2091
2092 /* Allocate exception table data structure. */
2093 data = arm_exidx_data_key.emplace (objfile->obfd);
2094 data->section_maps.resize (objfile->obfd->section_count);
2095
2096 /* Fill in exception table. */
2097 for (i = 0; i < exidx_data.size () / 8; i++)
2098 {
2099 struct arm_exidx_entry new_exidx_entry;
2100 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2101 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2102 exidx_data.data () + i * 8 + 4);
2103 bfd_vma addr = 0, word = 0;
2104 int n_bytes = 0, n_words = 0;
2105 struct obj_section *sec;
2106 gdb_byte *entry = NULL;
2107
2108 /* Extract address of start of function. */
2109 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2110 idx += exidx_vma + i * 8;
2111
2112 /* Find section containing function and compute section offset. */
2113 sec = arm_obj_section_from_vma (objfile, idx);
2114 if (sec == NULL)
2115 continue;
2116 idx -= bfd_section_vma (sec->the_bfd_section);
2117
2118 /* Determine address of exception table entry. */
2119 if (val == 1)
2120 {
2121 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2122 }
2123 else if ((val & 0xff000000) == 0x80000000)
2124 {
2125 /* Exception table entry embedded in .ARM.exidx
2126 -- must be short form. */
2127 word = val;
2128 n_bytes = 3;
2129 }
2130 else if (!(val & 0x80000000))
2131 {
2132 /* Exception table entry in .ARM.extab. */
2133 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2134 addr += exidx_vma + i * 8 + 4;
2135
2136 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2137 {
2138 word = bfd_h_get_32 (objfile->obfd,
2139 extab_data.data () + addr - extab_vma);
2140 addr += 4;
2141
2142 if ((word & 0xff000000) == 0x80000000)
2143 {
2144 /* Short form. */
2145 n_bytes = 3;
2146 }
2147 else if ((word & 0xff000000) == 0x81000000
2148 || (word & 0xff000000) == 0x82000000)
2149 {
2150 /* Long form. */
2151 n_bytes = 2;
2152 n_words = ((word >> 16) & 0xff);
2153 }
2154 else if (!(word & 0x80000000))
2155 {
2156 bfd_vma pers;
2157 struct obj_section *pers_sec;
2158 int gnu_personality = 0;
2159
2160 /* Custom personality routine. */
2161 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2162 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2163
2164 /* Check whether we've got one of the variants of the
2165 GNU personality routines. */
2166 pers_sec = arm_obj_section_from_vma (objfile, pers);
2167 if (pers_sec)
2168 {
2169 static const char *personality[] =
2170 {
2171 "__gcc_personality_v0",
2172 "__gxx_personality_v0",
2173 "__gcj_personality_v0",
2174 "__gnu_objc_personality_v0",
2175 NULL
2176 };
2177
2178 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2179 int k;
2180
2181 for (k = 0; personality[k]; k++)
2182 if (lookup_minimal_symbol_by_pc_name
2183 (pc, personality[k], objfile))
2184 {
2185 gnu_personality = 1;
2186 break;
2187 }
2188 }
2189
2190 /* If so, the next word contains a word count in the high
2191 byte, followed by the same unwind instructions as the
2192 pre-defined forms. */
2193 if (gnu_personality
2194 && addr + 4 <= extab_vma + extab_data.size ())
2195 {
2196 word = bfd_h_get_32 (objfile->obfd,
2197 (extab_data.data ()
2198 + addr - extab_vma));
2199 addr += 4;
2200 n_bytes = 3;
2201 n_words = ((word >> 24) & 0xff);
2202 }
2203 }
2204 }
2205 }
2206
2207 /* Sanity check address. */
2208 if (n_words)
2209 if (addr < extab_vma
2210 || addr + 4 * n_words > extab_vma + extab_data.size ())
2211 n_words = n_bytes = 0;
2212
2213 /* The unwind instructions reside in WORD (only the N_BYTES least
2214 significant bytes are valid), followed by N_WORDS words in the
2215 extab section starting at ADDR. */
2216 if (n_bytes || n_words)
2217 {
2218 gdb_byte *p = entry
2219 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2220 n_bytes + n_words * 4 + 1);
2221
2222 while (n_bytes--)
2223 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2224
2225 while (n_words--)
2226 {
2227 word = bfd_h_get_32 (objfile->obfd,
2228 extab_data.data () + addr - extab_vma);
2229 addr += 4;
2230
2231 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2232 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2233 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2234 *p++ = (gdb_byte) (word & 0xff);
2235 }
2236
2237 /* Implied "Finish" to terminate the list. */
2238 *p++ = 0xb0;
2239 }
2240
2241 /* Push entry onto vector. They are guaranteed to always
2242 appear in order of increasing addresses. */
2243 new_exidx_entry.addr = idx;
2244 new_exidx_entry.entry = entry;
2245 data->section_maps[sec->the_bfd_section->index].push_back
2246 (new_exidx_entry);
2247 }
2248 }
2249
2250 /* Search for the exception table entry covering MEMADDR. If one is found,
2251 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2252 set *START to the start of the region covered by this entry. */
2253
2254 static gdb_byte *
2255 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2256 {
2257 struct obj_section *sec;
2258
2259 sec = find_pc_section (memaddr);
2260 if (sec != NULL)
2261 {
2262 struct arm_exidx_data *data;
2263 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2264
2265 data = arm_exidx_data_key.get (sec->objfile->obfd);
2266 if (data != NULL)
2267 {
2268 std::vector<arm_exidx_entry> &map
2269 = data->section_maps[sec->the_bfd_section->index];
2270 if (!map.empty ())
2271 {
2272 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2273
2274 /* std::lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < map.end ())
2279 {
2280 if (idx->addr == map_key.addr)
2281 {
2282 if (start)
2283 *start = idx->addr + obj_section_addr (sec);
2284 return idx->entry;
2285 }
2286 }
2287
2288 if (idx > map.begin ())
2289 {
2290 idx = idx - 1;
2291 if (start)
2292 *start = idx->addr + obj_section_addr (sec);
2293 return idx->entry;
2294 }
2295 }
2296 }
2297 }
2298
2299 return NULL;
2300 }
2301
2302 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2303 instruction list from the ARM exception table entry ENTRY, allocate and
2304 return a prologue cache structure describing how to unwind this frame.
2305
2306 Return NULL if the unwinding instruction list contains a "spare",
2307 "reserved" or "refuse to unwind" instruction as defined in section
2308 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2309 for the ARM Architecture" document. */
2310
2311 static struct arm_prologue_cache *
2312 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2313 {
2314 CORE_ADDR vsp = 0;
2315 int vsp_valid = 0;
2316
2317 struct arm_prologue_cache *cache;
2318 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2319 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2320
2321 for (;;)
2322 {
2323 gdb_byte insn;
2324
2325 /* Whenever we reload SP, we actually have to retrieve its
2326 actual value in the current frame. */
2327 if (!vsp_valid)
2328 {
2329 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2330 {
2331 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2332 vsp = get_frame_register_unsigned (this_frame, reg);
2333 }
2334 else
2335 {
2336 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2337 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2338 }
2339
2340 vsp_valid = 1;
2341 }
2342
2343 /* Decode next unwind instruction. */
2344 insn = *entry++;
2345
2346 if ((insn & 0xc0) == 0)
2347 {
2348 int offset = insn & 0x3f;
2349 vsp += (offset << 2) + 4;
2350 }
2351 else if ((insn & 0xc0) == 0x40)
2352 {
2353 int offset = insn & 0x3f;
2354 vsp -= (offset << 2) + 4;
2355 }
2356 else if ((insn & 0xf0) == 0x80)
2357 {
2358 int mask = ((insn & 0xf) << 8) | *entry++;
2359 int i;
2360
2361 /* The special case of an all-zero mask identifies
2362 "Refuse to unwind". We return NULL to fall back
2363 to the prologue analyzer. */
2364 if (mask == 0)
2365 return NULL;
2366
2367 /* Pop registers r4..r15 under mask. */
2368 for (i = 0; i < 12; i++)
2369 if (mask & (1 << i))
2370 {
2371 cache->saved_regs[4 + i].addr = vsp;
2372 vsp += 4;
2373 }
2374
2375 /* Special-case popping SP -- we need to reload vsp. */
2376 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2377 vsp_valid = 0;
2378 }
2379 else if ((insn & 0xf0) == 0x90)
2380 {
2381 int reg = insn & 0xf;
2382
2383 /* Reserved cases. */
2384 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2385 return NULL;
2386
2387 /* Set SP from another register and mark VSP for reload. */
2388 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2389 vsp_valid = 0;
2390 }
2391 else if ((insn & 0xf0) == 0xa0)
2392 {
2393 int count = insn & 0x7;
2394 int pop_lr = (insn & 0x8) != 0;
2395 int i;
2396
2397 /* Pop r4..r[4+count]. */
2398 for (i = 0; i <= count; i++)
2399 {
2400 cache->saved_regs[4 + i].addr = vsp;
2401 vsp += 4;
2402 }
2403
2404 /* If indicated by flag, pop LR as well. */
2405 if (pop_lr)
2406 {
2407 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2408 vsp += 4;
2409 }
2410 }
2411 else if (insn == 0xb0)
2412 {
2413 /* We could only have updated PC by popping into it; if so, it
2414 will show up as address. Otherwise, copy LR into PC. */
2415 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2416 cache->saved_regs[ARM_PC_REGNUM]
2417 = cache->saved_regs[ARM_LR_REGNUM];
2418
2419 /* We're done. */
2420 break;
2421 }
2422 else if (insn == 0xb1)
2423 {
2424 int mask = *entry++;
2425 int i;
2426
2427 /* All-zero mask and mask >= 16 is "spare". */
2428 if (mask == 0 || mask >= 16)
2429 return NULL;
2430
2431 /* Pop r0..r3 under mask. */
2432 for (i = 0; i < 4; i++)
2433 if (mask & (1 << i))
2434 {
2435 cache->saved_regs[i].addr = vsp;
2436 vsp += 4;
2437 }
2438 }
2439 else if (insn == 0xb2)
2440 {
2441 ULONGEST offset = 0;
2442 unsigned shift = 0;
2443
2444 do
2445 {
2446 offset |= (*entry & 0x7f) << shift;
2447 shift += 7;
2448 }
2449 while (*entry++ & 0x80);
2450
2451 vsp += 0x204 + (offset << 2);
2452 }
2453 else if (insn == 0xb3)
2454 {
2455 int start = *entry >> 4;
2456 int count = (*entry++) & 0xf;
2457 int i;
2458
2459 /* Only registers D0..D15 are valid here. */
2460 if (start + count >= 16)
2461 return NULL;
2462
2463 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2464 for (i = 0; i <= count; i++)
2465 {
2466 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2467 vsp += 8;
2468 }
2469
2470 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2471 vsp += 4;
2472 }
2473 else if ((insn & 0xf8) == 0xb8)
2474 {
2475 int count = insn & 0x7;
2476 int i;
2477
2478 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if (insn == 0xc6)
2489 {
2490 int start = *entry >> 4;
2491 int count = (*entry++) & 0xf;
2492 int i;
2493
2494 /* Only registers WR0..WR15 are valid. */
2495 if (start + count >= 16)
2496 return NULL;
2497
2498 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2499 for (i = 0; i <= count; i++)
2500 {
2501 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2502 vsp += 8;
2503 }
2504 }
2505 else if (insn == 0xc7)
2506 {
2507 int mask = *entry++;
2508 int i;
2509
2510 /* All-zero mask and mask >= 16 is "spare". */
2511 if (mask == 0 || mask >= 16)
2512 return NULL;
2513
2514 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2515 for (i = 0; i < 4; i++)
2516 if (mask & (1 << i))
2517 {
2518 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2519 vsp += 4;
2520 }
2521 }
2522 else if ((insn & 0xf8) == 0xc0)
2523 {
2524 int count = insn & 0x7;
2525 int i;
2526
2527 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2528 for (i = 0; i <= count; i++)
2529 {
2530 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2531 vsp += 8;
2532 }
2533 }
2534 else if (insn == 0xc8)
2535 {
2536 int start = *entry >> 4;
2537 int count = (*entry++) & 0xf;
2538 int i;
2539
2540 /* Only registers D0..D31 are valid. */
2541 if (start + count >= 16)
2542 return NULL;
2543
2544 /* Pop VFP double-precision registers
2545 D[16+start]..D[16+start+count]. */
2546 for (i = 0; i <= count; i++)
2547 {
2548 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2549 vsp += 8;
2550 }
2551 }
2552 else if (insn == 0xc9)
2553 {
2554 int start = *entry >> 4;
2555 int count = (*entry++) & 0xf;
2556 int i;
2557
2558 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else if ((insn & 0xf8) == 0xd0)
2566 {
2567 int count = insn & 0x7;
2568 int i;
2569
2570 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2571 for (i = 0; i <= count; i++)
2572 {
2573 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2574 vsp += 8;
2575 }
2576 }
2577 else
2578 {
2579 /* Everything else is "spare". */
2580 return NULL;
2581 }
2582 }
2583
2584 /* If we restore SP from a register, assume this was the frame register.
2585 Otherwise just fall back to SP as frame register. */
2586 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2587 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2588 else
2589 cache->framereg = ARM_SP_REGNUM;
2590
2591 /* Determine offset to previous frame. */
2592 cache->framesize
2593 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2594
2595 /* We already got the previous SP. */
2596 cache->prev_sp = vsp;
2597
2598 return cache;
2599 }
2600
2601 /* Unwinding via ARM exception table entries. Note that the sniffer
2602 already computes a filled-in prologue cache, which is then used
2603 with the same arm_prologue_this_id and arm_prologue_prev_register
2604 routines also used for prologue-parsing based unwinding. */
2605
2606 static int
2607 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2608 struct frame_info *this_frame,
2609 void **this_prologue_cache)
2610 {
2611 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2613 CORE_ADDR addr_in_block, exidx_region, func_start;
2614 struct arm_prologue_cache *cache;
2615 gdb_byte *entry;
2616
2617 /* See if we have an ARM exception table entry covering this address. */
2618 addr_in_block = get_frame_address_in_block (this_frame);
2619 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2620 if (!entry)
2621 return 0;
2622
2623 /* The ARM exception table does not describe unwind information
2624 for arbitrary PC values, but is guaranteed to be correct only
2625 at call sites. We have to decide here whether we want to use
2626 ARM exception table information for this frame, or fall back
2627 to using prologue parsing. (Note that if we have DWARF CFI,
2628 this sniffer isn't even called -- CFI is always preferred.)
2629
2630 Before we make this decision, however, we check whether we
2631 actually have *symbol* information for the current frame.
2632 If not, prologue parsing would not work anyway, so we might
2633 as well use the exception table and hope for the best. */
2634 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2635 {
2636 int exc_valid = 0;
2637
2638 /* If the next frame is "normal", we are at a call site in this
2639 frame, so exception information is guaranteed to be valid. */
2640 if (get_next_frame (this_frame)
2641 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2642 exc_valid = 1;
2643
2644 /* We also assume exception information is valid if we're currently
2645 blocked in a system call. The system library is supposed to
2646 ensure this, so that e.g. pthread cancellation works. */
2647 if (arm_frame_is_thumb (this_frame))
2648 {
2649 ULONGEST insn;
2650
2651 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2652 2, byte_order_for_code, &insn)
2653 && (insn & 0xff00) == 0xdf00 /* svc */)
2654 exc_valid = 1;
2655 }
2656 else
2657 {
2658 ULONGEST insn;
2659
2660 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2661 4, byte_order_for_code, &insn)
2662 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2663 exc_valid = 1;
2664 }
2665
2666 /* Bail out if we don't know that exception information is valid. */
2667 if (!exc_valid)
2668 return 0;
2669
2670 /* The ARM exception index does not mark the *end* of the region
2671 covered by the entry, and some functions will not have any entry.
2672 To correctly recognize the end of the covered region, the linker
2673 should have inserted dummy records with a CANTUNWIND marker.
2674
2675 Unfortunately, current versions of GNU ld do not reliably do
2676 this, and thus we may have found an incorrect entry above.
2677 As a (temporary) sanity check, we only use the entry if it
2678 lies *within* the bounds of the function. Note that this check
2679 might reject perfectly valid entries that just happen to cover
2680 multiple functions; therefore this check ought to be removed
2681 once the linker is fixed. */
2682 if (func_start > exidx_region)
2683 return 0;
2684 }
2685
2686 /* Decode the list of unwinding instructions into a prologue cache.
2687 Note that this may fail due to e.g. a "refuse to unwind" code. */
2688 cache = arm_exidx_fill_cache (this_frame, entry);
2689 if (!cache)
2690 return 0;
2691
2692 *this_prologue_cache = cache;
2693 return 1;
2694 }
2695
2696 struct frame_unwind arm_exidx_unwind = {
2697 NORMAL_FRAME,
2698 default_frame_unwind_stop_reason,
2699 arm_prologue_this_id,
2700 arm_prologue_prev_register,
2701 NULL,
2702 arm_exidx_unwind_sniffer
2703 };
2704
2705 static struct arm_prologue_cache *
2706 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2707 {
2708 struct arm_prologue_cache *cache;
2709 int reg;
2710
2711 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2712 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2713
2714 /* Still rely on the offset calculated from prologue. */
2715 arm_scan_prologue (this_frame, cache);
2716
2717 /* Since we are in epilogue, the SP has been restored. */
2718 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2719
2720 /* Calculate actual addresses of saved registers using offsets
2721 determined by arm_scan_prologue. */
2722 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2723 if (trad_frame_addr_p (cache->saved_regs, reg))
2724 cache->saved_regs[reg].addr += cache->prev_sp;
2725
2726 return cache;
2727 }
2728
2729 /* Implementation of function hook 'this_id' in
2730 'struct frame_uwnind' for epilogue unwinder. */
2731
2732 static void
2733 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2734 void **this_cache,
2735 struct frame_id *this_id)
2736 {
2737 struct arm_prologue_cache *cache;
2738 CORE_ADDR pc, func;
2739
2740 if (*this_cache == NULL)
2741 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2742 cache = (struct arm_prologue_cache *) *this_cache;
2743
2744 /* Use function start address as part of the frame ID. If we cannot
2745 identify the start address (due to missing symbol information),
2746 fall back to just using the current PC. */
2747 pc = get_frame_pc (this_frame);
2748 func = get_frame_func (this_frame);
2749 if (func == 0)
2750 func = pc;
2751
2752 (*this_id) = frame_id_build (cache->prev_sp, pc);
2753 }
2754
2755 /* Implementation of function hook 'prev_register' in
2756 'struct frame_uwnind' for epilogue unwinder. */
2757
2758 static struct value *
2759 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2760 void **this_cache, int regnum)
2761 {
2762 if (*this_cache == NULL)
2763 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2764
2765 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2766 }
2767
2768 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2769 CORE_ADDR pc);
2770 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2771 CORE_ADDR pc);
2772
2773 /* Implementation of function hook 'sniffer' in
2774 'struct frame_uwnind' for epilogue unwinder. */
2775
2776 static int
2777 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2778 struct frame_info *this_frame,
2779 void **this_prologue_cache)
2780 {
2781 if (frame_relative_level (this_frame) == 0)
2782 {
2783 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2784 CORE_ADDR pc = get_frame_pc (this_frame);
2785
2786 if (arm_frame_is_thumb (this_frame))
2787 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2788 else
2789 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2790 }
2791 else
2792 return 0;
2793 }
2794
2795 /* Frame unwinder from epilogue. */
2796
2797 static const struct frame_unwind arm_epilogue_frame_unwind =
2798 {
2799 NORMAL_FRAME,
2800 default_frame_unwind_stop_reason,
2801 arm_epilogue_frame_this_id,
2802 arm_epilogue_frame_prev_register,
2803 NULL,
2804 arm_epilogue_frame_sniffer,
2805 };
2806
2807 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2808 trampoline, return the target PC. Otherwise return 0.
2809
2810 void call0a (char c, short s, int i, long l) {}
2811
2812 int main (void)
2813 {
2814 (*pointer_to_call0a) (c, s, i, l);
2815 }
2816
2817 Instead of calling a stub library function _call_via_xx (xx is
2818 the register name), GCC may inline the trampoline in the object
2819 file as below (register r2 has the address of call0a).
2820
2821 .global main
2822 .type main, %function
2823 ...
2824 bl .L1
2825 ...
2826 .size main, .-main
2827
2828 .L1:
2829 bx r2
2830
2831 The trampoline 'bx r2' doesn't belong to main. */
2832
2833 static CORE_ADDR
2834 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2835 {
2836 /* The heuristics of recognizing such trampoline is that FRAME is
2837 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2838 if (arm_frame_is_thumb (frame))
2839 {
2840 gdb_byte buf[2];
2841
2842 if (target_read_memory (pc, buf, 2) == 0)
2843 {
2844 struct gdbarch *gdbarch = get_frame_arch (frame);
2845 enum bfd_endian byte_order_for_code
2846 = gdbarch_byte_order_for_code (gdbarch);
2847 uint16_t insn
2848 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2849
2850 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2851 {
2852 CORE_ADDR dest
2853 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2854
2855 /* Clear the LSB so that gdb core sets step-resume
2856 breakpoint at the right address. */
2857 return UNMAKE_THUMB_ADDR (dest);
2858 }
2859 }
2860 }
2861
2862 return 0;
2863 }
2864
2865 static struct arm_prologue_cache *
2866 arm_make_stub_cache (struct frame_info *this_frame)
2867 {
2868 struct arm_prologue_cache *cache;
2869
2870 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2871 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2872
2873 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2874
2875 return cache;
2876 }
2877
2878 /* Our frame ID for a stub frame is the current SP and LR. */
2879
2880 static void
2881 arm_stub_this_id (struct frame_info *this_frame,
2882 void **this_cache,
2883 struct frame_id *this_id)
2884 {
2885 struct arm_prologue_cache *cache;
2886
2887 if (*this_cache == NULL)
2888 *this_cache = arm_make_stub_cache (this_frame);
2889 cache = (struct arm_prologue_cache *) *this_cache;
2890
2891 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2892 }
2893
2894 static int
2895 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2896 struct frame_info *this_frame,
2897 void **this_prologue_cache)
2898 {
2899 CORE_ADDR addr_in_block;
2900 gdb_byte dummy[4];
2901 CORE_ADDR pc, start_addr;
2902 const char *name;
2903
2904 addr_in_block = get_frame_address_in_block (this_frame);
2905 pc = get_frame_pc (this_frame);
2906 if (in_plt_section (addr_in_block)
2907 /* We also use the stub winder if the target memory is unreadable
2908 to avoid having the prologue unwinder trying to read it. */
2909 || target_read_memory (pc, dummy, 4) != 0)
2910 return 1;
2911
2912 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2913 && arm_skip_bx_reg (this_frame, pc) != 0)
2914 return 1;
2915
2916 return 0;
2917 }
2918
2919 struct frame_unwind arm_stub_unwind = {
2920 NORMAL_FRAME,
2921 default_frame_unwind_stop_reason,
2922 arm_stub_this_id,
2923 arm_prologue_prev_register,
2924 NULL,
2925 arm_stub_unwind_sniffer
2926 };
2927
2928 /* Put here the code to store, into CACHE->saved_regs, the addresses
2929 of the saved registers of frame described by THIS_FRAME. CACHE is
2930 returned. */
2931
2932 static struct arm_prologue_cache *
2933 arm_m_exception_cache (struct frame_info *this_frame)
2934 {
2935 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2936 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2937 struct arm_prologue_cache *cache;
2938 CORE_ADDR lr;
2939 CORE_ADDR sp;
2940 CORE_ADDR unwound_sp;
2941 LONGEST xpsr;
2942 uint32_t exc_return;
2943 uint32_t process_stack_used;
2944 uint32_t extended_frame_used;
2945 uint32_t secure_stack_used;
2946
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2949
2950 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2951 describes which bits in LR that define which stack was used prior
2952 to the exception and if FPU is used (causing extended stack frame). */
2953
2954 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2955 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2956
2957 /* Check EXC_RETURN indicator bits. */
2958 exc_return = (((lr >> 28) & 0xf) == 0xf);
2959
2960 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2961 process_stack_used = ((lr & (1 << 2)) != 0);
2962 if (exc_return && process_stack_used)
2963 {
2964 /* Thread (process) stack used.
2965 Potentially this could be other register defined by target, but PSP
2966 can be considered a standard name for the "Process Stack Pointer".
2967 To be fully aware of system registers like MSP and PSP, these could
2968 be added to a separate XML arm-m-system-profile that is valid for
2969 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
2970 corefile off-line, then these registers must be defined by GDB,
2971 and also be included in the corefile regsets. */
2972
2973 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
2974 if (psp_regnum == -1)
2975 {
2976 /* Thread (process) stack could not be fetched,
2977 give warning and exit. */
2978
2979 warning (_("no PSP thread stack unwinding supported."));
2980
2981 /* Terminate any further stack unwinding by refer to self. */
2982 cache->prev_sp = sp;
2983 return cache;
2984 }
2985 else
2986 {
2987 /* Thread (process) stack used, use PSP as SP. */
2988 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
2989 }
2990 }
2991 else
2992 {
2993 /* Main stack used, use MSP as SP. */
2994 unwound_sp = sp;
2995 }
2996
2997 /* The hardware saves eight 32-bit words, comprising xPSR,
2998 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2999 "B1.5.6 Exception entry behavior" in
3000 "ARMv7-M Architecture Reference Manual". */
3001 cache->saved_regs[0].addr = unwound_sp;
3002 cache->saved_regs[1].addr = unwound_sp + 4;
3003 cache->saved_regs[2].addr = unwound_sp + 8;
3004 cache->saved_regs[3].addr = unwound_sp + 12;
3005 cache->saved_regs[ARM_IP_REGNUM].addr = unwound_sp + 16;
3006 cache->saved_regs[ARM_LR_REGNUM].addr = unwound_sp + 20;
3007 cache->saved_regs[ARM_PC_REGNUM].addr = unwound_sp + 24;
3008 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3009
3010 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3011 type used. */
3012 extended_frame_used = ((lr & (1 << 4)) == 0);
3013 if (exc_return && extended_frame_used)
3014 {
3015 int i;
3016 int fpu_regs_stack_offset;
3017
3018 /* This code does not take into account the lazy stacking, see "Lazy
3019 context save of FP state", in B1.5.7, also ARM AN298, supported
3020 by Cortex-M4F architecture.
3021 To fully handle this the FPCCR register (Floating-point Context
3022 Control Register) needs to be read out and the bits ASPEN and LSPEN
3023 could be checked to setup correct lazy stacked FP registers.
3024 This register is located at address 0xE000EF34. */
3025
3026 /* Extended stack frame type used. */
3027 fpu_regs_stack_offset = unwound_sp + 0x20;
3028 for (i = 0; i < 16; i++)
3029 {
3030 cache->saved_regs[ARM_D0_REGNUM + i].addr = fpu_regs_stack_offset;
3031 fpu_regs_stack_offset += 4;
3032 }
3033 cache->saved_regs[ARM_FPSCR_REGNUM].addr = unwound_sp + 0x60;
3034
3035 /* Offset 0x64 is reserved. */
3036 cache->prev_sp = unwound_sp + 0x68;
3037 }
3038 else
3039 {
3040 /* Standard stack frame type used. */
3041 cache->prev_sp = unwound_sp + 0x20;
3042 }
3043
3044 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3045 secure_stack_used = ((lr & (1 << 6)) != 0);
3046 if (exc_return && secure_stack_used)
3047 {
3048 /* ARMv8-M Exception and interrupt handling is not considered here.
3049 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3050 the Secure or Non-secure stack was used. To separate Secure and
3051 Non-secure stacks, processors that are based on the ARMv8-M
3052 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3053 In addition, a stack limit feature is provided using stack limit
3054 registers (accessible using MSR and MRS instructions) in Privileged
3055 level. */
3056 }
3057
3058 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3059 aligner between the top of the 32-byte stack frame and the
3060 previous context's stack pointer. */
3061 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3062 && (xpsr & (1 << 9)) != 0)
3063 cache->prev_sp += 4;
3064
3065 return cache;
3066 }
3067
3068 /* Implementation of function hook 'this_id' in
3069 'struct frame_uwnind'. */
3070
3071 static void
3072 arm_m_exception_this_id (struct frame_info *this_frame,
3073 void **this_cache,
3074 struct frame_id *this_id)
3075 {
3076 struct arm_prologue_cache *cache;
3077
3078 if (*this_cache == NULL)
3079 *this_cache = arm_m_exception_cache (this_frame);
3080 cache = (struct arm_prologue_cache *) *this_cache;
3081
3082 /* Our frame ID for a stub frame is the current SP and LR. */
3083 *this_id = frame_id_build (cache->prev_sp,
3084 get_frame_pc (this_frame));
3085 }
3086
3087 /* Implementation of function hook 'prev_register' in
3088 'struct frame_uwnind'. */
3089
3090 static struct value *
3091 arm_m_exception_prev_register (struct frame_info *this_frame,
3092 void **this_cache,
3093 int prev_regnum)
3094 {
3095 struct arm_prologue_cache *cache;
3096
3097 if (*this_cache == NULL)
3098 *this_cache = arm_m_exception_cache (this_frame);
3099 cache = (struct arm_prologue_cache *) *this_cache;
3100
3101 /* The value was already reconstructed into PREV_SP. */
3102 if (prev_regnum == ARM_SP_REGNUM)
3103 return frame_unwind_got_constant (this_frame, prev_regnum,
3104 cache->prev_sp);
3105
3106 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3107 prev_regnum);
3108 }
3109
3110 /* Implementation of function hook 'sniffer' in
3111 'struct frame_uwnind'. */
3112
3113 static int
3114 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3115 struct frame_info *this_frame,
3116 void **this_prologue_cache)
3117 {
3118 CORE_ADDR this_pc = get_frame_pc (this_frame);
3119
3120 /* No need to check is_m; this sniffer is only registered for
3121 M-profile architectures. */
3122
3123 /* Check if exception frame returns to a magic PC value. */
3124 return arm_m_addr_is_magic (this_pc);
3125 }
3126
3127 /* Frame unwinder for M-profile exceptions. */
3128
3129 struct frame_unwind arm_m_exception_unwind =
3130 {
3131 SIGTRAMP_FRAME,
3132 default_frame_unwind_stop_reason,
3133 arm_m_exception_this_id,
3134 arm_m_exception_prev_register,
3135 NULL,
3136 arm_m_exception_unwind_sniffer
3137 };
3138
3139 static CORE_ADDR
3140 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3141 {
3142 struct arm_prologue_cache *cache;
3143
3144 if (*this_cache == NULL)
3145 *this_cache = arm_make_prologue_cache (this_frame);
3146 cache = (struct arm_prologue_cache *) *this_cache;
3147
3148 return cache->prev_sp - cache->framesize;
3149 }
3150
3151 struct frame_base arm_normal_base = {
3152 &arm_prologue_unwind,
3153 arm_normal_frame_base,
3154 arm_normal_frame_base,
3155 arm_normal_frame_base
3156 };
3157
3158 static struct value *
3159 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3160 int regnum)
3161 {
3162 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3163 CORE_ADDR lr, cpsr;
3164 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3165
3166 switch (regnum)
3167 {
3168 case ARM_PC_REGNUM:
3169 /* The PC is normally copied from the return column, which
3170 describes saves of LR. However, that version may have an
3171 extra bit set to indicate Thumb state. The bit is not
3172 part of the PC. */
3173 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3174 return frame_unwind_got_constant (this_frame, regnum,
3175 arm_addr_bits_remove (gdbarch, lr));
3176
3177 case ARM_PS_REGNUM:
3178 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3179 cpsr = get_frame_register_unsigned (this_frame, regnum);
3180 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3181 if (IS_THUMB_ADDR (lr))
3182 cpsr |= t_bit;
3183 else
3184 cpsr &= ~t_bit;
3185 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3186
3187 default:
3188 internal_error (__FILE__, __LINE__,
3189 _("Unexpected register %d"), regnum);
3190 }
3191 }
3192
3193 static void
3194 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3195 struct dwarf2_frame_state_reg *reg,
3196 struct frame_info *this_frame)
3197 {
3198 switch (regnum)
3199 {
3200 case ARM_PC_REGNUM:
3201 case ARM_PS_REGNUM:
3202 reg->how = DWARF2_FRAME_REG_FN;
3203 reg->loc.fn = arm_dwarf2_prev_register;
3204 break;
3205 case ARM_SP_REGNUM:
3206 reg->how = DWARF2_FRAME_REG_CFA;
3207 break;
3208 }
3209 }
3210
3211 /* Implement the stack_frame_destroyed_p gdbarch method. */
3212
3213 static int
3214 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3215 {
3216 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3217 unsigned int insn, insn2;
3218 int found_return = 0, found_stack_adjust = 0;
3219 CORE_ADDR func_start, func_end;
3220 CORE_ADDR scan_pc;
3221 gdb_byte buf[4];
3222
3223 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3224 return 0;
3225
3226 /* The epilogue is a sequence of instructions along the following lines:
3227
3228 - add stack frame size to SP or FP
3229 - [if frame pointer used] restore SP from FP
3230 - restore registers from SP [may include PC]
3231 - a return-type instruction [if PC wasn't already restored]
3232
3233 In a first pass, we scan forward from the current PC and verify the
3234 instructions we find as compatible with this sequence, ending in a
3235 return instruction.
3236
3237 However, this is not sufficient to distinguish indirect function calls
3238 within a function from indirect tail calls in the epilogue in some cases.
3239 Therefore, if we didn't already find any SP-changing instruction during
3240 forward scan, we add a backward scanning heuristic to ensure we actually
3241 are in the epilogue. */
3242
3243 scan_pc = pc;
3244 while (scan_pc < func_end && !found_return)
3245 {
3246 if (target_read_memory (scan_pc, buf, 2))
3247 break;
3248
3249 scan_pc += 2;
3250 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3251
3252 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3253 found_return = 1;
3254 else if (insn == 0x46f7) /* mov pc, lr */
3255 found_return = 1;
3256 else if (thumb_instruction_restores_sp (insn))
3257 {
3258 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3259 found_return = 1;
3260 }
3261 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3262 {
3263 if (target_read_memory (scan_pc, buf, 2))
3264 break;
3265
3266 scan_pc += 2;
3267 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3268
3269 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3270 {
3271 if (insn2 & 0x8000) /* <registers> include PC. */
3272 found_return = 1;
3273 }
3274 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3275 && (insn2 & 0x0fff) == 0x0b04)
3276 {
3277 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3278 found_return = 1;
3279 }
3280 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3281 && (insn2 & 0x0e00) == 0x0a00)
3282 ;
3283 else
3284 break;
3285 }
3286 else
3287 break;
3288 }
3289
3290 if (!found_return)
3291 return 0;
3292
3293 /* Since any instruction in the epilogue sequence, with the possible
3294 exception of return itself, updates the stack pointer, we need to
3295 scan backwards for at most one instruction. Try either a 16-bit or
3296 a 32-bit instruction. This is just a heuristic, so we do not worry
3297 too much about false positives. */
3298
3299 if (pc - 4 < func_start)
3300 return 0;
3301 if (target_read_memory (pc - 4, buf, 4))
3302 return 0;
3303
3304 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3305 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3306
3307 if (thumb_instruction_restores_sp (insn2))
3308 found_stack_adjust = 1;
3309 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3310 found_stack_adjust = 1;
3311 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3312 && (insn2 & 0x0fff) == 0x0b04)
3313 found_stack_adjust = 1;
3314 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3315 && (insn2 & 0x0e00) == 0x0a00)
3316 found_stack_adjust = 1;
3317
3318 return found_stack_adjust;
3319 }
3320
3321 static int
3322 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3323 {
3324 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3325 unsigned int insn;
3326 int found_return;
3327 CORE_ADDR func_start, func_end;
3328
3329 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3330 return 0;
3331
3332 /* We are in the epilogue if the previous instruction was a stack
3333 adjustment and the next instruction is a possible return (bx, mov
3334 pc, or pop). We could have to scan backwards to find the stack
3335 adjustment, or forwards to find the return, but this is a decent
3336 approximation. First scan forwards. */
3337
3338 found_return = 0;
3339 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3340 if (bits (insn, 28, 31) != INST_NV)
3341 {
3342 if ((insn & 0x0ffffff0) == 0x012fff10)
3343 /* BX. */
3344 found_return = 1;
3345 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3346 /* MOV PC. */
3347 found_return = 1;
3348 else if ((insn & 0x0fff0000) == 0x08bd0000
3349 && (insn & 0x0000c000) != 0)
3350 /* POP (LDMIA), including PC or LR. */
3351 found_return = 1;
3352 }
3353
3354 if (!found_return)
3355 return 0;
3356
3357 /* Scan backwards. This is just a heuristic, so do not worry about
3358 false positives from mode changes. */
3359
3360 if (pc < func_start + 4)
3361 return 0;
3362
3363 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3364 if (arm_instruction_restores_sp (insn))
3365 return 1;
3366
3367 return 0;
3368 }
3369
3370 /* Implement the stack_frame_destroyed_p gdbarch method. */
3371
3372 static int
3373 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3374 {
3375 if (arm_pc_is_thumb (gdbarch, pc))
3376 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3377 else
3378 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3379 }
3380
3381 /* When arguments must be pushed onto the stack, they go on in reverse
3382 order. The code below implements a FILO (stack) to do this. */
3383
3384 struct stack_item
3385 {
3386 int len;
3387 struct stack_item *prev;
3388 gdb_byte *data;
3389 };
3390
3391 static struct stack_item *
3392 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3393 {
3394 struct stack_item *si;
3395 si = XNEW (struct stack_item);
3396 si->data = (gdb_byte *) xmalloc (len);
3397 si->len = len;
3398 si->prev = prev;
3399 memcpy (si->data, contents, len);
3400 return si;
3401 }
3402
3403 static struct stack_item *
3404 pop_stack_item (struct stack_item *si)
3405 {
3406 struct stack_item *dead = si;
3407 si = si->prev;
3408 xfree (dead->data);
3409 xfree (dead);
3410 return si;
3411 }
3412
3413 /* Implement the gdbarch type alignment method, overrides the generic
3414 alignment algorithm for anything that is arm specific. */
3415
3416 static ULONGEST
3417 arm_type_align (gdbarch *gdbarch, struct type *t)
3418 {
3419 t = check_typedef (t);
3420 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3421 {
3422 /* Use the natural alignment for vector types (the same for
3423 scalar type), but the maximum alignment is 64-bit. */
3424 if (TYPE_LENGTH (t) > 8)
3425 return 8;
3426 else
3427 return TYPE_LENGTH (t);
3428 }
3429
3430 /* Allow the common code to calculate the alignment. */
3431 return 0;
3432 }
3433
3434 /* Possible base types for a candidate for passing and returning in
3435 VFP registers. */
3436
3437 enum arm_vfp_cprc_base_type
3438 {
3439 VFP_CPRC_UNKNOWN,
3440 VFP_CPRC_SINGLE,
3441 VFP_CPRC_DOUBLE,
3442 VFP_CPRC_VEC64,
3443 VFP_CPRC_VEC128
3444 };
3445
3446 /* The length of one element of base type B. */
3447
3448 static unsigned
3449 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3450 {
3451 switch (b)
3452 {
3453 case VFP_CPRC_SINGLE:
3454 return 4;
3455 case VFP_CPRC_DOUBLE:
3456 return 8;
3457 case VFP_CPRC_VEC64:
3458 return 8;
3459 case VFP_CPRC_VEC128:
3460 return 16;
3461 default:
3462 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3463 (int) b);
3464 }
3465 }
3466
3467 /* The character ('s', 'd' or 'q') for the type of VFP register used
3468 for passing base type B. */
3469
3470 static int
3471 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3472 {
3473 switch (b)
3474 {
3475 case VFP_CPRC_SINGLE:
3476 return 's';
3477 case VFP_CPRC_DOUBLE:
3478 return 'd';
3479 case VFP_CPRC_VEC64:
3480 return 'd';
3481 case VFP_CPRC_VEC128:
3482 return 'q';
3483 default:
3484 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3485 (int) b);
3486 }
3487 }
3488
3489 /* Determine whether T may be part of a candidate for passing and
3490 returning in VFP registers, ignoring the limit on the total number
3491 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3492 classification of the first valid component found; if it is not
3493 VFP_CPRC_UNKNOWN, all components must have the same classification
3494 as *BASE_TYPE. If it is found that T contains a type not permitted
3495 for passing and returning in VFP registers, a type differently
3496 classified from *BASE_TYPE, or two types differently classified
3497 from each other, return -1, otherwise return the total number of
3498 base-type elements found (possibly 0 in an empty structure or
3499 array). Vector types are not currently supported, matching the
3500 generic AAPCS support. */
3501
3502 static int
3503 arm_vfp_cprc_sub_candidate (struct type *t,
3504 enum arm_vfp_cprc_base_type *base_type)
3505 {
3506 t = check_typedef (t);
3507 switch (t->code ())
3508 {
3509 case TYPE_CODE_FLT:
3510 switch (TYPE_LENGTH (t))
3511 {
3512 case 4:
3513 if (*base_type == VFP_CPRC_UNKNOWN)
3514 *base_type = VFP_CPRC_SINGLE;
3515 else if (*base_type != VFP_CPRC_SINGLE)
3516 return -1;
3517 return 1;
3518
3519 case 8:
3520 if (*base_type == VFP_CPRC_UNKNOWN)
3521 *base_type = VFP_CPRC_DOUBLE;
3522 else if (*base_type != VFP_CPRC_DOUBLE)
3523 return -1;
3524 return 1;
3525
3526 default:
3527 return -1;
3528 }
3529 break;
3530
3531 case TYPE_CODE_COMPLEX:
3532 /* Arguments of complex T where T is one of the types float or
3533 double get treated as if they are implemented as:
3534
3535 struct complexT
3536 {
3537 T real;
3538 T imag;
3539 };
3540
3541 */
3542 switch (TYPE_LENGTH (t))
3543 {
3544 case 8:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_SINGLE;
3547 else if (*base_type != VFP_CPRC_SINGLE)
3548 return -1;
3549 return 2;
3550
3551 case 16:
3552 if (*base_type == VFP_CPRC_UNKNOWN)
3553 *base_type = VFP_CPRC_DOUBLE;
3554 else if (*base_type != VFP_CPRC_DOUBLE)
3555 return -1;
3556 return 2;
3557
3558 default:
3559 return -1;
3560 }
3561 break;
3562
3563 case TYPE_CODE_ARRAY:
3564 {
3565 if (t->is_vector ())
3566 {
3567 /* A 64-bit or 128-bit containerized vector type are VFP
3568 CPRCs. */
3569 switch (TYPE_LENGTH (t))
3570 {
3571 case 8:
3572 if (*base_type == VFP_CPRC_UNKNOWN)
3573 *base_type = VFP_CPRC_VEC64;
3574 return 1;
3575 case 16:
3576 if (*base_type == VFP_CPRC_UNKNOWN)
3577 *base_type = VFP_CPRC_VEC128;
3578 return 1;
3579 default:
3580 return -1;
3581 }
3582 }
3583 else
3584 {
3585 int count;
3586 unsigned unitlen;
3587
3588 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3589 base_type);
3590 if (count == -1)
3591 return -1;
3592 if (TYPE_LENGTH (t) == 0)
3593 {
3594 gdb_assert (count == 0);
3595 return 0;
3596 }
3597 else if (count == 0)
3598 return -1;
3599 unitlen = arm_vfp_cprc_unit_length (*base_type);
3600 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3601 return TYPE_LENGTH (t) / unitlen;
3602 }
3603 }
3604 break;
3605
3606 case TYPE_CODE_STRUCT:
3607 {
3608 int count = 0;
3609 unsigned unitlen;
3610 int i;
3611 for (i = 0; i < t->num_fields (); i++)
3612 {
3613 int sub_count = 0;
3614
3615 if (!field_is_static (&t->field (i)))
3616 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3617 base_type);
3618 if (sub_count == -1)
3619 return -1;
3620 count += sub_count;
3621 }
3622 if (TYPE_LENGTH (t) == 0)
3623 {
3624 gdb_assert (count == 0);
3625 return 0;
3626 }
3627 else if (count == 0)
3628 return -1;
3629 unitlen = arm_vfp_cprc_unit_length (*base_type);
3630 if (TYPE_LENGTH (t) != unitlen * count)
3631 return -1;
3632 return count;
3633 }
3634
3635 case TYPE_CODE_UNION:
3636 {
3637 int count = 0;
3638 unsigned unitlen;
3639 int i;
3640 for (i = 0; i < t->num_fields (); i++)
3641 {
3642 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3643 base_type);
3644 if (sub_count == -1)
3645 return -1;
3646 count = (count > sub_count ? count : sub_count);
3647 }
3648 if (TYPE_LENGTH (t) == 0)
3649 {
3650 gdb_assert (count == 0);
3651 return 0;
3652 }
3653 else if (count == 0)
3654 return -1;
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 if (TYPE_LENGTH (t) != unitlen * count)
3657 return -1;
3658 return count;
3659 }
3660
3661 default:
3662 break;
3663 }
3664
3665 return -1;
3666 }
3667
3668 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3669 if passed to or returned from a non-variadic function with the VFP
3670 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3671 *BASE_TYPE to the base type for T and *COUNT to the number of
3672 elements of that base type before returning. */
3673
3674 static int
3675 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3676 int *count)
3677 {
3678 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3679 int c = arm_vfp_cprc_sub_candidate (t, &b);
3680 if (c <= 0 || c > 4)
3681 return 0;
3682 *base_type = b;
3683 *count = c;
3684 return 1;
3685 }
3686
3687 /* Return 1 if the VFP ABI should be used for passing arguments to and
3688 returning values from a function of type FUNC_TYPE, 0
3689 otherwise. */
3690
3691 static int
3692 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3693 {
3694 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3695 /* Variadic functions always use the base ABI. Assume that functions
3696 without debug info are not variadic. */
3697 if (func_type && check_typedef (func_type)->has_varargs ())
3698 return 0;
3699 /* The VFP ABI is only supported as a variant of AAPCS. */
3700 if (tdep->arm_abi != ARM_ABI_AAPCS)
3701 return 0;
3702 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3703 }
3704
3705 /* We currently only support passing parameters in integer registers, which
3706 conforms with GCC's default model, and VFP argument passing following
3707 the VFP variant of AAPCS. Several other variants exist and
3708 we should probably support some of them based on the selected ABI. */
3709
3710 static CORE_ADDR
3711 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3712 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3713 struct value **args, CORE_ADDR sp,
3714 function_call_return_method return_method,
3715 CORE_ADDR struct_addr)
3716 {
3717 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3718 int argnum;
3719 int argreg;
3720 int nstack;
3721 struct stack_item *si = NULL;
3722 int use_vfp_abi;
3723 struct type *ftype;
3724 unsigned vfp_regs_free = (1 << 16) - 1;
3725
3726 /* Determine the type of this function and whether the VFP ABI
3727 applies. */
3728 ftype = check_typedef (value_type (function));
3729 if (ftype->code () == TYPE_CODE_PTR)
3730 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3731 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3732
3733 /* Set the return address. For the ARM, the return breakpoint is
3734 always at BP_ADDR. */
3735 if (arm_pc_is_thumb (gdbarch, bp_addr))
3736 bp_addr |= 1;
3737 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3738
3739 /* Walk through the list of args and determine how large a temporary
3740 stack is required. Need to take care here as structs may be
3741 passed on the stack, and we have to push them. */
3742 nstack = 0;
3743
3744 argreg = ARM_A1_REGNUM;
3745 nstack = 0;
3746
3747 /* The struct_return pointer occupies the first parameter
3748 passing register. */
3749 if (return_method == return_method_struct)
3750 {
3751 if (arm_debug)
3752 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3753 gdbarch_register_name (gdbarch, argreg),
3754 paddress (gdbarch, struct_addr));
3755 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3756 argreg++;
3757 }
3758
3759 for (argnum = 0; argnum < nargs; argnum++)
3760 {
3761 int len;
3762 struct type *arg_type;
3763 struct type *target_type;
3764 enum type_code typecode;
3765 const bfd_byte *val;
3766 int align;
3767 enum arm_vfp_cprc_base_type vfp_base_type;
3768 int vfp_base_count;
3769 int may_use_core_reg = 1;
3770
3771 arg_type = check_typedef (value_type (args[argnum]));
3772 len = TYPE_LENGTH (arg_type);
3773 target_type = TYPE_TARGET_TYPE (arg_type);
3774 typecode = arg_type->code ();
3775 val = value_contents (args[argnum]);
3776
3777 align = type_align (arg_type);
3778 /* Round alignment up to a whole number of words. */
3779 align = (align + ARM_INT_REGISTER_SIZE - 1)
3780 & ~(ARM_INT_REGISTER_SIZE - 1);
3781 /* Different ABIs have different maximum alignments. */
3782 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3783 {
3784 /* The APCS ABI only requires word alignment. */
3785 align = ARM_INT_REGISTER_SIZE;
3786 }
3787 else
3788 {
3789 /* The AAPCS requires at most doubleword alignment. */
3790 if (align > ARM_INT_REGISTER_SIZE * 2)
3791 align = ARM_INT_REGISTER_SIZE * 2;
3792 }
3793
3794 if (use_vfp_abi
3795 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3796 &vfp_base_count))
3797 {
3798 int regno;
3799 int unit_length;
3800 int shift;
3801 unsigned mask;
3802
3803 /* Because this is a CPRC it cannot go in a core register or
3804 cause a core register to be skipped for alignment.
3805 Either it goes in VFP registers and the rest of this loop
3806 iteration is skipped for this argument, or it goes on the
3807 stack (and the stack alignment code is correct for this
3808 case). */
3809 may_use_core_reg = 0;
3810
3811 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3812 shift = unit_length / 4;
3813 mask = (1 << (shift * vfp_base_count)) - 1;
3814 for (regno = 0; regno < 16; regno += shift)
3815 if (((vfp_regs_free >> regno) & mask) == mask)
3816 break;
3817
3818 if (regno < 16)
3819 {
3820 int reg_char;
3821 int reg_scaled;
3822 int i;
3823
3824 vfp_regs_free &= ~(mask << regno);
3825 reg_scaled = regno / shift;
3826 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3827 for (i = 0; i < vfp_base_count; i++)
3828 {
3829 char name_buf[4];
3830 int regnum;
3831 if (reg_char == 'q')
3832 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3833 val + i * unit_length);
3834 else
3835 {
3836 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3837 reg_char, reg_scaled + i);
3838 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3839 strlen (name_buf));
3840 regcache->cooked_write (regnum, val + i * unit_length);
3841 }
3842 }
3843 continue;
3844 }
3845 else
3846 {
3847 /* This CPRC could not go in VFP registers, so all VFP
3848 registers are now marked as used. */
3849 vfp_regs_free = 0;
3850 }
3851 }
3852
3853 /* Push stack padding for doubleword alignment. */
3854 if (nstack & (align - 1))
3855 {
3856 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3857 nstack += ARM_INT_REGISTER_SIZE;
3858 }
3859
3860 /* Doubleword aligned quantities must go in even register pairs. */
3861 if (may_use_core_reg
3862 && argreg <= ARM_LAST_ARG_REGNUM
3863 && align > ARM_INT_REGISTER_SIZE
3864 && argreg & 1)
3865 argreg++;
3866
3867 /* If the argument is a pointer to a function, and it is a
3868 Thumb function, create a LOCAL copy of the value and set
3869 the THUMB bit in it. */
3870 if (TYPE_CODE_PTR == typecode
3871 && target_type != NULL
3872 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3873 {
3874 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3875 if (arm_pc_is_thumb (gdbarch, regval))
3876 {
3877 bfd_byte *copy = (bfd_byte *) alloca (len);
3878 store_unsigned_integer (copy, len, byte_order,
3879 MAKE_THUMB_ADDR (regval));
3880 val = copy;
3881 }
3882 }
3883
3884 /* Copy the argument to general registers or the stack in
3885 register-sized pieces. Large arguments are split between
3886 registers and stack. */
3887 while (len > 0)
3888 {
3889 int partial_len = len < ARM_INT_REGISTER_SIZE
3890 ? len : ARM_INT_REGISTER_SIZE;
3891 CORE_ADDR regval
3892 = extract_unsigned_integer (val, partial_len, byte_order);
3893
3894 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3895 {
3896 /* The argument is being passed in a general purpose
3897 register. */
3898 if (byte_order == BFD_ENDIAN_BIG)
3899 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3900 if (arm_debug)
3901 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3902 argnum,
3903 gdbarch_register_name
3904 (gdbarch, argreg),
3905 phex (regval, ARM_INT_REGISTER_SIZE));
3906 regcache_cooked_write_unsigned (regcache, argreg, regval);
3907 argreg++;
3908 }
3909 else
3910 {
3911 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3912
3913 memset (buf, 0, sizeof (buf));
3914 store_unsigned_integer (buf, partial_len, byte_order, regval);
3915
3916 /* Push the arguments onto the stack. */
3917 if (arm_debug)
3918 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3919 argnum, nstack);
3920 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3921 nstack += ARM_INT_REGISTER_SIZE;
3922 }
3923
3924 len -= partial_len;
3925 val += partial_len;
3926 }
3927 }
3928 /* If we have an odd number of words to push, then decrement the stack
3929 by one word now, so first stack argument will be dword aligned. */
3930 if (nstack & 4)
3931 sp -= 4;
3932
3933 while (si)
3934 {
3935 sp -= si->len;
3936 write_memory (sp, si->data, si->len);
3937 si = pop_stack_item (si);
3938 }
3939
3940 /* Finally, update teh SP register. */
3941 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3942
3943 return sp;
3944 }
3945
3946
3947 /* Always align the frame to an 8-byte boundary. This is required on
3948 some platforms and harmless on the rest. */
3949
3950 static CORE_ADDR
3951 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3952 {
3953 /* Align the stack to eight bytes. */
3954 return sp & ~ (CORE_ADDR) 7;
3955 }
3956
3957 static void
3958 print_fpu_flags (struct ui_file *file, int flags)
3959 {
3960 if (flags & (1 << 0))
3961 fputs_filtered ("IVO ", file);
3962 if (flags & (1 << 1))
3963 fputs_filtered ("DVZ ", file);
3964 if (flags & (1 << 2))
3965 fputs_filtered ("OFL ", file);
3966 if (flags & (1 << 3))
3967 fputs_filtered ("UFL ", file);
3968 if (flags & (1 << 4))
3969 fputs_filtered ("INX ", file);
3970 fputc_filtered ('\n', file);
3971 }
3972
3973 /* Print interesting information about the floating point processor
3974 (if present) or emulator. */
3975 static void
3976 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3977 struct frame_info *frame, const char *args)
3978 {
3979 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3980 int type;
3981
3982 type = (status >> 24) & 127;
3983 if (status & (1 << 31))
3984 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3985 else
3986 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3987 /* i18n: [floating point unit] mask */
3988 fputs_filtered (_("mask: "), file);
3989 print_fpu_flags (file, status >> 16);
3990 /* i18n: [floating point unit] flags */
3991 fputs_filtered (_("flags: "), file);
3992 print_fpu_flags (file, status);
3993 }
3994
3995 /* Construct the ARM extended floating point type. */
3996 static struct type *
3997 arm_ext_type (struct gdbarch *gdbarch)
3998 {
3999 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4000
4001 if (!tdep->arm_ext_type)
4002 tdep->arm_ext_type
4003 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4004 floatformats_arm_ext);
4005
4006 return tdep->arm_ext_type;
4007 }
4008
4009 static struct type *
4010 arm_neon_double_type (struct gdbarch *gdbarch)
4011 {
4012 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4013
4014 if (tdep->neon_double_type == NULL)
4015 {
4016 struct type *t, *elem;
4017
4018 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4019 TYPE_CODE_UNION);
4020 elem = builtin_type (gdbarch)->builtin_uint8;
4021 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4022 elem = builtin_type (gdbarch)->builtin_uint16;
4023 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4024 elem = builtin_type (gdbarch)->builtin_uint32;
4025 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4026 elem = builtin_type (gdbarch)->builtin_uint64;
4027 append_composite_type_field (t, "u64", elem);
4028 elem = builtin_type (gdbarch)->builtin_float;
4029 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4030 elem = builtin_type (gdbarch)->builtin_double;
4031 append_composite_type_field (t, "f64", elem);
4032
4033 t->set_is_vector (true);
4034 t->set_name ("neon_d");
4035 tdep->neon_double_type = t;
4036 }
4037
4038 return tdep->neon_double_type;
4039 }
4040
4041 /* FIXME: The vector types are not correctly ordered on big-endian
4042 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4043 bits of d0 - regardless of what unit size is being held in d0. So
4044 the offset of the first uint8 in d0 is 7, but the offset of the
4045 first float is 4. This code works as-is for little-endian
4046 targets. */
4047
4048 static struct type *
4049 arm_neon_quad_type (struct gdbarch *gdbarch)
4050 {
4051 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4052
4053 if (tdep->neon_quad_type == NULL)
4054 {
4055 struct type *t, *elem;
4056
4057 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4058 TYPE_CODE_UNION);
4059 elem = builtin_type (gdbarch)->builtin_uint8;
4060 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4061 elem = builtin_type (gdbarch)->builtin_uint16;
4062 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint32;
4064 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint64;
4066 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_float;
4068 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4069 elem = builtin_type (gdbarch)->builtin_double;
4070 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4071
4072 t->set_is_vector (true);
4073 t->set_name ("neon_q");
4074 tdep->neon_quad_type = t;
4075 }
4076
4077 return tdep->neon_quad_type;
4078 }
4079
4080 /* Return the GDB type object for the "standard" data type of data in
4081 register N. */
4082
4083 static struct type *
4084 arm_register_type (struct gdbarch *gdbarch, int regnum)
4085 {
4086 int num_regs = gdbarch_num_regs (gdbarch);
4087
4088 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4089 && regnum >= num_regs && regnum < num_regs + 32)
4090 return builtin_type (gdbarch)->builtin_float;
4091
4092 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4093 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4094 return arm_neon_quad_type (gdbarch);
4095
4096 /* If the target description has register information, we are only
4097 in this function so that we can override the types of
4098 double-precision registers for NEON. */
4099 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4100 {
4101 struct type *t = tdesc_register_type (gdbarch, regnum);
4102
4103 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4104 && t->code () == TYPE_CODE_FLT
4105 && gdbarch_tdep (gdbarch)->have_neon)
4106 return arm_neon_double_type (gdbarch);
4107 else
4108 return t;
4109 }
4110
4111 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4112 {
4113 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4114 return builtin_type (gdbarch)->builtin_void;
4115
4116 return arm_ext_type (gdbarch);
4117 }
4118 else if (regnum == ARM_SP_REGNUM)
4119 return builtin_type (gdbarch)->builtin_data_ptr;
4120 else if (regnum == ARM_PC_REGNUM)
4121 return builtin_type (gdbarch)->builtin_func_ptr;
4122 else if (regnum >= ARRAY_SIZE (arm_register_names))
4123 /* These registers are only supported on targets which supply
4124 an XML description. */
4125 return builtin_type (gdbarch)->builtin_int0;
4126 else
4127 return builtin_type (gdbarch)->builtin_uint32;
4128 }
4129
4130 /* Map a DWARF register REGNUM onto the appropriate GDB register
4131 number. */
4132
4133 static int
4134 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4135 {
4136 /* Core integer regs. */
4137 if (reg >= 0 && reg <= 15)
4138 return reg;
4139
4140 /* Legacy FPA encoding. These were once used in a way which
4141 overlapped with VFP register numbering, so their use is
4142 discouraged, but GDB doesn't support the ARM toolchain
4143 which used them for VFP. */
4144 if (reg >= 16 && reg <= 23)
4145 return ARM_F0_REGNUM + reg - 16;
4146
4147 /* New assignments for the FPA registers. */
4148 if (reg >= 96 && reg <= 103)
4149 return ARM_F0_REGNUM + reg - 96;
4150
4151 /* WMMX register assignments. */
4152 if (reg >= 104 && reg <= 111)
4153 return ARM_WCGR0_REGNUM + reg - 104;
4154
4155 if (reg >= 112 && reg <= 127)
4156 return ARM_WR0_REGNUM + reg - 112;
4157
4158 if (reg >= 192 && reg <= 199)
4159 return ARM_WC0_REGNUM + reg - 192;
4160
4161 /* VFP v2 registers. A double precision value is actually
4162 in d1 rather than s2, but the ABI only defines numbering
4163 for the single precision registers. This will "just work"
4164 in GDB for little endian targets (we'll read eight bytes,
4165 starting in s0 and then progressing to s1), but will be
4166 reversed on big endian targets with VFP. This won't
4167 be a problem for the new Neon quad registers; you're supposed
4168 to use DW_OP_piece for those. */
4169 if (reg >= 64 && reg <= 95)
4170 {
4171 char name_buf[4];
4172
4173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4175 strlen (name_buf));
4176 }
4177
4178 /* VFP v3 / Neon registers. This range is also used for VFP v2
4179 registers, except that it now describes d0 instead of s0. */
4180 if (reg >= 256 && reg <= 287)
4181 {
4182 char name_buf[4];
4183
4184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4186 strlen (name_buf));
4187 }
4188
4189 return -1;
4190 }
4191
4192 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4193 static int
4194 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4195 {
4196 int reg = regnum;
4197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4198
4199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4201
4202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4204
4205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4207
4208 if (reg < NUM_GREGS)
4209 return SIM_ARM_R0_REGNUM + reg;
4210 reg -= NUM_GREGS;
4211
4212 if (reg < NUM_FREGS)
4213 return SIM_ARM_FP0_REGNUM + reg;
4214 reg -= NUM_FREGS;
4215
4216 if (reg < NUM_SREGS)
4217 return SIM_ARM_FPS_REGNUM + reg;
4218 reg -= NUM_SREGS;
4219
4220 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4221 }
4222
4223 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4224 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4225 NULL if an error occurs. BUF is freed. */
4226
4227 static gdb_byte *
4228 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4229 int old_len, int new_len)
4230 {
4231 gdb_byte *new_buf;
4232 int bytes_to_read = new_len - old_len;
4233
4234 new_buf = (gdb_byte *) xmalloc (new_len);
4235 memcpy (new_buf + bytes_to_read, buf, old_len);
4236 xfree (buf);
4237 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4238 {
4239 xfree (new_buf);
4240 return NULL;
4241 }
4242 return new_buf;
4243 }
4244
4245 /* An IT block is at most the 2-byte IT instruction followed by
4246 four 4-byte instructions. The furthest back we must search to
4247 find an IT block that affects the current instruction is thus
4248 2 + 3 * 4 == 14 bytes. */
4249 #define MAX_IT_BLOCK_PREFIX 14
4250
4251 /* Use a quick scan if there are more than this many bytes of
4252 code. */
4253 #define IT_SCAN_THRESHOLD 32
4254
4255 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4256 A breakpoint in an IT block may not be hit, depending on the
4257 condition flags. */
4258 static CORE_ADDR
4259 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4260 {
4261 gdb_byte *buf;
4262 char map_type;
4263 CORE_ADDR boundary, func_start;
4264 int buf_len;
4265 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4266 int i, any, last_it, last_it_count;
4267
4268 /* If we are using BKPT breakpoints, none of this is necessary. */
4269 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4270 return bpaddr;
4271
4272 /* ARM mode does not have this problem. */
4273 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4274 return bpaddr;
4275
4276 /* We are setting a breakpoint in Thumb code that could potentially
4277 contain an IT block. The first step is to find how much Thumb
4278 code there is; we do not need to read outside of known Thumb
4279 sequences. */
4280 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4281 if (map_type == 0)
4282 /* Thumb-2 code must have mapping symbols to have a chance. */
4283 return bpaddr;
4284
4285 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4286
4287 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4288 && func_start > boundary)
4289 boundary = func_start;
4290
4291 /* Search for a candidate IT instruction. We have to do some fancy
4292 footwork to distinguish a real IT instruction from the second
4293 half of a 32-bit instruction, but there is no need for that if
4294 there's no candidate. */
4295 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4296 if (buf_len == 0)
4297 /* No room for an IT instruction. */
4298 return bpaddr;
4299
4300 buf = (gdb_byte *) xmalloc (buf_len);
4301 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4302 return bpaddr;
4303 any = 0;
4304 for (i = 0; i < buf_len; i += 2)
4305 {
4306 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4307 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4308 {
4309 any = 1;
4310 break;
4311 }
4312 }
4313
4314 if (any == 0)
4315 {
4316 xfree (buf);
4317 return bpaddr;
4318 }
4319
4320 /* OK, the code bytes before this instruction contain at least one
4321 halfword which resembles an IT instruction. We know that it's
4322 Thumb code, but there are still two possibilities. Either the
4323 halfword really is an IT instruction, or it is the second half of
4324 a 32-bit Thumb instruction. The only way we can tell is to
4325 scan forwards from a known instruction boundary. */
4326 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4327 {
4328 int definite;
4329
4330 /* There's a lot of code before this instruction. Start with an
4331 optimistic search; it's easy to recognize halfwords that can
4332 not be the start of a 32-bit instruction, and use that to
4333 lock on to the instruction boundaries. */
4334 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4335 if (buf == NULL)
4336 return bpaddr;
4337 buf_len = IT_SCAN_THRESHOLD;
4338
4339 definite = 0;
4340 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4341 {
4342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4343 if (thumb_insn_size (inst1) == 2)
4344 {
4345 definite = 1;
4346 break;
4347 }
4348 }
4349
4350 /* At this point, if DEFINITE, BUF[I] is the first place we
4351 are sure that we know the instruction boundaries, and it is far
4352 enough from BPADDR that we could not miss an IT instruction
4353 affecting BPADDR. If ! DEFINITE, give up - start from a
4354 known boundary. */
4355 if (! definite)
4356 {
4357 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4358 bpaddr - boundary);
4359 if (buf == NULL)
4360 return bpaddr;
4361 buf_len = bpaddr - boundary;
4362 i = 0;
4363 }
4364 }
4365 else
4366 {
4367 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4368 if (buf == NULL)
4369 return bpaddr;
4370 buf_len = bpaddr - boundary;
4371 i = 0;
4372 }
4373
4374 /* Scan forwards. Find the last IT instruction before BPADDR. */
4375 last_it = -1;
4376 last_it_count = 0;
4377 while (i < buf_len)
4378 {
4379 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4380 last_it_count--;
4381 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4382 {
4383 last_it = i;
4384 if (inst1 & 0x0001)
4385 last_it_count = 4;
4386 else if (inst1 & 0x0002)
4387 last_it_count = 3;
4388 else if (inst1 & 0x0004)
4389 last_it_count = 2;
4390 else
4391 last_it_count = 1;
4392 }
4393 i += thumb_insn_size (inst1);
4394 }
4395
4396 xfree (buf);
4397
4398 if (last_it == -1)
4399 /* There wasn't really an IT instruction after all. */
4400 return bpaddr;
4401
4402 if (last_it_count < 1)
4403 /* It was too far away. */
4404 return bpaddr;
4405
4406 /* This really is a trouble spot. Move the breakpoint to the IT
4407 instruction. */
4408 return bpaddr - buf_len + last_it;
4409 }
4410
4411 /* ARM displaced stepping support.
4412
4413 Generally ARM displaced stepping works as follows:
4414
4415 1. When an instruction is to be single-stepped, it is first decoded by
4416 arm_process_displaced_insn. Depending on the type of instruction, it is
4417 then copied to a scratch location, possibly in a modified form. The
4418 copy_* set of functions performs such modification, as necessary. A
4419 breakpoint is placed after the modified instruction in the scratch space
4420 to return control to GDB. Note in particular that instructions which
4421 modify the PC will no longer do so after modification.
4422
4423 2. The instruction is single-stepped, by setting the PC to the scratch
4424 location address, and resuming. Control returns to GDB when the
4425 breakpoint is hit.
4426
4427 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4428 function used for the current instruction. This function's job is to
4429 put the CPU/memory state back to what it would have been if the
4430 instruction had been executed unmodified in its original location. */
4431
4432 /* NOP instruction (mov r0, r0). */
4433 #define ARM_NOP 0xe1a00000
4434 #define THUMB_NOP 0x4600
4435
4436 /* Helper for register reads for displaced stepping. In particular, this
4437 returns the PC as it would be seen by the instruction at its original
4438 location. */
4439
4440 ULONGEST
4441 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4442 int regno)
4443 {
4444 ULONGEST ret;
4445 CORE_ADDR from = dsc->insn_addr;
4446
4447 if (regno == ARM_PC_REGNUM)
4448 {
4449 /* Compute pipeline offset:
4450 - When executing an ARM instruction, PC reads as the address of the
4451 current instruction plus 8.
4452 - When executing a Thumb instruction, PC reads as the address of the
4453 current instruction plus 4. */
4454
4455 if (!dsc->is_thumb)
4456 from += 8;
4457 else
4458 from += 4;
4459
4460 if (debug_displaced)
4461 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4462 (unsigned long) from);
4463 return (ULONGEST) from;
4464 }
4465 else
4466 {
4467 regcache_cooked_read_unsigned (regs, regno, &ret);
4468 if (debug_displaced)
4469 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4470 regno, (unsigned long) ret);
4471 return ret;
4472 }
4473 }
4474
4475 static int
4476 displaced_in_arm_mode (struct regcache *regs)
4477 {
4478 ULONGEST ps;
4479 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4480
4481 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4482
4483 return (ps & t_bit) == 0;
4484 }
4485
4486 /* Write to the PC as from a branch instruction. */
4487
4488 static void
4489 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4490 ULONGEST val)
4491 {
4492 if (!dsc->is_thumb)
4493 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4494 architecture versions < 6. */
4495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4496 val & ~(ULONGEST) 0x3);
4497 else
4498 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4499 val & ~(ULONGEST) 0x1);
4500 }
4501
4502 /* Write to the PC as from a branch-exchange instruction. */
4503
4504 static void
4505 bx_write_pc (struct regcache *regs, ULONGEST val)
4506 {
4507 ULONGEST ps;
4508 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4509
4510 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4511
4512 if ((val & 1) == 1)
4513 {
4514 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4515 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4516 }
4517 else if ((val & 2) == 0)
4518 {
4519 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4520 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4521 }
4522 else
4523 {
4524 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4525 mode, align dest to 4 bytes). */
4526 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4527 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4528 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4529 }
4530 }
4531
4532 /* Write to the PC as if from a load instruction. */
4533
4534 static void
4535 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4536 ULONGEST val)
4537 {
4538 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4539 bx_write_pc (regs, val);
4540 else
4541 branch_write_pc (regs, dsc, val);
4542 }
4543
4544 /* Write to the PC as if from an ALU instruction. */
4545
4546 static void
4547 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4548 ULONGEST val)
4549 {
4550 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4551 bx_write_pc (regs, val);
4552 else
4553 branch_write_pc (regs, dsc, val);
4554 }
4555
4556 /* Helper for writing to registers for displaced stepping. Writing to the PC
4557 has a varying effects depending on the instruction which does the write:
4558 this is controlled by the WRITE_PC argument. */
4559
4560 void
4561 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4562 int regno, ULONGEST val, enum pc_write_style write_pc)
4563 {
4564 if (regno == ARM_PC_REGNUM)
4565 {
4566 if (debug_displaced)
4567 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4568 (unsigned long) val);
4569 switch (write_pc)
4570 {
4571 case BRANCH_WRITE_PC:
4572 branch_write_pc (regs, dsc, val);
4573 break;
4574
4575 case BX_WRITE_PC:
4576 bx_write_pc (regs, val);
4577 break;
4578
4579 case LOAD_WRITE_PC:
4580 load_write_pc (regs, dsc, val);
4581 break;
4582
4583 case ALU_WRITE_PC:
4584 alu_write_pc (regs, dsc, val);
4585 break;
4586
4587 case CANNOT_WRITE_PC:
4588 warning (_("Instruction wrote to PC in an unexpected way when "
4589 "single-stepping"));
4590 break;
4591
4592 default:
4593 internal_error (__FILE__, __LINE__,
4594 _("Invalid argument to displaced_write_reg"));
4595 }
4596
4597 dsc->wrote_to_pc = 1;
4598 }
4599 else
4600 {
4601 if (debug_displaced)
4602 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4603 regno, (unsigned long) val);
4604 regcache_cooked_write_unsigned (regs, regno, val);
4605 }
4606 }
4607
4608 /* This function is used to concisely determine if an instruction INSN
4609 references PC. Register fields of interest in INSN should have the
4610 corresponding fields of BITMASK set to 0b1111. The function
4611 returns return 1 if any of these fields in INSN reference the PC
4612 (also 0b1111, r15), else it returns 0. */
4613
4614 static int
4615 insn_references_pc (uint32_t insn, uint32_t bitmask)
4616 {
4617 uint32_t lowbit = 1;
4618
4619 while (bitmask != 0)
4620 {
4621 uint32_t mask;
4622
4623 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4624 ;
4625
4626 if (!lowbit)
4627 break;
4628
4629 mask = lowbit * 0xf;
4630
4631 if ((insn & mask) == mask)
4632 return 1;
4633
4634 bitmask &= ~mask;
4635 }
4636
4637 return 0;
4638 }
4639
4640 /* The simplest copy function. Many instructions have the same effect no
4641 matter what address they are executed at: in those cases, use this. */
4642
4643 static int
4644 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4645 const char *iname, arm_displaced_step_closure *dsc)
4646 {
4647 if (debug_displaced)
4648 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4649 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4650 iname);
4651
4652 dsc->modinsn[0] = insn;
4653
4654 return 0;
4655 }
4656
4657 static int
4658 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4659 uint16_t insn2, const char *iname,
4660 arm_displaced_step_closure *dsc)
4661 {
4662 if (debug_displaced)
4663 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4664 "opcode/class '%s' unmodified\n", insn1, insn2,
4665 iname);
4666
4667 dsc->modinsn[0] = insn1;
4668 dsc->modinsn[1] = insn2;
4669 dsc->numinsns = 2;
4670
4671 return 0;
4672 }
4673
4674 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4675 modification. */
4676 static int
4677 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4678 const char *iname,
4679 arm_displaced_step_closure *dsc)
4680 {
4681 if (debug_displaced)
4682 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4683 "opcode/class '%s' unmodified\n", insn,
4684 iname);
4685
4686 dsc->modinsn[0] = insn;
4687
4688 return 0;
4689 }
4690
4691 /* Preload instructions with immediate offset. */
4692
4693 static void
4694 cleanup_preload (struct gdbarch *gdbarch,
4695 struct regcache *regs, arm_displaced_step_closure *dsc)
4696 {
4697 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4698 if (!dsc->u.preload.immed)
4699 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4700 }
4701
4702 static void
4703 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4704 arm_displaced_step_closure *dsc, unsigned int rn)
4705 {
4706 ULONGEST rn_val;
4707 /* Preload instructions:
4708
4709 {pli/pld} [rn, #+/-imm]
4710 ->
4711 {pli/pld} [r0, #+/-imm]. */
4712
4713 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4714 rn_val = displaced_read_reg (regs, dsc, rn);
4715 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4716 dsc->u.preload.immed = 1;
4717
4718 dsc->cleanup = &cleanup_preload;
4719 }
4720
4721 static int
4722 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4723 arm_displaced_step_closure *dsc)
4724 {
4725 unsigned int rn = bits (insn, 16, 19);
4726
4727 if (!insn_references_pc (insn, 0x000f0000ul))
4728 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4729
4730 if (debug_displaced)
4731 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4732 (unsigned long) insn);
4733
4734 dsc->modinsn[0] = insn & 0xfff0ffff;
4735
4736 install_preload (gdbarch, regs, dsc, rn);
4737
4738 return 0;
4739 }
4740
4741 static int
4742 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4743 struct regcache *regs, arm_displaced_step_closure *dsc)
4744 {
4745 unsigned int rn = bits (insn1, 0, 3);
4746 unsigned int u_bit = bit (insn1, 7);
4747 int imm12 = bits (insn2, 0, 11);
4748 ULONGEST pc_val;
4749
4750 if (rn != ARM_PC_REGNUM)
4751 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4752
4753 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4754 PLD (literal) Encoding T1. */
4755 if (debug_displaced)
4756 fprintf_unfiltered (gdb_stdlog,
4757 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4758 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4759 imm12);
4760
4761 if (!u_bit)
4762 imm12 = -1 * imm12;
4763
4764 /* Rewrite instruction {pli/pld} PC imm12 into:
4765 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4766
4767 {pli/pld} [r0, r1]
4768
4769 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4770
4771 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4772 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4773
4774 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4775
4776 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4777 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4778 dsc->u.preload.immed = 0;
4779
4780 /* {pli/pld} [r0, r1] */
4781 dsc->modinsn[0] = insn1 & 0xfff0;
4782 dsc->modinsn[1] = 0xf001;
4783 dsc->numinsns = 2;
4784
4785 dsc->cleanup = &cleanup_preload;
4786 return 0;
4787 }
4788
4789 /* Preload instructions with register offset. */
4790
4791 static void
4792 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4793 arm_displaced_step_closure *dsc, unsigned int rn,
4794 unsigned int rm)
4795 {
4796 ULONGEST rn_val, rm_val;
4797
4798 /* Preload register-offset instructions:
4799
4800 {pli/pld} [rn, rm {, shift}]
4801 ->
4802 {pli/pld} [r0, r1 {, shift}]. */
4803
4804 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4805 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4806 rn_val = displaced_read_reg (regs, dsc, rn);
4807 rm_val = displaced_read_reg (regs, dsc, rm);
4808 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4809 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4810 dsc->u.preload.immed = 0;
4811
4812 dsc->cleanup = &cleanup_preload;
4813 }
4814
4815 static int
4816 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4817 struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4819 {
4820 unsigned int rn = bits (insn, 16, 19);
4821 unsigned int rm = bits (insn, 0, 3);
4822
4823
4824 if (!insn_references_pc (insn, 0x000f000ful))
4825 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4826
4827 if (debug_displaced)
4828 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4829 (unsigned long) insn);
4830
4831 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4832
4833 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4834 return 0;
4835 }
4836
4837 /* Copy/cleanup coprocessor load and store instructions. */
4838
4839 static void
4840 cleanup_copro_load_store (struct gdbarch *gdbarch,
4841 struct regcache *regs,
4842 arm_displaced_step_closure *dsc)
4843 {
4844 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4845
4846 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4847
4848 if (dsc->u.ldst.writeback)
4849 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4850 }
4851
4852 static void
4853 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4854 arm_displaced_step_closure *dsc,
4855 int writeback, unsigned int rn)
4856 {
4857 ULONGEST rn_val;
4858
4859 /* Coprocessor load/store instructions:
4860
4861 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4862 ->
4863 {stc/stc2} [r0, #+/-imm].
4864
4865 ldc/ldc2 are handled identically. */
4866
4867 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4868 rn_val = displaced_read_reg (regs, dsc, rn);
4869 /* PC should be 4-byte aligned. */
4870 rn_val = rn_val & 0xfffffffc;
4871 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4872
4873 dsc->u.ldst.writeback = writeback;
4874 dsc->u.ldst.rn = rn;
4875
4876 dsc->cleanup = &cleanup_copro_load_store;
4877 }
4878
4879 static int
4880 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs,
4882 arm_displaced_step_closure *dsc)
4883 {
4884 unsigned int rn = bits (insn, 16, 19);
4885
4886 if (!insn_references_pc (insn, 0x000f0000ul))
4887 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4888
4889 if (debug_displaced)
4890 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4891 "load/store insn %.8lx\n", (unsigned long) insn);
4892
4893 dsc->modinsn[0] = insn & 0xfff0ffff;
4894
4895 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4896
4897 return 0;
4898 }
4899
4900 static int
4901 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4902 uint16_t insn2, struct regcache *regs,
4903 arm_displaced_step_closure *dsc)
4904 {
4905 unsigned int rn = bits (insn1, 0, 3);
4906
4907 if (rn != ARM_PC_REGNUM)
4908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4909 "copro load/store", dsc);
4910
4911 if (debug_displaced)
4912 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4913 "load/store insn %.4x%.4x\n", insn1, insn2);
4914
4915 dsc->modinsn[0] = insn1 & 0xfff0;
4916 dsc->modinsn[1] = insn2;
4917 dsc->numinsns = 2;
4918
4919 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4920 doesn't support writeback, so pass 0. */
4921 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4922
4923 return 0;
4924 }
4925
4926 /* Clean up branch instructions (actually perform the branch, by setting
4927 PC). */
4928
4929 static void
4930 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4931 arm_displaced_step_closure *dsc)
4932 {
4933 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4934 int branch_taken = condition_true (dsc->u.branch.cond, status);
4935 enum pc_write_style write_pc = dsc->u.branch.exchange
4936 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4937
4938 if (!branch_taken)
4939 return;
4940
4941 if (dsc->u.branch.link)
4942 {
4943 /* The value of LR should be the next insn of current one. In order
4944 not to confuse logic handling later insn `bx lr', if current insn mode
4945 is Thumb, the bit 0 of LR value should be set to 1. */
4946 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4947
4948 if (dsc->is_thumb)
4949 next_insn_addr |= 0x1;
4950
4951 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4952 CANNOT_WRITE_PC);
4953 }
4954
4955 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4956 }
4957
4958 /* Copy B/BL/BLX instructions with immediate destinations. */
4959
4960 static void
4961 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4962 arm_displaced_step_closure *dsc,
4963 unsigned int cond, int exchange, int link, long offset)
4964 {
4965 /* Implement "BL<cond> <label>" as:
4966
4967 Preparation: cond <- instruction condition
4968 Insn: mov r0, r0 (nop)
4969 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4970
4971 B<cond> similar, but don't set r14 in cleanup. */
4972
4973 dsc->u.branch.cond = cond;
4974 dsc->u.branch.link = link;
4975 dsc->u.branch.exchange = exchange;
4976
4977 dsc->u.branch.dest = dsc->insn_addr;
4978 if (link && exchange)
4979 /* For BLX, offset is computed from the Align (PC, 4). */
4980 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4981
4982 if (dsc->is_thumb)
4983 dsc->u.branch.dest += 4 + offset;
4984 else
4985 dsc->u.branch.dest += 8 + offset;
4986
4987 dsc->cleanup = &cleanup_branch;
4988 }
4989 static int
4990 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4991 struct regcache *regs, arm_displaced_step_closure *dsc)
4992 {
4993 unsigned int cond = bits (insn, 28, 31);
4994 int exchange = (cond == 0xf);
4995 int link = exchange || bit (insn, 24);
4996 long offset;
4997
4998 if (debug_displaced)
4999 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5000 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5001 (unsigned long) insn);
5002 if (exchange)
5003 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5004 then arrange the switch into Thumb mode. */
5005 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5006 else
5007 offset = bits (insn, 0, 23) << 2;
5008
5009 if (bit (offset, 25))
5010 offset = offset | ~0x3ffffff;
5011
5012 dsc->modinsn[0] = ARM_NOP;
5013
5014 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5015 return 0;
5016 }
5017
5018 static int
5019 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5020 uint16_t insn2, struct regcache *regs,
5021 arm_displaced_step_closure *dsc)
5022 {
5023 int link = bit (insn2, 14);
5024 int exchange = link && !bit (insn2, 12);
5025 int cond = INST_AL;
5026 long offset = 0;
5027 int j1 = bit (insn2, 13);
5028 int j2 = bit (insn2, 11);
5029 int s = sbits (insn1, 10, 10);
5030 int i1 = !(j1 ^ bit (insn1, 10));
5031 int i2 = !(j2 ^ bit (insn1, 10));
5032
5033 if (!link && !exchange) /* B */
5034 {
5035 offset = (bits (insn2, 0, 10) << 1);
5036 if (bit (insn2, 12)) /* Encoding T4 */
5037 {
5038 offset |= (bits (insn1, 0, 9) << 12)
5039 | (i2 << 22)
5040 | (i1 << 23)
5041 | (s << 24);
5042 cond = INST_AL;
5043 }
5044 else /* Encoding T3 */
5045 {
5046 offset |= (bits (insn1, 0, 5) << 12)
5047 | (j1 << 18)
5048 | (j2 << 19)
5049 | (s << 20);
5050 cond = bits (insn1, 6, 9);
5051 }
5052 }
5053 else
5054 {
5055 offset = (bits (insn1, 0, 9) << 12);
5056 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5057 offset |= exchange ?
5058 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5059 }
5060
5061 if (debug_displaced)
5062 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5063 "%.4x %.4x with offset %.8lx\n",
5064 link ? (exchange) ? "blx" : "bl" : "b",
5065 insn1, insn2, offset);
5066
5067 dsc->modinsn[0] = THUMB_NOP;
5068
5069 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5070 return 0;
5071 }
5072
5073 /* Copy B Thumb instructions. */
5074 static int
5075 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5076 arm_displaced_step_closure *dsc)
5077 {
5078 unsigned int cond = 0;
5079 int offset = 0;
5080 unsigned short bit_12_15 = bits (insn, 12, 15);
5081 CORE_ADDR from = dsc->insn_addr;
5082
5083 if (bit_12_15 == 0xd)
5084 {
5085 /* offset = SignExtend (imm8:0, 32) */
5086 offset = sbits ((insn << 1), 0, 8);
5087 cond = bits (insn, 8, 11);
5088 }
5089 else if (bit_12_15 == 0xe) /* Encoding T2 */
5090 {
5091 offset = sbits ((insn << 1), 0, 11);
5092 cond = INST_AL;
5093 }
5094
5095 if (debug_displaced)
5096 fprintf_unfiltered (gdb_stdlog,
5097 "displaced: copying b immediate insn %.4x "
5098 "with offset %d\n", insn, offset);
5099
5100 dsc->u.branch.cond = cond;
5101 dsc->u.branch.link = 0;
5102 dsc->u.branch.exchange = 0;
5103 dsc->u.branch.dest = from + 4 + offset;
5104
5105 dsc->modinsn[0] = THUMB_NOP;
5106
5107 dsc->cleanup = &cleanup_branch;
5108
5109 return 0;
5110 }
5111
5112 /* Copy BX/BLX with register-specified destinations. */
5113
5114 static void
5115 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5116 arm_displaced_step_closure *dsc, int link,
5117 unsigned int cond, unsigned int rm)
5118 {
5119 /* Implement {BX,BLX}<cond> <reg>" as:
5120
5121 Preparation: cond <- instruction condition
5122 Insn: mov r0, r0 (nop)
5123 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5124
5125 Don't set r14 in cleanup for BX. */
5126
5127 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5128
5129 dsc->u.branch.cond = cond;
5130 dsc->u.branch.link = link;
5131
5132 dsc->u.branch.exchange = 1;
5133
5134 dsc->cleanup = &cleanup_branch;
5135 }
5136
5137 static int
5138 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5139 struct regcache *regs, arm_displaced_step_closure *dsc)
5140 {
5141 unsigned int cond = bits (insn, 28, 31);
5142 /* BX: x12xxx1x
5143 BLX: x12xxx3x. */
5144 int link = bit (insn, 5);
5145 unsigned int rm = bits (insn, 0, 3);
5146
5147 if (debug_displaced)
5148 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5149 (unsigned long) insn);
5150
5151 dsc->modinsn[0] = ARM_NOP;
5152
5153 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5154 return 0;
5155 }
5156
5157 static int
5158 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5159 struct regcache *regs,
5160 arm_displaced_step_closure *dsc)
5161 {
5162 int link = bit (insn, 7);
5163 unsigned int rm = bits (insn, 3, 6);
5164
5165 if (debug_displaced)
5166 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5167 (unsigned short) insn);
5168
5169 dsc->modinsn[0] = THUMB_NOP;
5170
5171 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5172
5173 return 0;
5174 }
5175
5176
5177 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5178
5179 static void
5180 cleanup_alu_imm (struct gdbarch *gdbarch,
5181 struct regcache *regs, arm_displaced_step_closure *dsc)
5182 {
5183 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5184 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5185 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5186 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5187 }
5188
5189 static int
5190 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5191 arm_displaced_step_closure *dsc)
5192 {
5193 unsigned int rn = bits (insn, 16, 19);
5194 unsigned int rd = bits (insn, 12, 15);
5195 unsigned int op = bits (insn, 21, 24);
5196 int is_mov = (op == 0xd);
5197 ULONGEST rd_val, rn_val;
5198
5199 if (!insn_references_pc (insn, 0x000ff000ul))
5200 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5201
5202 if (debug_displaced)
5203 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5204 "%.8lx\n", is_mov ? "move" : "ALU",
5205 (unsigned long) insn);
5206
5207 /* Instruction is of form:
5208
5209 <op><cond> rd, [rn,] #imm
5210
5211 Rewrite as:
5212
5213 Preparation: tmp1, tmp2 <- r0, r1;
5214 r0, r1 <- rd, rn
5215 Insn: <op><cond> r0, r1, #imm
5216 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5217 */
5218
5219 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5220 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5221 rn_val = displaced_read_reg (regs, dsc, rn);
5222 rd_val = displaced_read_reg (regs, dsc, rd);
5223 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5224 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5225 dsc->rd = rd;
5226
5227 if (is_mov)
5228 dsc->modinsn[0] = insn & 0xfff00fff;
5229 else
5230 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5231
5232 dsc->cleanup = &cleanup_alu_imm;
5233
5234 return 0;
5235 }
5236
5237 static int
5238 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5239 uint16_t insn2, struct regcache *regs,
5240 arm_displaced_step_closure *dsc)
5241 {
5242 unsigned int op = bits (insn1, 5, 8);
5243 unsigned int rn, rm, rd;
5244 ULONGEST rd_val, rn_val;
5245
5246 rn = bits (insn1, 0, 3); /* Rn */
5247 rm = bits (insn2, 0, 3); /* Rm */
5248 rd = bits (insn2, 8, 11); /* Rd */
5249
5250 /* This routine is only called for instruction MOV. */
5251 gdb_assert (op == 0x2 && rn == 0xf);
5252
5253 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5255
5256 if (debug_displaced)
5257 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5258 "ALU", insn1, insn2);
5259
5260 /* Instruction is of form:
5261
5262 <op><cond> rd, [rn,] #imm
5263
5264 Rewrite as:
5265
5266 Preparation: tmp1, tmp2 <- r0, r1;
5267 r0, r1 <- rd, rn
5268 Insn: <op><cond> r0, r1, #imm
5269 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5270 */
5271
5272 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5273 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5274 rn_val = displaced_read_reg (regs, dsc, rn);
5275 rd_val = displaced_read_reg (regs, dsc, rd);
5276 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5277 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5278 dsc->rd = rd;
5279
5280 dsc->modinsn[0] = insn1;
5281 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5282 dsc->numinsns = 2;
5283
5284 dsc->cleanup = &cleanup_alu_imm;
5285
5286 return 0;
5287 }
5288
5289 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5290
5291 static void
5292 cleanup_alu_reg (struct gdbarch *gdbarch,
5293 struct regcache *regs, arm_displaced_step_closure *dsc)
5294 {
5295 ULONGEST rd_val;
5296 int i;
5297
5298 rd_val = displaced_read_reg (regs, dsc, 0);
5299
5300 for (i = 0; i < 3; i++)
5301 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5302
5303 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5304 }
5305
5306 static void
5307 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5308 arm_displaced_step_closure *dsc,
5309 unsigned int rd, unsigned int rn, unsigned int rm)
5310 {
5311 ULONGEST rd_val, rn_val, rm_val;
5312
5313 /* Instruction is of form:
5314
5315 <op><cond> rd, [rn,] rm [, <shift>]
5316
5317 Rewrite as:
5318
5319 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5320 r0, r1, r2 <- rd, rn, rm
5321 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5322 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5323 */
5324
5325 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5326 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5327 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5328 rd_val = displaced_read_reg (regs, dsc, rd);
5329 rn_val = displaced_read_reg (regs, dsc, rn);
5330 rm_val = displaced_read_reg (regs, dsc, rm);
5331 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5332 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5333 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5334 dsc->rd = rd;
5335
5336 dsc->cleanup = &cleanup_alu_reg;
5337 }
5338
5339 static int
5340 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5341 arm_displaced_step_closure *dsc)
5342 {
5343 unsigned int op = bits (insn, 21, 24);
5344 int is_mov = (op == 0xd);
5345
5346 if (!insn_references_pc (insn, 0x000ff00ful))
5347 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5348
5349 if (debug_displaced)
5350 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5351 is_mov ? "move" : "ALU", (unsigned long) insn);
5352
5353 if (is_mov)
5354 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5355 else
5356 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5357
5358 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5359 bits (insn, 0, 3));
5360 return 0;
5361 }
5362
5363 static int
5364 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5365 struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 unsigned rm, rd;
5369
5370 rm = bits (insn, 3, 6);
5371 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5372
5373 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5374 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5375
5376 if (debug_displaced)
5377 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5378 (unsigned short) insn);
5379
5380 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5381
5382 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5383
5384 return 0;
5385 }
5386
5387 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5388
5389 static void
5390 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5391 struct regcache *regs,
5392 arm_displaced_step_closure *dsc)
5393 {
5394 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5395 int i;
5396
5397 for (i = 0; i < 4; i++)
5398 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5399
5400 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5401 }
5402
5403 static void
5404 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5405 arm_displaced_step_closure *dsc,
5406 unsigned int rd, unsigned int rn, unsigned int rm,
5407 unsigned rs)
5408 {
5409 int i;
5410 ULONGEST rd_val, rn_val, rm_val, rs_val;
5411
5412 /* Instruction is of form:
5413
5414 <op><cond> rd, [rn,] rm, <shift> rs
5415
5416 Rewrite as:
5417
5418 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5419 r0, r1, r2, r3 <- rd, rn, rm, rs
5420 Insn: <op><cond> r0, r1, r2, <shift> r3
5421 Cleanup: tmp5 <- r0
5422 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5423 rd <- tmp5
5424 */
5425
5426 for (i = 0; i < 4; i++)
5427 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5428
5429 rd_val = displaced_read_reg (regs, dsc, rd);
5430 rn_val = displaced_read_reg (regs, dsc, rn);
5431 rm_val = displaced_read_reg (regs, dsc, rm);
5432 rs_val = displaced_read_reg (regs, dsc, rs);
5433 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5434 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5435 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5436 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5437 dsc->rd = rd;
5438 dsc->cleanup = &cleanup_alu_shifted_reg;
5439 }
5440
5441 static int
5442 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5443 struct regcache *regs,
5444 arm_displaced_step_closure *dsc)
5445 {
5446 unsigned int op = bits (insn, 21, 24);
5447 int is_mov = (op == 0xd);
5448 unsigned int rd, rn, rm, rs;
5449
5450 if (!insn_references_pc (insn, 0x000fff0ful))
5451 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5452
5453 if (debug_displaced)
5454 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5455 "%.8lx\n", is_mov ? "move" : "ALU",
5456 (unsigned long) insn);
5457
5458 rn = bits (insn, 16, 19);
5459 rm = bits (insn, 0, 3);
5460 rs = bits (insn, 8, 11);
5461 rd = bits (insn, 12, 15);
5462
5463 if (is_mov)
5464 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5465 else
5466 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5467
5468 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5469
5470 return 0;
5471 }
5472
5473 /* Clean up load instructions. */
5474
5475 static void
5476 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5477 arm_displaced_step_closure *dsc)
5478 {
5479 ULONGEST rt_val, rt_val2 = 0, rn_val;
5480
5481 rt_val = displaced_read_reg (regs, dsc, 0);
5482 if (dsc->u.ldst.xfersize == 8)
5483 rt_val2 = displaced_read_reg (regs, dsc, 1);
5484 rn_val = displaced_read_reg (regs, dsc, 2);
5485
5486 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5487 if (dsc->u.ldst.xfersize > 4)
5488 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5489 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5490 if (!dsc->u.ldst.immed)
5491 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5492
5493 /* Handle register writeback. */
5494 if (dsc->u.ldst.writeback)
5495 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5496 /* Put result in right place. */
5497 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5498 if (dsc->u.ldst.xfersize == 8)
5499 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5500 }
5501
5502 /* Clean up store instructions. */
5503
5504 static void
5505 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5506 arm_displaced_step_closure *dsc)
5507 {
5508 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5509
5510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5511 if (dsc->u.ldst.xfersize > 4)
5512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5514 if (!dsc->u.ldst.immed)
5515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5516 if (!dsc->u.ldst.restore_r4)
5517 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5518
5519 /* Writeback. */
5520 if (dsc->u.ldst.writeback)
5521 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5522 }
5523
5524 /* Copy "extra" load/store instructions. These are halfword/doubleword
5525 transfers, which have a different encoding to byte/word transfers. */
5526
5527 static int
5528 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5529 struct regcache *regs, arm_displaced_step_closure *dsc)
5530 {
5531 unsigned int op1 = bits (insn, 20, 24);
5532 unsigned int op2 = bits (insn, 5, 6);
5533 unsigned int rt = bits (insn, 12, 15);
5534 unsigned int rn = bits (insn, 16, 19);
5535 unsigned int rm = bits (insn, 0, 3);
5536 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5537 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5538 int immed = (op1 & 0x4) != 0;
5539 int opcode;
5540 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5541
5542 if (!insn_references_pc (insn, 0x000ff00ful))
5543 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5544
5545 if (debug_displaced)
5546 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5547 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5548 (unsigned long) insn);
5549
5550 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5551
5552 if (opcode < 0)
5553 internal_error (__FILE__, __LINE__,
5554 _("copy_extra_ld_st: instruction decode error"));
5555
5556 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5557 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5558 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5559 if (!immed)
5560 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5561
5562 rt_val = displaced_read_reg (regs, dsc, rt);
5563 if (bytesize[opcode] == 8)
5564 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5565 rn_val = displaced_read_reg (regs, dsc, rn);
5566 if (!immed)
5567 rm_val = displaced_read_reg (regs, dsc, rm);
5568
5569 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5570 if (bytesize[opcode] == 8)
5571 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5572 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5573 if (!immed)
5574 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5575
5576 dsc->rd = rt;
5577 dsc->u.ldst.xfersize = bytesize[opcode];
5578 dsc->u.ldst.rn = rn;
5579 dsc->u.ldst.immed = immed;
5580 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5581 dsc->u.ldst.restore_r4 = 0;
5582
5583 if (immed)
5584 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5585 ->
5586 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5587 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5588 else
5589 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5590 ->
5591 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5592 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5593
5594 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5595
5596 return 0;
5597 }
5598
5599 /* Copy byte/half word/word loads and stores. */
5600
5601 static void
5602 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5603 arm_displaced_step_closure *dsc, int load,
5604 int immed, int writeback, int size, int usermode,
5605 int rt, int rm, int rn)
5606 {
5607 ULONGEST rt_val, rn_val, rm_val = 0;
5608
5609 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5610 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5611 if (!immed)
5612 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5613 if (!load)
5614 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5615
5616 rt_val = displaced_read_reg (regs, dsc, rt);
5617 rn_val = displaced_read_reg (regs, dsc, rn);
5618 if (!immed)
5619 rm_val = displaced_read_reg (regs, dsc, rm);
5620
5621 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5622 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5623 if (!immed)
5624 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5625 dsc->rd = rt;
5626 dsc->u.ldst.xfersize = size;
5627 dsc->u.ldst.rn = rn;
5628 dsc->u.ldst.immed = immed;
5629 dsc->u.ldst.writeback = writeback;
5630
5631 /* To write PC we can do:
5632
5633 Before this sequence of instructions:
5634 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5635 r2 is the Rn value got from displaced_read_reg.
5636
5637 Insn1: push {pc} Write address of STR instruction + offset on stack
5638 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5639 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5640 = addr(Insn1) + offset - addr(Insn3) - 8
5641 = offset - 16
5642 Insn4: add r4, r4, #8 r4 = offset - 8
5643 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5644 = from + offset
5645 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5646
5647 Otherwise we don't know what value to write for PC, since the offset is
5648 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5649 of this can be found in Section "Saving from r15" in
5650 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5651
5652 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5653 }
5654
5655
5656 static int
5657 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5658 uint16_t insn2, struct regcache *regs,
5659 arm_displaced_step_closure *dsc, int size)
5660 {
5661 unsigned int u_bit = bit (insn1, 7);
5662 unsigned int rt = bits (insn2, 12, 15);
5663 int imm12 = bits (insn2, 0, 11);
5664 ULONGEST pc_val;
5665
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5669 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5670 imm12);
5671
5672 if (!u_bit)
5673 imm12 = -1 * imm12;
5674
5675 /* Rewrite instruction LDR Rt imm12 into:
5676
5677 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5678
5679 LDR R0, R2, R3,
5680
5681 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5682
5683
5684 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5685 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5686 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5687
5688 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5689
5690 pc_val = pc_val & 0xfffffffc;
5691
5692 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5693 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5694
5695 dsc->rd = rt;
5696
5697 dsc->u.ldst.xfersize = size;
5698 dsc->u.ldst.immed = 0;
5699 dsc->u.ldst.writeback = 0;
5700 dsc->u.ldst.restore_r4 = 0;
5701
5702 /* LDR R0, R2, R3 */
5703 dsc->modinsn[0] = 0xf852;
5704 dsc->modinsn[1] = 0x3;
5705 dsc->numinsns = 2;
5706
5707 dsc->cleanup = &cleanup_load;
5708
5709 return 0;
5710 }
5711
5712 static int
5713 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5714 uint16_t insn2, struct regcache *regs,
5715 arm_displaced_step_closure *dsc,
5716 int writeback, int immed)
5717 {
5718 unsigned int rt = bits (insn2, 12, 15);
5719 unsigned int rn = bits (insn1, 0, 3);
5720 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5721 /* In LDR (register), there is also a register Rm, which is not allowed to
5722 be PC, so we don't have to check it. */
5723
5724 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5726 dsc);
5727
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog,
5730 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5731 rt, rn, insn1, insn2);
5732
5733 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5734 0, rt, rm, rn);
5735
5736 dsc->u.ldst.restore_r4 = 0;
5737
5738 if (immed)
5739 /* ldr[b]<cond> rt, [rn, #imm], etc.
5740 ->
5741 ldr[b]<cond> r0, [r2, #imm]. */
5742 {
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = insn2 & 0x0fff;
5745 }
5746 else
5747 /* ldr[b]<cond> rt, [rn, rm], etc.
5748 ->
5749 ldr[b]<cond> r0, [r2, r3]. */
5750 {
5751 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5752 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5753 }
5754
5755 dsc->numinsns = 2;
5756
5757 return 0;
5758 }
5759
5760
5761 static int
5762 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5763 struct regcache *regs,
5764 arm_displaced_step_closure *dsc,
5765 int load, int size, int usermode)
5766 {
5767 int immed = !bit (insn, 25);
5768 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5769 unsigned int rt = bits (insn, 12, 15);
5770 unsigned int rn = bits (insn, 16, 19);
5771 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5772
5773 if (!insn_references_pc (insn, 0x000ff00ful))
5774 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5775
5776 if (debug_displaced)
5777 fprintf_unfiltered (gdb_stdlog,
5778 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5779 load ? (size == 1 ? "ldrb" : "ldr")
5780 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5781 rt, rn,
5782 (unsigned long) insn);
5783
5784 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5785 usermode, rt, rm, rn);
5786
5787 if (load || rt != ARM_PC_REGNUM)
5788 {
5789 dsc->u.ldst.restore_r4 = 0;
5790
5791 if (immed)
5792 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5793 ->
5794 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5795 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5796 else
5797 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5798 ->
5799 {ldr,str}[b]<cond> r0, [r2, r3]. */
5800 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5801 }
5802 else
5803 {
5804 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5805 dsc->u.ldst.restore_r4 = 1;
5806 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5807 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5808 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5809 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5810 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5811
5812 /* As above. */
5813 if (immed)
5814 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5815 else
5816 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5817
5818 dsc->numinsns = 6;
5819 }
5820
5821 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5822
5823 return 0;
5824 }
5825
5826 /* Cleanup LDM instructions with fully-populated register list. This is an
5827 unfortunate corner case: it's impossible to implement correctly by modifying
5828 the instruction. The issue is as follows: we have an instruction,
5829
5830 ldm rN, {r0-r15}
5831
5832 which we must rewrite to avoid loading PC. A possible solution would be to
5833 do the load in two halves, something like (with suitable cleanup
5834 afterwards):
5835
5836 mov r8, rN
5837 ldm[id][ab] r8!, {r0-r7}
5838 str r7, <temp>
5839 ldm[id][ab] r8, {r7-r14}
5840 <bkpt>
5841
5842 but at present there's no suitable place for <temp>, since the scratch space
5843 is overwritten before the cleanup routine is called. For now, we simply
5844 emulate the instruction. */
5845
5846 static void
5847 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5848 arm_displaced_step_closure *dsc)
5849 {
5850 int inc = dsc->u.block.increment;
5851 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5852 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5853 uint32_t regmask = dsc->u.block.regmask;
5854 int regno = inc ? 0 : 15;
5855 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5856 int exception_return = dsc->u.block.load && dsc->u.block.user
5857 && (regmask & 0x8000) != 0;
5858 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5859 int do_transfer = condition_true (dsc->u.block.cond, status);
5860 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5861
5862 if (!do_transfer)
5863 return;
5864
5865 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5866 sensible we can do here. Complain loudly. */
5867 if (exception_return)
5868 error (_("Cannot single-step exception return"));
5869
5870 /* We don't handle any stores here for now. */
5871 gdb_assert (dsc->u.block.load != 0);
5872
5873 if (debug_displaced)
5874 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5875 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5876 dsc->u.block.increment ? "inc" : "dec",
5877 dsc->u.block.before ? "before" : "after");
5878
5879 while (regmask)
5880 {
5881 uint32_t memword;
5882
5883 if (inc)
5884 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5885 regno++;
5886 else
5887 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5888 regno--;
5889
5890 xfer_addr += bump_before;
5891
5892 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5893 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5894
5895 xfer_addr += bump_after;
5896
5897 regmask &= ~(1 << regno);
5898 }
5899
5900 if (dsc->u.block.writeback)
5901 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5902 CANNOT_WRITE_PC);
5903 }
5904
5905 /* Clean up an STM which included the PC in the register list. */
5906
5907 static void
5908 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5909 arm_displaced_step_closure *dsc)
5910 {
5911 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5912 int store_executed = condition_true (dsc->u.block.cond, status);
5913 CORE_ADDR pc_stored_at, transferred_regs
5914 = count_one_bits (dsc->u.block.regmask);
5915 CORE_ADDR stm_insn_addr;
5916 uint32_t pc_val;
5917 long offset;
5918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5919
5920 /* If condition code fails, there's nothing else to do. */
5921 if (!store_executed)
5922 return;
5923
5924 if (dsc->u.block.increment)
5925 {
5926 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5927
5928 if (dsc->u.block.before)
5929 pc_stored_at += 4;
5930 }
5931 else
5932 {
5933 pc_stored_at = dsc->u.block.xfer_addr;
5934
5935 if (dsc->u.block.before)
5936 pc_stored_at -= 4;
5937 }
5938
5939 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5940 stm_insn_addr = dsc->scratch_base;
5941 offset = pc_val - stm_insn_addr;
5942
5943 if (debug_displaced)
5944 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5945 "STM instruction\n", offset);
5946
5947 /* Rewrite the stored PC to the proper value for the non-displaced original
5948 instruction. */
5949 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5950 dsc->insn_addr + offset);
5951 }
5952
5953 /* Clean up an LDM which includes the PC in the register list. We clumped all
5954 the registers in the transferred list into a contiguous range r0...rX (to
5955 avoid loading PC directly and losing control of the debugged program), so we
5956 must undo that here. */
5957
5958 static void
5959 cleanup_block_load_pc (struct gdbarch *gdbarch,
5960 struct regcache *regs,
5961 arm_displaced_step_closure *dsc)
5962 {
5963 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5964 int load_executed = condition_true (dsc->u.block.cond, status);
5965 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5966 unsigned int regs_loaded = count_one_bits (mask);
5967 unsigned int num_to_shuffle = regs_loaded, clobbered;
5968
5969 /* The method employed here will fail if the register list is fully populated
5970 (we need to avoid loading PC directly). */
5971 gdb_assert (num_to_shuffle < 16);
5972
5973 if (!load_executed)
5974 return;
5975
5976 clobbered = (1 << num_to_shuffle) - 1;
5977
5978 while (num_to_shuffle > 0)
5979 {
5980 if ((mask & (1 << write_reg)) != 0)
5981 {
5982 unsigned int read_reg = num_to_shuffle - 1;
5983
5984 if (read_reg != write_reg)
5985 {
5986 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5987 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5990 "loaded register r%d to r%d\n"), read_reg,
5991 write_reg);
5992 }
5993 else if (debug_displaced)
5994 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5995 "r%d already in the right place\n"),
5996 write_reg);
5997
5998 clobbered &= ~(1 << write_reg);
5999
6000 num_to_shuffle--;
6001 }
6002
6003 write_reg--;
6004 }
6005
6006 /* Restore any registers we scribbled over. */
6007 for (write_reg = 0; clobbered != 0; write_reg++)
6008 {
6009 if ((clobbered & (1 << write_reg)) != 0)
6010 {
6011 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6012 CANNOT_WRITE_PC);
6013 if (debug_displaced)
6014 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6015 "clobbered register r%d\n"), write_reg);
6016 clobbered &= ~(1 << write_reg);
6017 }
6018 }
6019
6020 /* Perform register writeback manually. */
6021 if (dsc->u.block.writeback)
6022 {
6023 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6024
6025 if (dsc->u.block.increment)
6026 new_rn_val += regs_loaded * 4;
6027 else
6028 new_rn_val -= regs_loaded * 4;
6029
6030 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6031 CANNOT_WRITE_PC);
6032 }
6033 }
6034
6035 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6036 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6037
6038 static int
6039 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6040 struct regcache *regs,
6041 arm_displaced_step_closure *dsc)
6042 {
6043 int load = bit (insn, 20);
6044 int user = bit (insn, 22);
6045 int increment = bit (insn, 23);
6046 int before = bit (insn, 24);
6047 int writeback = bit (insn, 21);
6048 int rn = bits (insn, 16, 19);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6053 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6060 }
6061
6062 if (debug_displaced)
6063 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6064 "%.8lx\n", (unsigned long) insn);
6065
6066 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6067 dsc->u.block.rn = rn;
6068
6069 dsc->u.block.load = load;
6070 dsc->u.block.user = user;
6071 dsc->u.block.increment = increment;
6072 dsc->u.block.before = before;
6073 dsc->u.block.writeback = writeback;
6074 dsc->u.block.cond = bits (insn, 28, 31);
6075
6076 dsc->u.block.regmask = insn & 0xffff;
6077
6078 if (load)
6079 {
6080 if ((insn & 0xffff) == 0xffff)
6081 {
6082 /* LDM with a fully-populated register list. This case is
6083 particularly tricky. Implement for now by fully emulating the
6084 instruction (which might not behave perfectly in all cases, but
6085 these instructions should be rare enough for that not to matter
6086 too much). */
6087 dsc->modinsn[0] = ARM_NOP;
6088
6089 dsc->cleanup = &cleanup_block_load_all;
6090 }
6091 else
6092 {
6093 /* LDM of a list of registers which includes PC. Implement by
6094 rewriting the list of registers to be transferred into a
6095 contiguous chunk r0...rX before doing the transfer, then shuffling
6096 registers into the correct places in the cleanup routine. */
6097 unsigned int regmask = insn & 0xffff;
6098 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6099 unsigned int i;
6100
6101 for (i = 0; i < num_in_list; i++)
6102 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6103
6104 /* Writeback makes things complicated. We need to avoid clobbering
6105 the base register with one of the registers in our modified
6106 register list, but just using a different register can't work in
6107 all cases, e.g.:
6108
6109 ldm r14!, {r0-r13,pc}
6110
6111 which would need to be rewritten as:
6112
6113 ldm rN!, {r0-r14}
6114
6115 but that can't work, because there's no free register for N.
6116
6117 Solve this by turning off the writeback bit, and emulating
6118 writeback manually in the cleanup routine. */
6119
6120 if (writeback)
6121 insn &= ~(1 << 21);
6122
6123 new_regmask = (1 << num_in_list) - 1;
6124
6125 if (debug_displaced)
6126 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6127 "{..., pc}: original reg list %.4x, modified "
6128 "list %.4x\n"), rn, writeback ? "!" : "",
6129 (int) insn & 0xffff, new_regmask);
6130
6131 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6132
6133 dsc->cleanup = &cleanup_block_load_pc;
6134 }
6135 }
6136 else
6137 {
6138 /* STM of a list of registers which includes PC. Run the instruction
6139 as-is, but out of line: this will store the wrong value for the PC,
6140 so we must manually fix up the memory in the cleanup routine.
6141 Doing things this way has the advantage that we can auto-detect
6142 the offset of the PC write (which is architecture-dependent) in
6143 the cleanup routine. */
6144 dsc->modinsn[0] = insn;
6145
6146 dsc->cleanup = &cleanup_block_store_pc;
6147 }
6148
6149 return 0;
6150 }
6151
6152 static int
6153 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6154 struct regcache *regs,
6155 arm_displaced_step_closure *dsc)
6156 {
6157 int rn = bits (insn1, 0, 3);
6158 int load = bit (insn1, 4);
6159 int writeback = bit (insn1, 5);
6160
6161 /* Block transfers which don't mention PC can be run directly
6162 out-of-line. */
6163 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6164 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6165
6166 if (rn == ARM_PC_REGNUM)
6167 {
6168 warning (_("displaced: Unpredictable LDM or STM with "
6169 "base register r15"));
6170 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6171 "unpredictable ldm/stm", dsc);
6172 }
6173
6174 if (debug_displaced)
6175 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6176 "%.4x%.4x\n", insn1, insn2);
6177
6178 /* Clear bit 13, since it should be always zero. */
6179 dsc->u.block.regmask = (insn2 & 0xdfff);
6180 dsc->u.block.rn = rn;
6181
6182 dsc->u.block.load = load;
6183 dsc->u.block.user = 0;
6184 dsc->u.block.increment = bit (insn1, 7);
6185 dsc->u.block.before = bit (insn1, 8);
6186 dsc->u.block.writeback = writeback;
6187 dsc->u.block.cond = INST_AL;
6188 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6189
6190 if (load)
6191 {
6192 if (dsc->u.block.regmask == 0xffff)
6193 {
6194 /* This branch is impossible to happen. */
6195 gdb_assert (0);
6196 }
6197 else
6198 {
6199 unsigned int regmask = dsc->u.block.regmask;
6200 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6201 unsigned int i;
6202
6203 for (i = 0; i < num_in_list; i++)
6204 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6205
6206 if (writeback)
6207 insn1 &= ~(1 << 5);
6208
6209 new_regmask = (1 << num_in_list) - 1;
6210
6211 if (debug_displaced)
6212 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6213 "{..., pc}: original reg list %.4x, modified "
6214 "list %.4x\n"), rn, writeback ? "!" : "",
6215 (int) dsc->u.block.regmask, new_regmask);
6216
6217 dsc->modinsn[0] = insn1;
6218 dsc->modinsn[1] = (new_regmask & 0xffff);
6219 dsc->numinsns = 2;
6220
6221 dsc->cleanup = &cleanup_block_load_pc;
6222 }
6223 }
6224 else
6225 {
6226 dsc->modinsn[0] = insn1;
6227 dsc->modinsn[1] = insn2;
6228 dsc->numinsns = 2;
6229 dsc->cleanup = &cleanup_block_store_pc;
6230 }
6231 return 0;
6232 }
6233
6234 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6235 This is used to avoid a dependency on BFD's bfd_endian enum. */
6236
6237 ULONGEST
6238 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6239 int byte_order)
6240 {
6241 return read_memory_unsigned_integer (memaddr, len,
6242 (enum bfd_endian) byte_order);
6243 }
6244
6245 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6246
6247 CORE_ADDR
6248 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6249 CORE_ADDR val)
6250 {
6251 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6252 }
6253
6254 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6255
6256 static CORE_ADDR
6257 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6258 {
6259 return 0;
6260 }
6261
6262 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6263
6264 int
6265 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6266 {
6267 return arm_is_thumb (self->regcache);
6268 }
6269
6270 /* single_step() is called just before we want to resume the inferior,
6271 if we want to single-step it but there is no hardware or kernel
6272 single-step support. We find the target of the coming instructions
6273 and breakpoint them. */
6274
6275 std::vector<CORE_ADDR>
6276 arm_software_single_step (struct regcache *regcache)
6277 {
6278 struct gdbarch *gdbarch = regcache->arch ();
6279 struct arm_get_next_pcs next_pcs_ctx;
6280
6281 arm_get_next_pcs_ctor (&next_pcs_ctx,
6282 &arm_get_next_pcs_ops,
6283 gdbarch_byte_order (gdbarch),
6284 gdbarch_byte_order_for_code (gdbarch),
6285 0,
6286 regcache);
6287
6288 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6289
6290 for (CORE_ADDR &pc_ref : next_pcs)
6291 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6292
6293 return next_pcs;
6294 }
6295
6296 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6297 for Linux, where some SVC instructions must be treated specially. */
6298
6299 static void
6300 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6301 arm_displaced_step_closure *dsc)
6302 {
6303 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6304
6305 if (debug_displaced)
6306 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6307 "%.8lx\n", (unsigned long) resume_addr);
6308
6309 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6310 }
6311
6312
6313 /* Common copy routine for svc instruction. */
6314
6315 static int
6316 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6317 arm_displaced_step_closure *dsc)
6318 {
6319 /* Preparation: none.
6320 Insn: unmodified svc.
6321 Cleanup: pc <- insn_addr + insn_size. */
6322
6323 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6324 instruction. */
6325 dsc->wrote_to_pc = 1;
6326
6327 /* Allow OS-specific code to override SVC handling. */
6328 if (dsc->u.svc.copy_svc_os)
6329 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6330 else
6331 {
6332 dsc->cleanup = &cleanup_svc;
6333 return 0;
6334 }
6335 }
6336
6337 static int
6338 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6339 struct regcache *regs, arm_displaced_step_closure *dsc)
6340 {
6341
6342 if (debug_displaced)
6343 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6344 (unsigned long) insn);
6345
6346 dsc->modinsn[0] = insn;
6347
6348 return install_svc (gdbarch, regs, dsc);
6349 }
6350
6351 static int
6352 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6353 struct regcache *regs, arm_displaced_step_closure *dsc)
6354 {
6355
6356 if (debug_displaced)
6357 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6358 insn);
6359
6360 dsc->modinsn[0] = insn;
6361
6362 return install_svc (gdbarch, regs, dsc);
6363 }
6364
6365 /* Copy undefined instructions. */
6366
6367 static int
6368 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6369 arm_displaced_step_closure *dsc)
6370 {
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog,
6373 "displaced: copying undefined insn %.8lx\n",
6374 (unsigned long) insn);
6375
6376 dsc->modinsn[0] = insn;
6377
6378 return 0;
6379 }
6380
6381 static int
6382 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6383 arm_displaced_step_closure *dsc)
6384 {
6385
6386 if (debug_displaced)
6387 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6388 "%.4x %.4x\n", (unsigned short) insn1,
6389 (unsigned short) insn2);
6390
6391 dsc->modinsn[0] = insn1;
6392 dsc->modinsn[1] = insn2;
6393 dsc->numinsns = 2;
6394
6395 return 0;
6396 }
6397
6398 /* Copy unpredictable instructions. */
6399
6400 static int
6401 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6402 arm_displaced_step_closure *dsc)
6403 {
6404 if (debug_displaced)
6405 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6406 "%.8lx\n", (unsigned long) insn);
6407
6408 dsc->modinsn[0] = insn;
6409
6410 return 0;
6411 }
6412
6413 /* The decode_* functions are instruction decoding helpers. They mostly follow
6414 the presentation in the ARM ARM. */
6415
6416 static int
6417 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6418 struct regcache *regs,
6419 arm_displaced_step_closure *dsc)
6420 {
6421 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6422 unsigned int rn = bits (insn, 16, 19);
6423
6424 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6425 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6426 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6427 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6428 else if ((op1 & 0x60) == 0x20)
6429 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6430 else if ((op1 & 0x71) == 0x40)
6431 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6432 dsc);
6433 else if ((op1 & 0x77) == 0x41)
6434 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6435 else if ((op1 & 0x77) == 0x45)
6436 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6437 else if ((op1 & 0x77) == 0x51)
6438 {
6439 if (rn != 0xf)
6440 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6441 else
6442 return arm_copy_unpred (gdbarch, insn, dsc);
6443 }
6444 else if ((op1 & 0x77) == 0x55)
6445 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6446 else if (op1 == 0x57)
6447 switch (op2)
6448 {
6449 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6450 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6451 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6452 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6453 default: return arm_copy_unpred (gdbarch, insn, dsc);
6454 }
6455 else if ((op1 & 0x63) == 0x43)
6456 return arm_copy_unpred (gdbarch, insn, dsc);
6457 else if ((op2 & 0x1) == 0x0)
6458 switch (op1 & ~0x80)
6459 {
6460 case 0x61:
6461 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6462 case 0x65:
6463 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6464 case 0x71: case 0x75:
6465 /* pld/pldw reg. */
6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6467 case 0x63: case 0x67: case 0x73: case 0x77:
6468 return arm_copy_unpred (gdbarch, insn, dsc);
6469 default:
6470 return arm_copy_undef (gdbarch, insn, dsc);
6471 }
6472 else
6473 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6474 }
6475
6476 static int
6477 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6478 struct regcache *regs,
6479 arm_displaced_step_closure *dsc)
6480 {
6481 if (bit (insn, 27) == 0)
6482 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6483 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6484 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6485 {
6486 case 0x0: case 0x2:
6487 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6488
6489 case 0x1: case 0x3:
6490 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6491
6492 case 0x4: case 0x5: case 0x6: case 0x7:
6493 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6494
6495 case 0x8:
6496 switch ((insn & 0xe00000) >> 21)
6497 {
6498 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6499 /* stc/stc2. */
6500 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6501
6502 case 0x2:
6503 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6504
6505 default:
6506 return arm_copy_undef (gdbarch, insn, dsc);
6507 }
6508
6509 case 0x9:
6510 {
6511 int rn_f = (bits (insn, 16, 19) == 0xf);
6512 switch ((insn & 0xe00000) >> 21)
6513 {
6514 case 0x1: case 0x3:
6515 /* ldc/ldc2 imm (undefined for rn == pc). */
6516 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6517 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6518
6519 case 0x2:
6520 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6521
6522 case 0x4: case 0x5: case 0x6: case 0x7:
6523 /* ldc/ldc2 lit (undefined for rn != pc). */
6524 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6525 : arm_copy_undef (gdbarch, insn, dsc);
6526
6527 default:
6528 return arm_copy_undef (gdbarch, insn, dsc);
6529 }
6530 }
6531
6532 case 0xa:
6533 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6534
6535 case 0xb:
6536 if (bits (insn, 16, 19) == 0xf)
6537 /* ldc/ldc2 lit. */
6538 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6539 else
6540 return arm_copy_undef (gdbarch, insn, dsc);
6541
6542 case 0xc:
6543 if (bit (insn, 4))
6544 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6545 else
6546 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6547
6548 case 0xd:
6549 if (bit (insn, 4))
6550 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6551 else
6552 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6553
6554 default:
6555 return arm_copy_undef (gdbarch, insn, dsc);
6556 }
6557 }
6558
6559 /* Decode miscellaneous instructions in dp/misc encoding space. */
6560
6561 static int
6562 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6563 struct regcache *regs,
6564 arm_displaced_step_closure *dsc)
6565 {
6566 unsigned int op2 = bits (insn, 4, 6);
6567 unsigned int op = bits (insn, 21, 22);
6568
6569 switch (op2)
6570 {
6571 case 0x0:
6572 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6573
6574 case 0x1:
6575 if (op == 0x1) /* bx. */
6576 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6577 else if (op == 0x3)
6578 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6579 else
6580 return arm_copy_undef (gdbarch, insn, dsc);
6581
6582 case 0x2:
6583 if (op == 0x1)
6584 /* Not really supported. */
6585 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6586 else
6587 return arm_copy_undef (gdbarch, insn, dsc);
6588
6589 case 0x3:
6590 if (op == 0x1)
6591 return arm_copy_bx_blx_reg (gdbarch, insn,
6592 regs, dsc); /* blx register. */
6593 else
6594 return arm_copy_undef (gdbarch, insn, dsc);
6595
6596 case 0x5:
6597 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6598
6599 case 0x7:
6600 if (op == 0x1)
6601 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6602 else if (op == 0x3)
6603 /* Not really supported. */
6604 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6605 /* Fall through. */
6606
6607 default:
6608 return arm_copy_undef (gdbarch, insn, dsc);
6609 }
6610 }
6611
6612 static int
6613 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6614 struct regcache *regs,
6615 arm_displaced_step_closure *dsc)
6616 {
6617 if (bit (insn, 25))
6618 switch (bits (insn, 20, 24))
6619 {
6620 case 0x10:
6621 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6622
6623 case 0x14:
6624 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6625
6626 case 0x12: case 0x16:
6627 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6628
6629 default:
6630 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6631 }
6632 else
6633 {
6634 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6635
6636 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6637 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6638 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6639 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6641 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6643 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6644 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6645 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6646 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6647 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6648 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6649 /* 2nd arg means "unprivileged". */
6650 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6651 dsc);
6652 }
6653
6654 /* Should be unreachable. */
6655 return 1;
6656 }
6657
6658 static int
6659 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6660 struct regcache *regs,
6661 arm_displaced_step_closure *dsc)
6662 {
6663 int a = bit (insn, 25), b = bit (insn, 4);
6664 uint32_t op1 = bits (insn, 20, 24);
6665
6666 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6667 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6668 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6669 else if ((!a && (op1 & 0x17) == 0x02)
6670 || (a && (op1 & 0x17) == 0x02 && !b))
6671 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6672 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6673 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6674 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6675 else if ((!a && (op1 & 0x17) == 0x03)
6676 || (a && (op1 & 0x17) == 0x03 && !b))
6677 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6678 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6679 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6680 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6681 else if ((!a && (op1 & 0x17) == 0x06)
6682 || (a && (op1 & 0x17) == 0x06 && !b))
6683 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6684 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6685 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6686 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6687 else if ((!a && (op1 & 0x17) == 0x07)
6688 || (a && (op1 & 0x17) == 0x07 && !b))
6689 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6690
6691 /* Should be unreachable. */
6692 return 1;
6693 }
6694
6695 static int
6696 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6697 arm_displaced_step_closure *dsc)
6698 {
6699 switch (bits (insn, 20, 24))
6700 {
6701 case 0x00: case 0x01: case 0x02: case 0x03:
6702 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6703
6704 case 0x04: case 0x05: case 0x06: case 0x07:
6705 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6706
6707 case 0x08: case 0x09: case 0x0a: case 0x0b:
6708 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6709 return arm_copy_unmodified (gdbarch, insn,
6710 "decode/pack/unpack/saturate/reverse", dsc);
6711
6712 case 0x18:
6713 if (bits (insn, 5, 7) == 0) /* op2. */
6714 {
6715 if (bits (insn, 12, 15) == 0xf)
6716 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6717 else
6718 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6719 }
6720 else
6721 return arm_copy_undef (gdbarch, insn, dsc);
6722
6723 case 0x1a: case 0x1b:
6724 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6725 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6726 else
6727 return arm_copy_undef (gdbarch, insn, dsc);
6728
6729 case 0x1c: case 0x1d:
6730 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6731 {
6732 if (bits (insn, 0, 3) == 0xf)
6733 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6734 else
6735 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6736 }
6737 else
6738 return arm_copy_undef (gdbarch, insn, dsc);
6739
6740 case 0x1e: case 0x1f:
6741 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6742 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6743 else
6744 return arm_copy_undef (gdbarch, insn, dsc);
6745 }
6746
6747 /* Should be unreachable. */
6748 return 1;
6749 }
6750
6751 static int
6752 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6753 struct regcache *regs,
6754 arm_displaced_step_closure *dsc)
6755 {
6756 if (bit (insn, 25))
6757 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6758 else
6759 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6760 }
6761
6762 static int
6763 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6764 struct regcache *regs,
6765 arm_displaced_step_closure *dsc)
6766 {
6767 unsigned int opcode = bits (insn, 20, 24);
6768
6769 switch (opcode)
6770 {
6771 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6772 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6773
6774 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6775 case 0x12: case 0x16:
6776 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6777
6778 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6779 case 0x13: case 0x17:
6780 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6781
6782 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6783 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6784 /* Note: no writeback for these instructions. Bit 25 will always be
6785 zero though (via caller), so the following works OK. */
6786 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 }
6788
6789 /* Should be unreachable. */
6790 return 1;
6791 }
6792
6793 /* Decode shifted register instructions. */
6794
6795 static int
6796 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6797 uint16_t insn2, struct regcache *regs,
6798 arm_displaced_step_closure *dsc)
6799 {
6800 /* PC is only allowed to be used in instruction MOV. */
6801
6802 unsigned int op = bits (insn1, 5, 8);
6803 unsigned int rn = bits (insn1, 0, 3);
6804
6805 if (op == 0x2 && rn == 0xf) /* MOV */
6806 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6807 else
6808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6809 "dp (shift reg)", dsc);
6810 }
6811
6812
6813 /* Decode extension register load/store. Exactly the same as
6814 arm_decode_ext_reg_ld_st. */
6815
6816 static int
6817 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6818 uint16_t insn2, struct regcache *regs,
6819 arm_displaced_step_closure *dsc)
6820 {
6821 unsigned int opcode = bits (insn1, 4, 8);
6822
6823 switch (opcode)
6824 {
6825 case 0x04: case 0x05:
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6827 "vfp/neon vmov", dsc);
6828
6829 case 0x08: case 0x0c: /* 01x00 */
6830 case 0x0a: case 0x0e: /* 01x10 */
6831 case 0x12: case 0x16: /* 10x10 */
6832 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6833 "vfp/neon vstm/vpush", dsc);
6834
6835 case 0x09: case 0x0d: /* 01x01 */
6836 case 0x0b: case 0x0f: /* 01x11 */
6837 case 0x13: case 0x17: /* 10x11 */
6838 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6839 "vfp/neon vldm/vpop", dsc);
6840
6841 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6842 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6843 "vstr", dsc);
6844 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6845 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6846 }
6847
6848 /* Should be unreachable. */
6849 return 1;
6850 }
6851
6852 static int
6853 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6854 struct regcache *regs, arm_displaced_step_closure *dsc)
6855 {
6856 unsigned int op1 = bits (insn, 20, 25);
6857 int op = bit (insn, 4);
6858 unsigned int coproc = bits (insn, 8, 11);
6859
6860 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6861 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6862 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6863 && (coproc & 0xe) != 0xa)
6864 /* stc/stc2. */
6865 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6866 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6867 && (coproc & 0xe) != 0xa)
6868 /* ldc/ldc2 imm/lit. */
6869 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6870 else if ((op1 & 0x3e) == 0x00)
6871 return arm_copy_undef (gdbarch, insn, dsc);
6872 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6873 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6874 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6876 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6877 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6878 else if ((op1 & 0x30) == 0x20 && !op)
6879 {
6880 if ((coproc & 0xe) == 0xa)
6881 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6882 else
6883 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6884 }
6885 else if ((op1 & 0x30) == 0x20 && op)
6886 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6887 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6888 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6889 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6890 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6891 else if ((op1 & 0x30) == 0x30)
6892 return arm_copy_svc (gdbarch, insn, regs, dsc);
6893 else
6894 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6895 }
6896
6897 static int
6898 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6899 uint16_t insn2, struct regcache *regs,
6900 arm_displaced_step_closure *dsc)
6901 {
6902 unsigned int coproc = bits (insn2, 8, 11);
6903 unsigned int bit_5_8 = bits (insn1, 5, 8);
6904 unsigned int bit_9 = bit (insn1, 9);
6905 unsigned int bit_4 = bit (insn1, 4);
6906
6907 if (bit_9 == 0)
6908 {
6909 if (bit_5_8 == 2)
6910 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6911 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6912 dsc);
6913 else if (bit_5_8 == 0) /* UNDEFINED. */
6914 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6915 else
6916 {
6917 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6918 if ((coproc & 0xe) == 0xa)
6919 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6920 dsc);
6921 else /* coproc is not 101x. */
6922 {
6923 if (bit_4 == 0) /* STC/STC2. */
6924 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6925 "stc/stc2", dsc);
6926 else /* LDC/LDC2 {literal, immediate}. */
6927 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6928 regs, dsc);
6929 }
6930 }
6931 }
6932 else
6933 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6934
6935 return 0;
6936 }
6937
6938 static void
6939 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6940 arm_displaced_step_closure *dsc, int rd)
6941 {
6942 /* ADR Rd, #imm
6943
6944 Rewrite as:
6945
6946 Preparation: Rd <- PC
6947 Insn: ADD Rd, #imm
6948 Cleanup: Null.
6949 */
6950
6951 /* Rd <- PC */
6952 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6953 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6954 }
6955
6956 static int
6957 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6958 arm_displaced_step_closure *dsc,
6959 int rd, unsigned int imm)
6960 {
6961
6962 /* Encoding T2: ADDS Rd, #imm */
6963 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6964
6965 install_pc_relative (gdbarch, regs, dsc, rd);
6966
6967 return 0;
6968 }
6969
6970 static int
6971 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6972 struct regcache *regs,
6973 arm_displaced_step_closure *dsc)
6974 {
6975 unsigned int rd = bits (insn, 8, 10);
6976 unsigned int imm8 = bits (insn, 0, 7);
6977
6978 if (debug_displaced)
6979 fprintf_unfiltered (gdb_stdlog,
6980 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6981 rd, imm8, insn);
6982
6983 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6984 }
6985
6986 static int
6987 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6988 uint16_t insn2, struct regcache *regs,
6989 arm_displaced_step_closure *dsc)
6990 {
6991 unsigned int rd = bits (insn2, 8, 11);
6992 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6993 extract raw immediate encoding rather than computing immediate. When
6994 generating ADD or SUB instruction, we can simply perform OR operation to
6995 set immediate into ADD. */
6996 unsigned int imm_3_8 = insn2 & 0x70ff;
6997 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6998
6999 if (debug_displaced)
7000 fprintf_unfiltered (gdb_stdlog,
7001 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7002 rd, imm_i, imm_3_8, insn1, insn2);
7003
7004 if (bit (insn1, 7)) /* Encoding T2 */
7005 {
7006 /* Encoding T3: SUB Rd, Rd, #imm */
7007 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7008 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7009 }
7010 else /* Encoding T3 */
7011 {
7012 /* Encoding T3: ADD Rd, Rd, #imm */
7013 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7014 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7015 }
7016 dsc->numinsns = 2;
7017
7018 install_pc_relative (gdbarch, regs, dsc, rd);
7019
7020 return 0;
7021 }
7022
7023 static int
7024 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7025 struct regcache *regs,
7026 arm_displaced_step_closure *dsc)
7027 {
7028 unsigned int rt = bits (insn1, 8, 10);
7029 unsigned int pc;
7030 int imm8 = (bits (insn1, 0, 7) << 2);
7031
7032 /* LDR Rd, #imm8
7033
7034 Rwrite as:
7035
7036 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7037
7038 Insn: LDR R0, [R2, R3];
7039 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7040
7041 if (debug_displaced)
7042 fprintf_unfiltered (gdb_stdlog,
7043 "displaced: copying thumb ldr r%d [pc #%d]\n"
7044 , rt, imm8);
7045
7046 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7047 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7048 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7049 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7050 /* The assembler calculates the required value of the offset from the
7051 Align(PC,4) value of this instruction to the label. */
7052 pc = pc & 0xfffffffc;
7053
7054 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7055 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7056
7057 dsc->rd = rt;
7058 dsc->u.ldst.xfersize = 4;
7059 dsc->u.ldst.rn = 0;
7060 dsc->u.ldst.immed = 0;
7061 dsc->u.ldst.writeback = 0;
7062 dsc->u.ldst.restore_r4 = 0;
7063
7064 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7065
7066 dsc->cleanup = &cleanup_load;
7067
7068 return 0;
7069 }
7070
7071 /* Copy Thumb cbnz/cbz instruction. */
7072
7073 static int
7074 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7075 struct regcache *regs,
7076 arm_displaced_step_closure *dsc)
7077 {
7078 int non_zero = bit (insn1, 11);
7079 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7080 CORE_ADDR from = dsc->insn_addr;
7081 int rn = bits (insn1, 0, 2);
7082 int rn_val = displaced_read_reg (regs, dsc, rn);
7083
7084 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7085 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7086 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7087 condition is false, let it be, cleanup_branch will do nothing. */
7088 if (dsc->u.branch.cond)
7089 {
7090 dsc->u.branch.cond = INST_AL;
7091 dsc->u.branch.dest = from + 4 + imm5;
7092 }
7093 else
7094 dsc->u.branch.dest = from + 2;
7095
7096 dsc->u.branch.link = 0;
7097 dsc->u.branch.exchange = 0;
7098
7099 if (debug_displaced)
7100 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7101 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7102 rn, rn_val, insn1, dsc->u.branch.dest);
7103
7104 dsc->modinsn[0] = THUMB_NOP;
7105
7106 dsc->cleanup = &cleanup_branch;
7107 return 0;
7108 }
7109
7110 /* Copy Table Branch Byte/Halfword */
7111 static int
7112 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7113 uint16_t insn2, struct regcache *regs,
7114 arm_displaced_step_closure *dsc)
7115 {
7116 ULONGEST rn_val, rm_val;
7117 int is_tbh = bit (insn2, 4);
7118 CORE_ADDR halfwords = 0;
7119 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7120
7121 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7122 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7123
7124 if (is_tbh)
7125 {
7126 gdb_byte buf[2];
7127
7128 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7129 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7130 }
7131 else
7132 {
7133 gdb_byte buf[1];
7134
7135 target_read_memory (rn_val + rm_val, buf, 1);
7136 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7137 }
7138
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7141 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7142 (unsigned int) rn_val, (unsigned int) rm_val,
7143 (unsigned int) halfwords);
7144
7145 dsc->u.branch.cond = INST_AL;
7146 dsc->u.branch.link = 0;
7147 dsc->u.branch.exchange = 0;
7148 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7149
7150 dsc->cleanup = &cleanup_branch;
7151
7152 return 0;
7153 }
7154
7155 static void
7156 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7157 arm_displaced_step_closure *dsc)
7158 {
7159 /* PC <- r7 */
7160 int val = displaced_read_reg (regs, dsc, 7);
7161 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7162
7163 /* r7 <- r8 */
7164 val = displaced_read_reg (regs, dsc, 8);
7165 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7166
7167 /* r8 <- tmp[0] */
7168 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7169
7170 }
7171
7172 static int
7173 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7174 struct regcache *regs,
7175 arm_displaced_step_closure *dsc)
7176 {
7177 dsc->u.block.regmask = insn1 & 0x00ff;
7178
7179 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7180 to :
7181
7182 (1) register list is full, that is, r0-r7 are used.
7183 Prepare: tmp[0] <- r8
7184
7185 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7186 MOV r8, r7; Move value of r7 to r8;
7187 POP {r7}; Store PC value into r7.
7188
7189 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7190
7191 (2) register list is not full, supposing there are N registers in
7192 register list (except PC, 0 <= N <= 7).
7193 Prepare: for each i, 0 - N, tmp[i] <- ri.
7194
7195 POP {r0, r1, ...., rN};
7196
7197 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7198 from tmp[] properly.
7199 */
7200 if (debug_displaced)
7201 fprintf_unfiltered (gdb_stdlog,
7202 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7203 dsc->u.block.regmask, insn1);
7204
7205 if (dsc->u.block.regmask == 0xff)
7206 {
7207 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7208
7209 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7210 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7211 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7212
7213 dsc->numinsns = 3;
7214 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7215 }
7216 else
7217 {
7218 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7219 unsigned int i;
7220 unsigned int new_regmask;
7221
7222 for (i = 0; i < num_in_list + 1; i++)
7223 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7224
7225 new_regmask = (1 << (num_in_list + 1)) - 1;
7226
7227 if (debug_displaced)
7228 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7229 "{..., pc}: original reg list %.4x,"
7230 " modified list %.4x\n"),
7231 (int) dsc->u.block.regmask, new_regmask);
7232
7233 dsc->u.block.regmask |= 0x8000;
7234 dsc->u.block.writeback = 0;
7235 dsc->u.block.cond = INST_AL;
7236
7237 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7238
7239 dsc->cleanup = &cleanup_block_load_pc;
7240 }
7241
7242 return 0;
7243 }
7244
7245 static void
7246 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7247 struct regcache *regs,
7248 arm_displaced_step_closure *dsc)
7249 {
7250 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7251 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7252 int err = 0;
7253
7254 /* 16-bit thumb instructions. */
7255 switch (op_bit_12_15)
7256 {
7257 /* Shift (imme), add, subtract, move and compare. */
7258 case 0: case 1: case 2: case 3:
7259 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7260 "shift/add/sub/mov/cmp",
7261 dsc);
7262 break;
7263 case 4:
7264 switch (op_bit_10_11)
7265 {
7266 case 0: /* Data-processing */
7267 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7268 "data-processing",
7269 dsc);
7270 break;
7271 case 1: /* Special data instructions and branch and exchange. */
7272 {
7273 unsigned short op = bits (insn1, 7, 9);
7274 if (op == 6 || op == 7) /* BX or BLX */
7275 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7276 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7277 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7278 else
7279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7280 dsc);
7281 }
7282 break;
7283 default: /* LDR (literal) */
7284 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7285 }
7286 break;
7287 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7289 break;
7290 case 10:
7291 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7292 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7293 else /* Generate SP-relative address */
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7295 break;
7296 case 11: /* Misc 16-bit instructions */
7297 {
7298 switch (bits (insn1, 8, 11))
7299 {
7300 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7301 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7302 break;
7303 case 12: case 13: /* POP */
7304 if (bit (insn1, 8)) /* PC is in register list. */
7305 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7306 else
7307 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7308 break;
7309 case 15: /* If-Then, and hints */
7310 if (bits (insn1, 0, 3))
7311 /* If-Then makes up to four following instructions conditional.
7312 IT instruction itself is not conditional, so handle it as a
7313 common unmodified instruction. */
7314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7315 dsc);
7316 else
7317 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7318 break;
7319 default:
7320 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7321 }
7322 }
7323 break;
7324 case 12:
7325 if (op_bit_10_11 < 2) /* Store multiple registers */
7326 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7327 else /* Load multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7329 break;
7330 case 13: /* Conditional branch and supervisor call */
7331 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7332 err = thumb_copy_b (gdbarch, insn1, dsc);
7333 else
7334 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7335 break;
7336 case 14: /* Unconditional branch */
7337 err = thumb_copy_b (gdbarch, insn1, dsc);
7338 break;
7339 default:
7340 err = 1;
7341 }
7342
7343 if (err)
7344 internal_error (__FILE__, __LINE__,
7345 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7346 }
7347
7348 static int
7349 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7350 uint16_t insn1, uint16_t insn2,
7351 struct regcache *regs,
7352 arm_displaced_step_closure *dsc)
7353 {
7354 int rt = bits (insn2, 12, 15);
7355 int rn = bits (insn1, 0, 3);
7356 int op1 = bits (insn1, 7, 8);
7357
7358 switch (bits (insn1, 5, 6))
7359 {
7360 case 0: /* Load byte and memory hints */
7361 if (rt == 0xf) /* PLD/PLI */
7362 {
7363 if (rn == 0xf)
7364 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7365 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7366 else
7367 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7368 "pli/pld", dsc);
7369 }
7370 else
7371 {
7372 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7374 1);
7375 else
7376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7377 "ldrb{reg, immediate}/ldrbt",
7378 dsc);
7379 }
7380
7381 break;
7382 case 1: /* Load halfword and memory hints. */
7383 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7384 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7385 "pld/unalloc memhint", dsc);
7386 else
7387 {
7388 if (rn == 0xf)
7389 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7390 2);
7391 else
7392 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 "ldrh/ldrht", dsc);
7394 }
7395 break;
7396 case 2: /* Load word */
7397 {
7398 int insn2_bit_8_11 = bits (insn2, 8, 11);
7399
7400 if (rn == 0xf)
7401 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7402 else if (op1 == 0x1) /* Encoding T3 */
7403 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7404 0, 1);
7405 else /* op1 == 0x0 */
7406 {
7407 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7408 /* LDR (immediate) */
7409 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7410 dsc, bit (insn2, 8), 1);
7411 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 "ldrt", dsc);
7414 else
7415 /* LDR (register) */
7416 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7417 dsc, 0, 0);
7418 }
7419 break;
7420 }
7421 default:
7422 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7423 break;
7424 }
7425 return 0;
7426 }
7427
7428 static void
7429 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7430 uint16_t insn2, struct regcache *regs,
7431 arm_displaced_step_closure *dsc)
7432 {
7433 int err = 0;
7434 unsigned short op = bit (insn2, 15);
7435 unsigned int op1 = bits (insn1, 11, 12);
7436
7437 switch (op1)
7438 {
7439 case 1:
7440 {
7441 switch (bits (insn1, 9, 10))
7442 {
7443 case 0:
7444 if (bit (insn1, 6))
7445 {
7446 /* Load/store {dual, exclusive}, table branch. */
7447 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7448 && bits (insn2, 5, 7) == 0)
7449 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7450 dsc);
7451 else
7452 /* PC is not allowed to use in load/store {dual, exclusive}
7453 instructions. */
7454 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7455 "load/store dual/ex", dsc);
7456 }
7457 else /* load/store multiple */
7458 {
7459 switch (bits (insn1, 7, 8))
7460 {
7461 case 0: case 3: /* SRS, RFE */
7462 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7463 "srs/rfe", dsc);
7464 break;
7465 case 1: case 2: /* LDM/STM/PUSH/POP */
7466 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7467 break;
7468 }
7469 }
7470 break;
7471
7472 case 1:
7473 /* Data-processing (shift register). */
7474 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7475 dsc);
7476 break;
7477 default: /* Coprocessor instructions. */
7478 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7479 break;
7480 }
7481 break;
7482 }
7483 case 2: /* op1 = 2 */
7484 if (op) /* Branch and misc control. */
7485 {
7486 if (bit (insn2, 14) /* BLX/BL */
7487 || bit (insn2, 12) /* Unconditional branch */
7488 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7489 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7490 else
7491 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7492 "misc ctrl", dsc);
7493 }
7494 else
7495 {
7496 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7497 {
7498 int dp_op = bits (insn1, 4, 8);
7499 int rn = bits (insn1, 0, 3);
7500 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7501 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7502 regs, dsc);
7503 else
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "dp/pb", dsc);
7506 }
7507 else /* Data processing (modified immediate) */
7508 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7509 "dp/mi", dsc);
7510 }
7511 break;
7512 case 3: /* op1 = 3 */
7513 switch (bits (insn1, 9, 10))
7514 {
7515 case 0:
7516 if (bit (insn1, 4))
7517 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7518 regs, dsc);
7519 else /* NEON Load/Store and Store single data item */
7520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7521 "neon elt/struct load/store",
7522 dsc);
7523 break;
7524 case 1: /* op1 = 3, bits (9, 10) == 1 */
7525 switch (bits (insn1, 7, 8))
7526 {
7527 case 0: case 1: /* Data processing (register) */
7528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 "dp(reg)", dsc);
7530 break;
7531 case 2: /* Multiply and absolute difference */
7532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7533 "mul/mua/diff", dsc);
7534 break;
7535 case 3: /* Long multiply and divide */
7536 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7537 "lmul/lmua", dsc);
7538 break;
7539 }
7540 break;
7541 default: /* Coprocessor instructions */
7542 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7543 break;
7544 }
7545 break;
7546 default:
7547 err = 1;
7548 }
7549
7550 if (err)
7551 internal_error (__FILE__, __LINE__,
7552 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7553
7554 }
7555
7556 static void
7557 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7558 struct regcache *regs,
7559 arm_displaced_step_closure *dsc)
7560 {
7561 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7562 uint16_t insn1
7563 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7564
7565 if (debug_displaced)
7566 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7567 "at %.8lx\n", insn1, (unsigned long) from);
7568
7569 dsc->is_thumb = 1;
7570 dsc->insn_size = thumb_insn_size (insn1);
7571 if (thumb_insn_size (insn1) == 4)
7572 {
7573 uint16_t insn2
7574 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7575 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7576 }
7577 else
7578 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7579 }
7580
7581 void
7582 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7583 CORE_ADDR to, struct regcache *regs,
7584 arm_displaced_step_closure *dsc)
7585 {
7586 int err = 0;
7587 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7588 uint32_t insn;
7589
7590 /* Most displaced instructions use a 1-instruction scratch space, so set this
7591 here and override below if/when necessary. */
7592 dsc->numinsns = 1;
7593 dsc->insn_addr = from;
7594 dsc->scratch_base = to;
7595 dsc->cleanup = NULL;
7596 dsc->wrote_to_pc = 0;
7597
7598 if (!displaced_in_arm_mode (regs))
7599 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7600
7601 dsc->is_thumb = 0;
7602 dsc->insn_size = 4;
7603 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7604 if (debug_displaced)
7605 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7606 "at %.8lx\n", (unsigned long) insn,
7607 (unsigned long) from);
7608
7609 if ((insn & 0xf0000000) == 0xf0000000)
7610 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7611 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7612 {
7613 case 0x0: case 0x1: case 0x2: case 0x3:
7614 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7615 break;
7616
7617 case 0x4: case 0x5: case 0x6:
7618 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7619 break;
7620
7621 case 0x7:
7622 err = arm_decode_media (gdbarch, insn, dsc);
7623 break;
7624
7625 case 0x8: case 0x9: case 0xa: case 0xb:
7626 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7627 break;
7628
7629 case 0xc: case 0xd: case 0xe: case 0xf:
7630 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7631 break;
7632 }
7633
7634 if (err)
7635 internal_error (__FILE__, __LINE__,
7636 _("arm_process_displaced_insn: Instruction decode error"));
7637 }
7638
7639 /* Actually set up the scratch space for a displaced instruction. */
7640
7641 void
7642 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7643 CORE_ADDR to, arm_displaced_step_closure *dsc)
7644 {
7645 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7646 unsigned int i, len, offset;
7647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7648 int size = dsc->is_thumb? 2 : 4;
7649 const gdb_byte *bkp_insn;
7650
7651 offset = 0;
7652 /* Poke modified instruction(s). */
7653 for (i = 0; i < dsc->numinsns; i++)
7654 {
7655 if (debug_displaced)
7656 {
7657 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7658 if (size == 4)
7659 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7660 dsc->modinsn[i]);
7661 else if (size == 2)
7662 fprintf_unfiltered (gdb_stdlog, "%.4x",
7663 (unsigned short)dsc->modinsn[i]);
7664
7665 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7666 (unsigned long) to + offset);
7667
7668 }
7669 write_memory_unsigned_integer (to + offset, size,
7670 byte_order_for_code,
7671 dsc->modinsn[i]);
7672 offset += size;
7673 }
7674
7675 /* Choose the correct breakpoint instruction. */
7676 if (dsc->is_thumb)
7677 {
7678 bkp_insn = tdep->thumb_breakpoint;
7679 len = tdep->thumb_breakpoint_size;
7680 }
7681 else
7682 {
7683 bkp_insn = tdep->arm_breakpoint;
7684 len = tdep->arm_breakpoint_size;
7685 }
7686
7687 /* Put breakpoint afterwards. */
7688 write_memory (to + offset, bkp_insn, len);
7689
7690 if (debug_displaced)
7691 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7692 paddress (gdbarch, from), paddress (gdbarch, to));
7693 }
7694
7695 /* Entry point for cleaning things up after a displaced instruction has been
7696 single-stepped. */
7697
7698 void
7699 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7700 struct displaced_step_closure *dsc_,
7701 CORE_ADDR from, CORE_ADDR to,
7702 struct regcache *regs)
7703 {
7704 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7705
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7708
7709 if (!dsc->wrote_to_pc)
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7712
7713 }
7714
7715 #include "bfd-in2.h"
7716 #include "libcoff.h"
7717
7718 static int
7719 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7720 {
7721 gdb_disassembler *di
7722 = static_cast<gdb_disassembler *>(info->application_data);
7723 struct gdbarch *gdbarch = di->arch ();
7724
7725 if (arm_pc_is_thumb (gdbarch, memaddr))
7726 {
7727 static asymbol *asym;
7728 static combined_entry_type ce;
7729 static struct coff_symbol_struct csym;
7730 static struct bfd fake_bfd;
7731 static bfd_target fake_target;
7732
7733 if (csym.native == NULL)
7734 {
7735 /* Create a fake symbol vector containing a Thumb symbol.
7736 This is solely so that the code in print_insn_little_arm()
7737 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7738 the presence of a Thumb symbol and switch to decoding
7739 Thumb instructions. */
7740
7741 fake_target.flavour = bfd_target_coff_flavour;
7742 fake_bfd.xvec = &fake_target;
7743 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7744 csym.native = &ce;
7745 csym.symbol.the_bfd = &fake_bfd;
7746 csym.symbol.name = "fake";
7747 asym = (asymbol *) & csym;
7748 }
7749
7750 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7751 info->symbols = &asym;
7752 }
7753 else
7754 info->symbols = NULL;
7755
7756 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7757 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7758 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7759 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7760 in default_print_insn. */
7761 if (exec_bfd != NULL)
7762 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7763
7764 return default_print_insn (memaddr, info);
7765 }
7766
7767 /* The following define instruction sequences that will cause ARM
7768 cpu's to take an undefined instruction trap. These are used to
7769 signal a breakpoint to GDB.
7770
7771 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7772 modes. A different instruction is required for each mode. The ARM
7773 cpu's can also be big or little endian. Thus four different
7774 instructions are needed to support all cases.
7775
7776 Note: ARMv4 defines several new instructions that will take the
7777 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7778 not in fact add the new instructions. The new undefined
7779 instructions in ARMv4 are all instructions that had no defined
7780 behaviour in earlier chips. There is no guarantee that they will
7781 raise an exception, but may be treated as NOP's. In practice, it
7782 may only safe to rely on instructions matching:
7783
7784 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7785 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7786 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7787
7788 Even this may only true if the condition predicate is true. The
7789 following use a condition predicate of ALWAYS so it is always TRUE.
7790
7791 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7792 and NetBSD all use a software interrupt rather than an undefined
7793 instruction to force a trap. This can be handled by by the
7794 abi-specific code during establishment of the gdbarch vector. */
7795
7796 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7797 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7798 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7799 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7800
7801 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7802 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7803 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7804 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7805
7806 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7807
7808 static int
7809 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7810 {
7811 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7812 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7813
7814 if (arm_pc_is_thumb (gdbarch, *pcptr))
7815 {
7816 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7817
7818 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7819 check whether we are replacing a 32-bit instruction. */
7820 if (tdep->thumb2_breakpoint != NULL)
7821 {
7822 gdb_byte buf[2];
7823
7824 if (target_read_memory (*pcptr, buf, 2) == 0)
7825 {
7826 unsigned short inst1;
7827
7828 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7829 if (thumb_insn_size (inst1) == 4)
7830 return ARM_BP_KIND_THUMB2;
7831 }
7832 }
7833
7834 return ARM_BP_KIND_THUMB;
7835 }
7836 else
7837 return ARM_BP_KIND_ARM;
7838
7839 }
7840
7841 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7842
7843 static const gdb_byte *
7844 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7845 {
7846 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7847
7848 switch (kind)
7849 {
7850 case ARM_BP_KIND_ARM:
7851 *size = tdep->arm_breakpoint_size;
7852 return tdep->arm_breakpoint;
7853 case ARM_BP_KIND_THUMB:
7854 *size = tdep->thumb_breakpoint_size;
7855 return tdep->thumb_breakpoint;
7856 case ARM_BP_KIND_THUMB2:
7857 *size = tdep->thumb2_breakpoint_size;
7858 return tdep->thumb2_breakpoint;
7859 default:
7860 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7861 }
7862 }
7863
7864 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7865
7866 static int
7867 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7868 struct regcache *regcache,
7869 CORE_ADDR *pcptr)
7870 {
7871 gdb_byte buf[4];
7872
7873 /* Check the memory pointed by PC is readable. */
7874 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7875 {
7876 struct arm_get_next_pcs next_pcs_ctx;
7877
7878 arm_get_next_pcs_ctor (&next_pcs_ctx,
7879 &arm_get_next_pcs_ops,
7880 gdbarch_byte_order (gdbarch),
7881 gdbarch_byte_order_for_code (gdbarch),
7882 0,
7883 regcache);
7884
7885 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7886
7887 /* If MEMADDR is the next instruction of current pc, do the
7888 software single step computation, and get the thumb mode by
7889 the destination address. */
7890 for (CORE_ADDR pc : next_pcs)
7891 {
7892 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7893 {
7894 if (IS_THUMB_ADDR (pc))
7895 {
7896 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7897 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7898 }
7899 else
7900 return ARM_BP_KIND_ARM;
7901 }
7902 }
7903 }
7904
7905 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7906 }
7907
7908 /* Extract from an array REGBUF containing the (raw) register state a
7909 function return value of type TYPE, and copy that, in virtual
7910 format, into VALBUF. */
7911
7912 static void
7913 arm_extract_return_value (struct type *type, struct regcache *regs,
7914 gdb_byte *valbuf)
7915 {
7916 struct gdbarch *gdbarch = regs->arch ();
7917 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7918
7919 if (TYPE_CODE_FLT == type->code ())
7920 {
7921 switch (gdbarch_tdep (gdbarch)->fp_model)
7922 {
7923 case ARM_FLOAT_FPA:
7924 {
7925 /* The value is in register F0 in internal format. We need to
7926 extract the raw value and then convert it to the desired
7927 internal type. */
7928 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7929
7930 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7931 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7932 valbuf, type);
7933 }
7934 break;
7935
7936 case ARM_FLOAT_SOFT_FPA:
7937 case ARM_FLOAT_SOFT_VFP:
7938 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7939 not using the VFP ABI code. */
7940 case ARM_FLOAT_VFP:
7941 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7942 if (TYPE_LENGTH (type) > 4)
7943 regs->cooked_read (ARM_A1_REGNUM + 1,
7944 valbuf + ARM_INT_REGISTER_SIZE);
7945 break;
7946
7947 default:
7948 internal_error (__FILE__, __LINE__,
7949 _("arm_extract_return_value: "
7950 "Floating point model not supported"));
7951 break;
7952 }
7953 }
7954 else if (type->code () == TYPE_CODE_INT
7955 || type->code () == TYPE_CODE_CHAR
7956 || type->code () == TYPE_CODE_BOOL
7957 || type->code () == TYPE_CODE_PTR
7958 || TYPE_IS_REFERENCE (type)
7959 || type->code () == TYPE_CODE_ENUM)
7960 {
7961 /* If the type is a plain integer, then the access is
7962 straight-forward. Otherwise we have to play around a bit
7963 more. */
7964 int len = TYPE_LENGTH (type);
7965 int regno = ARM_A1_REGNUM;
7966 ULONGEST tmp;
7967
7968 while (len > 0)
7969 {
7970 /* By using store_unsigned_integer we avoid having to do
7971 anything special for small big-endian values. */
7972 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7973 store_unsigned_integer (valbuf,
7974 (len > ARM_INT_REGISTER_SIZE
7975 ? ARM_INT_REGISTER_SIZE : len),
7976 byte_order, tmp);
7977 len -= ARM_INT_REGISTER_SIZE;
7978 valbuf += ARM_INT_REGISTER_SIZE;
7979 }
7980 }
7981 else
7982 {
7983 /* For a structure or union the behaviour is as if the value had
7984 been stored to word-aligned memory and then loaded into
7985 registers with 32-bit load instruction(s). */
7986 int len = TYPE_LENGTH (type);
7987 int regno = ARM_A1_REGNUM;
7988 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7989
7990 while (len > 0)
7991 {
7992 regs->cooked_read (regno++, tmpbuf);
7993 memcpy (valbuf, tmpbuf,
7994 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7995 len -= ARM_INT_REGISTER_SIZE;
7996 valbuf += ARM_INT_REGISTER_SIZE;
7997 }
7998 }
7999 }
8000
8001
8002 /* Will a function return an aggregate type in memory or in a
8003 register? Return 0 if an aggregate type can be returned in a
8004 register, 1 if it must be returned in memory. */
8005
8006 static int
8007 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8008 {
8009 enum type_code code;
8010
8011 type = check_typedef (type);
8012
8013 /* Simple, non-aggregate types (ie not including vectors and
8014 complex) are always returned in a register (or registers). */
8015 code = type->code ();
8016 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8017 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8018 return 0;
8019
8020 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8021 {
8022 /* Vector values should be returned using ARM registers if they
8023 are not over 16 bytes. */
8024 return (TYPE_LENGTH (type) > 16);
8025 }
8026
8027 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8028 {
8029 /* The AAPCS says all aggregates not larger than a word are returned
8030 in a register. */
8031 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8032 return 0;
8033
8034 return 1;
8035 }
8036 else
8037 {
8038 int nRc;
8039
8040 /* All aggregate types that won't fit in a register must be returned
8041 in memory. */
8042 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8043 return 1;
8044
8045 /* In the ARM ABI, "integer" like aggregate types are returned in
8046 registers. For an aggregate type to be integer like, its size
8047 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8048 offset of each addressable subfield must be zero. Note that bit
8049 fields are not addressable, and all addressable subfields of
8050 unions always start at offset zero.
8051
8052 This function is based on the behaviour of GCC 2.95.1.
8053 See: gcc/arm.c: arm_return_in_memory() for details.
8054
8055 Note: All versions of GCC before GCC 2.95.2 do not set up the
8056 parameters correctly for a function returning the following
8057 structure: struct { float f;}; This should be returned in memory,
8058 not a register. Richard Earnshaw sent me a patch, but I do not
8059 know of any way to detect if a function like the above has been
8060 compiled with the correct calling convention. */
8061
8062 /* Assume all other aggregate types can be returned in a register.
8063 Run a check for structures, unions and arrays. */
8064 nRc = 0;
8065
8066 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8067 {
8068 int i;
8069 /* Need to check if this struct/union is "integer" like. For
8070 this to be true, its size must be less than or equal to
8071 ARM_INT_REGISTER_SIZE and the offset of each addressable
8072 subfield must be zero. Note that bit fields are not
8073 addressable, and unions always start at offset zero. If any
8074 of the subfields is a floating point type, the struct/union
8075 cannot be an integer type. */
8076
8077 /* For each field in the object, check:
8078 1) Is it FP? --> yes, nRc = 1;
8079 2) Is it addressable (bitpos != 0) and
8080 not packed (bitsize == 0)?
8081 --> yes, nRc = 1
8082 */
8083
8084 for (i = 0; i < type->num_fields (); i++)
8085 {
8086 enum type_code field_type_code;
8087
8088 field_type_code
8089 = check_typedef (type->field (i).type ())->code ();
8090
8091 /* Is it a floating point type field? */
8092 if (field_type_code == TYPE_CODE_FLT)
8093 {
8094 nRc = 1;
8095 break;
8096 }
8097
8098 /* If bitpos != 0, then we have to care about it. */
8099 if (TYPE_FIELD_BITPOS (type, i) != 0)
8100 {
8101 /* Bitfields are not addressable. If the field bitsize is
8102 zero, then the field is not packed. Hence it cannot be
8103 a bitfield or any other packed type. */
8104 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8105 {
8106 nRc = 1;
8107 break;
8108 }
8109 }
8110 }
8111 }
8112
8113 return nRc;
8114 }
8115 }
8116
8117 /* Write into appropriate registers a function return value of type
8118 TYPE, given in virtual format. */
8119
8120 static void
8121 arm_store_return_value (struct type *type, struct regcache *regs,
8122 const gdb_byte *valbuf)
8123 {
8124 struct gdbarch *gdbarch = regs->arch ();
8125 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8126
8127 if (type->code () == TYPE_CODE_FLT)
8128 {
8129 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8130
8131 switch (gdbarch_tdep (gdbarch)->fp_model)
8132 {
8133 case ARM_FLOAT_FPA:
8134
8135 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8136 regs->cooked_write (ARM_F0_REGNUM, buf);
8137 break;
8138
8139 case ARM_FLOAT_SOFT_FPA:
8140 case ARM_FLOAT_SOFT_VFP:
8141 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8142 not using the VFP ABI code. */
8143 case ARM_FLOAT_VFP:
8144 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8145 if (TYPE_LENGTH (type) > 4)
8146 regs->cooked_write (ARM_A1_REGNUM + 1,
8147 valbuf + ARM_INT_REGISTER_SIZE);
8148 break;
8149
8150 default:
8151 internal_error (__FILE__, __LINE__,
8152 _("arm_store_return_value: Floating "
8153 "point model not supported"));
8154 break;
8155 }
8156 }
8157 else if (type->code () == TYPE_CODE_INT
8158 || type->code () == TYPE_CODE_CHAR
8159 || type->code () == TYPE_CODE_BOOL
8160 || type->code () == TYPE_CODE_PTR
8161 || TYPE_IS_REFERENCE (type)
8162 || type->code () == TYPE_CODE_ENUM)
8163 {
8164 if (TYPE_LENGTH (type) <= 4)
8165 {
8166 /* Values of one word or less are zero/sign-extended and
8167 returned in r0. */
8168 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8169 LONGEST val = unpack_long (type, valbuf);
8170
8171 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8172 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8173 }
8174 else
8175 {
8176 /* Integral values greater than one word are stored in consecutive
8177 registers starting with r0. This will always be a multiple of
8178 the regiser size. */
8179 int len = TYPE_LENGTH (type);
8180 int regno = ARM_A1_REGNUM;
8181
8182 while (len > 0)
8183 {
8184 regs->cooked_write (regno++, valbuf);
8185 len -= ARM_INT_REGISTER_SIZE;
8186 valbuf += ARM_INT_REGISTER_SIZE;
8187 }
8188 }
8189 }
8190 else
8191 {
8192 /* For a structure or union the behaviour is as if the value had
8193 been stored to word-aligned memory and then loaded into
8194 registers with 32-bit load instruction(s). */
8195 int len = TYPE_LENGTH (type);
8196 int regno = ARM_A1_REGNUM;
8197 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8198
8199 while (len > 0)
8200 {
8201 memcpy (tmpbuf, valbuf,
8202 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8203 regs->cooked_write (regno++, tmpbuf);
8204 len -= ARM_INT_REGISTER_SIZE;
8205 valbuf += ARM_INT_REGISTER_SIZE;
8206 }
8207 }
8208 }
8209
8210
8211 /* Handle function return values. */
8212
8213 static enum return_value_convention
8214 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8215 struct type *valtype, struct regcache *regcache,
8216 gdb_byte *readbuf, const gdb_byte *writebuf)
8217 {
8218 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8219 struct type *func_type = function ? value_type (function) : NULL;
8220 enum arm_vfp_cprc_base_type vfp_base_type;
8221 int vfp_base_count;
8222
8223 if (arm_vfp_abi_for_function (gdbarch, func_type)
8224 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8225 {
8226 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8227 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8228 int i;
8229 for (i = 0; i < vfp_base_count; i++)
8230 {
8231 if (reg_char == 'q')
8232 {
8233 if (writebuf)
8234 arm_neon_quad_write (gdbarch, regcache, i,
8235 writebuf + i * unit_length);
8236
8237 if (readbuf)
8238 arm_neon_quad_read (gdbarch, regcache, i,
8239 readbuf + i * unit_length);
8240 }
8241 else
8242 {
8243 char name_buf[4];
8244 int regnum;
8245
8246 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8247 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8248 strlen (name_buf));
8249 if (writebuf)
8250 regcache->cooked_write (regnum, writebuf + i * unit_length);
8251 if (readbuf)
8252 regcache->cooked_read (regnum, readbuf + i * unit_length);
8253 }
8254 }
8255 return RETURN_VALUE_REGISTER_CONVENTION;
8256 }
8257
8258 if (valtype->code () == TYPE_CODE_STRUCT
8259 || valtype->code () == TYPE_CODE_UNION
8260 || valtype->code () == TYPE_CODE_ARRAY)
8261 {
8262 if (tdep->struct_return == pcc_struct_return
8263 || arm_return_in_memory (gdbarch, valtype))
8264 return RETURN_VALUE_STRUCT_CONVENTION;
8265 }
8266 else if (valtype->code () == TYPE_CODE_COMPLEX)
8267 {
8268 if (arm_return_in_memory (gdbarch, valtype))
8269 return RETURN_VALUE_STRUCT_CONVENTION;
8270 }
8271
8272 if (writebuf)
8273 arm_store_return_value (valtype, regcache, writebuf);
8274
8275 if (readbuf)
8276 arm_extract_return_value (valtype, regcache, readbuf);
8277
8278 return RETURN_VALUE_REGISTER_CONVENTION;
8279 }
8280
8281
8282 static int
8283 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8284 {
8285 struct gdbarch *gdbarch = get_frame_arch (frame);
8286 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8287 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8288 CORE_ADDR jb_addr;
8289 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8290
8291 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8292
8293 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8294 ARM_INT_REGISTER_SIZE))
8295 return 0;
8296
8297 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8298 return 1;
8299 }
8300 /* A call to cmse secure entry function "foo" at "a" is modified by
8301 GNU ld as "b".
8302 a) bl xxxx <foo>
8303
8304 <foo>
8305 xxxx:
8306
8307 b) bl yyyy <__acle_se_foo>
8308
8309 section .gnu.sgstubs:
8310 <foo>
8311 yyyy: sg // secure gateway
8312 b.w xxxx <__acle_se_foo> // original_branch_dest
8313
8314 <__acle_se_foo>
8315 xxxx:
8316
8317 When the control at "b", the pc contains "yyyy" (sg address) which is a
8318 trampoline and does not exist in source code. This function returns the
8319 target pc "xxxx". For more details please refer to section 5.4
8320 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8321 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8322 document on www.developer.arm.com. */
8323
8324 static CORE_ADDR
8325 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8326 {
8327 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8328 char *target_name = (char *) alloca (target_len);
8329 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8330
8331 struct bound_minimal_symbol minsym
8332 = lookup_minimal_symbol (target_name, NULL, objfile);
8333
8334 if (minsym.minsym != nullptr)
8335 return BMSYMBOL_VALUE_ADDRESS (minsym);
8336
8337 return 0;
8338 }
8339
8340 /* Return true when SEC points to ".gnu.sgstubs" section. */
8341
8342 static bool
8343 arm_is_sgstubs_section (struct obj_section *sec)
8344 {
8345 return (sec != nullptr
8346 && sec->the_bfd_section != nullptr
8347 && sec->the_bfd_section->name != nullptr
8348 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8349 }
8350
8351 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8352 return the target PC. Otherwise return 0. */
8353
8354 CORE_ADDR
8355 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8356 {
8357 const char *name;
8358 int namelen;
8359 CORE_ADDR start_addr;
8360
8361 /* Find the starting address and name of the function containing the PC. */
8362 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8363 {
8364 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8365 check here. */
8366 start_addr = arm_skip_bx_reg (frame, pc);
8367 if (start_addr != 0)
8368 return start_addr;
8369
8370 return 0;
8371 }
8372
8373 /* If PC is in a Thumb call or return stub, return the address of the
8374 target PC, which is in a register. The thunk functions are called
8375 _call_via_xx, where x is the register name. The possible names
8376 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8377 functions, named __ARM_call_via_r[0-7]. */
8378 if (startswith (name, "_call_via_")
8379 || startswith (name, "__ARM_call_via_"))
8380 {
8381 /* Use the name suffix to determine which register contains the
8382 target PC. */
8383 static const char *table[15] =
8384 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8385 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8386 };
8387 int regno;
8388 int offset = strlen (name) - 2;
8389
8390 for (regno = 0; regno <= 14; regno++)
8391 if (strcmp (&name[offset], table[regno]) == 0)
8392 return get_frame_register_unsigned (frame, regno);
8393 }
8394
8395 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8396 non-interworking calls to foo. We could decode the stubs
8397 to find the target but it's easier to use the symbol table. */
8398 namelen = strlen (name);
8399 if (name[0] == '_' && name[1] == '_'
8400 && ((namelen > 2 + strlen ("_from_thumb")
8401 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8402 || (namelen > 2 + strlen ("_from_arm")
8403 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8404 {
8405 char *target_name;
8406 int target_len = namelen - 2;
8407 struct bound_minimal_symbol minsym;
8408 struct objfile *objfile;
8409 struct obj_section *sec;
8410
8411 if (name[namelen - 1] == 'b')
8412 target_len -= strlen ("_from_thumb");
8413 else
8414 target_len -= strlen ("_from_arm");
8415
8416 target_name = (char *) alloca (target_len + 1);
8417 memcpy (target_name, name + 2, target_len);
8418 target_name[target_len] = '\0';
8419
8420 sec = find_pc_section (pc);
8421 objfile = (sec == NULL) ? NULL : sec->objfile;
8422 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8423 if (minsym.minsym != NULL)
8424 return BMSYMBOL_VALUE_ADDRESS (minsym);
8425 else
8426 return 0;
8427 }
8428
8429 struct obj_section *section = find_pc_section (pc);
8430
8431 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8432 if (arm_is_sgstubs_section (section))
8433 return arm_skip_cmse_entry (pc, name, section->objfile);
8434
8435 return 0; /* not a stub */
8436 }
8437
8438 static void
8439 arm_update_current_architecture (void)
8440 {
8441 struct gdbarch_info info;
8442
8443 /* If the current architecture is not ARM, we have nothing to do. */
8444 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8445 return;
8446
8447 /* Update the architecture. */
8448 gdbarch_info_init (&info);
8449
8450 if (!gdbarch_update_p (info))
8451 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8452 }
8453
8454 static void
8455 set_fp_model_sfunc (const char *args, int from_tty,
8456 struct cmd_list_element *c)
8457 {
8458 int fp_model;
8459
8460 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8461 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8462 {
8463 arm_fp_model = (enum arm_float_model) fp_model;
8464 break;
8465 }
8466
8467 if (fp_model == ARM_FLOAT_LAST)
8468 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8469 current_fp_model);
8470
8471 arm_update_current_architecture ();
8472 }
8473
8474 static void
8475 show_fp_model (struct ui_file *file, int from_tty,
8476 struct cmd_list_element *c, const char *value)
8477 {
8478 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8479
8480 if (arm_fp_model == ARM_FLOAT_AUTO
8481 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8482 fprintf_filtered (file, _("\
8483 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8484 fp_model_strings[tdep->fp_model]);
8485 else
8486 fprintf_filtered (file, _("\
8487 The current ARM floating point model is \"%s\".\n"),
8488 fp_model_strings[arm_fp_model]);
8489 }
8490
8491 static void
8492 arm_set_abi (const char *args, int from_tty,
8493 struct cmd_list_element *c)
8494 {
8495 int arm_abi;
8496
8497 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8498 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8499 {
8500 arm_abi_global = (enum arm_abi_kind) arm_abi;
8501 break;
8502 }
8503
8504 if (arm_abi == ARM_ABI_LAST)
8505 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8506 arm_abi_string);
8507
8508 arm_update_current_architecture ();
8509 }
8510
8511 static void
8512 arm_show_abi (struct ui_file *file, int from_tty,
8513 struct cmd_list_element *c, const char *value)
8514 {
8515 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8516
8517 if (arm_abi_global == ARM_ABI_AUTO
8518 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8519 fprintf_filtered (file, _("\
8520 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8521 arm_abi_strings[tdep->arm_abi]);
8522 else
8523 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8524 arm_abi_string);
8525 }
8526
8527 static void
8528 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8529 struct cmd_list_element *c, const char *value)
8530 {
8531 fprintf_filtered (file,
8532 _("The current execution mode assumed "
8533 "(when symbols are unavailable) is \"%s\".\n"),
8534 arm_fallback_mode_string);
8535 }
8536
8537 static void
8538 arm_show_force_mode (struct ui_file *file, int from_tty,
8539 struct cmd_list_element *c, const char *value)
8540 {
8541 fprintf_filtered (file,
8542 _("The current execution mode assumed "
8543 "(even when symbols are available) is \"%s\".\n"),
8544 arm_force_mode_string);
8545 }
8546
8547 /* If the user changes the register disassembly style used for info
8548 register and other commands, we have to also switch the style used
8549 in opcodes for disassembly output. This function is run in the "set
8550 arm disassembly" command, and does that. */
8551
8552 static void
8553 set_disassembly_style_sfunc (const char *args, int from_tty,
8554 struct cmd_list_element *c)
8555 {
8556 /* Convert the short style name into the long style name (eg, reg-names-*)
8557 before calling the generic set_disassembler_options() function. */
8558 std::string long_name = std::string ("reg-names-") + disassembly_style;
8559 set_disassembler_options (&long_name[0]);
8560 }
8561
8562 static void
8563 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8564 struct cmd_list_element *c, const char *value)
8565 {
8566 struct gdbarch *gdbarch = get_current_arch ();
8567 char *options = get_disassembler_options (gdbarch);
8568 const char *style = "";
8569 int len = 0;
8570 const char *opt;
8571
8572 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8573 if (CONST_STRNEQ (opt, "reg-names-"))
8574 {
8575 style = &opt[strlen ("reg-names-")];
8576 len = strcspn (style, ",");
8577 }
8578
8579 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8580 }
8581 \f
8582 /* Return the ARM register name corresponding to register I. */
8583 static const char *
8584 arm_register_name (struct gdbarch *gdbarch, int i)
8585 {
8586 const int num_regs = gdbarch_num_regs (gdbarch);
8587
8588 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8589 && i >= num_regs && i < num_regs + 32)
8590 {
8591 static const char *const vfp_pseudo_names[] = {
8592 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8593 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8594 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8595 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8596 };
8597
8598 return vfp_pseudo_names[i - num_regs];
8599 }
8600
8601 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8602 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8603 {
8604 static const char *const neon_pseudo_names[] = {
8605 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8606 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8607 };
8608
8609 return neon_pseudo_names[i - num_regs - 32];
8610 }
8611
8612 if (i >= ARRAY_SIZE (arm_register_names))
8613 /* These registers are only supported on targets which supply
8614 an XML description. */
8615 return "";
8616
8617 return arm_register_names[i];
8618 }
8619
8620 /* Test whether the coff symbol specific value corresponds to a Thumb
8621 function. */
8622
8623 static int
8624 coff_sym_is_thumb (int val)
8625 {
8626 return (val == C_THUMBEXT
8627 || val == C_THUMBSTAT
8628 || val == C_THUMBEXTFUNC
8629 || val == C_THUMBSTATFUNC
8630 || val == C_THUMBLABEL);
8631 }
8632
8633 /* arm_coff_make_msymbol_special()
8634 arm_elf_make_msymbol_special()
8635
8636 These functions test whether the COFF or ELF symbol corresponds to
8637 an address in thumb code, and set a "special" bit in a minimal
8638 symbol to indicate that it does. */
8639
8640 static void
8641 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8642 {
8643 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8644
8645 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8646 == ST_BRANCH_TO_THUMB)
8647 MSYMBOL_SET_SPECIAL (msym);
8648 }
8649
8650 static void
8651 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8652 {
8653 if (coff_sym_is_thumb (val))
8654 MSYMBOL_SET_SPECIAL (msym);
8655 }
8656
8657 static void
8658 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8659 asymbol *sym)
8660 {
8661 const char *name = bfd_asymbol_name (sym);
8662 struct arm_per_bfd *data;
8663 struct arm_mapping_symbol new_map_sym;
8664
8665 gdb_assert (name[0] == '$');
8666 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8667 return;
8668
8669 data = arm_bfd_data_key.get (objfile->obfd);
8670 if (data == NULL)
8671 data = arm_bfd_data_key.emplace (objfile->obfd,
8672 objfile->obfd->section_count);
8673 arm_mapping_symbol_vec &map
8674 = data->section_maps[bfd_asymbol_section (sym)->index];
8675
8676 new_map_sym.value = sym->value;
8677 new_map_sym.type = name[1];
8678
8679 /* Insert at the end, the vector will be sorted on first use. */
8680 map.push_back (new_map_sym);
8681 }
8682
8683 static void
8684 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8685 {
8686 struct gdbarch *gdbarch = regcache->arch ();
8687 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8688
8689 /* If necessary, set the T bit. */
8690 if (arm_apcs_32)
8691 {
8692 ULONGEST val, t_bit;
8693 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8694 t_bit = arm_psr_thumb_bit (gdbarch);
8695 if (arm_pc_is_thumb (gdbarch, pc))
8696 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8697 val | t_bit);
8698 else
8699 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8700 val & ~t_bit);
8701 }
8702 }
8703
8704 /* Read the contents of a NEON quad register, by reading from two
8705 double registers. This is used to implement the quad pseudo
8706 registers, and for argument passing in case the quad registers are
8707 missing; vectors are passed in quad registers when using the VFP
8708 ABI, even if a NEON unit is not present. REGNUM is the index of
8709 the quad register, in [0, 15]. */
8710
8711 static enum register_status
8712 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8713 int regnum, gdb_byte *buf)
8714 {
8715 char name_buf[4];
8716 gdb_byte reg_buf[8];
8717 int offset, double_regnum;
8718 enum register_status status;
8719
8720 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8721 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8722 strlen (name_buf));
8723
8724 /* d0 is always the least significant half of q0. */
8725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8726 offset = 8;
8727 else
8728 offset = 0;
8729
8730 status = regcache->raw_read (double_regnum, reg_buf);
8731 if (status != REG_VALID)
8732 return status;
8733 memcpy (buf + offset, reg_buf, 8);
8734
8735 offset = 8 - offset;
8736 status = regcache->raw_read (double_regnum + 1, reg_buf);
8737 if (status != REG_VALID)
8738 return status;
8739 memcpy (buf + offset, reg_buf, 8);
8740
8741 return REG_VALID;
8742 }
8743
8744 static enum register_status
8745 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8746 int regnum, gdb_byte *buf)
8747 {
8748 const int num_regs = gdbarch_num_regs (gdbarch);
8749 char name_buf[4];
8750 gdb_byte reg_buf[8];
8751 int offset, double_regnum;
8752
8753 gdb_assert (regnum >= num_regs);
8754 regnum -= num_regs;
8755
8756 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8757 /* Quad-precision register. */
8758 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8759 else
8760 {
8761 enum register_status status;
8762
8763 /* Single-precision register. */
8764 gdb_assert (regnum < 32);
8765
8766 /* s0 is always the least significant half of d0. */
8767 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8768 offset = (regnum & 1) ? 0 : 4;
8769 else
8770 offset = (regnum & 1) ? 4 : 0;
8771
8772 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8773 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8774 strlen (name_buf));
8775
8776 status = regcache->raw_read (double_regnum, reg_buf);
8777 if (status == REG_VALID)
8778 memcpy (buf, reg_buf + offset, 4);
8779 return status;
8780 }
8781 }
8782
8783 /* Store the contents of BUF to a NEON quad register, by writing to
8784 two double registers. This is used to implement the quad pseudo
8785 registers, and for argument passing in case the quad registers are
8786 missing; vectors are passed in quad registers when using the VFP
8787 ABI, even if a NEON unit is not present. REGNUM is the index
8788 of the quad register, in [0, 15]. */
8789
8790 static void
8791 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8792 int regnum, const gdb_byte *buf)
8793 {
8794 char name_buf[4];
8795 int offset, double_regnum;
8796
8797 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8798 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8799 strlen (name_buf));
8800
8801 /* d0 is always the least significant half of q0. */
8802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8803 offset = 8;
8804 else
8805 offset = 0;
8806
8807 regcache->raw_write (double_regnum, buf + offset);
8808 offset = 8 - offset;
8809 regcache->raw_write (double_regnum + 1, buf + offset);
8810 }
8811
8812 static void
8813 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8814 int regnum, const gdb_byte *buf)
8815 {
8816 const int num_regs = gdbarch_num_regs (gdbarch);
8817 char name_buf[4];
8818 gdb_byte reg_buf[8];
8819 int offset, double_regnum;
8820
8821 gdb_assert (regnum >= num_regs);
8822 regnum -= num_regs;
8823
8824 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8825 /* Quad-precision register. */
8826 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8827 else
8828 {
8829 /* Single-precision register. */
8830 gdb_assert (regnum < 32);
8831
8832 /* s0 is always the least significant half of d0. */
8833 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8834 offset = (regnum & 1) ? 0 : 4;
8835 else
8836 offset = (regnum & 1) ? 4 : 0;
8837
8838 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8839 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8840 strlen (name_buf));
8841
8842 regcache->raw_read (double_regnum, reg_buf);
8843 memcpy (reg_buf + offset, buf, 4);
8844 regcache->raw_write (double_regnum, reg_buf);
8845 }
8846 }
8847
8848 static struct value *
8849 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8850 {
8851 const int *reg_p = (const int *) baton;
8852 return value_of_register (*reg_p, frame);
8853 }
8854 \f
8855 static enum gdb_osabi
8856 arm_elf_osabi_sniffer (bfd *abfd)
8857 {
8858 unsigned int elfosabi;
8859 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8860
8861 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8862
8863 if (elfosabi == ELFOSABI_ARM)
8864 /* GNU tools use this value. Check note sections in this case,
8865 as well. */
8866 {
8867 for (asection *sect : gdb_bfd_sections (abfd))
8868 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8869 }
8870
8871 /* Anything else will be handled by the generic ELF sniffer. */
8872 return osabi;
8873 }
8874
8875 static int
8876 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8877 struct reggroup *group)
8878 {
8879 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8880 this, FPS register belongs to save_regroup, restore_reggroup, and
8881 all_reggroup, of course. */
8882 if (regnum == ARM_FPS_REGNUM)
8883 return (group == float_reggroup
8884 || group == save_reggroup
8885 || group == restore_reggroup
8886 || group == all_reggroup);
8887 else
8888 return default_register_reggroup_p (gdbarch, regnum, group);
8889 }
8890
8891 /* For backward-compatibility we allow two 'g' packet lengths with
8892 the remote protocol depending on whether FPA registers are
8893 supplied. M-profile targets do not have FPA registers, but some
8894 stubs already exist in the wild which use a 'g' packet which
8895 supplies them albeit with dummy values. The packet format which
8896 includes FPA registers should be considered deprecated for
8897 M-profile targets. */
8898
8899 static void
8900 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8901 {
8902 if (gdbarch_tdep (gdbarch)->is_m)
8903 {
8904 const target_desc *tdesc;
8905
8906 /* If we know from the executable this is an M-profile target,
8907 cater for remote targets whose register set layout is the
8908 same as the FPA layout. */
8909 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8910 register_remote_g_packet_guess (gdbarch,
8911 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8912 tdesc);
8913
8914 /* The regular M-profile layout. */
8915 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8916 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8917 tdesc);
8918
8919 /* M-profile plus M4F VFP. */
8920 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8921 register_remote_g_packet_guess (gdbarch,
8922 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8923 tdesc);
8924 }
8925
8926 /* Otherwise we don't have a useful guess. */
8927 }
8928
8929 /* Implement the code_of_frame_writable gdbarch method. */
8930
8931 static int
8932 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8933 {
8934 if (gdbarch_tdep (gdbarch)->is_m
8935 && get_frame_type (frame) == SIGTRAMP_FRAME)
8936 {
8937 /* M-profile exception frames return to some magic PCs, where
8938 isn't writable at all. */
8939 return 0;
8940 }
8941 else
8942 return 1;
8943 }
8944
8945 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8946 to be postfixed by a version (eg armv7hl). */
8947
8948 static const char *
8949 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8950 {
8951 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8952 return "arm(v[^- ]*)?";
8953 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8954 }
8955
8956 /* Initialize the current architecture based on INFO. If possible,
8957 re-use an architecture from ARCHES, which is a list of
8958 architectures already created during this debugging session.
8959
8960 Called e.g. at program startup, when reading a core file, and when
8961 reading a binary file. */
8962
8963 static struct gdbarch *
8964 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8965 {
8966 struct gdbarch_tdep *tdep;
8967 struct gdbarch *gdbarch;
8968 struct gdbarch_list *best_arch;
8969 enum arm_abi_kind arm_abi = arm_abi_global;
8970 enum arm_float_model fp_model = arm_fp_model;
8971 tdesc_arch_data_up tdesc_data;
8972 int i;
8973 bool is_m = false;
8974 int vfp_register_count = 0;
8975 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8976 bool have_wmmx_registers = false;
8977 bool have_neon = false;
8978 bool have_fpa_registers = true;
8979 const struct target_desc *tdesc = info.target_desc;
8980
8981 /* If we have an object to base this architecture on, try to determine
8982 its ABI. */
8983
8984 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8985 {
8986 int ei_osabi, e_flags;
8987
8988 switch (bfd_get_flavour (info.abfd))
8989 {
8990 case bfd_target_coff_flavour:
8991 /* Assume it's an old APCS-style ABI. */
8992 /* XXX WinCE? */
8993 arm_abi = ARM_ABI_APCS;
8994 break;
8995
8996 case bfd_target_elf_flavour:
8997 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8998 e_flags = elf_elfheader (info.abfd)->e_flags;
8999
9000 if (ei_osabi == ELFOSABI_ARM)
9001 {
9002 /* GNU tools used to use this value, but do not for EABI
9003 objects. There's nowhere to tag an EABI version
9004 anyway, so assume APCS. */
9005 arm_abi = ARM_ABI_APCS;
9006 }
9007 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9008 {
9009 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9010
9011 switch (eabi_ver)
9012 {
9013 case EF_ARM_EABI_UNKNOWN:
9014 /* Assume GNU tools. */
9015 arm_abi = ARM_ABI_APCS;
9016 break;
9017
9018 case EF_ARM_EABI_VER4:
9019 case EF_ARM_EABI_VER5:
9020 arm_abi = ARM_ABI_AAPCS;
9021 /* EABI binaries default to VFP float ordering.
9022 They may also contain build attributes that can
9023 be used to identify if the VFP argument-passing
9024 ABI is in use. */
9025 if (fp_model == ARM_FLOAT_AUTO)
9026 {
9027 #ifdef HAVE_ELF
9028 switch (bfd_elf_get_obj_attr_int (info.abfd,
9029 OBJ_ATTR_PROC,
9030 Tag_ABI_VFP_args))
9031 {
9032 case AEABI_VFP_args_base:
9033 /* "The user intended FP parameter/result
9034 passing to conform to AAPCS, base
9035 variant". */
9036 fp_model = ARM_FLOAT_SOFT_VFP;
9037 break;
9038 case AEABI_VFP_args_vfp:
9039 /* "The user intended FP parameter/result
9040 passing to conform to AAPCS, VFP
9041 variant". */
9042 fp_model = ARM_FLOAT_VFP;
9043 break;
9044 case AEABI_VFP_args_toolchain:
9045 /* "The user intended FP parameter/result
9046 passing to conform to tool chain-specific
9047 conventions" - we don't know any such
9048 conventions, so leave it as "auto". */
9049 break;
9050 case AEABI_VFP_args_compatible:
9051 /* "Code is compatible with both the base
9052 and VFP variants; the user did not permit
9053 non-variadic functions to pass FP
9054 parameters/results" - leave it as
9055 "auto". */
9056 break;
9057 default:
9058 /* Attribute value not mentioned in the
9059 November 2012 ABI, so leave it as
9060 "auto". */
9061 break;
9062 }
9063 #else
9064 fp_model = ARM_FLOAT_SOFT_VFP;
9065 #endif
9066 }
9067 break;
9068
9069 default:
9070 /* Leave it as "auto". */
9071 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9072 break;
9073 }
9074
9075 #ifdef HAVE_ELF
9076 /* Detect M-profile programs. This only works if the
9077 executable file includes build attributes; GCC does
9078 copy them to the executable, but e.g. RealView does
9079 not. */
9080 int attr_arch
9081 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9082 Tag_CPU_arch);
9083 int attr_profile
9084 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9085 Tag_CPU_arch_profile);
9086
9087 /* GCC specifies the profile for v6-M; RealView only
9088 specifies the profile for architectures starting with
9089 V7 (as opposed to architectures with a tag
9090 numerically greater than TAG_CPU_ARCH_V7). */
9091 if (!tdesc_has_registers (tdesc)
9092 && (attr_arch == TAG_CPU_ARCH_V6_M
9093 || attr_arch == TAG_CPU_ARCH_V6S_M
9094 || attr_profile == 'M'))
9095 is_m = true;
9096 #endif
9097 }
9098
9099 if (fp_model == ARM_FLOAT_AUTO)
9100 {
9101 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9102 {
9103 case 0:
9104 /* Leave it as "auto". Strictly speaking this case
9105 means FPA, but almost nobody uses that now, and
9106 many toolchains fail to set the appropriate bits
9107 for the floating-point model they use. */
9108 break;
9109 case EF_ARM_SOFT_FLOAT:
9110 fp_model = ARM_FLOAT_SOFT_FPA;
9111 break;
9112 case EF_ARM_VFP_FLOAT:
9113 fp_model = ARM_FLOAT_VFP;
9114 break;
9115 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9116 fp_model = ARM_FLOAT_SOFT_VFP;
9117 break;
9118 }
9119 }
9120
9121 if (e_flags & EF_ARM_BE8)
9122 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9123
9124 break;
9125
9126 default:
9127 /* Leave it as "auto". */
9128 break;
9129 }
9130 }
9131
9132 /* Check any target description for validity. */
9133 if (tdesc_has_registers (tdesc))
9134 {
9135 /* For most registers we require GDB's default names; but also allow
9136 the numeric names for sp / lr / pc, as a convenience. */
9137 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9138 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9139 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9140
9141 const struct tdesc_feature *feature;
9142 int valid_p;
9143
9144 feature = tdesc_find_feature (tdesc,
9145 "org.gnu.gdb.arm.core");
9146 if (feature == NULL)
9147 {
9148 feature = tdesc_find_feature (tdesc,
9149 "org.gnu.gdb.arm.m-profile");
9150 if (feature == NULL)
9151 return NULL;
9152 else
9153 is_m = true;
9154 }
9155
9156 tdesc_data = tdesc_data_alloc ();
9157
9158 valid_p = 1;
9159 for (i = 0; i < ARM_SP_REGNUM; i++)
9160 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9161 arm_register_names[i]);
9162 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9163 ARM_SP_REGNUM,
9164 arm_sp_names);
9165 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9166 ARM_LR_REGNUM,
9167 arm_lr_names);
9168 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9169 ARM_PC_REGNUM,
9170 arm_pc_names);
9171 if (is_m)
9172 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9173 ARM_PS_REGNUM, "xpsr");
9174 else
9175 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9176 ARM_PS_REGNUM, "cpsr");
9177
9178 if (!valid_p)
9179 return NULL;
9180
9181 feature = tdesc_find_feature (tdesc,
9182 "org.gnu.gdb.arm.fpa");
9183 if (feature != NULL)
9184 {
9185 valid_p = 1;
9186 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9187 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9188 arm_register_names[i]);
9189 if (!valid_p)
9190 return NULL;
9191 }
9192 else
9193 have_fpa_registers = false;
9194
9195 feature = tdesc_find_feature (tdesc,
9196 "org.gnu.gdb.xscale.iwmmxt");
9197 if (feature != NULL)
9198 {
9199 static const char *const iwmmxt_names[] = {
9200 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9201 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9202 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9203 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9204 };
9205
9206 valid_p = 1;
9207 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9208 valid_p
9209 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9210 iwmmxt_names[i - ARM_WR0_REGNUM]);
9211
9212 /* Check for the control registers, but do not fail if they
9213 are missing. */
9214 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9215 tdesc_numbered_register (feature, tdesc_data.get (), i,
9216 iwmmxt_names[i - ARM_WR0_REGNUM]);
9217
9218 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9219 valid_p
9220 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9221 iwmmxt_names[i - ARM_WR0_REGNUM]);
9222
9223 if (!valid_p)
9224 return NULL;
9225
9226 have_wmmx_registers = true;
9227 }
9228
9229 /* If we have a VFP unit, check whether the single precision registers
9230 are present. If not, then we will synthesize them as pseudo
9231 registers. */
9232 feature = tdesc_find_feature (tdesc,
9233 "org.gnu.gdb.arm.vfp");
9234 if (feature != NULL)
9235 {
9236 static const char *const vfp_double_names[] = {
9237 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9238 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9239 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9240 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9241 };
9242
9243 /* Require the double precision registers. There must be either
9244 16 or 32. */
9245 valid_p = 1;
9246 for (i = 0; i < 32; i++)
9247 {
9248 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9249 ARM_D0_REGNUM + i,
9250 vfp_double_names[i]);
9251 if (!valid_p)
9252 break;
9253 }
9254 if (!valid_p && i == 16)
9255 valid_p = 1;
9256
9257 /* Also require FPSCR. */
9258 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9259 ARM_FPSCR_REGNUM, "fpscr");
9260 if (!valid_p)
9261 return NULL;
9262
9263 if (tdesc_unnumbered_register (feature, "s0") == 0)
9264 have_vfp_pseudos = true;
9265
9266 vfp_register_count = i;
9267
9268 /* If we have VFP, also check for NEON. The architecture allows
9269 NEON without VFP (integer vector operations only), but GDB
9270 does not support that. */
9271 feature = tdesc_find_feature (tdesc,
9272 "org.gnu.gdb.arm.neon");
9273 if (feature != NULL)
9274 {
9275 /* NEON requires 32 double-precision registers. */
9276 if (i != 32)
9277 return NULL;
9278
9279 /* If there are quad registers defined by the stub, use
9280 their type; otherwise (normally) provide them with
9281 the default type. */
9282 if (tdesc_unnumbered_register (feature, "q0") == 0)
9283 have_neon_pseudos = true;
9284
9285 have_neon = true;
9286 }
9287 }
9288 }
9289
9290 /* If there is already a candidate, use it. */
9291 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9292 best_arch != NULL;
9293 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9294 {
9295 if (arm_abi != ARM_ABI_AUTO
9296 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9297 continue;
9298
9299 if (fp_model != ARM_FLOAT_AUTO
9300 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9301 continue;
9302
9303 /* There are various other properties in tdep that we do not
9304 need to check here: those derived from a target description,
9305 since gdbarches with a different target description are
9306 automatically disqualified. */
9307
9308 /* Do check is_m, though, since it might come from the binary. */
9309 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9310 continue;
9311
9312 /* Found a match. */
9313 break;
9314 }
9315
9316 if (best_arch != NULL)
9317 return best_arch->gdbarch;
9318
9319 tdep = XCNEW (struct gdbarch_tdep);
9320 gdbarch = gdbarch_alloc (&info, tdep);
9321
9322 /* Record additional information about the architecture we are defining.
9323 These are gdbarch discriminators, like the OSABI. */
9324 tdep->arm_abi = arm_abi;
9325 tdep->fp_model = fp_model;
9326 tdep->is_m = is_m;
9327 tdep->have_fpa_registers = have_fpa_registers;
9328 tdep->have_wmmx_registers = have_wmmx_registers;
9329 gdb_assert (vfp_register_count == 0
9330 || vfp_register_count == 16
9331 || vfp_register_count == 32);
9332 tdep->vfp_register_count = vfp_register_count;
9333 tdep->have_vfp_pseudos = have_vfp_pseudos;
9334 tdep->have_neon_pseudos = have_neon_pseudos;
9335 tdep->have_neon = have_neon;
9336
9337 arm_register_g_packet_guesses (gdbarch);
9338
9339 /* Breakpoints. */
9340 switch (info.byte_order_for_code)
9341 {
9342 case BFD_ENDIAN_BIG:
9343 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9344 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9345 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9346 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9347
9348 break;
9349
9350 case BFD_ENDIAN_LITTLE:
9351 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9352 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9353 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9354 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9355
9356 break;
9357
9358 default:
9359 internal_error (__FILE__, __LINE__,
9360 _("arm_gdbarch_init: bad byte order for float format"));
9361 }
9362
9363 /* On ARM targets char defaults to unsigned. */
9364 set_gdbarch_char_signed (gdbarch, 0);
9365
9366 /* wchar_t is unsigned under the AAPCS. */
9367 if (tdep->arm_abi == ARM_ABI_AAPCS)
9368 set_gdbarch_wchar_signed (gdbarch, 0);
9369 else
9370 set_gdbarch_wchar_signed (gdbarch, 1);
9371
9372 /* Compute type alignment. */
9373 set_gdbarch_type_align (gdbarch, arm_type_align);
9374
9375 /* Note: for displaced stepping, this includes the breakpoint, and one word
9376 of additional scratch space. This setting isn't used for anything beside
9377 displaced stepping at present. */
9378 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9379
9380 /* This should be low enough for everything. */
9381 tdep->lowest_pc = 0x20;
9382 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9383
9384 /* The default, for both APCS and AAPCS, is to return small
9385 structures in registers. */
9386 tdep->struct_return = reg_struct_return;
9387
9388 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9389 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9390
9391 if (is_m)
9392 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9393
9394 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9395
9396 frame_base_set_default (gdbarch, &arm_normal_base);
9397
9398 /* Address manipulation. */
9399 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9400
9401 /* Advance PC across function entry code. */
9402 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9403
9404 /* Detect whether PC is at a point where the stack has been destroyed. */
9405 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9406
9407 /* Skip trampolines. */
9408 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9409
9410 /* The stack grows downward. */
9411 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9412
9413 /* Breakpoint manipulation. */
9414 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9415 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9416 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9417 arm_breakpoint_kind_from_current_state);
9418
9419 /* Information about registers, etc. */
9420 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9421 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9422 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9423 set_gdbarch_register_type (gdbarch, arm_register_type);
9424 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9425
9426 /* This "info float" is FPA-specific. Use the generic version if we
9427 do not have FPA. */
9428 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9429 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9430
9431 /* Internal <-> external register number maps. */
9432 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9433 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9434
9435 set_gdbarch_register_name (gdbarch, arm_register_name);
9436
9437 /* Returning results. */
9438 set_gdbarch_return_value (gdbarch, arm_return_value);
9439
9440 /* Disassembly. */
9441 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9442
9443 /* Minsymbol frobbing. */
9444 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9445 set_gdbarch_coff_make_msymbol_special (gdbarch,
9446 arm_coff_make_msymbol_special);
9447 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9448
9449 /* Thumb-2 IT block support. */
9450 set_gdbarch_adjust_breakpoint_address (gdbarch,
9451 arm_adjust_breakpoint_address);
9452
9453 /* Virtual tables. */
9454 set_gdbarch_vbit_in_delta (gdbarch, 1);
9455
9456 /* Hook in the ABI-specific overrides, if they have been registered. */
9457 gdbarch_init_osabi (info, gdbarch);
9458
9459 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9460
9461 /* Add some default predicates. */
9462 if (is_m)
9463 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9464 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9465 dwarf2_append_unwinders (gdbarch);
9466 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9467 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9468 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9469
9470 /* Now we have tuned the configuration, set a few final things,
9471 based on what the OS ABI has told us. */
9472
9473 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9474 binaries are always marked. */
9475 if (tdep->arm_abi == ARM_ABI_AUTO)
9476 tdep->arm_abi = ARM_ABI_APCS;
9477
9478 /* Watchpoints are not steppable. */
9479 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9480
9481 /* We used to default to FPA for generic ARM, but almost nobody
9482 uses that now, and we now provide a way for the user to force
9483 the model. So default to the most useful variant. */
9484 if (tdep->fp_model == ARM_FLOAT_AUTO)
9485 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9486
9487 if (tdep->jb_pc >= 0)
9488 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9489
9490 /* Floating point sizes and format. */
9491 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9492 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9493 {
9494 set_gdbarch_double_format
9495 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9496 set_gdbarch_long_double_format
9497 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9498 }
9499 else
9500 {
9501 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9502 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9503 }
9504
9505 if (have_vfp_pseudos)
9506 {
9507 /* NOTE: These are the only pseudo registers used by
9508 the ARM target at the moment. If more are added, a
9509 little more care in numbering will be needed. */
9510
9511 int num_pseudos = 32;
9512 if (have_neon_pseudos)
9513 num_pseudos += 16;
9514 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9515 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9516 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9517 }
9518
9519 if (tdesc_data != nullptr)
9520 {
9521 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9522
9523 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9524
9525 /* Override tdesc_register_type to adjust the types of VFP
9526 registers for NEON. */
9527 set_gdbarch_register_type (gdbarch, arm_register_type);
9528 }
9529
9530 /* Add standard register aliases. We add aliases even for those
9531 names which are used by the current architecture - it's simpler,
9532 and does no harm, since nothing ever lists user registers. */
9533 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9534 user_reg_add (gdbarch, arm_register_aliases[i].name,
9535 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9536
9537 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9538 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9539
9540 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9541
9542 return gdbarch;
9543 }
9544
9545 static void
9546 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9547 {
9548 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9549
9550 if (tdep == NULL)
9551 return;
9552
9553 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9554 (int) tdep->fp_model);
9555 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9556 (int) tdep->have_fpa_registers);
9557 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9558 (int) tdep->have_wmmx_registers);
9559 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9560 (int) tdep->vfp_register_count);
9561 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9562 (int) tdep->have_vfp_pseudos);
9563 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9564 (int) tdep->have_neon_pseudos);
9565 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9566 (int) tdep->have_neon);
9567 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9568 (unsigned long) tdep->lowest_pc);
9569 }
9570
9571 #if GDB_SELF_TEST
9572 namespace selftests
9573 {
9574 static void arm_record_test (void);
9575 }
9576 #endif
9577
9578 void _initialize_arm_tdep ();
9579 void
9580 _initialize_arm_tdep ()
9581 {
9582 long length;
9583 int i, j;
9584 char regdesc[1024], *rdptr = regdesc;
9585 size_t rest = sizeof (regdesc);
9586
9587 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9588
9589 /* Add ourselves to objfile event chain. */
9590 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9591
9592 /* Register an ELF OS ABI sniffer for ARM binaries. */
9593 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9594 bfd_target_elf_flavour,
9595 arm_elf_osabi_sniffer);
9596
9597 /* Add root prefix command for all "set arm"/"show arm" commands. */
9598 add_basic_prefix_cmd ("arm", no_class,
9599 _("Various ARM-specific commands."),
9600 &setarmcmdlist, "set arm ", 0, &setlist);
9601
9602 add_show_prefix_cmd ("arm", no_class,
9603 _("Various ARM-specific commands."),
9604 &showarmcmdlist, "show arm ", 0, &showlist);
9605
9606
9607 arm_disassembler_options = xstrdup ("reg-names-std");
9608 const disasm_options_t *disasm_options
9609 = &disassembler_options_arm ()->options;
9610 int num_disassembly_styles = 0;
9611 for (i = 0; disasm_options->name[i] != NULL; i++)
9612 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9613 num_disassembly_styles++;
9614
9615 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9616 valid_disassembly_styles = XNEWVEC (const char *,
9617 num_disassembly_styles + 1);
9618 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9619 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9620 {
9621 size_t offset = strlen ("reg-names-");
9622 const char *style = disasm_options->name[i];
9623 valid_disassembly_styles[j++] = &style[offset];
9624 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9625 disasm_options->description[i]);
9626 rdptr += length;
9627 rest -= length;
9628 }
9629 /* Mark the end of valid options. */
9630 valid_disassembly_styles[num_disassembly_styles] = NULL;
9631
9632 /* Create the help text. */
9633 std::string helptext = string_printf ("%s%s%s",
9634 _("The valid values are:\n"),
9635 regdesc,
9636 _("The default is \"std\"."));
9637
9638 add_setshow_enum_cmd("disassembler", no_class,
9639 valid_disassembly_styles, &disassembly_style,
9640 _("Set the disassembly style."),
9641 _("Show the disassembly style."),
9642 helptext.c_str (),
9643 set_disassembly_style_sfunc,
9644 show_disassembly_style_sfunc,
9645 &setarmcmdlist, &showarmcmdlist);
9646
9647 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9648 _("Set usage of ARM 32-bit mode."),
9649 _("Show usage of ARM 32-bit mode."),
9650 _("When off, a 26-bit PC will be used."),
9651 NULL,
9652 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9653 mode is %s. */
9654 &setarmcmdlist, &showarmcmdlist);
9655
9656 /* Add a command to allow the user to force the FPU model. */
9657 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9658 _("Set the floating point type."),
9659 _("Show the floating point type."),
9660 _("auto - Determine the FP typefrom the OS-ABI.\n\
9661 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9662 fpa - FPA co-processor (GCC compiled).\n\
9663 softvfp - Software FP with pure-endian doubles.\n\
9664 vfp - VFP co-processor."),
9665 set_fp_model_sfunc, show_fp_model,
9666 &setarmcmdlist, &showarmcmdlist);
9667
9668 /* Add a command to allow the user to force the ABI. */
9669 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9670 _("Set the ABI."),
9671 _("Show the ABI."),
9672 NULL, arm_set_abi, arm_show_abi,
9673 &setarmcmdlist, &showarmcmdlist);
9674
9675 /* Add two commands to allow the user to force the assumed
9676 execution mode. */
9677 add_setshow_enum_cmd ("fallback-mode", class_support,
9678 arm_mode_strings, &arm_fallback_mode_string,
9679 _("Set the mode assumed when symbols are unavailable."),
9680 _("Show the mode assumed when symbols are unavailable."),
9681 NULL, NULL, arm_show_fallback_mode,
9682 &setarmcmdlist, &showarmcmdlist);
9683 add_setshow_enum_cmd ("force-mode", class_support,
9684 arm_mode_strings, &arm_force_mode_string,
9685 _("Set the mode assumed even when symbols are available."),
9686 _("Show the mode assumed even when symbols are available."),
9687 NULL, NULL, arm_show_force_mode,
9688 &setarmcmdlist, &showarmcmdlist);
9689
9690 /* Debugging flag. */
9691 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9692 _("Set ARM debugging."),
9693 _("Show ARM debugging."),
9694 _("When on, arm-specific debugging is enabled."),
9695 NULL,
9696 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9697 &setdebuglist, &showdebuglist);
9698
9699 #if GDB_SELF_TEST
9700 selftests::register_test ("arm-record", selftests::arm_record_test);
9701 #endif
9702
9703 }
9704
9705 /* ARM-reversible process record data structures. */
9706
9707 #define ARM_INSN_SIZE_BYTES 4
9708 #define THUMB_INSN_SIZE_BYTES 2
9709 #define THUMB2_INSN_SIZE_BYTES 4
9710
9711
9712 /* Position of the bit within a 32-bit ARM instruction
9713 that defines whether the instruction is a load or store. */
9714 #define INSN_S_L_BIT_NUM 20
9715
9716 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9717 do \
9718 { \
9719 unsigned int reg_len = LENGTH; \
9720 if (reg_len) \
9721 { \
9722 REGS = XNEWVEC (uint32_t, reg_len); \
9723 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9724 } \
9725 } \
9726 while (0)
9727
9728 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9729 do \
9730 { \
9731 unsigned int mem_len = LENGTH; \
9732 if (mem_len) \
9733 { \
9734 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9735 memcpy(&MEMS->len, &RECORD_BUF[0], \
9736 sizeof(struct arm_mem_r) * LENGTH); \
9737 } \
9738 } \
9739 while (0)
9740
9741 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9742 #define INSN_RECORDED(ARM_RECORD) \
9743 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9744
9745 /* ARM memory record structure. */
9746 struct arm_mem_r
9747 {
9748 uint32_t len; /* Record length. */
9749 uint32_t addr; /* Memory address. */
9750 };
9751
9752 /* ARM instruction record contains opcode of current insn
9753 and execution state (before entry to decode_insn()),
9754 contains list of to-be-modified registers and
9755 memory blocks (on return from decode_insn()). */
9756
9757 typedef struct insn_decode_record_t
9758 {
9759 struct gdbarch *gdbarch;
9760 struct regcache *regcache;
9761 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9762 uint32_t arm_insn; /* Should accommodate thumb. */
9763 uint32_t cond; /* Condition code. */
9764 uint32_t opcode; /* Insn opcode. */
9765 uint32_t decode; /* Insn decode bits. */
9766 uint32_t mem_rec_count; /* No of mem records. */
9767 uint32_t reg_rec_count; /* No of reg records. */
9768 uint32_t *arm_regs; /* Registers to be saved for this record. */
9769 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9770 } insn_decode_record;
9771
9772
9773 /* Checks ARM SBZ and SBO mandatory fields. */
9774
9775 static int
9776 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9777 {
9778 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9779
9780 if (!len)
9781 return 1;
9782
9783 if (!sbo)
9784 ones = ~ones;
9785
9786 while (ones)
9787 {
9788 if (!(ones & sbo))
9789 {
9790 return 0;
9791 }
9792 ones = ones >> 1;
9793 }
9794 return 1;
9795 }
9796
9797 enum arm_record_result
9798 {
9799 ARM_RECORD_SUCCESS = 0,
9800 ARM_RECORD_FAILURE = 1
9801 };
9802
9803 typedef enum
9804 {
9805 ARM_RECORD_STRH=1,
9806 ARM_RECORD_STRD
9807 } arm_record_strx_t;
9808
9809 typedef enum
9810 {
9811 ARM_RECORD=1,
9812 THUMB_RECORD,
9813 THUMB2_RECORD
9814 } record_type_t;
9815
9816
9817 static int
9818 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9819 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9820 {
9821
9822 struct regcache *reg_cache = arm_insn_r->regcache;
9823 ULONGEST u_regval[2]= {0};
9824
9825 uint32_t reg_src1 = 0, reg_src2 = 0;
9826 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9827
9828 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9829 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9830
9831 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9832 {
9833 /* 1) Handle misc store, immediate offset. */
9834 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9835 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9836 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9837 regcache_raw_read_unsigned (reg_cache, reg_src1,
9838 &u_regval[0]);
9839 if (ARM_PC_REGNUM == reg_src1)
9840 {
9841 /* If R15 was used as Rn, hence current PC+8. */
9842 u_regval[0] = u_regval[0] + 8;
9843 }
9844 offset_8 = (immed_high << 4) | immed_low;
9845 /* Calculate target store address. */
9846 if (14 == arm_insn_r->opcode)
9847 {
9848 tgt_mem_addr = u_regval[0] + offset_8;
9849 }
9850 else
9851 {
9852 tgt_mem_addr = u_regval[0] - offset_8;
9853 }
9854 if (ARM_RECORD_STRH == str_type)
9855 {
9856 record_buf_mem[0] = 2;
9857 record_buf_mem[1] = tgt_mem_addr;
9858 arm_insn_r->mem_rec_count = 1;
9859 }
9860 else if (ARM_RECORD_STRD == str_type)
9861 {
9862 record_buf_mem[0] = 4;
9863 record_buf_mem[1] = tgt_mem_addr;
9864 record_buf_mem[2] = 4;
9865 record_buf_mem[3] = tgt_mem_addr + 4;
9866 arm_insn_r->mem_rec_count = 2;
9867 }
9868 }
9869 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9870 {
9871 /* 2) Store, register offset. */
9872 /* Get Rm. */
9873 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9874 /* Get Rn. */
9875 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9876 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9877 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9878 if (15 == reg_src2)
9879 {
9880 /* If R15 was used as Rn, hence current PC+8. */
9881 u_regval[0] = u_regval[0] + 8;
9882 }
9883 /* Calculate target store address, Rn +/- Rm, register offset. */
9884 if (12 == arm_insn_r->opcode)
9885 {
9886 tgt_mem_addr = u_regval[0] + u_regval[1];
9887 }
9888 else
9889 {
9890 tgt_mem_addr = u_regval[1] - u_regval[0];
9891 }
9892 if (ARM_RECORD_STRH == str_type)
9893 {
9894 record_buf_mem[0] = 2;
9895 record_buf_mem[1] = tgt_mem_addr;
9896 arm_insn_r->mem_rec_count = 1;
9897 }
9898 else if (ARM_RECORD_STRD == str_type)
9899 {
9900 record_buf_mem[0] = 4;
9901 record_buf_mem[1] = tgt_mem_addr;
9902 record_buf_mem[2] = 4;
9903 record_buf_mem[3] = tgt_mem_addr + 4;
9904 arm_insn_r->mem_rec_count = 2;
9905 }
9906 }
9907 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9908 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9909 {
9910 /* 3) Store, immediate pre-indexed. */
9911 /* 5) Store, immediate post-indexed. */
9912 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9913 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9914 offset_8 = (immed_high << 4) | immed_low;
9915 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9916 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9917 /* Calculate target store address, Rn +/- Rm, register offset. */
9918 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9919 {
9920 tgt_mem_addr = u_regval[0] + offset_8;
9921 }
9922 else
9923 {
9924 tgt_mem_addr = u_regval[0] - offset_8;
9925 }
9926 if (ARM_RECORD_STRH == str_type)
9927 {
9928 record_buf_mem[0] = 2;
9929 record_buf_mem[1] = tgt_mem_addr;
9930 arm_insn_r->mem_rec_count = 1;
9931 }
9932 else if (ARM_RECORD_STRD == str_type)
9933 {
9934 record_buf_mem[0] = 4;
9935 record_buf_mem[1] = tgt_mem_addr;
9936 record_buf_mem[2] = 4;
9937 record_buf_mem[3] = tgt_mem_addr + 4;
9938 arm_insn_r->mem_rec_count = 2;
9939 }
9940 /* Record Rn also as it changes. */
9941 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9942 arm_insn_r->reg_rec_count = 1;
9943 }
9944 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9945 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9946 {
9947 /* 4) Store, register pre-indexed. */
9948 /* 6) Store, register post -indexed. */
9949 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9950 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9951 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9952 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9953 /* Calculate target store address, Rn +/- Rm, register offset. */
9954 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9955 {
9956 tgt_mem_addr = u_regval[0] + u_regval[1];
9957 }
9958 else
9959 {
9960 tgt_mem_addr = u_regval[1] - u_regval[0];
9961 }
9962 if (ARM_RECORD_STRH == str_type)
9963 {
9964 record_buf_mem[0] = 2;
9965 record_buf_mem[1] = tgt_mem_addr;
9966 arm_insn_r->mem_rec_count = 1;
9967 }
9968 else if (ARM_RECORD_STRD == str_type)
9969 {
9970 record_buf_mem[0] = 4;
9971 record_buf_mem[1] = tgt_mem_addr;
9972 record_buf_mem[2] = 4;
9973 record_buf_mem[3] = tgt_mem_addr + 4;
9974 arm_insn_r->mem_rec_count = 2;
9975 }
9976 /* Record Rn also as it changes. */
9977 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9978 arm_insn_r->reg_rec_count = 1;
9979 }
9980 return 0;
9981 }
9982
9983 /* Handling ARM extension space insns. */
9984
9985 static int
9986 arm_record_extension_space (insn_decode_record *arm_insn_r)
9987 {
9988 int ret = 0; /* Return value: -1:record failure ; 0:success */
9989 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9990 uint32_t record_buf[8], record_buf_mem[8];
9991 uint32_t reg_src1 = 0;
9992 struct regcache *reg_cache = arm_insn_r->regcache;
9993 ULONGEST u_regval = 0;
9994
9995 gdb_assert (!INSN_RECORDED(arm_insn_r));
9996 /* Handle unconditional insn extension space. */
9997
9998 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9999 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10000 if (arm_insn_r->cond)
10001 {
10002 /* PLD has no affect on architectural state, it just affects
10003 the caches. */
10004 if (5 == ((opcode1 & 0xE0) >> 5))
10005 {
10006 /* BLX(1) */
10007 record_buf[0] = ARM_PS_REGNUM;
10008 record_buf[1] = ARM_LR_REGNUM;
10009 arm_insn_r->reg_rec_count = 2;
10010 }
10011 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10012 }
10013
10014
10015 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10016 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10017 {
10018 ret = -1;
10019 /* Undefined instruction on ARM V5; need to handle if later
10020 versions define it. */
10021 }
10022
10023 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10024 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10025 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10026
10027 /* Handle arithmetic insn extension space. */
10028 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10029 && !INSN_RECORDED(arm_insn_r))
10030 {
10031 /* Handle MLA(S) and MUL(S). */
10032 if (in_inclusive_range (insn_op1, 0U, 3U))
10033 {
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10035 record_buf[1] = ARM_PS_REGNUM;
10036 arm_insn_r->reg_rec_count = 2;
10037 }
10038 else if (in_inclusive_range (insn_op1, 4U, 15U))
10039 {
10040 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10041 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10042 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10043 record_buf[2] = ARM_PS_REGNUM;
10044 arm_insn_r->reg_rec_count = 3;
10045 }
10046 }
10047
10048 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10049 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10050 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10051
10052 /* Handle control insn extension space. */
10053
10054 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10055 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10056 {
10057 if (!bit (arm_insn_r->arm_insn,25))
10058 {
10059 if (!bits (arm_insn_r->arm_insn, 4, 7))
10060 {
10061 if ((0 == insn_op1) || (2 == insn_op1))
10062 {
10063 /* MRS. */
10064 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10065 arm_insn_r->reg_rec_count = 1;
10066 }
10067 else if (1 == insn_op1)
10068 {
10069 /* CSPR is going to be changed. */
10070 record_buf[0] = ARM_PS_REGNUM;
10071 arm_insn_r->reg_rec_count = 1;
10072 }
10073 else if (3 == insn_op1)
10074 {
10075 /* SPSR is going to be changed. */
10076 /* We need to get SPSR value, which is yet to be done. */
10077 return -1;
10078 }
10079 }
10080 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10081 {
10082 if (1 == insn_op1)
10083 {
10084 /* BX. */
10085 record_buf[0] = ARM_PS_REGNUM;
10086 arm_insn_r->reg_rec_count = 1;
10087 }
10088 else if (3 == insn_op1)
10089 {
10090 /* CLZ. */
10091 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10092 arm_insn_r->reg_rec_count = 1;
10093 }
10094 }
10095 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10096 {
10097 /* BLX. */
10098 record_buf[0] = ARM_PS_REGNUM;
10099 record_buf[1] = ARM_LR_REGNUM;
10100 arm_insn_r->reg_rec_count = 2;
10101 }
10102 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10103 {
10104 /* QADD, QSUB, QDADD, QDSUB */
10105 record_buf[0] = ARM_PS_REGNUM;
10106 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10107 arm_insn_r->reg_rec_count = 2;
10108 }
10109 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10110 {
10111 /* BKPT. */
10112 record_buf[0] = ARM_PS_REGNUM;
10113 record_buf[1] = ARM_LR_REGNUM;
10114 arm_insn_r->reg_rec_count = 2;
10115
10116 /* Save SPSR also;how? */
10117 return -1;
10118 }
10119 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10120 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10121 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10122 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10123 )
10124 {
10125 if (0 == insn_op1 || 1 == insn_op1)
10126 {
10127 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10128 /* We dont do optimization for SMULW<y> where we
10129 need only Rd. */
10130 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10131 record_buf[1] = ARM_PS_REGNUM;
10132 arm_insn_r->reg_rec_count = 2;
10133 }
10134 else if (2 == insn_op1)
10135 {
10136 /* SMLAL<x><y>. */
10137 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10138 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10139 arm_insn_r->reg_rec_count = 2;
10140 }
10141 else if (3 == insn_op1)
10142 {
10143 /* SMUL<x><y>. */
10144 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10145 arm_insn_r->reg_rec_count = 1;
10146 }
10147 }
10148 }
10149 else
10150 {
10151 /* MSR : immediate form. */
10152 if (1 == insn_op1)
10153 {
10154 /* CSPR is going to be changed. */
10155 record_buf[0] = ARM_PS_REGNUM;
10156 arm_insn_r->reg_rec_count = 1;
10157 }
10158 else if (3 == insn_op1)
10159 {
10160 /* SPSR is going to be changed. */
10161 /* we need to get SPSR value, which is yet to be done */
10162 return -1;
10163 }
10164 }
10165 }
10166
10167 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10168 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10169 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10170
10171 /* Handle load/store insn extension space. */
10172
10173 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10174 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10175 && !INSN_RECORDED(arm_insn_r))
10176 {
10177 /* SWP/SWPB. */
10178 if (0 == insn_op1)
10179 {
10180 /* These insn, changes register and memory as well. */
10181 /* SWP or SWPB insn. */
10182 /* Get memory address given by Rn. */
10183 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10184 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10185 /* SWP insn ?, swaps word. */
10186 if (8 == arm_insn_r->opcode)
10187 {
10188 record_buf_mem[0] = 4;
10189 }
10190 else
10191 {
10192 /* SWPB insn, swaps only byte. */
10193 record_buf_mem[0] = 1;
10194 }
10195 record_buf_mem[1] = u_regval;
10196 arm_insn_r->mem_rec_count = 1;
10197 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10198 arm_insn_r->reg_rec_count = 1;
10199 }
10200 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10201 {
10202 /* STRH. */
10203 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10204 ARM_RECORD_STRH);
10205 }
10206 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10207 {
10208 /* LDRD. */
10209 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10210 record_buf[1] = record_buf[0] + 1;
10211 arm_insn_r->reg_rec_count = 2;
10212 }
10213 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10214 {
10215 /* STRD. */
10216 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10217 ARM_RECORD_STRD);
10218 }
10219 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10220 {
10221 /* LDRH, LDRSB, LDRSH. */
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10223 arm_insn_r->reg_rec_count = 1;
10224 }
10225
10226 }
10227
10228 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10229 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10230 && !INSN_RECORDED(arm_insn_r))
10231 {
10232 ret = -1;
10233 /* Handle coprocessor insn extension space. */
10234 }
10235
10236 /* To be done for ARMv5 and later; as of now we return -1. */
10237 if (-1 == ret)
10238 return ret;
10239
10240 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10241 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10242
10243 return ret;
10244 }
10245
10246 /* Handling opcode 000 insns. */
10247
10248 static int
10249 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10250 {
10251 struct regcache *reg_cache = arm_insn_r->regcache;
10252 uint32_t record_buf[8], record_buf_mem[8];
10253 ULONGEST u_regval[2] = {0};
10254
10255 uint32_t reg_src1 = 0;
10256 uint32_t opcode1 = 0;
10257
10258 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10259 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10260 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10261
10262 if (!((opcode1 & 0x19) == 0x10))
10263 {
10264 /* Data-processing (register) and Data-processing (register-shifted
10265 register */
10266 /* Out of 11 shifter operands mode, all the insn modifies destination
10267 register, which is specified by 13-16 decode. */
10268 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10269 record_buf[1] = ARM_PS_REGNUM;
10270 arm_insn_r->reg_rec_count = 2;
10271 }
10272 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10273 {
10274 /* Miscellaneous instructions */
10275
10276 if (3 == arm_insn_r->decode && 0x12 == opcode1
10277 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10278 {
10279 /* Handle BLX, branch and link/exchange. */
10280 if (9 == arm_insn_r->opcode)
10281 {
10282 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10283 and R14 stores the return address. */
10284 record_buf[0] = ARM_PS_REGNUM;
10285 record_buf[1] = ARM_LR_REGNUM;
10286 arm_insn_r->reg_rec_count = 2;
10287 }
10288 }
10289 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10290 {
10291 /* Handle enhanced software breakpoint insn, BKPT. */
10292 /* CPSR is changed to be executed in ARM state, disabling normal
10293 interrupts, entering abort mode. */
10294 /* According to high vector configuration PC is set. */
10295 /* user hit breakpoint and type reverse, in
10296 that case, we need to go back with previous CPSR and
10297 Program Counter. */
10298 record_buf[0] = ARM_PS_REGNUM;
10299 record_buf[1] = ARM_LR_REGNUM;
10300 arm_insn_r->reg_rec_count = 2;
10301
10302 /* Save SPSR also; how? */
10303 return -1;
10304 }
10305 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10306 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10307 {
10308 /* Handle BX, branch and link/exchange. */
10309 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10310 record_buf[0] = ARM_PS_REGNUM;
10311 arm_insn_r->reg_rec_count = 1;
10312 }
10313 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10314 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10315 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10316 {
10317 /* Count leading zeros: CLZ. */
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10319 arm_insn_r->reg_rec_count = 1;
10320 }
10321 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10322 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10323 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10324 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10325 {
10326 /* Handle MRS insn. */
10327 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10328 arm_insn_r->reg_rec_count = 1;
10329 }
10330 }
10331 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10332 {
10333 /* Multiply and multiply-accumulate */
10334
10335 /* Handle multiply instructions. */
10336 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10337 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10338 {
10339 /* Handle MLA and MUL. */
10340 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10341 record_buf[1] = ARM_PS_REGNUM;
10342 arm_insn_r->reg_rec_count = 2;
10343 }
10344 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10345 {
10346 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10347 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10348 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10349 record_buf[2] = ARM_PS_REGNUM;
10350 arm_insn_r->reg_rec_count = 3;
10351 }
10352 }
10353 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10354 {
10355 /* Synchronization primitives */
10356
10357 /* Handling SWP, SWPB. */
10358 /* These insn, changes register and memory as well. */
10359 /* SWP or SWPB insn. */
10360
10361 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10362 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10363 /* SWP insn ?, swaps word. */
10364 if (8 == arm_insn_r->opcode)
10365 {
10366 record_buf_mem[0] = 4;
10367 }
10368 else
10369 {
10370 /* SWPB insn, swaps only byte. */
10371 record_buf_mem[0] = 1;
10372 }
10373 record_buf_mem[1] = u_regval[0];
10374 arm_insn_r->mem_rec_count = 1;
10375 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10376 arm_insn_r->reg_rec_count = 1;
10377 }
10378 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10379 || 15 == arm_insn_r->decode)
10380 {
10381 if ((opcode1 & 0x12) == 2)
10382 {
10383 /* Extra load/store (unprivileged) */
10384 return -1;
10385 }
10386 else
10387 {
10388 /* Extra load/store */
10389 switch (bits (arm_insn_r->arm_insn, 5, 6))
10390 {
10391 case 1:
10392 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10393 {
10394 /* STRH (register), STRH (immediate) */
10395 arm_record_strx (arm_insn_r, &record_buf[0],
10396 &record_buf_mem[0], ARM_RECORD_STRH);
10397 }
10398 else if ((opcode1 & 0x05) == 0x1)
10399 {
10400 /* LDRH (register) */
10401 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10402 arm_insn_r->reg_rec_count = 1;
10403
10404 if (bit (arm_insn_r->arm_insn, 21))
10405 {
10406 /* Write back to Rn. */
10407 record_buf[arm_insn_r->reg_rec_count++]
10408 = bits (arm_insn_r->arm_insn, 16, 19);
10409 }
10410 }
10411 else if ((opcode1 & 0x05) == 0x5)
10412 {
10413 /* LDRH (immediate), LDRH (literal) */
10414 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10415
10416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10417 arm_insn_r->reg_rec_count = 1;
10418
10419 if (rn != 15)
10420 {
10421 /*LDRH (immediate) */
10422 if (bit (arm_insn_r->arm_insn, 21))
10423 {
10424 /* Write back to Rn. */
10425 record_buf[arm_insn_r->reg_rec_count++] = rn;
10426 }
10427 }
10428 }
10429 else
10430 return -1;
10431 break;
10432 case 2:
10433 if ((opcode1 & 0x05) == 0x0)
10434 {
10435 /* LDRD (register) */
10436 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10437 record_buf[1] = record_buf[0] + 1;
10438 arm_insn_r->reg_rec_count = 2;
10439
10440 if (bit (arm_insn_r->arm_insn, 21))
10441 {
10442 /* Write back to Rn. */
10443 record_buf[arm_insn_r->reg_rec_count++]
10444 = bits (arm_insn_r->arm_insn, 16, 19);
10445 }
10446 }
10447 else if ((opcode1 & 0x05) == 0x1)
10448 {
10449 /* LDRSB (register) */
10450 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10451 arm_insn_r->reg_rec_count = 1;
10452
10453 if (bit (arm_insn_r->arm_insn, 21))
10454 {
10455 /* Write back to Rn. */
10456 record_buf[arm_insn_r->reg_rec_count++]
10457 = bits (arm_insn_r->arm_insn, 16, 19);
10458 }
10459 }
10460 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10461 {
10462 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10463 LDRSB (literal) */
10464 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10465
10466 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10467 arm_insn_r->reg_rec_count = 1;
10468
10469 if (rn != 15)
10470 {
10471 /*LDRD (immediate), LDRSB (immediate) */
10472 if (bit (arm_insn_r->arm_insn, 21))
10473 {
10474 /* Write back to Rn. */
10475 record_buf[arm_insn_r->reg_rec_count++] = rn;
10476 }
10477 }
10478 }
10479 else
10480 return -1;
10481 break;
10482 case 3:
10483 if ((opcode1 & 0x05) == 0x0)
10484 {
10485 /* STRD (register) */
10486 arm_record_strx (arm_insn_r, &record_buf[0],
10487 &record_buf_mem[0], ARM_RECORD_STRD);
10488 }
10489 else if ((opcode1 & 0x05) == 0x1)
10490 {
10491 /* LDRSH (register) */
10492 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10493 arm_insn_r->reg_rec_count = 1;
10494
10495 if (bit (arm_insn_r->arm_insn, 21))
10496 {
10497 /* Write back to Rn. */
10498 record_buf[arm_insn_r->reg_rec_count++]
10499 = bits (arm_insn_r->arm_insn, 16, 19);
10500 }
10501 }
10502 else if ((opcode1 & 0x05) == 0x4)
10503 {
10504 /* STRD (immediate) */
10505 arm_record_strx (arm_insn_r, &record_buf[0],
10506 &record_buf_mem[0], ARM_RECORD_STRD);
10507 }
10508 else if ((opcode1 & 0x05) == 0x5)
10509 {
10510 /* LDRSH (immediate), LDRSH (literal) */
10511 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10512 arm_insn_r->reg_rec_count = 1;
10513
10514 if (bit (arm_insn_r->arm_insn, 21))
10515 {
10516 /* Write back to Rn. */
10517 record_buf[arm_insn_r->reg_rec_count++]
10518 = bits (arm_insn_r->arm_insn, 16, 19);
10519 }
10520 }
10521 else
10522 return -1;
10523 break;
10524 default:
10525 return -1;
10526 }
10527 }
10528 }
10529 else
10530 {
10531 return -1;
10532 }
10533
10534 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10535 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10536 return 0;
10537 }
10538
10539 /* Handling opcode 001 insns. */
10540
10541 static int
10542 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10543 {
10544 uint32_t record_buf[8], record_buf_mem[8];
10545
10546 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10547 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10548
10549 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10550 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10551 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10552 )
10553 {
10554 /* Handle MSR insn. */
10555 if (9 == arm_insn_r->opcode)
10556 {
10557 /* CSPR is going to be changed. */
10558 record_buf[0] = ARM_PS_REGNUM;
10559 arm_insn_r->reg_rec_count = 1;
10560 }
10561 else
10562 {
10563 /* SPSR is going to be changed. */
10564 }
10565 }
10566 else if (arm_insn_r->opcode <= 15)
10567 {
10568 /* Normal data processing insns. */
10569 /* Out of 11 shifter operands mode, all the insn modifies destination
10570 register, which is specified by 13-16 decode. */
10571 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10572 record_buf[1] = ARM_PS_REGNUM;
10573 arm_insn_r->reg_rec_count = 2;
10574 }
10575 else
10576 {
10577 return -1;
10578 }
10579
10580 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10581 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10582 return 0;
10583 }
10584
10585 static int
10586 arm_record_media (insn_decode_record *arm_insn_r)
10587 {
10588 uint32_t record_buf[8];
10589
10590 switch (bits (arm_insn_r->arm_insn, 22, 24))
10591 {
10592 case 0:
10593 /* Parallel addition and subtraction, signed */
10594 case 1:
10595 /* Parallel addition and subtraction, unsigned */
10596 case 2:
10597 case 3:
10598 /* Packing, unpacking, saturation and reversal */
10599 {
10600 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10601
10602 record_buf[arm_insn_r->reg_rec_count++] = rd;
10603 }
10604 break;
10605
10606 case 4:
10607 case 5:
10608 /* Signed multiplies */
10609 {
10610 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10611 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10612
10613 record_buf[arm_insn_r->reg_rec_count++] = rd;
10614 if (op1 == 0x0)
10615 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10616 else if (op1 == 0x4)
10617 record_buf[arm_insn_r->reg_rec_count++]
10618 = bits (arm_insn_r->arm_insn, 12, 15);
10619 }
10620 break;
10621
10622 case 6:
10623 {
10624 if (bit (arm_insn_r->arm_insn, 21)
10625 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10626 {
10627 /* SBFX */
10628 record_buf[arm_insn_r->reg_rec_count++]
10629 = bits (arm_insn_r->arm_insn, 12, 15);
10630 }
10631 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10632 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10633 {
10634 /* USAD8 and USADA8 */
10635 record_buf[arm_insn_r->reg_rec_count++]
10636 = bits (arm_insn_r->arm_insn, 16, 19);
10637 }
10638 }
10639 break;
10640
10641 case 7:
10642 {
10643 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10644 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10645 {
10646 /* Permanently UNDEFINED */
10647 return -1;
10648 }
10649 else
10650 {
10651 /* BFC, BFI and UBFX */
10652 record_buf[arm_insn_r->reg_rec_count++]
10653 = bits (arm_insn_r->arm_insn, 12, 15);
10654 }
10655 }
10656 break;
10657
10658 default:
10659 return -1;
10660 }
10661
10662 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10663
10664 return 0;
10665 }
10666
10667 /* Handle ARM mode instructions with opcode 010. */
10668
10669 static int
10670 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10671 {
10672 struct regcache *reg_cache = arm_insn_r->regcache;
10673
10674 uint32_t reg_base , reg_dest;
10675 uint32_t offset_12, tgt_mem_addr;
10676 uint32_t record_buf[8], record_buf_mem[8];
10677 unsigned char wback;
10678 ULONGEST u_regval;
10679
10680 /* Calculate wback. */
10681 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10682 || (bit (arm_insn_r->arm_insn, 21) == 1);
10683
10684 arm_insn_r->reg_rec_count = 0;
10685 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10686
10687 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10688 {
10689 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10690 and LDRT. */
10691
10692 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10693 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10694
10695 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10696 preceeds a LDR instruction having R15 as reg_base, it
10697 emulates a branch and link instruction, and hence we need to save
10698 CPSR and PC as well. */
10699 if (ARM_PC_REGNUM == reg_dest)
10700 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10701
10702 /* If wback is true, also save the base register, which is going to be
10703 written to. */
10704 if (wback)
10705 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10706 }
10707 else
10708 {
10709 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10710
10711 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10712 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10713
10714 /* Handle bit U. */
10715 if (bit (arm_insn_r->arm_insn, 23))
10716 {
10717 /* U == 1: Add the offset. */
10718 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10719 }
10720 else
10721 {
10722 /* U == 0: subtract the offset. */
10723 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10724 }
10725
10726 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10727 bytes. */
10728 if (bit (arm_insn_r->arm_insn, 22))
10729 {
10730 /* STRB and STRBT: 1 byte. */
10731 record_buf_mem[0] = 1;
10732 }
10733 else
10734 {
10735 /* STR and STRT: 4 bytes. */
10736 record_buf_mem[0] = 4;
10737 }
10738
10739 /* Handle bit P. */
10740 if (bit (arm_insn_r->arm_insn, 24))
10741 record_buf_mem[1] = tgt_mem_addr;
10742 else
10743 record_buf_mem[1] = (uint32_t) u_regval;
10744
10745 arm_insn_r->mem_rec_count = 1;
10746
10747 /* If wback is true, also save the base register, which is going to be
10748 written to. */
10749 if (wback)
10750 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10751 }
10752
10753 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10754 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10755 return 0;
10756 }
10757
10758 /* Handling opcode 011 insns. */
10759
10760 static int
10761 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10762 {
10763 struct regcache *reg_cache = arm_insn_r->regcache;
10764
10765 uint32_t shift_imm = 0;
10766 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10767 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10768 uint32_t record_buf[8], record_buf_mem[8];
10769
10770 LONGEST s_word;
10771 ULONGEST u_regval[2];
10772
10773 if (bit (arm_insn_r->arm_insn, 4))
10774 return arm_record_media (arm_insn_r);
10775
10776 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10777 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10778
10779 /* Handle enhanced store insns and LDRD DSP insn,
10780 order begins according to addressing modes for store insns
10781 STRH insn. */
10782
10783 /* LDR or STR? */
10784 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10785 {
10786 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10787 /* LDR insn has a capability to do branching, if
10788 MOV LR, PC is preceded by LDR insn having Rn as R15
10789 in that case, it emulates branch and link insn, and hence we
10790 need to save CSPR and PC as well. */
10791 if (15 != reg_dest)
10792 {
10793 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10794 arm_insn_r->reg_rec_count = 1;
10795 }
10796 else
10797 {
10798 record_buf[0] = reg_dest;
10799 record_buf[1] = ARM_PS_REGNUM;
10800 arm_insn_r->reg_rec_count = 2;
10801 }
10802 }
10803 else
10804 {
10805 if (! bits (arm_insn_r->arm_insn, 4, 11))
10806 {
10807 /* Store insn, register offset and register pre-indexed,
10808 register post-indexed. */
10809 /* Get Rm. */
10810 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10811 /* Get Rn. */
10812 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10813 regcache_raw_read_unsigned (reg_cache, reg_src1
10814 , &u_regval[0]);
10815 regcache_raw_read_unsigned (reg_cache, reg_src2
10816 , &u_regval[1]);
10817 if (15 == reg_src2)
10818 {
10819 /* If R15 was used as Rn, hence current PC+8. */
10820 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10821 u_regval[0] = u_regval[0] + 8;
10822 }
10823 /* Calculate target store address, Rn +/- Rm, register offset. */
10824 /* U == 1. */
10825 if (bit (arm_insn_r->arm_insn, 23))
10826 {
10827 tgt_mem_addr = u_regval[0] + u_regval[1];
10828 }
10829 else
10830 {
10831 tgt_mem_addr = u_regval[1] - u_regval[0];
10832 }
10833
10834 switch (arm_insn_r->opcode)
10835 {
10836 /* STR. */
10837 case 8:
10838 case 12:
10839 /* STR. */
10840 case 9:
10841 case 13:
10842 /* STRT. */
10843 case 1:
10844 case 5:
10845 /* STR. */
10846 case 0:
10847 case 4:
10848 record_buf_mem[0] = 4;
10849 break;
10850
10851 /* STRB. */
10852 case 10:
10853 case 14:
10854 /* STRB. */
10855 case 11:
10856 case 15:
10857 /* STRBT. */
10858 case 3:
10859 case 7:
10860 /* STRB. */
10861 case 2:
10862 case 6:
10863 record_buf_mem[0] = 1;
10864 break;
10865
10866 default:
10867 gdb_assert_not_reached ("no decoding pattern found");
10868 break;
10869 }
10870 record_buf_mem[1] = tgt_mem_addr;
10871 arm_insn_r->mem_rec_count = 1;
10872
10873 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10874 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10875 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10876 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10877 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10878 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10879 )
10880 {
10881 /* Rn is going to be changed in pre-indexed mode and
10882 post-indexed mode as well. */
10883 record_buf[0] = reg_src2;
10884 arm_insn_r->reg_rec_count = 1;
10885 }
10886 }
10887 else
10888 {
10889 /* Store insn, scaled register offset; scaled pre-indexed. */
10890 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10891 /* Get Rm. */
10892 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10893 /* Get Rn. */
10894 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10895 /* Get shift_imm. */
10896 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10897 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10898 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10899 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10900 /* Offset_12 used as shift. */
10901 switch (offset_12)
10902 {
10903 case 0:
10904 /* Offset_12 used as index. */
10905 offset_12 = u_regval[0] << shift_imm;
10906 break;
10907
10908 case 1:
10909 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10910 break;
10911
10912 case 2:
10913 if (!shift_imm)
10914 {
10915 if (bit (u_regval[0], 31))
10916 {
10917 offset_12 = 0xFFFFFFFF;
10918 }
10919 else
10920 {
10921 offset_12 = 0;
10922 }
10923 }
10924 else
10925 {
10926 /* This is arithmetic shift. */
10927 offset_12 = s_word >> shift_imm;
10928 }
10929 break;
10930
10931 case 3:
10932 if (!shift_imm)
10933 {
10934 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10935 &u_regval[1]);
10936 /* Get C flag value and shift it by 31. */
10937 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10938 | (u_regval[0]) >> 1);
10939 }
10940 else
10941 {
10942 offset_12 = (u_regval[0] >> shift_imm) \
10943 | (u_regval[0] <<
10944 (sizeof(uint32_t) - shift_imm));
10945 }
10946 break;
10947
10948 default:
10949 gdb_assert_not_reached ("no decoding pattern found");
10950 break;
10951 }
10952
10953 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10954 /* bit U set. */
10955 if (bit (arm_insn_r->arm_insn, 23))
10956 {
10957 tgt_mem_addr = u_regval[1] + offset_12;
10958 }
10959 else
10960 {
10961 tgt_mem_addr = u_regval[1] - offset_12;
10962 }
10963
10964 switch (arm_insn_r->opcode)
10965 {
10966 /* STR. */
10967 case 8:
10968 case 12:
10969 /* STR. */
10970 case 9:
10971 case 13:
10972 /* STRT. */
10973 case 1:
10974 case 5:
10975 /* STR. */
10976 case 0:
10977 case 4:
10978 record_buf_mem[0] = 4;
10979 break;
10980
10981 /* STRB. */
10982 case 10:
10983 case 14:
10984 /* STRB. */
10985 case 11:
10986 case 15:
10987 /* STRBT. */
10988 case 3:
10989 case 7:
10990 /* STRB. */
10991 case 2:
10992 case 6:
10993 record_buf_mem[0] = 1;
10994 break;
10995
10996 default:
10997 gdb_assert_not_reached ("no decoding pattern found");
10998 break;
10999 }
11000 record_buf_mem[1] = tgt_mem_addr;
11001 arm_insn_r->mem_rec_count = 1;
11002
11003 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11004 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11005 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11006 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11007 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11008 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11009 )
11010 {
11011 /* Rn is going to be changed in register scaled pre-indexed
11012 mode,and scaled post indexed mode. */
11013 record_buf[0] = reg_src2;
11014 arm_insn_r->reg_rec_count = 1;
11015 }
11016 }
11017 }
11018
11019 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11020 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11021 return 0;
11022 }
11023
11024 /* Handle ARM mode instructions with opcode 100. */
11025
11026 static int
11027 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11028 {
11029 struct regcache *reg_cache = arm_insn_r->regcache;
11030 uint32_t register_count = 0, register_bits;
11031 uint32_t reg_base, addr_mode;
11032 uint32_t record_buf[24], record_buf_mem[48];
11033 uint32_t wback;
11034 ULONGEST u_regval;
11035
11036 /* Fetch the list of registers. */
11037 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11038 arm_insn_r->reg_rec_count = 0;
11039
11040 /* Fetch the base register that contains the address we are loading data
11041 to. */
11042 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11043
11044 /* Calculate wback. */
11045 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11046
11047 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11048 {
11049 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11050
11051 /* Find out which registers are going to be loaded from memory. */
11052 while (register_bits)
11053 {
11054 if (register_bits & 0x00000001)
11055 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11056 register_bits = register_bits >> 1;
11057 register_count++;
11058 }
11059
11060
11061 /* If wback is true, also save the base register, which is going to be
11062 written to. */
11063 if (wback)
11064 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11065
11066 /* Save the CPSR register. */
11067 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11068 }
11069 else
11070 {
11071 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11072
11073 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11074
11075 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11076
11077 /* Find out how many registers are going to be stored to memory. */
11078 while (register_bits)
11079 {
11080 if (register_bits & 0x00000001)
11081 register_count++;
11082 register_bits = register_bits >> 1;
11083 }
11084
11085 switch (addr_mode)
11086 {
11087 /* STMDA (STMED): Decrement after. */
11088 case 0:
11089 record_buf_mem[1] = (uint32_t) u_regval
11090 - register_count * ARM_INT_REGISTER_SIZE + 4;
11091 break;
11092 /* STM (STMIA, STMEA): Increment after. */
11093 case 1:
11094 record_buf_mem[1] = (uint32_t) u_regval;
11095 break;
11096 /* STMDB (STMFD): Decrement before. */
11097 case 2:
11098 record_buf_mem[1] = (uint32_t) u_regval
11099 - register_count * ARM_INT_REGISTER_SIZE;
11100 break;
11101 /* STMIB (STMFA): Increment before. */
11102 case 3:
11103 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11104 break;
11105 default:
11106 gdb_assert_not_reached ("no decoding pattern found");
11107 break;
11108 }
11109
11110 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11111 arm_insn_r->mem_rec_count = 1;
11112
11113 /* If wback is true, also save the base register, which is going to be
11114 written to. */
11115 if (wback)
11116 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11117 }
11118
11119 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11120 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11121 return 0;
11122 }
11123
11124 /* Handling opcode 101 insns. */
11125
11126 static int
11127 arm_record_b_bl (insn_decode_record *arm_insn_r)
11128 {
11129 uint32_t record_buf[8];
11130
11131 /* Handle B, BL, BLX(1) insns. */
11132 /* B simply branches so we do nothing here. */
11133 /* Note: BLX(1) doesnt fall here but instead it falls into
11134 extension space. */
11135 if (bit (arm_insn_r->arm_insn, 24))
11136 {
11137 record_buf[0] = ARM_LR_REGNUM;
11138 arm_insn_r->reg_rec_count = 1;
11139 }
11140
11141 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11142
11143 return 0;
11144 }
11145
11146 static int
11147 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11148 {
11149 printf_unfiltered (_("Process record does not support instruction "
11150 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11151 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11152
11153 return -1;
11154 }
11155
11156 /* Record handler for vector data transfer instructions. */
11157
11158 static int
11159 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11160 {
11161 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11162 uint32_t record_buf[4];
11163
11164 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11165 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11166 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11167 bit_l = bit (arm_insn_r->arm_insn, 20);
11168 bit_c = bit (arm_insn_r->arm_insn, 8);
11169
11170 /* Handle VMOV instruction. */
11171 if (bit_l && bit_c)
11172 {
11173 record_buf[0] = reg_t;
11174 arm_insn_r->reg_rec_count = 1;
11175 }
11176 else if (bit_l && !bit_c)
11177 {
11178 /* Handle VMOV instruction. */
11179 if (bits_a == 0x00)
11180 {
11181 record_buf[0] = reg_t;
11182 arm_insn_r->reg_rec_count = 1;
11183 }
11184 /* Handle VMRS instruction. */
11185 else if (bits_a == 0x07)
11186 {
11187 if (reg_t == 15)
11188 reg_t = ARM_PS_REGNUM;
11189
11190 record_buf[0] = reg_t;
11191 arm_insn_r->reg_rec_count = 1;
11192 }
11193 }
11194 else if (!bit_l && !bit_c)
11195 {
11196 /* Handle VMOV instruction. */
11197 if (bits_a == 0x00)
11198 {
11199 record_buf[0] = ARM_D0_REGNUM + reg_v;
11200
11201 arm_insn_r->reg_rec_count = 1;
11202 }
11203 /* Handle VMSR instruction. */
11204 else if (bits_a == 0x07)
11205 {
11206 record_buf[0] = ARM_FPSCR_REGNUM;
11207 arm_insn_r->reg_rec_count = 1;
11208 }
11209 }
11210 else if (!bit_l && bit_c)
11211 {
11212 /* Handle VMOV instruction. */
11213 if (!(bits_a & 0x04))
11214 {
11215 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11216 + ARM_D0_REGNUM;
11217 arm_insn_r->reg_rec_count = 1;
11218 }
11219 /* Handle VDUP instruction. */
11220 else
11221 {
11222 if (bit (arm_insn_r->arm_insn, 21))
11223 {
11224 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11225 record_buf[0] = reg_v + ARM_D0_REGNUM;
11226 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11227 arm_insn_r->reg_rec_count = 2;
11228 }
11229 else
11230 {
11231 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11232 record_buf[0] = reg_v + ARM_D0_REGNUM;
11233 arm_insn_r->reg_rec_count = 1;
11234 }
11235 }
11236 }
11237
11238 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11239 return 0;
11240 }
11241
11242 /* Record handler for extension register load/store instructions. */
11243
11244 static int
11245 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11246 {
11247 uint32_t opcode, single_reg;
11248 uint8_t op_vldm_vstm;
11249 uint32_t record_buf[8], record_buf_mem[128];
11250 ULONGEST u_regval = 0;
11251
11252 struct regcache *reg_cache = arm_insn_r->regcache;
11253
11254 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11255 single_reg = !bit (arm_insn_r->arm_insn, 8);
11256 op_vldm_vstm = opcode & 0x1b;
11257
11258 /* Handle VMOV instructions. */
11259 if ((opcode & 0x1e) == 0x04)
11260 {
11261 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11262 {
11263 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11264 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11265 arm_insn_r->reg_rec_count = 2;
11266 }
11267 else
11268 {
11269 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11270 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11271
11272 if (single_reg)
11273 {
11274 /* The first S register number m is REG_M:M (M is bit 5),
11275 the corresponding D register number is REG_M:M / 2, which
11276 is REG_M. */
11277 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11278 /* The second S register number is REG_M:M + 1, the
11279 corresponding D register number is (REG_M:M + 1) / 2.
11280 IOW, if bit M is 1, the first and second S registers
11281 are mapped to different D registers, otherwise, they are
11282 in the same D register. */
11283 if (bit_m)
11284 {
11285 record_buf[arm_insn_r->reg_rec_count++]
11286 = ARM_D0_REGNUM + reg_m + 1;
11287 }
11288 }
11289 else
11290 {
11291 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11292 arm_insn_r->reg_rec_count = 1;
11293 }
11294 }
11295 }
11296 /* Handle VSTM and VPUSH instructions. */
11297 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11298 || op_vldm_vstm == 0x12)
11299 {
11300 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11301 uint32_t memory_index = 0;
11302
11303 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11304 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11305 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11306 imm_off32 = imm_off8 << 2;
11307 memory_count = imm_off8;
11308
11309 if (bit (arm_insn_r->arm_insn, 23))
11310 start_address = u_regval;
11311 else
11312 start_address = u_regval - imm_off32;
11313
11314 if (bit (arm_insn_r->arm_insn, 21))
11315 {
11316 record_buf[0] = reg_rn;
11317 arm_insn_r->reg_rec_count = 1;
11318 }
11319
11320 while (memory_count > 0)
11321 {
11322 if (single_reg)
11323 {
11324 record_buf_mem[memory_index] = 4;
11325 record_buf_mem[memory_index + 1] = start_address;
11326 start_address = start_address + 4;
11327 memory_index = memory_index + 2;
11328 }
11329 else
11330 {
11331 record_buf_mem[memory_index] = 4;
11332 record_buf_mem[memory_index + 1] = start_address;
11333 record_buf_mem[memory_index + 2] = 4;
11334 record_buf_mem[memory_index + 3] = start_address + 4;
11335 start_address = start_address + 8;
11336 memory_index = memory_index + 4;
11337 }
11338 memory_count--;
11339 }
11340 arm_insn_r->mem_rec_count = (memory_index >> 1);
11341 }
11342 /* Handle VLDM instructions. */
11343 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11344 || op_vldm_vstm == 0x13)
11345 {
11346 uint32_t reg_count, reg_vd;
11347 uint32_t reg_index = 0;
11348 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11349
11350 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11351 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11352
11353 /* REG_VD is the first D register number. If the instruction
11354 loads memory to S registers (SINGLE_REG is TRUE), the register
11355 number is (REG_VD << 1 | bit D), so the corresponding D
11356 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11357 if (!single_reg)
11358 reg_vd = reg_vd | (bit_d << 4);
11359
11360 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11361 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11362
11363 /* If the instruction loads memory to D register, REG_COUNT should
11364 be divided by 2, according to the ARM Architecture Reference
11365 Manual. If the instruction loads memory to S register, divide by
11366 2 as well because two S registers are mapped to D register. */
11367 reg_count = reg_count / 2;
11368 if (single_reg && bit_d)
11369 {
11370 /* Increase the register count if S register list starts from
11371 an odd number (bit d is one). */
11372 reg_count++;
11373 }
11374
11375 while (reg_count > 0)
11376 {
11377 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11378 reg_count--;
11379 }
11380 arm_insn_r->reg_rec_count = reg_index;
11381 }
11382 /* VSTR Vector store register. */
11383 else if ((opcode & 0x13) == 0x10)
11384 {
11385 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11386 uint32_t memory_index = 0;
11387
11388 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11389 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11390 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11391 imm_off32 = imm_off8 << 2;
11392
11393 if (bit (arm_insn_r->arm_insn, 23))
11394 start_address = u_regval + imm_off32;
11395 else
11396 start_address = u_regval - imm_off32;
11397
11398 if (single_reg)
11399 {
11400 record_buf_mem[memory_index] = 4;
11401 record_buf_mem[memory_index + 1] = start_address;
11402 arm_insn_r->mem_rec_count = 1;
11403 }
11404 else
11405 {
11406 record_buf_mem[memory_index] = 4;
11407 record_buf_mem[memory_index + 1] = start_address;
11408 record_buf_mem[memory_index + 2] = 4;
11409 record_buf_mem[memory_index + 3] = start_address + 4;
11410 arm_insn_r->mem_rec_count = 2;
11411 }
11412 }
11413 /* VLDR Vector load register. */
11414 else if ((opcode & 0x13) == 0x11)
11415 {
11416 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11417
11418 if (!single_reg)
11419 {
11420 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11421 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11422 }
11423 else
11424 {
11425 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11426 /* Record register D rather than pseudo register S. */
11427 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11428 }
11429 arm_insn_r->reg_rec_count = 1;
11430 }
11431
11432 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11433 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11434 return 0;
11435 }
11436
11437 /* Record handler for arm/thumb mode VFP data processing instructions. */
11438
11439 static int
11440 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11441 {
11442 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11443 uint32_t record_buf[4];
11444 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11445 enum insn_types curr_insn_type = INSN_INV;
11446
11447 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11448 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11449 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11450 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11451 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11452 bit_d = bit (arm_insn_r->arm_insn, 22);
11453 /* Mask off the "D" bit. */
11454 opc1 = opc1 & ~0x04;
11455
11456 /* Handle VMLA, VMLS. */
11457 if (opc1 == 0x00)
11458 {
11459 if (bit (arm_insn_r->arm_insn, 10))
11460 {
11461 if (bit (arm_insn_r->arm_insn, 6))
11462 curr_insn_type = INSN_T0;
11463 else
11464 curr_insn_type = INSN_T1;
11465 }
11466 else
11467 {
11468 if (dp_op_sz)
11469 curr_insn_type = INSN_T1;
11470 else
11471 curr_insn_type = INSN_T2;
11472 }
11473 }
11474 /* Handle VNMLA, VNMLS, VNMUL. */
11475 else if (opc1 == 0x01)
11476 {
11477 if (dp_op_sz)
11478 curr_insn_type = INSN_T1;
11479 else
11480 curr_insn_type = INSN_T2;
11481 }
11482 /* Handle VMUL. */
11483 else if (opc1 == 0x02 && !(opc3 & 0x01))
11484 {
11485 if (bit (arm_insn_r->arm_insn, 10))
11486 {
11487 if (bit (arm_insn_r->arm_insn, 6))
11488 curr_insn_type = INSN_T0;
11489 else
11490 curr_insn_type = INSN_T1;
11491 }
11492 else
11493 {
11494 if (dp_op_sz)
11495 curr_insn_type = INSN_T1;
11496 else
11497 curr_insn_type = INSN_T2;
11498 }
11499 }
11500 /* Handle VADD, VSUB. */
11501 else if (opc1 == 0x03)
11502 {
11503 if (!bit (arm_insn_r->arm_insn, 9))
11504 {
11505 if (bit (arm_insn_r->arm_insn, 6))
11506 curr_insn_type = INSN_T0;
11507 else
11508 curr_insn_type = INSN_T1;
11509 }
11510 else
11511 {
11512 if (dp_op_sz)
11513 curr_insn_type = INSN_T1;
11514 else
11515 curr_insn_type = INSN_T2;
11516 }
11517 }
11518 /* Handle VDIV. */
11519 else if (opc1 == 0x08)
11520 {
11521 if (dp_op_sz)
11522 curr_insn_type = INSN_T1;
11523 else
11524 curr_insn_type = INSN_T2;
11525 }
11526 /* Handle all other vfp data processing instructions. */
11527 else if (opc1 == 0x0b)
11528 {
11529 /* Handle VMOV. */
11530 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11531 {
11532 if (bit (arm_insn_r->arm_insn, 4))
11533 {
11534 if (bit (arm_insn_r->arm_insn, 6))
11535 curr_insn_type = INSN_T0;
11536 else
11537 curr_insn_type = INSN_T1;
11538 }
11539 else
11540 {
11541 if (dp_op_sz)
11542 curr_insn_type = INSN_T1;
11543 else
11544 curr_insn_type = INSN_T2;
11545 }
11546 }
11547 /* Handle VNEG and VABS. */
11548 else if ((opc2 == 0x01 && opc3 == 0x01)
11549 || (opc2 == 0x00 && opc3 == 0x03))
11550 {
11551 if (!bit (arm_insn_r->arm_insn, 11))
11552 {
11553 if (bit (arm_insn_r->arm_insn, 6))
11554 curr_insn_type = INSN_T0;
11555 else
11556 curr_insn_type = INSN_T1;
11557 }
11558 else
11559 {
11560 if (dp_op_sz)
11561 curr_insn_type = INSN_T1;
11562 else
11563 curr_insn_type = INSN_T2;
11564 }
11565 }
11566 /* Handle VSQRT. */
11567 else if (opc2 == 0x01 && opc3 == 0x03)
11568 {
11569 if (dp_op_sz)
11570 curr_insn_type = INSN_T1;
11571 else
11572 curr_insn_type = INSN_T2;
11573 }
11574 /* Handle VCVT. */
11575 else if (opc2 == 0x07 && opc3 == 0x03)
11576 {
11577 if (!dp_op_sz)
11578 curr_insn_type = INSN_T1;
11579 else
11580 curr_insn_type = INSN_T2;
11581 }
11582 else if (opc3 & 0x01)
11583 {
11584 /* Handle VCVT. */
11585 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11586 {
11587 if (!bit (arm_insn_r->arm_insn, 18))
11588 curr_insn_type = INSN_T2;
11589 else
11590 {
11591 if (dp_op_sz)
11592 curr_insn_type = INSN_T1;
11593 else
11594 curr_insn_type = INSN_T2;
11595 }
11596 }
11597 /* Handle VCVT. */
11598 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11599 {
11600 if (dp_op_sz)
11601 curr_insn_type = INSN_T1;
11602 else
11603 curr_insn_type = INSN_T2;
11604 }
11605 /* Handle VCVTB, VCVTT. */
11606 else if ((opc2 & 0x0e) == 0x02)
11607 curr_insn_type = INSN_T2;
11608 /* Handle VCMP, VCMPE. */
11609 else if ((opc2 & 0x0e) == 0x04)
11610 curr_insn_type = INSN_T3;
11611 }
11612 }
11613
11614 switch (curr_insn_type)
11615 {
11616 case INSN_T0:
11617 reg_vd = reg_vd | (bit_d << 4);
11618 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11619 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11620 arm_insn_r->reg_rec_count = 2;
11621 break;
11622
11623 case INSN_T1:
11624 reg_vd = reg_vd | (bit_d << 4);
11625 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11626 arm_insn_r->reg_rec_count = 1;
11627 break;
11628
11629 case INSN_T2:
11630 reg_vd = (reg_vd << 1) | bit_d;
11631 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11632 arm_insn_r->reg_rec_count = 1;
11633 break;
11634
11635 case INSN_T3:
11636 record_buf[0] = ARM_FPSCR_REGNUM;
11637 arm_insn_r->reg_rec_count = 1;
11638 break;
11639
11640 default:
11641 gdb_assert_not_reached ("no decoding pattern found");
11642 break;
11643 }
11644
11645 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11646 return 0;
11647 }
11648
11649 /* Handling opcode 110 insns. */
11650
11651 static int
11652 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11653 {
11654 uint32_t op1, op1_ebit, coproc;
11655
11656 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11657 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11658 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11659
11660 if ((coproc & 0x0e) == 0x0a)
11661 {
11662 /* Handle extension register ld/st instructions. */
11663 if (!(op1 & 0x20))
11664 return arm_record_exreg_ld_st_insn (arm_insn_r);
11665
11666 /* 64-bit transfers between arm core and extension registers. */
11667 if ((op1 & 0x3e) == 0x04)
11668 return arm_record_exreg_ld_st_insn (arm_insn_r);
11669 }
11670 else
11671 {
11672 /* Handle coprocessor ld/st instructions. */
11673 if (!(op1 & 0x3a))
11674 {
11675 /* Store. */
11676 if (!op1_ebit)
11677 return arm_record_unsupported_insn (arm_insn_r);
11678 else
11679 /* Load. */
11680 return arm_record_unsupported_insn (arm_insn_r);
11681 }
11682
11683 /* Move to coprocessor from two arm core registers. */
11684 if (op1 == 0x4)
11685 return arm_record_unsupported_insn (arm_insn_r);
11686
11687 /* Move to two arm core registers from coprocessor. */
11688 if (op1 == 0x5)
11689 {
11690 uint32_t reg_t[2];
11691
11692 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11693 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11694 arm_insn_r->reg_rec_count = 2;
11695
11696 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11697 return 0;
11698 }
11699 }
11700 return arm_record_unsupported_insn (arm_insn_r);
11701 }
11702
11703 /* Handling opcode 111 insns. */
11704
11705 static int
11706 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11707 {
11708 uint32_t op, op1_ebit, coproc, bits_24_25;
11709 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11710 struct regcache *reg_cache = arm_insn_r->regcache;
11711
11712 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11713 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11714 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11715 op = bit (arm_insn_r->arm_insn, 4);
11716 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11717
11718 /* Handle arm SWI/SVC system call instructions. */
11719 if (bits_24_25 == 0x3)
11720 {
11721 if (tdep->arm_syscall_record != NULL)
11722 {
11723 ULONGEST svc_operand, svc_number;
11724
11725 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11726
11727 if (svc_operand) /* OABI. */
11728 svc_number = svc_operand - 0x900000;
11729 else /* EABI. */
11730 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11731
11732 return tdep->arm_syscall_record (reg_cache, svc_number);
11733 }
11734 else
11735 {
11736 printf_unfiltered (_("no syscall record support\n"));
11737 return -1;
11738 }
11739 }
11740 else if (bits_24_25 == 0x02)
11741 {
11742 if (op)
11743 {
11744 if ((coproc & 0x0e) == 0x0a)
11745 {
11746 /* 8, 16, and 32-bit transfer */
11747 return arm_record_vdata_transfer_insn (arm_insn_r);
11748 }
11749 else
11750 {
11751 if (op1_ebit)
11752 {
11753 /* MRC, MRC2 */
11754 uint32_t record_buf[1];
11755
11756 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11757 if (record_buf[0] == 15)
11758 record_buf[0] = ARM_PS_REGNUM;
11759
11760 arm_insn_r->reg_rec_count = 1;
11761 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11762 record_buf);
11763 return 0;
11764 }
11765 else
11766 {
11767 /* MCR, MCR2 */
11768 return -1;
11769 }
11770 }
11771 }
11772 else
11773 {
11774 if ((coproc & 0x0e) == 0x0a)
11775 {
11776 /* VFP data-processing instructions. */
11777 return arm_record_vfp_data_proc_insn (arm_insn_r);
11778 }
11779 else
11780 {
11781 /* CDP, CDP2 */
11782 return -1;
11783 }
11784 }
11785 }
11786 else
11787 {
11788 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11789
11790 if (op1 == 5)
11791 {
11792 if ((coproc & 0x0e) != 0x0a)
11793 {
11794 /* MRRC, MRRC2 */
11795 return -1;
11796 }
11797 }
11798 else if (op1 == 4 || op1 == 5)
11799 {
11800 if ((coproc & 0x0e) == 0x0a)
11801 {
11802 /* 64-bit transfers between ARM core and extension */
11803 return -1;
11804 }
11805 else if (op1 == 4)
11806 {
11807 /* MCRR, MCRR2 */
11808 return -1;
11809 }
11810 }
11811 else if (op1 == 0 || op1 == 1)
11812 {
11813 /* UNDEFINED */
11814 return -1;
11815 }
11816 else
11817 {
11818 if ((coproc & 0x0e) == 0x0a)
11819 {
11820 /* Extension register load/store */
11821 }
11822 else
11823 {
11824 /* STC, STC2, LDC, LDC2 */
11825 }
11826 return -1;
11827 }
11828 }
11829
11830 return -1;
11831 }
11832
11833 /* Handling opcode 000 insns. */
11834
11835 static int
11836 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11837 {
11838 uint32_t record_buf[8];
11839 uint32_t reg_src1 = 0;
11840
11841 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11842
11843 record_buf[0] = ARM_PS_REGNUM;
11844 record_buf[1] = reg_src1;
11845 thumb_insn_r->reg_rec_count = 2;
11846
11847 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11848
11849 return 0;
11850 }
11851
11852
11853 /* Handling opcode 001 insns. */
11854
11855 static int
11856 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11857 {
11858 uint32_t record_buf[8];
11859 uint32_t reg_src1 = 0;
11860
11861 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11862
11863 record_buf[0] = ARM_PS_REGNUM;
11864 record_buf[1] = reg_src1;
11865 thumb_insn_r->reg_rec_count = 2;
11866
11867 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11868
11869 return 0;
11870 }
11871
11872 /* Handling opcode 010 insns. */
11873
11874 static int
11875 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11876 {
11877 struct regcache *reg_cache = thumb_insn_r->regcache;
11878 uint32_t record_buf[8], record_buf_mem[8];
11879
11880 uint32_t reg_src1 = 0, reg_src2 = 0;
11881 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11882
11883 ULONGEST u_regval[2] = {0};
11884
11885 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11886
11887 if (bit (thumb_insn_r->arm_insn, 12))
11888 {
11889 /* Handle load/store register offset. */
11890 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11891
11892 if (in_inclusive_range (opB, 4U, 7U))
11893 {
11894 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11895 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11896 record_buf[0] = reg_src1;
11897 thumb_insn_r->reg_rec_count = 1;
11898 }
11899 else if (in_inclusive_range (opB, 0U, 2U))
11900 {
11901 /* STR(2), STRB(2), STRH(2) . */
11902 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11903 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11904 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11905 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11906 if (0 == opB)
11907 record_buf_mem[0] = 4; /* STR (2). */
11908 else if (2 == opB)
11909 record_buf_mem[0] = 1; /* STRB (2). */
11910 else if (1 == opB)
11911 record_buf_mem[0] = 2; /* STRH (2). */
11912 record_buf_mem[1] = u_regval[0] + u_regval[1];
11913 thumb_insn_r->mem_rec_count = 1;
11914 }
11915 }
11916 else if (bit (thumb_insn_r->arm_insn, 11))
11917 {
11918 /* Handle load from literal pool. */
11919 /* LDR(3). */
11920 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11921 record_buf[0] = reg_src1;
11922 thumb_insn_r->reg_rec_count = 1;
11923 }
11924 else if (opcode1)
11925 {
11926 /* Special data instructions and branch and exchange */
11927 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11928 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11929 if ((3 == opcode2) && (!opcode3))
11930 {
11931 /* Branch with exchange. */
11932 record_buf[0] = ARM_PS_REGNUM;
11933 thumb_insn_r->reg_rec_count = 1;
11934 }
11935 else
11936 {
11937 /* Format 8; special data processing insns. */
11938 record_buf[0] = ARM_PS_REGNUM;
11939 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11940 | bits (thumb_insn_r->arm_insn, 0, 2));
11941 thumb_insn_r->reg_rec_count = 2;
11942 }
11943 }
11944 else
11945 {
11946 /* Format 5; data processing insns. */
11947 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11948 if (bit (thumb_insn_r->arm_insn, 7))
11949 {
11950 reg_src1 = reg_src1 + 8;
11951 }
11952 record_buf[0] = ARM_PS_REGNUM;
11953 record_buf[1] = reg_src1;
11954 thumb_insn_r->reg_rec_count = 2;
11955 }
11956
11957 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11958 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11959 record_buf_mem);
11960
11961 return 0;
11962 }
11963
11964 /* Handling opcode 001 insns. */
11965
11966 static int
11967 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11968 {
11969 struct regcache *reg_cache = thumb_insn_r->regcache;
11970 uint32_t record_buf[8], record_buf_mem[8];
11971
11972 uint32_t reg_src1 = 0;
11973 uint32_t opcode = 0, immed_5 = 0;
11974
11975 ULONGEST u_regval = 0;
11976
11977 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11978
11979 if (opcode)
11980 {
11981 /* LDR(1). */
11982 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11983 record_buf[0] = reg_src1;
11984 thumb_insn_r->reg_rec_count = 1;
11985 }
11986 else
11987 {
11988 /* STR(1). */
11989 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11990 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11991 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11992 record_buf_mem[0] = 4;
11993 record_buf_mem[1] = u_regval + (immed_5 * 4);
11994 thumb_insn_r->mem_rec_count = 1;
11995 }
11996
11997 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11998 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11999 record_buf_mem);
12000
12001 return 0;
12002 }
12003
12004 /* Handling opcode 100 insns. */
12005
12006 static int
12007 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12008 {
12009 struct regcache *reg_cache = thumb_insn_r->regcache;
12010 uint32_t record_buf[8], record_buf_mem[8];
12011
12012 uint32_t reg_src1 = 0;
12013 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12014
12015 ULONGEST u_regval = 0;
12016
12017 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12018
12019 if (3 == opcode)
12020 {
12021 /* LDR(4). */
12022 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12023 record_buf[0] = reg_src1;
12024 thumb_insn_r->reg_rec_count = 1;
12025 }
12026 else if (1 == opcode)
12027 {
12028 /* LDRH(1). */
12029 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12030 record_buf[0] = reg_src1;
12031 thumb_insn_r->reg_rec_count = 1;
12032 }
12033 else if (2 == opcode)
12034 {
12035 /* STR(3). */
12036 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12037 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12038 record_buf_mem[0] = 4;
12039 record_buf_mem[1] = u_regval + (immed_8 * 4);
12040 thumb_insn_r->mem_rec_count = 1;
12041 }
12042 else if (0 == opcode)
12043 {
12044 /* STRH(1). */
12045 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12046 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12047 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12048 record_buf_mem[0] = 2;
12049 record_buf_mem[1] = u_regval + (immed_5 * 2);
12050 thumb_insn_r->mem_rec_count = 1;
12051 }
12052
12053 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12054 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12055 record_buf_mem);
12056
12057 return 0;
12058 }
12059
12060 /* Handling opcode 101 insns. */
12061
12062 static int
12063 thumb_record_misc (insn_decode_record *thumb_insn_r)
12064 {
12065 struct regcache *reg_cache = thumb_insn_r->regcache;
12066
12067 uint32_t opcode = 0;
12068 uint32_t register_bits = 0, register_count = 0;
12069 uint32_t index = 0, start_address = 0;
12070 uint32_t record_buf[24], record_buf_mem[48];
12071 uint32_t reg_src1;
12072
12073 ULONGEST u_regval = 0;
12074
12075 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12076
12077 if (opcode == 0 || opcode == 1)
12078 {
12079 /* ADR and ADD (SP plus immediate) */
12080
12081 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12082 record_buf[0] = reg_src1;
12083 thumb_insn_r->reg_rec_count = 1;
12084 }
12085 else
12086 {
12087 /* Miscellaneous 16-bit instructions */
12088 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12089
12090 switch (opcode2)
12091 {
12092 case 6:
12093 /* SETEND and CPS */
12094 break;
12095 case 0:
12096 /* ADD/SUB (SP plus immediate) */
12097 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12098 record_buf[0] = ARM_SP_REGNUM;
12099 thumb_insn_r->reg_rec_count = 1;
12100 break;
12101 case 1: /* fall through */
12102 case 3: /* fall through */
12103 case 9: /* fall through */
12104 case 11:
12105 /* CBNZ, CBZ */
12106 break;
12107 case 2:
12108 /* SXTH, SXTB, UXTH, UXTB */
12109 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12110 thumb_insn_r->reg_rec_count = 1;
12111 break;
12112 case 4: /* fall through */
12113 case 5:
12114 /* PUSH. */
12115 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12116 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12117 while (register_bits)
12118 {
12119 if (register_bits & 0x00000001)
12120 register_count++;
12121 register_bits = register_bits >> 1;
12122 }
12123 start_address = u_regval - \
12124 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12125 thumb_insn_r->mem_rec_count = register_count;
12126 while (register_count)
12127 {
12128 record_buf_mem[(register_count * 2) - 1] = start_address;
12129 record_buf_mem[(register_count * 2) - 2] = 4;
12130 start_address = start_address + 4;
12131 register_count--;
12132 }
12133 record_buf[0] = ARM_SP_REGNUM;
12134 thumb_insn_r->reg_rec_count = 1;
12135 break;
12136 case 10:
12137 /* REV, REV16, REVSH */
12138 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12139 thumb_insn_r->reg_rec_count = 1;
12140 break;
12141 case 12: /* fall through */
12142 case 13:
12143 /* POP. */
12144 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12145 while (register_bits)
12146 {
12147 if (register_bits & 0x00000001)
12148 record_buf[index++] = register_count;
12149 register_bits = register_bits >> 1;
12150 register_count++;
12151 }
12152 record_buf[index++] = ARM_PS_REGNUM;
12153 record_buf[index++] = ARM_SP_REGNUM;
12154 thumb_insn_r->reg_rec_count = index;
12155 break;
12156 case 0xe:
12157 /* BKPT insn. */
12158 /* Handle enhanced software breakpoint insn, BKPT. */
12159 /* CPSR is changed to be executed in ARM state, disabling normal
12160 interrupts, entering abort mode. */
12161 /* According to high vector configuration PC is set. */
12162 /* User hits breakpoint and type reverse, in that case, we need to go back with
12163 previous CPSR and Program Counter. */
12164 record_buf[0] = ARM_PS_REGNUM;
12165 record_buf[1] = ARM_LR_REGNUM;
12166 thumb_insn_r->reg_rec_count = 2;
12167 /* We need to save SPSR value, which is not yet done. */
12168 printf_unfiltered (_("Process record does not support instruction "
12169 "0x%0x at address %s.\n"),
12170 thumb_insn_r->arm_insn,
12171 paddress (thumb_insn_r->gdbarch,
12172 thumb_insn_r->this_addr));
12173 return -1;
12174
12175 case 0xf:
12176 /* If-Then, and hints */
12177 break;
12178 default:
12179 return -1;
12180 };
12181 }
12182
12183 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12184 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12185 record_buf_mem);
12186
12187 return 0;
12188 }
12189
12190 /* Handling opcode 110 insns. */
12191
12192 static int
12193 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12194 {
12195 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12196 struct regcache *reg_cache = thumb_insn_r->regcache;
12197
12198 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12199 uint32_t reg_src1 = 0;
12200 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12201 uint32_t index = 0, start_address = 0;
12202 uint32_t record_buf[24], record_buf_mem[48];
12203
12204 ULONGEST u_regval = 0;
12205
12206 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12207 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12208
12209 if (1 == opcode2)
12210 {
12211
12212 /* LDMIA. */
12213 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12214 /* Get Rn. */
12215 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12216 while (register_bits)
12217 {
12218 if (register_bits & 0x00000001)
12219 record_buf[index++] = register_count;
12220 register_bits = register_bits >> 1;
12221 register_count++;
12222 }
12223 record_buf[index++] = reg_src1;
12224 thumb_insn_r->reg_rec_count = index;
12225 }
12226 else if (0 == opcode2)
12227 {
12228 /* It handles both STMIA. */
12229 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12230 /* Get Rn. */
12231 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12232 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12233 while (register_bits)
12234 {
12235 if (register_bits & 0x00000001)
12236 register_count++;
12237 register_bits = register_bits >> 1;
12238 }
12239 start_address = u_regval;
12240 thumb_insn_r->mem_rec_count = register_count;
12241 while (register_count)
12242 {
12243 record_buf_mem[(register_count * 2) - 1] = start_address;
12244 record_buf_mem[(register_count * 2) - 2] = 4;
12245 start_address = start_address + 4;
12246 register_count--;
12247 }
12248 }
12249 else if (0x1F == opcode1)
12250 {
12251 /* Handle arm syscall insn. */
12252 if (tdep->arm_syscall_record != NULL)
12253 {
12254 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12255 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12256 }
12257 else
12258 {
12259 printf_unfiltered (_("no syscall record support\n"));
12260 return -1;
12261 }
12262 }
12263
12264 /* B (1), conditional branch is automatically taken care in process_record,
12265 as PC is saved there. */
12266
12267 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12268 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12269 record_buf_mem);
12270
12271 return ret;
12272 }
12273
12274 /* Handling opcode 111 insns. */
12275
12276 static int
12277 thumb_record_branch (insn_decode_record *thumb_insn_r)
12278 {
12279 uint32_t record_buf[8];
12280 uint32_t bits_h = 0;
12281
12282 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12283
12284 if (2 == bits_h || 3 == bits_h)
12285 {
12286 /* BL */
12287 record_buf[0] = ARM_LR_REGNUM;
12288 thumb_insn_r->reg_rec_count = 1;
12289 }
12290 else if (1 == bits_h)
12291 {
12292 /* BLX(1). */
12293 record_buf[0] = ARM_PS_REGNUM;
12294 record_buf[1] = ARM_LR_REGNUM;
12295 thumb_insn_r->reg_rec_count = 2;
12296 }
12297
12298 /* B(2) is automatically taken care in process_record, as PC is
12299 saved there. */
12300
12301 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12302
12303 return 0;
12304 }
12305
12306 /* Handler for thumb2 load/store multiple instructions. */
12307
12308 static int
12309 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12310 {
12311 struct regcache *reg_cache = thumb2_insn_r->regcache;
12312
12313 uint32_t reg_rn, op;
12314 uint32_t register_bits = 0, register_count = 0;
12315 uint32_t index = 0, start_address = 0;
12316 uint32_t record_buf[24], record_buf_mem[48];
12317
12318 ULONGEST u_regval = 0;
12319
12320 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12321 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12322
12323 if (0 == op || 3 == op)
12324 {
12325 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12326 {
12327 /* Handle RFE instruction. */
12328 record_buf[0] = ARM_PS_REGNUM;
12329 thumb2_insn_r->reg_rec_count = 1;
12330 }
12331 else
12332 {
12333 /* Handle SRS instruction after reading banked SP. */
12334 return arm_record_unsupported_insn (thumb2_insn_r);
12335 }
12336 }
12337 else if (1 == op || 2 == op)
12338 {
12339 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12340 {
12341 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12342 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12343 while (register_bits)
12344 {
12345 if (register_bits & 0x00000001)
12346 record_buf[index++] = register_count;
12347
12348 register_count++;
12349 register_bits = register_bits >> 1;
12350 }
12351 record_buf[index++] = reg_rn;
12352 record_buf[index++] = ARM_PS_REGNUM;
12353 thumb2_insn_r->reg_rec_count = index;
12354 }
12355 else
12356 {
12357 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12358 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12359 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12360 while (register_bits)
12361 {
12362 if (register_bits & 0x00000001)
12363 register_count++;
12364
12365 register_bits = register_bits >> 1;
12366 }
12367
12368 if (1 == op)
12369 {
12370 /* Start address calculation for LDMDB/LDMEA. */
12371 start_address = u_regval;
12372 }
12373 else if (2 == op)
12374 {
12375 /* Start address calculation for LDMDB/LDMEA. */
12376 start_address = u_regval - register_count * 4;
12377 }
12378
12379 thumb2_insn_r->mem_rec_count = register_count;
12380 while (register_count)
12381 {
12382 record_buf_mem[register_count * 2 - 1] = start_address;
12383 record_buf_mem[register_count * 2 - 2] = 4;
12384 start_address = start_address + 4;
12385 register_count--;
12386 }
12387 record_buf[0] = reg_rn;
12388 record_buf[1] = ARM_PS_REGNUM;
12389 thumb2_insn_r->reg_rec_count = 2;
12390 }
12391 }
12392
12393 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12394 record_buf_mem);
12395 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12396 record_buf);
12397 return ARM_RECORD_SUCCESS;
12398 }
12399
12400 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12401 instructions. */
12402
12403 static int
12404 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12405 {
12406 struct regcache *reg_cache = thumb2_insn_r->regcache;
12407
12408 uint32_t reg_rd, reg_rn, offset_imm;
12409 uint32_t reg_dest1, reg_dest2;
12410 uint32_t address, offset_addr;
12411 uint32_t record_buf[8], record_buf_mem[8];
12412 uint32_t op1, op2, op3;
12413
12414 ULONGEST u_regval[2];
12415
12416 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12417 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12418 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12419
12420 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12421 {
12422 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12423 {
12424 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12425 record_buf[0] = reg_dest1;
12426 record_buf[1] = ARM_PS_REGNUM;
12427 thumb2_insn_r->reg_rec_count = 2;
12428 }
12429
12430 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12431 {
12432 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12433 record_buf[2] = reg_dest2;
12434 thumb2_insn_r->reg_rec_count = 3;
12435 }
12436 }
12437 else
12438 {
12439 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12440 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12441
12442 if (0 == op1 && 0 == op2)
12443 {
12444 /* Handle STREX. */
12445 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12446 address = u_regval[0] + (offset_imm * 4);
12447 record_buf_mem[0] = 4;
12448 record_buf_mem[1] = address;
12449 thumb2_insn_r->mem_rec_count = 1;
12450 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12451 record_buf[0] = reg_rd;
12452 thumb2_insn_r->reg_rec_count = 1;
12453 }
12454 else if (1 == op1 && 0 == op2)
12455 {
12456 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12457 record_buf[0] = reg_rd;
12458 thumb2_insn_r->reg_rec_count = 1;
12459 address = u_regval[0];
12460 record_buf_mem[1] = address;
12461
12462 if (4 == op3)
12463 {
12464 /* Handle STREXB. */
12465 record_buf_mem[0] = 1;
12466 thumb2_insn_r->mem_rec_count = 1;
12467 }
12468 else if (5 == op3)
12469 {
12470 /* Handle STREXH. */
12471 record_buf_mem[0] = 2 ;
12472 thumb2_insn_r->mem_rec_count = 1;
12473 }
12474 else if (7 == op3)
12475 {
12476 /* Handle STREXD. */
12477 address = u_regval[0];
12478 record_buf_mem[0] = 4;
12479 record_buf_mem[2] = 4;
12480 record_buf_mem[3] = address + 4;
12481 thumb2_insn_r->mem_rec_count = 2;
12482 }
12483 }
12484 else
12485 {
12486 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12487
12488 if (bit (thumb2_insn_r->arm_insn, 24))
12489 {
12490 if (bit (thumb2_insn_r->arm_insn, 23))
12491 offset_addr = u_regval[0] + (offset_imm * 4);
12492 else
12493 offset_addr = u_regval[0] - (offset_imm * 4);
12494
12495 address = offset_addr;
12496 }
12497 else
12498 address = u_regval[0];
12499
12500 record_buf_mem[0] = 4;
12501 record_buf_mem[1] = address;
12502 record_buf_mem[2] = 4;
12503 record_buf_mem[3] = address + 4;
12504 thumb2_insn_r->mem_rec_count = 2;
12505 record_buf[0] = reg_rn;
12506 thumb2_insn_r->reg_rec_count = 1;
12507 }
12508 }
12509
12510 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12511 record_buf);
12512 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12513 record_buf_mem);
12514 return ARM_RECORD_SUCCESS;
12515 }
12516
12517 /* Handler for thumb2 data processing (shift register and modified immediate)
12518 instructions. */
12519
12520 static int
12521 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12522 {
12523 uint32_t reg_rd, op;
12524 uint32_t record_buf[8];
12525
12526 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12527 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12528
12529 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12530 {
12531 record_buf[0] = ARM_PS_REGNUM;
12532 thumb2_insn_r->reg_rec_count = 1;
12533 }
12534 else
12535 {
12536 record_buf[0] = reg_rd;
12537 record_buf[1] = ARM_PS_REGNUM;
12538 thumb2_insn_r->reg_rec_count = 2;
12539 }
12540
12541 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12542 record_buf);
12543 return ARM_RECORD_SUCCESS;
12544 }
12545
12546 /* Generic handler for thumb2 instructions which effect destination and PS
12547 registers. */
12548
12549 static int
12550 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12551 {
12552 uint32_t reg_rd;
12553 uint32_t record_buf[8];
12554
12555 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12556
12557 record_buf[0] = reg_rd;
12558 record_buf[1] = ARM_PS_REGNUM;
12559 thumb2_insn_r->reg_rec_count = 2;
12560
12561 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12562 record_buf);
12563 return ARM_RECORD_SUCCESS;
12564 }
12565
12566 /* Handler for thumb2 branch and miscellaneous control instructions. */
12567
12568 static int
12569 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12570 {
12571 uint32_t op, op1, op2;
12572 uint32_t record_buf[8];
12573
12574 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12575 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12576 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12577
12578 /* Handle MSR insn. */
12579 if (!(op1 & 0x2) && 0x38 == op)
12580 {
12581 if (!(op2 & 0x3))
12582 {
12583 /* CPSR is going to be changed. */
12584 record_buf[0] = ARM_PS_REGNUM;
12585 thumb2_insn_r->reg_rec_count = 1;
12586 }
12587 else
12588 {
12589 arm_record_unsupported_insn(thumb2_insn_r);
12590 return -1;
12591 }
12592 }
12593 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12594 {
12595 /* BLX. */
12596 record_buf[0] = ARM_PS_REGNUM;
12597 record_buf[1] = ARM_LR_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 2;
12599 }
12600
12601 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12602 record_buf);
12603 return ARM_RECORD_SUCCESS;
12604 }
12605
12606 /* Handler for thumb2 store single data item instructions. */
12607
12608 static int
12609 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12610 {
12611 struct regcache *reg_cache = thumb2_insn_r->regcache;
12612
12613 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12614 uint32_t address, offset_addr;
12615 uint32_t record_buf[8], record_buf_mem[8];
12616 uint32_t op1, op2;
12617
12618 ULONGEST u_regval[2];
12619
12620 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12621 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12622 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12623 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12624
12625 if (bit (thumb2_insn_r->arm_insn, 23))
12626 {
12627 /* T2 encoding. */
12628 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12629 offset_addr = u_regval[0] + offset_imm;
12630 address = offset_addr;
12631 }
12632 else
12633 {
12634 /* T3 encoding. */
12635 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12636 {
12637 /* Handle STRB (register). */
12638 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12639 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12640 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12641 offset_addr = u_regval[1] << shift_imm;
12642 address = u_regval[0] + offset_addr;
12643 }
12644 else
12645 {
12646 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12647 if (bit (thumb2_insn_r->arm_insn, 10))
12648 {
12649 if (bit (thumb2_insn_r->arm_insn, 9))
12650 offset_addr = u_regval[0] + offset_imm;
12651 else
12652 offset_addr = u_regval[0] - offset_imm;
12653
12654 address = offset_addr;
12655 }
12656 else
12657 address = u_regval[0];
12658 }
12659 }
12660
12661 switch (op1)
12662 {
12663 /* Store byte instructions. */
12664 case 4:
12665 case 0:
12666 record_buf_mem[0] = 1;
12667 break;
12668 /* Store half word instructions. */
12669 case 1:
12670 case 5:
12671 record_buf_mem[0] = 2;
12672 break;
12673 /* Store word instructions. */
12674 case 2:
12675 case 6:
12676 record_buf_mem[0] = 4;
12677 break;
12678
12679 default:
12680 gdb_assert_not_reached ("no decoding pattern found");
12681 break;
12682 }
12683
12684 record_buf_mem[1] = address;
12685 thumb2_insn_r->mem_rec_count = 1;
12686 record_buf[0] = reg_rn;
12687 thumb2_insn_r->reg_rec_count = 1;
12688
12689 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12690 record_buf);
12691 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12692 record_buf_mem);
12693 return ARM_RECORD_SUCCESS;
12694 }
12695
12696 /* Handler for thumb2 load memory hints instructions. */
12697
12698 static int
12699 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12700 {
12701 uint32_t record_buf[8];
12702 uint32_t reg_rt, reg_rn;
12703
12704 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12705 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12706
12707 if (ARM_PC_REGNUM != reg_rt)
12708 {
12709 record_buf[0] = reg_rt;
12710 record_buf[1] = reg_rn;
12711 record_buf[2] = ARM_PS_REGNUM;
12712 thumb2_insn_r->reg_rec_count = 3;
12713
12714 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12715 record_buf);
12716 return ARM_RECORD_SUCCESS;
12717 }
12718
12719 return ARM_RECORD_FAILURE;
12720 }
12721
12722 /* Handler for thumb2 load word instructions. */
12723
12724 static int
12725 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12726 {
12727 uint32_t record_buf[8];
12728
12729 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12730 record_buf[1] = ARM_PS_REGNUM;
12731 thumb2_insn_r->reg_rec_count = 2;
12732
12733 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12734 record_buf);
12735 return ARM_RECORD_SUCCESS;
12736 }
12737
12738 /* Handler for thumb2 long multiply, long multiply accumulate, and
12739 divide instructions. */
12740
12741 static int
12742 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12743 {
12744 uint32_t opcode1 = 0, opcode2 = 0;
12745 uint32_t record_buf[8];
12746
12747 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12748 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12749
12750 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12751 {
12752 /* Handle SMULL, UMULL, SMULAL. */
12753 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12754 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12755 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12756 record_buf[2] = ARM_PS_REGNUM;
12757 thumb2_insn_r->reg_rec_count = 3;
12758 }
12759 else if (1 == opcode1 || 3 == opcode2)
12760 {
12761 /* Handle SDIV and UDIV. */
12762 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12763 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12764 record_buf[2] = ARM_PS_REGNUM;
12765 thumb2_insn_r->reg_rec_count = 3;
12766 }
12767 else
12768 return ARM_RECORD_FAILURE;
12769
12770 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12771 record_buf);
12772 return ARM_RECORD_SUCCESS;
12773 }
12774
12775 /* Record handler for thumb32 coprocessor instructions. */
12776
12777 static int
12778 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12779 {
12780 if (bit (thumb2_insn_r->arm_insn, 25))
12781 return arm_record_coproc_data_proc (thumb2_insn_r);
12782 else
12783 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12784 }
12785
12786 /* Record handler for advance SIMD structure load/store instructions. */
12787
12788 static int
12789 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12790 {
12791 struct regcache *reg_cache = thumb2_insn_r->regcache;
12792 uint32_t l_bit, a_bit, b_bits;
12793 uint32_t record_buf[128], record_buf_mem[128];
12794 uint32_t reg_rn, reg_vd, address, f_elem;
12795 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12796 uint8_t f_ebytes;
12797
12798 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12799 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12800 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12801 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12802 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12803 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12804 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12805 f_elem = 8 / f_ebytes;
12806
12807 if (!l_bit)
12808 {
12809 ULONGEST u_regval = 0;
12810 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12811 address = u_regval;
12812
12813 if (!a_bit)
12814 {
12815 /* Handle VST1. */
12816 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12817 {
12818 if (b_bits == 0x07)
12819 bf_regs = 1;
12820 else if (b_bits == 0x0a)
12821 bf_regs = 2;
12822 else if (b_bits == 0x06)
12823 bf_regs = 3;
12824 else if (b_bits == 0x02)
12825 bf_regs = 4;
12826 else
12827 bf_regs = 0;
12828
12829 for (index_r = 0; index_r < bf_regs; index_r++)
12830 {
12831 for (index_e = 0; index_e < f_elem; index_e++)
12832 {
12833 record_buf_mem[index_m++] = f_ebytes;
12834 record_buf_mem[index_m++] = address;
12835 address = address + f_ebytes;
12836 thumb2_insn_r->mem_rec_count += 1;
12837 }
12838 }
12839 }
12840 /* Handle VST2. */
12841 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12842 {
12843 if (b_bits == 0x09 || b_bits == 0x08)
12844 bf_regs = 1;
12845 else if (b_bits == 0x03)
12846 bf_regs = 2;
12847 else
12848 bf_regs = 0;
12849
12850 for (index_r = 0; index_r < bf_regs; index_r++)
12851 for (index_e = 0; index_e < f_elem; index_e++)
12852 {
12853 for (loop_t = 0; loop_t < 2; loop_t++)
12854 {
12855 record_buf_mem[index_m++] = f_ebytes;
12856 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12857 thumb2_insn_r->mem_rec_count += 1;
12858 }
12859 address = address + (2 * f_ebytes);
12860 }
12861 }
12862 /* Handle VST3. */
12863 else if ((b_bits & 0x0e) == 0x04)
12864 {
12865 for (index_e = 0; index_e < f_elem; index_e++)
12866 {
12867 for (loop_t = 0; loop_t < 3; loop_t++)
12868 {
12869 record_buf_mem[index_m++] = f_ebytes;
12870 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12871 thumb2_insn_r->mem_rec_count += 1;
12872 }
12873 address = address + (3 * f_ebytes);
12874 }
12875 }
12876 /* Handle VST4. */
12877 else if (!(b_bits & 0x0e))
12878 {
12879 for (index_e = 0; index_e < f_elem; index_e++)
12880 {
12881 for (loop_t = 0; loop_t < 4; loop_t++)
12882 {
12883 record_buf_mem[index_m++] = f_ebytes;
12884 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12885 thumb2_insn_r->mem_rec_count += 1;
12886 }
12887 address = address + (4 * f_ebytes);
12888 }
12889 }
12890 }
12891 else
12892 {
12893 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12894
12895 if (bft_size == 0x00)
12896 f_ebytes = 1;
12897 else if (bft_size == 0x01)
12898 f_ebytes = 2;
12899 else if (bft_size == 0x02)
12900 f_ebytes = 4;
12901 else
12902 f_ebytes = 0;
12903
12904 /* Handle VST1. */
12905 if (!(b_bits & 0x0b) || b_bits == 0x08)
12906 thumb2_insn_r->mem_rec_count = 1;
12907 /* Handle VST2. */
12908 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12909 thumb2_insn_r->mem_rec_count = 2;
12910 /* Handle VST3. */
12911 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12912 thumb2_insn_r->mem_rec_count = 3;
12913 /* Handle VST4. */
12914 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12915 thumb2_insn_r->mem_rec_count = 4;
12916
12917 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12918 {
12919 record_buf_mem[index_m] = f_ebytes;
12920 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12921 }
12922 }
12923 }
12924 else
12925 {
12926 if (!a_bit)
12927 {
12928 /* Handle VLD1. */
12929 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12930 thumb2_insn_r->reg_rec_count = 1;
12931 /* Handle VLD2. */
12932 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12933 thumb2_insn_r->reg_rec_count = 2;
12934 /* Handle VLD3. */
12935 else if ((b_bits & 0x0e) == 0x04)
12936 thumb2_insn_r->reg_rec_count = 3;
12937 /* Handle VLD4. */
12938 else if (!(b_bits & 0x0e))
12939 thumb2_insn_r->reg_rec_count = 4;
12940 }
12941 else
12942 {
12943 /* Handle VLD1. */
12944 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12945 thumb2_insn_r->reg_rec_count = 1;
12946 /* Handle VLD2. */
12947 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12948 thumb2_insn_r->reg_rec_count = 2;
12949 /* Handle VLD3. */
12950 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12951 thumb2_insn_r->reg_rec_count = 3;
12952 /* Handle VLD4. */
12953 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12954 thumb2_insn_r->reg_rec_count = 4;
12955
12956 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12957 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12958 }
12959 }
12960
12961 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12962 {
12963 record_buf[index_r] = reg_rn;
12964 thumb2_insn_r->reg_rec_count += 1;
12965 }
12966
12967 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12968 record_buf);
12969 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12970 record_buf_mem);
12971 return 0;
12972 }
12973
12974 /* Decodes thumb2 instruction type and invokes its record handler. */
12975
12976 static unsigned int
12977 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12978 {
12979 uint32_t op, op1, op2;
12980
12981 op = bit (thumb2_insn_r->arm_insn, 15);
12982 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12983 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12984
12985 if (op1 == 0x01)
12986 {
12987 if (!(op2 & 0x64 ))
12988 {
12989 /* Load/store multiple instruction. */
12990 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12991 }
12992 else if ((op2 & 0x64) == 0x4)
12993 {
12994 /* Load/store (dual/exclusive) and table branch instruction. */
12995 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12996 }
12997 else if ((op2 & 0x60) == 0x20)
12998 {
12999 /* Data-processing (shifted register). */
13000 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13001 }
13002 else if (op2 & 0x40)
13003 {
13004 /* Co-processor instructions. */
13005 return thumb2_record_coproc_insn (thumb2_insn_r);
13006 }
13007 }
13008 else if (op1 == 0x02)
13009 {
13010 if (op)
13011 {
13012 /* Branches and miscellaneous control instructions. */
13013 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13014 }
13015 else if (op2 & 0x20)
13016 {
13017 /* Data-processing (plain binary immediate) instruction. */
13018 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13019 }
13020 else
13021 {
13022 /* Data-processing (modified immediate). */
13023 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13024 }
13025 }
13026 else if (op1 == 0x03)
13027 {
13028 if (!(op2 & 0x71 ))
13029 {
13030 /* Store single data item. */
13031 return thumb2_record_str_single_data (thumb2_insn_r);
13032 }
13033 else if (!((op2 & 0x71) ^ 0x10))
13034 {
13035 /* Advanced SIMD or structure load/store instructions. */
13036 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13037 }
13038 else if (!((op2 & 0x67) ^ 0x01))
13039 {
13040 /* Load byte, memory hints instruction. */
13041 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13042 }
13043 else if (!((op2 & 0x67) ^ 0x03))
13044 {
13045 /* Load halfword, memory hints instruction. */
13046 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13047 }
13048 else if (!((op2 & 0x67) ^ 0x05))
13049 {
13050 /* Load word instruction. */
13051 return thumb2_record_ld_word (thumb2_insn_r);
13052 }
13053 else if (!((op2 & 0x70) ^ 0x20))
13054 {
13055 /* Data-processing (register) instruction. */
13056 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13057 }
13058 else if (!((op2 & 0x78) ^ 0x30))
13059 {
13060 /* Multiply, multiply accumulate, abs diff instruction. */
13061 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13062 }
13063 else if (!((op2 & 0x78) ^ 0x38))
13064 {
13065 /* Long multiply, long multiply accumulate, and divide. */
13066 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13067 }
13068 else if (op2 & 0x40)
13069 {
13070 /* Co-processor instructions. */
13071 return thumb2_record_coproc_insn (thumb2_insn_r);
13072 }
13073 }
13074
13075 return -1;
13076 }
13077
13078 namespace {
13079 /* Abstract memory reader. */
13080
13081 class abstract_memory_reader
13082 {
13083 public:
13084 /* Read LEN bytes of target memory at address MEMADDR, placing the
13085 results in GDB's memory at BUF. Return true on success. */
13086
13087 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13088 };
13089
13090 /* Instruction reader from real target. */
13091
13092 class instruction_reader : public abstract_memory_reader
13093 {
13094 public:
13095 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13096 {
13097 if (target_read_memory (memaddr, buf, len))
13098 return false;
13099 else
13100 return true;
13101 }
13102 };
13103
13104 } // namespace
13105
13106 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13107 and positive val on failure. */
13108
13109 static int
13110 extract_arm_insn (abstract_memory_reader& reader,
13111 insn_decode_record *insn_record, uint32_t insn_size)
13112 {
13113 gdb_byte buf[insn_size];
13114
13115 memset (&buf[0], 0, insn_size);
13116
13117 if (!reader.read (insn_record->this_addr, buf, insn_size))
13118 return 1;
13119 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13120 insn_size,
13121 gdbarch_byte_order_for_code (insn_record->gdbarch));
13122 return 0;
13123 }
13124
13125 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13126
13127 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13128 dispatch it. */
13129
13130 static int
13131 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13132 record_type_t record_type, uint32_t insn_size)
13133 {
13134
13135 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13136 instruction. */
13137 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13138 {
13139 arm_record_data_proc_misc_ld_str, /* 000. */
13140 arm_record_data_proc_imm, /* 001. */
13141 arm_record_ld_st_imm_offset, /* 010. */
13142 arm_record_ld_st_reg_offset, /* 011. */
13143 arm_record_ld_st_multiple, /* 100. */
13144 arm_record_b_bl, /* 101. */
13145 arm_record_asimd_vfp_coproc, /* 110. */
13146 arm_record_coproc_data_proc /* 111. */
13147 };
13148
13149 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13150 instruction. */
13151 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13152 { \
13153 thumb_record_shift_add_sub, /* 000. */
13154 thumb_record_add_sub_cmp_mov, /* 001. */
13155 thumb_record_ld_st_reg_offset, /* 010. */
13156 thumb_record_ld_st_imm_offset, /* 011. */
13157 thumb_record_ld_st_stack, /* 100. */
13158 thumb_record_misc, /* 101. */
13159 thumb_record_ldm_stm_swi, /* 110. */
13160 thumb_record_branch /* 111. */
13161 };
13162
13163 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13164 uint32_t insn_id = 0;
13165
13166 if (extract_arm_insn (reader, arm_record, insn_size))
13167 {
13168 if (record_debug)
13169 {
13170 printf_unfiltered (_("Process record: error reading memory at "
13171 "addr %s len = %d.\n"),
13172 paddress (arm_record->gdbarch,
13173 arm_record->this_addr), insn_size);
13174 }
13175 return -1;
13176 }
13177 else if (ARM_RECORD == record_type)
13178 {
13179 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13180 insn_id = bits (arm_record->arm_insn, 25, 27);
13181
13182 if (arm_record->cond == 0xf)
13183 ret = arm_record_extension_space (arm_record);
13184 else
13185 {
13186 /* If this insn has fallen into extension space
13187 then we need not decode it anymore. */
13188 ret = arm_handle_insn[insn_id] (arm_record);
13189 }
13190 if (ret != ARM_RECORD_SUCCESS)
13191 {
13192 arm_record_unsupported_insn (arm_record);
13193 ret = -1;
13194 }
13195 }
13196 else if (THUMB_RECORD == record_type)
13197 {
13198 /* As thumb does not have condition codes, we set negative. */
13199 arm_record->cond = -1;
13200 insn_id = bits (arm_record->arm_insn, 13, 15);
13201 ret = thumb_handle_insn[insn_id] (arm_record);
13202 if (ret != ARM_RECORD_SUCCESS)
13203 {
13204 arm_record_unsupported_insn (arm_record);
13205 ret = -1;
13206 }
13207 }
13208 else if (THUMB2_RECORD == record_type)
13209 {
13210 /* As thumb does not have condition codes, we set negative. */
13211 arm_record->cond = -1;
13212
13213 /* Swap first half of 32bit thumb instruction with second half. */
13214 arm_record->arm_insn
13215 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13216
13217 ret = thumb2_record_decode_insn_handler (arm_record);
13218
13219 if (ret != ARM_RECORD_SUCCESS)
13220 {
13221 arm_record_unsupported_insn (arm_record);
13222 ret = -1;
13223 }
13224 }
13225 else
13226 {
13227 /* Throw assertion. */
13228 gdb_assert_not_reached ("not a valid instruction, could not decode");
13229 }
13230
13231 return ret;
13232 }
13233
13234 #if GDB_SELF_TEST
13235 namespace selftests {
13236
13237 /* Provide both 16-bit and 32-bit thumb instructions. */
13238
13239 class instruction_reader_thumb : public abstract_memory_reader
13240 {
13241 public:
13242 template<size_t SIZE>
13243 instruction_reader_thumb (enum bfd_endian endian,
13244 const uint16_t (&insns)[SIZE])
13245 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13246 {}
13247
13248 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13249 {
13250 SELF_CHECK (len == 4 || len == 2);
13251 SELF_CHECK (memaddr % 2 == 0);
13252 SELF_CHECK ((memaddr / 2) < m_insns_size);
13253
13254 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13255 if (len == 4)
13256 {
13257 store_unsigned_integer (&buf[2], 2, m_endian,
13258 m_insns[memaddr / 2 + 1]);
13259 }
13260 return true;
13261 }
13262
13263 private:
13264 enum bfd_endian m_endian;
13265 const uint16_t *m_insns;
13266 size_t m_insns_size;
13267 };
13268
13269 static void
13270 arm_record_test (void)
13271 {
13272 struct gdbarch_info info;
13273 gdbarch_info_init (&info);
13274 info.bfd_arch_info = bfd_scan_arch ("arm");
13275
13276 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13277
13278 SELF_CHECK (gdbarch != NULL);
13279
13280 /* 16-bit Thumb instructions. */
13281 {
13282 insn_decode_record arm_record;
13283
13284 memset (&arm_record, 0, sizeof (insn_decode_record));
13285 arm_record.gdbarch = gdbarch;
13286
13287 static const uint16_t insns[] = {
13288 /* db b2 uxtb r3, r3 */
13289 0xb2db,
13290 /* cd 58 ldr r5, [r1, r3] */
13291 0x58cd,
13292 };
13293
13294 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13295 instruction_reader_thumb reader (endian, insns);
13296 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13297 THUMB_INSN_SIZE_BYTES);
13298
13299 SELF_CHECK (ret == 0);
13300 SELF_CHECK (arm_record.mem_rec_count == 0);
13301 SELF_CHECK (arm_record.reg_rec_count == 1);
13302 SELF_CHECK (arm_record.arm_regs[0] == 3);
13303
13304 arm_record.this_addr += 2;
13305 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13306 THUMB_INSN_SIZE_BYTES);
13307
13308 SELF_CHECK (ret == 0);
13309 SELF_CHECK (arm_record.mem_rec_count == 0);
13310 SELF_CHECK (arm_record.reg_rec_count == 1);
13311 SELF_CHECK (arm_record.arm_regs[0] == 5);
13312 }
13313
13314 /* 32-bit Thumb-2 instructions. */
13315 {
13316 insn_decode_record arm_record;
13317
13318 memset (&arm_record, 0, sizeof (insn_decode_record));
13319 arm_record.gdbarch = gdbarch;
13320
13321 static const uint16_t insns[] = {
13322 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13323 0xee1d, 0x7f70,
13324 };
13325
13326 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13327 instruction_reader_thumb reader (endian, insns);
13328 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13329 THUMB2_INSN_SIZE_BYTES);
13330
13331 SELF_CHECK (ret == 0);
13332 SELF_CHECK (arm_record.mem_rec_count == 0);
13333 SELF_CHECK (arm_record.reg_rec_count == 1);
13334 SELF_CHECK (arm_record.arm_regs[0] == 7);
13335 }
13336 }
13337 } // namespace selftests
13338 #endif /* GDB_SELF_TEST */
13339
13340 /* Cleans up local record registers and memory allocations. */
13341
13342 static void
13343 deallocate_reg_mem (insn_decode_record *record)
13344 {
13345 xfree (record->arm_regs);
13346 xfree (record->arm_mems);
13347 }
13348
13349
13350 /* Parse the current instruction and record the values of the registers and
13351 memory that will be changed in current instruction to record_arch_list".
13352 Return -1 if something is wrong. */
13353
13354 int
13355 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13356 CORE_ADDR insn_addr)
13357 {
13358
13359 uint32_t no_of_rec = 0;
13360 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13361 ULONGEST t_bit = 0, insn_id = 0;
13362
13363 ULONGEST u_regval = 0;
13364
13365 insn_decode_record arm_record;
13366
13367 memset (&arm_record, 0, sizeof (insn_decode_record));
13368 arm_record.regcache = regcache;
13369 arm_record.this_addr = insn_addr;
13370 arm_record.gdbarch = gdbarch;
13371
13372
13373 if (record_debug > 1)
13374 {
13375 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13376 "addr = %s\n",
13377 paddress (gdbarch, arm_record.this_addr));
13378 }
13379
13380 instruction_reader reader;
13381 if (extract_arm_insn (reader, &arm_record, 2))
13382 {
13383 if (record_debug)
13384 {
13385 printf_unfiltered (_("Process record: error reading memory at "
13386 "addr %s len = %d.\n"),
13387 paddress (arm_record.gdbarch,
13388 arm_record.this_addr), 2);
13389 }
13390 return -1;
13391 }
13392
13393 /* Check the insn, whether it is thumb or arm one. */
13394
13395 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13396 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13397
13398
13399 if (!(u_regval & t_bit))
13400 {
13401 /* We are decoding arm insn. */
13402 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13403 }
13404 else
13405 {
13406 insn_id = bits (arm_record.arm_insn, 11, 15);
13407 /* is it thumb2 insn? */
13408 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13409 {
13410 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13411 THUMB2_INSN_SIZE_BYTES);
13412 }
13413 else
13414 {
13415 /* We are decoding thumb insn. */
13416 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13417 THUMB_INSN_SIZE_BYTES);
13418 }
13419 }
13420
13421 if (0 == ret)
13422 {
13423 /* Record registers. */
13424 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13425 if (arm_record.arm_regs)
13426 {
13427 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13428 {
13429 if (record_full_arch_list_add_reg
13430 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13431 ret = -1;
13432 }
13433 }
13434 /* Record memories. */
13435 if (arm_record.arm_mems)
13436 {
13437 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13438 {
13439 if (record_full_arch_list_add_mem
13440 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13441 arm_record.arm_mems[no_of_rec].len))
13442 ret = -1;
13443 }
13444 }
13445
13446 if (record_full_arch_list_add_end ())
13447 ret = -1;
13448 }
13449
13450
13451 deallocate_reg_mem (&arm_record);
13452
13453 return ret;
13454 }
13455
13456 /* See arm-tdep.h. */
13457
13458 const target_desc *
13459 arm_read_description (arm_fp_type fp_type)
13460 {
13461 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13462
13463 if (tdesc == nullptr)
13464 {
13465 tdesc = arm_create_target_description (fp_type);
13466 tdesc_arm_list[fp_type] = tdesc;
13467 }
13468
13469 return tdesc;
13470 }
13471
13472 /* See arm-tdep.h. */
13473
13474 const target_desc *
13475 arm_read_mprofile_description (arm_m_profile_type m_type)
13476 {
13477 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13478
13479 if (tdesc == nullptr)
13480 {
13481 tdesc = arm_create_mprofile_target_description (m_type);
13482 tdesc_arm_mprofile_list[m_type] = tdesc;
13483 }
13484
13485 return tdesc;
13486 }
This page took 0.325001 seconds and 4 git commands to generate.