gdb.base/share-env-with-gdbserver.exp C++ify
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "producer.h"
64
65 #if GDB_SELF_TEST
66 #include "gdbsupport/selftest.h"
67 #endif
68
69 static bool arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 struct arm_mapping_symbol
85 {
86 CORE_ADDR value;
87 char type;
88
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
91 };
92
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94
95 struct arm_per_bfd
96 {
97 explicit arm_per_bfd (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
100 {}
101
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
103
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
108 index).
109
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113
114 /* For each corresponding element of section_maps above, is this vector
115 sorted. */
116 std::unique_ptr<bool[]> section_maps_sorted;
117 };
118
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key<arm_per_bfd> arm_bfd_data_key;
121
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
125
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
129 {
130 "auto",
131 "softfpa",
132 "fpa",
133 "softvfp",
134 "vfp",
135 NULL
136 };
137
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
141
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
144 {
145 "auto",
146 "APCS",
147 "AAPCS",
148 NULL
149 };
150
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
154
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
157 {
158 "auto",
159 "arm",
160 "thumb",
161 NULL
162 };
163
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
166
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
171 static const struct
172 {
173 const char *name;
174 int regnum;
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
177 { "r0", 0 },
178 { "r1", 1 },
179 { "r2", 2 },
180 { "r3", 3 },
181 { "r4", 4 },
182 { "r5", 5 },
183 { "r6", 6 },
184 { "r7", 7 },
185 { "r8", 8 },
186 { "r9", 9 },
187 { "r10", 10 },
188 { "r11", 11 },
189 { "r12", 12 },
190 { "r13", 13 },
191 { "r14", 14 },
192 { "r15", 15 },
193 /* Synonyms (argument and variable registers). */
194 { "a1", 0 },
195 { "a2", 1 },
196 { "a3", 2 },
197 { "a4", 3 },
198 { "v1", 4 },
199 { "v2", 5 },
200 { "v3", 6 },
201 { "v4", 7 },
202 { "v5", 8 },
203 { "v6", 9 },
204 { "v7", 10 },
205 { "v8", 11 },
206 /* Other platform-specific names for r9. */
207 { "sb", 9 },
208 { "tr", 9 },
209 /* Special names. */
210 { "ip", 12 },
211 { "lr", 14 },
212 /* Names used by GCC (not listed in the ARM EABI). */
213 { "sl", 10 },
214 /* A special name from the older ATPCS. */
215 { "wr", 7 },
216 };
217
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
226
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
229
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
232
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
235
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239
240 /* This is used to keep the bfd arch_info in sync with the disassembly
241 style. */
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
246 const char *);
247
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
254
255 static CORE_ADDR
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
257
258
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
265 NULL,
266 };
267
268 struct arm_prologue_cache
269 {
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
273 CORE_ADDR prev_sp;
274
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
278
279 int framesize;
280
281 /* The register used to hold the frame pointer for this frame. */
282 int framereg;
283
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
286 };
287
288 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
289 CORE_ADDR prologue_start,
290 CORE_ADDR prologue_end,
291 struct arm_prologue_cache *cache);
292
293 /* Architecture version for displaced stepping. This effects the behaviour of
294 certain instructions, and really should not be hard-wired. */
295
296 #define DISPLACED_STEPPING_ARCH_VERSION 5
297
298 /* See arm-tdep.h. */
299
300 bool arm_apcs_32 = true;
301
302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
303
304 int
305 arm_psr_thumb_bit (struct gdbarch *gdbarch)
306 {
307 if (gdbarch_tdep (gdbarch)->is_m)
308 return XPSR_T;
309 else
310 return CPSR_T;
311 }
312
313 /* Determine if the processor is currently executing in Thumb mode. */
314
315 int
316 arm_is_thumb (struct regcache *regcache)
317 {
318 ULONGEST cpsr;
319 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
320
321 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
322
323 return (cpsr & t_bit) != 0;
324 }
325
326 /* Determine if FRAME is executing in Thumb mode. */
327
328 int
329 arm_frame_is_thumb (struct frame_info *frame)
330 {
331 CORE_ADDR cpsr;
332 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
333
334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
335 directly (from a signal frame or dummy frame) or by interpreting
336 the saved LR (from a prologue or DWARF frame). So consult it and
337 trust the unwinders. */
338 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
339
340 return (cpsr & t_bit) != 0;
341 }
342
343 /* Search for the mapping symbol covering MEMADDR. If one is found,
344 return its type. Otherwise, return 0. If START is non-NULL,
345 set *START to the location of the mapping symbol. */
346
347 static char
348 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
349 {
350 struct obj_section *sec;
351
352 /* If there are mapping symbols, consult them. */
353 sec = find_pc_section (memaddr);
354 if (sec != NULL)
355 {
356 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
357 if (data != NULL)
358 {
359 unsigned int section_idx = sec->the_bfd_section->index;
360 arm_mapping_symbol_vec &map
361 = data->section_maps[section_idx];
362
363 /* Sort the vector on first use. */
364 if (!data->section_maps_sorted[section_idx])
365 {
366 std::sort (map.begin (), map.end ());
367 data->section_maps_sorted[section_idx] = true;
368 }
369
370 struct arm_mapping_symbol map_key
371 = { memaddr - obj_section_addr (sec), 0 };
372 arm_mapping_symbol_vec::const_iterator it
373 = std::lower_bound (map.begin (), map.end (), map_key);
374
375 /* std::lower_bound finds the earliest ordered insertion
376 point. If the symbol at this position starts at this exact
377 address, we use that; otherwise, the preceding
378 mapping symbol covers this address. */
379 if (it < map.end ())
380 {
381 if (it->value == map_key.value)
382 {
383 if (start)
384 *start = it->value + obj_section_addr (sec);
385 return it->type;
386 }
387 }
388
389 if (it > map.begin ())
390 {
391 arm_mapping_symbol_vec::const_iterator prev_it
392 = it - 1;
393
394 if (start)
395 *start = prev_it->value + obj_section_addr (sec);
396 return prev_it->type;
397 }
398 }
399 }
400
401 return 0;
402 }
403
404 /* Determine if the program counter specified in MEMADDR is in a Thumb
405 function. This function should be called for addresses unrelated to
406 any executing frame; otherwise, prefer arm_frame_is_thumb. */
407
408 int
409 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
410 {
411 struct bound_minimal_symbol sym;
412 char type;
413 arm_displaced_step_closure *dsc
414 = ((arm_displaced_step_closure * )
415 get_displaced_step_closure_by_addr (memaddr));
416
417 /* If checking the mode of displaced instruction in copy area, the mode
418 should be determined by instruction on the original address. */
419 if (dsc)
420 {
421 if (debug_displaced)
422 fprintf_unfiltered (gdb_stdlog,
423 "displaced: check mode of %.8lx instead of %.8lx\n",
424 (unsigned long) dsc->insn_addr,
425 (unsigned long) memaddr);
426 memaddr = dsc->insn_addr;
427 }
428
429 /* If bit 0 of the address is set, assume this is a Thumb address. */
430 if (IS_THUMB_ADDR (memaddr))
431 return 1;
432
433 /* If the user wants to override the symbol table, let him. */
434 if (strcmp (arm_force_mode_string, "arm") == 0)
435 return 0;
436 if (strcmp (arm_force_mode_string, "thumb") == 0)
437 return 1;
438
439 /* ARM v6-M and v7-M are always in Thumb mode. */
440 if (gdbarch_tdep (gdbarch)->is_m)
441 return 1;
442
443 /* If there are mapping symbols, consult them. */
444 type = arm_find_mapping_symbol (memaddr, NULL);
445 if (type)
446 return type == 't';
447
448 /* Thumb functions have a "special" bit set in minimal symbols. */
449 sym = lookup_minimal_symbol_by_pc (memaddr);
450 if (sym.minsym)
451 return (MSYMBOL_IS_SPECIAL (sym.minsym));
452
453 /* If the user wants to override the fallback mode, let them. */
454 if (strcmp (arm_fallback_mode_string, "arm") == 0)
455 return 0;
456 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
457 return 1;
458
459 /* If we couldn't find any symbol, but we're talking to a running
460 target, then trust the current value of $cpsr. This lets
461 "display/i $pc" always show the correct mode (though if there is
462 a symbol table we will not reach here, so it still may not be
463 displayed in the mode it will be executed). */
464 if (target_has_registers)
465 return arm_frame_is_thumb (get_current_frame ());
466
467 /* Otherwise we're out of luck; we assume ARM. */
468 return 0;
469 }
470
471 /* Determine if the address specified equals any of these magic return
472 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
473 architectures.
474
475 From ARMv6-M Reference Manual B1.5.8
476 Table B1-5 Exception return behavior
477
478 EXC_RETURN Return To Return Stack
479 0xFFFFFFF1 Handler mode Main
480 0xFFFFFFF9 Thread mode Main
481 0xFFFFFFFD Thread mode Process
482
483 From ARMv7-M Reference Manual B1.5.8
484 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
485
486 EXC_RETURN Return To Return Stack
487 0xFFFFFFF1 Handler mode Main
488 0xFFFFFFF9 Thread mode Main
489 0xFFFFFFFD Thread mode Process
490
491 Table B1-9 EXC_RETURN definition of exception return behavior, with
492 FP
493
494 EXC_RETURN Return To Return Stack Frame Type
495 0xFFFFFFE1 Handler mode Main Extended
496 0xFFFFFFE9 Thread mode Main Extended
497 0xFFFFFFED Thread mode Process Extended
498 0xFFFFFFF1 Handler mode Main Basic
499 0xFFFFFFF9 Thread mode Main Basic
500 0xFFFFFFFD Thread mode Process Basic
501
502 For more details see "B1.5.8 Exception return behavior"
503 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
504
505 In the ARMv8-M Architecture Technical Reference also adds
506 for implementations without the Security Extension:
507
508 EXC_RETURN Condition
509 0xFFFFFFB0 Return to Handler mode.
510 0xFFFFFFB8 Return to Thread mode using the main stack.
511 0xFFFFFFBC Return to Thread mode using the process stack. */
512
513 static int
514 arm_m_addr_is_magic (CORE_ADDR addr)
515 {
516 switch (addr)
517 {
518 /* Values from ARMv8-M Architecture Technical Reference. */
519 case 0xffffffb0:
520 case 0xffffffb8:
521 case 0xffffffbc:
522 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
523 the exception return behavior. */
524 case 0xffffffe1:
525 case 0xffffffe9:
526 case 0xffffffed:
527 case 0xfffffff1:
528 case 0xfffffff9:
529 case 0xfffffffd:
530 /* Address is magic. */
531 return 1;
532
533 default:
534 /* Address is not magic. */
535 return 0;
536 }
537 }
538
539 /* Remove useless bits from addresses in a running program. */
540 static CORE_ADDR
541 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
542 {
543 /* On M-profile devices, do not strip the low bit from EXC_RETURN
544 (the magic exception return address). */
545 if (gdbarch_tdep (gdbarch)->is_m
546 && arm_m_addr_is_magic (val))
547 return val;
548
549 if (arm_apcs_32)
550 return UNMAKE_THUMB_ADDR (val);
551 else
552 return (val & 0x03fffffc);
553 }
554
555 /* Return 1 if PC is the start of a compiler helper function which
556 can be safely ignored during prologue skipping. IS_THUMB is true
557 if the function is known to be a Thumb function due to the way it
558 is being called. */
559 static int
560 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
561 {
562 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
563 struct bound_minimal_symbol msym;
564
565 msym = lookup_minimal_symbol_by_pc (pc);
566 if (msym.minsym != NULL
567 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
568 && msym.minsym->linkage_name () != NULL)
569 {
570 const char *name = msym.minsym->linkage_name ();
571
572 /* The GNU linker's Thumb call stub to foo is named
573 __foo_from_thumb. */
574 if (strstr (name, "_from_thumb") != NULL)
575 name += 2;
576
577 /* On soft-float targets, __truncdfsf2 is called to convert promoted
578 arguments to their argument types in non-prototyped
579 functions. */
580 if (startswith (name, "__truncdfsf2"))
581 return 1;
582 if (startswith (name, "__aeabi_d2f"))
583 return 1;
584
585 /* Internal functions related to thread-local storage. */
586 if (startswith (name, "__tls_get_addr"))
587 return 1;
588 if (startswith (name, "__aeabi_read_tp"))
589 return 1;
590 }
591 else
592 {
593 /* If we run against a stripped glibc, we may be unable to identify
594 special functions by name. Check for one important case,
595 __aeabi_read_tp, by comparing the *code* against the default
596 implementation (this is hand-written ARM assembler in glibc). */
597
598 if (!is_thumb
599 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
600 == 0xe3e00a0f /* mov r0, #0xffff0fff */
601 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
602 == 0xe240f01f) /* sub pc, r0, #31 */
603 return 1;
604 }
605
606 return 0;
607 }
608
609 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
610 the first 16-bit of instruction, and INSN2 is the second 16-bit of
611 instruction. */
612 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
613 ((bits ((insn1), 0, 3) << 12) \
614 | (bits ((insn1), 10, 10) << 11) \
615 | (bits ((insn2), 12, 14) << 8) \
616 | bits ((insn2), 0, 7))
617
618 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
619 the 32-bit instruction. */
620 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
621 ((bits ((insn), 16, 19) << 12) \
622 | bits ((insn), 0, 11))
623
624 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
625
626 static unsigned int
627 thumb_expand_immediate (unsigned int imm)
628 {
629 unsigned int count = imm >> 7;
630
631 if (count < 8)
632 switch (count / 2)
633 {
634 case 0:
635 return imm & 0xff;
636 case 1:
637 return (imm & 0xff) | ((imm & 0xff) << 16);
638 case 2:
639 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
640 case 3:
641 return (imm & 0xff) | ((imm & 0xff) << 8)
642 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
643 }
644
645 return (0x80 | (imm & 0x7f)) << (32 - count);
646 }
647
648 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
649 epilogue, 0 otherwise. */
650
651 static int
652 thumb_instruction_restores_sp (unsigned short insn)
653 {
654 return (insn == 0x46bd /* mov sp, r7 */
655 || (insn & 0xff80) == 0xb000 /* add sp, imm */
656 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
657 }
658
659 /* Analyze a Thumb prologue, looking for a recognizable stack frame
660 and frame pointer. Scan until we encounter a store that could
661 clobber the stack frame unexpectedly, or an unknown instruction.
662 Return the last address which is definitely safe to skip for an
663 initial breakpoint. */
664
665 static CORE_ADDR
666 thumb_analyze_prologue (struct gdbarch *gdbarch,
667 CORE_ADDR start, CORE_ADDR limit,
668 struct arm_prologue_cache *cache)
669 {
670 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
671 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
672 int i;
673 pv_t regs[16];
674 CORE_ADDR offset;
675 CORE_ADDR unrecognized_pc = 0;
676
677 for (i = 0; i < 16; i++)
678 regs[i] = pv_register (i, 0);
679 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
680
681 while (start < limit)
682 {
683 unsigned short insn;
684
685 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
686
687 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
688 {
689 int regno;
690 int mask;
691
692 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
693 break;
694
695 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
696 whether to save LR (R14). */
697 mask = (insn & 0xff) | ((insn & 0x100) << 6);
698
699 /* Calculate offsets of saved R0-R7 and LR. */
700 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
701 if (mask & (1 << regno))
702 {
703 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
704 -4);
705 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
706 }
707 }
708 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
709 {
710 offset = (insn & 0x7f) << 2; /* get scaled offset */
711 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
712 -offset);
713 }
714 else if (thumb_instruction_restores_sp (insn))
715 {
716 /* Don't scan past the epilogue. */
717 break;
718 }
719 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
720 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
721 (insn & 0xff) << 2);
722 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
723 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
724 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
725 bits (insn, 6, 8));
726 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
727 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
728 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
729 bits (insn, 0, 7));
730 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
731 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
732 && pv_is_constant (regs[bits (insn, 3, 5)]))
733 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
734 regs[bits (insn, 6, 8)]);
735 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
736 && pv_is_constant (regs[bits (insn, 3, 6)]))
737 {
738 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
739 int rm = bits (insn, 3, 6);
740 regs[rd] = pv_add (regs[rd], regs[rm]);
741 }
742 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
743 {
744 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
745 int src_reg = (insn & 0x78) >> 3;
746 regs[dst_reg] = regs[src_reg];
747 }
748 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
749 {
750 /* Handle stores to the stack. Normally pushes are used,
751 but with GCC -mtpcs-frame, there may be other stores
752 in the prologue to create the frame. */
753 int regno = (insn >> 8) & 0x7;
754 pv_t addr;
755
756 offset = (insn & 0xff) << 2;
757 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[regno]);
763 }
764 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
765 {
766 int rd = bits (insn, 0, 2);
767 int rn = bits (insn, 3, 5);
768 pv_t addr;
769
770 offset = bits (insn, 6, 10) << 2;
771 addr = pv_add_constant (regs[rn], offset);
772
773 if (stack.store_would_trash (addr))
774 break;
775
776 stack.store (addr, 4, regs[rd]);
777 }
778 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
779 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
780 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
781 /* Ignore stores of argument registers to the stack. */
782 ;
783 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
784 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
785 /* Ignore block loads from the stack, potentially copying
786 parameters from memory. */
787 ;
788 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
789 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
790 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
791 /* Similarly ignore single loads from the stack. */
792 ;
793 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
794 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
795 /* Skip register copies, i.e. saves to another register
796 instead of the stack. */
797 ;
798 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
799 /* Recognize constant loads; even with small stacks these are necessary
800 on Thumb. */
801 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
802 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
803 {
804 /* Constant pool loads, for the same reason. */
805 unsigned int constant;
806 CORE_ADDR loc;
807
808 loc = start + 4 + bits (insn, 0, 7) * 4;
809 constant = read_memory_unsigned_integer (loc, 4, byte_order);
810 regs[bits (insn, 8, 10)] = pv_constant (constant);
811 }
812 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
813 {
814 unsigned short inst2;
815
816 inst2 = read_code_unsigned_integer (start + 2, 2,
817 byte_order_for_code);
818
819 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
820 {
821 /* BL, BLX. Allow some special function calls when
822 skipping the prologue; GCC generates these before
823 storing arguments to the stack. */
824 CORE_ADDR nextpc;
825 int j1, j2, imm1, imm2;
826
827 imm1 = sbits (insn, 0, 10);
828 imm2 = bits (inst2, 0, 10);
829 j1 = bit (inst2, 13);
830 j2 = bit (inst2, 11);
831
832 offset = ((imm1 << 12) + (imm2 << 1));
833 offset ^= ((!j2) << 22) | ((!j1) << 23);
834
835 nextpc = start + 4 + offset;
836 /* For BLX make sure to clear the low bits. */
837 if (bit (inst2, 12) == 0)
838 nextpc = nextpc & 0xfffffffc;
839
840 if (!skip_prologue_function (gdbarch, nextpc,
841 bit (inst2, 12) != 0))
842 break;
843 }
844
845 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
846 { registers } */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 {
849 pv_t addr = regs[bits (insn, 0, 3)];
850 int regno;
851
852 if (stack.store_would_trash (addr))
853 break;
854
855 /* Calculate offsets of saved registers. */
856 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
857 if (inst2 & (1 << regno))
858 {
859 addr = pv_add_constant (addr, -4);
860 stack.store (addr, 4, regs[regno]);
861 }
862
863 if (insn & 0x0020)
864 regs[bits (insn, 0, 3)] = addr;
865 }
866
867 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
868 [Rn, #+/-imm]{!} */
869 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 {
871 int regno1 = bits (inst2, 12, 15);
872 int regno2 = bits (inst2, 8, 11);
873 pv_t addr = regs[bits (insn, 0, 3)];
874
875 offset = inst2 & 0xff;
876 if (insn & 0x0080)
877 addr = pv_add_constant (addr, offset);
878 else
879 addr = pv_add_constant (addr, -offset);
880
881 if (stack.store_would_trash (addr))
882 break;
883
884 stack.store (addr, 4, regs[regno1]);
885 stack.store (pv_add_constant (addr, 4),
886 4, regs[regno2]);
887
888 if (insn & 0x0020)
889 regs[bits (insn, 0, 3)] = addr;
890 }
891
892 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
893 && (inst2 & 0x0c00) == 0x0c00
894 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
895 {
896 int regno = bits (inst2, 12, 15);
897 pv_t addr = regs[bits (insn, 0, 3)];
898
899 offset = inst2 & 0xff;
900 if (inst2 & 0x0200)
901 addr = pv_add_constant (addr, offset);
902 else
903 addr = pv_add_constant (addr, -offset);
904
905 if (stack.store_would_trash (addr))
906 break;
907
908 stack.store (addr, 4, regs[regno]);
909
910 if (inst2 & 0x0100)
911 regs[bits (insn, 0, 3)] = addr;
912 }
913
914 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 {
917 int regno = bits (inst2, 12, 15);
918 pv_t addr;
919
920 offset = inst2 & 0xfff;
921 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
922
923 if (stack.store_would_trash (addr))
924 break;
925
926 stack.store (addr, 4, regs[regno]);
927 }
928
929 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore stores of argument registers to the stack. */
932 ;
933
934 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
935 && (inst2 & 0x0d00) == 0x0c00
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Ignore stores of argument registers to the stack. */
938 ;
939
940 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
941 { registers } */
942 && (inst2 & 0x8000) == 0x0000
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Ignore block loads from the stack, potentially copying
945 parameters from memory. */
946 ;
947
948 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
949 [Rn, #+/-imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
951 /* Similarly ignore dual loads from the stack. */
952 ;
953
954 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
955 && (inst2 & 0x0d00) == 0x0c00
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 /* Similarly ignore single loads from the stack. */
958 ;
959
960 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
961 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
962 /* Similarly ignore single loads from the stack. */
963 ;
964
965 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
986 }
987
988 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)],
997 - (CORE_ADDR) thumb_expand_immediate (imm));
998 }
999
1000 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1002 {
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1006
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1009 }
1010
1011 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1012 {
1013 unsigned int imm = ((bits (insn, 10, 10) << 11)
1014 | (bits (inst2, 12, 14) << 8)
1015 | bits (inst2, 0, 7));
1016
1017 regs[bits (inst2, 8, 11)]
1018 = pv_constant (thumb_expand_immediate (imm));
1019 }
1020
1021 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1022 {
1023 unsigned int imm
1024 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1025
1026 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1027 }
1028
1029 else if (insn == 0xea5f /* mov.w Rd,Rm */
1030 && (inst2 & 0xf0f0) == 0)
1031 {
1032 int dst_reg = (inst2 & 0x0f00) >> 8;
1033 int src_reg = inst2 & 0xf;
1034 regs[dst_reg] = regs[src_reg];
1035 }
1036
1037 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1038 {
1039 /* Constant pool loads. */
1040 unsigned int constant;
1041 CORE_ADDR loc;
1042
1043 offset = bits (inst2, 0, 11);
1044 if (insn & 0x0080)
1045 loc = start + 4 + offset;
1046 else
1047 loc = start + 4 - offset;
1048
1049 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1050 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1051 }
1052
1053 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1054 {
1055 /* Constant pool loads. */
1056 unsigned int constant;
1057 CORE_ADDR loc;
1058
1059 offset = bits (inst2, 0, 7) << 2;
1060 if (insn & 0x0080)
1061 loc = start + 4 + offset;
1062 else
1063 loc = start + 4 - offset;
1064
1065 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1066 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1067
1068 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1069 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1070 }
1071
1072 else if (thumb2_instruction_changes_pc (insn, inst2))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086 else if (thumb_instruction_changes_pc (insn))
1087 {
1088 /* Don't scan past anything that might change control flow. */
1089 break;
1090 }
1091 else
1092 {
1093 /* The optimizer might shove anything into the prologue,
1094 so we just skip what we don't recognize. */
1095 unrecognized_pc = start;
1096 }
1097
1098 start += 2;
1099 }
1100
1101 if (arm_debug)
1102 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1103 paddress (gdbarch, start));
1104
1105 if (unrecognized_pc == 0)
1106 unrecognized_pc = start;
1107
1108 if (cache == NULL)
1109 return unrecognized_pc;
1110
1111 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1112 {
1113 /* Frame pointer is fp. Frame size is constant. */
1114 cache->framereg = ARM_FP_REGNUM;
1115 cache->framesize = -regs[ARM_FP_REGNUM].k;
1116 }
1117 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1118 {
1119 /* Frame pointer is r7. Frame size is constant. */
1120 cache->framereg = THUMB_FP_REGNUM;
1121 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1122 }
1123 else
1124 {
1125 /* Try the stack pointer... this is a bit desperate. */
1126 cache->framereg = ARM_SP_REGNUM;
1127 cache->framesize = -regs[ARM_SP_REGNUM].k;
1128 }
1129
1130 for (i = 0; i < 16; i++)
1131 if (stack.find_reg (gdbarch, i, &offset))
1132 cache->saved_regs[i].addr = offset;
1133
1134 return unrecognized_pc;
1135 }
1136
1137
1138 /* Try to analyze the instructions starting from PC, which load symbol
1139 __stack_chk_guard. Return the address of instruction after loading this
1140 symbol, set the dest register number to *BASEREG, and set the size of
1141 instructions for loading symbol in OFFSET. Return 0 if instructions are
1142 not recognized. */
1143
1144 static CORE_ADDR
1145 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1146 unsigned int *destreg, int *offset)
1147 {
1148 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1149 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1150 unsigned int low, high, address;
1151
1152 address = 0;
1153 if (is_thumb)
1154 {
1155 unsigned short insn1
1156 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1157
1158 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1159 {
1160 *destreg = bits (insn1, 8, 10);
1161 *offset = 2;
1162 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1163 address = read_memory_unsigned_integer (address, 4,
1164 byte_order_for_code);
1165 }
1166 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1167 {
1168 unsigned short insn2
1169 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1170
1171 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1172
1173 insn1
1174 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1175 insn2
1176 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1177
1178 /* movt Rd, #const */
1179 if ((insn1 & 0xfbc0) == 0xf2c0)
1180 {
1181 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1182 *destreg = bits (insn2, 8, 11);
1183 *offset = 8;
1184 address = (high << 16 | low);
1185 }
1186 }
1187 }
1188 else
1189 {
1190 unsigned int insn
1191 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1192
1193 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1194 {
1195 address = bits (insn, 0, 11) + pc + 8;
1196 address = read_memory_unsigned_integer (address, 4,
1197 byte_order_for_code);
1198
1199 *destreg = bits (insn, 12, 15);
1200 *offset = 4;
1201 }
1202 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1203 {
1204 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1205
1206 insn
1207 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1208
1209 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1210 {
1211 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1212 *destreg = bits (insn, 12, 15);
1213 *offset = 8;
1214 address = (high << 16 | low);
1215 }
1216 }
1217 }
1218
1219 return address;
1220 }
1221
1222 /* Try to skip a sequence of instructions used for stack protector. If PC
1223 points to the first instruction of this sequence, return the address of
1224 first instruction after this sequence, otherwise, return original PC.
1225
1226 On arm, this sequence of instructions is composed of mainly three steps,
1227 Step 1: load symbol __stack_chk_guard,
1228 Step 2: load from address of __stack_chk_guard,
1229 Step 3: store it to somewhere else.
1230
1231 Usually, instructions on step 2 and step 3 are the same on various ARM
1232 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1233 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1234 instructions in step 1 vary from different ARM architectures. On ARMv7,
1235 they are,
1236
1237 movw Rn, #:lower16:__stack_chk_guard
1238 movt Rn, #:upper16:__stack_chk_guard
1239
1240 On ARMv5t, it is,
1241
1242 ldr Rn, .Label
1243 ....
1244 .Lable:
1245 .word __stack_chk_guard
1246
1247 Since ldr/str is a very popular instruction, we can't use them as
1248 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1249 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1250 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1251
1252 static CORE_ADDR
1253 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1254 {
1255 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1256 unsigned int basereg;
1257 struct bound_minimal_symbol stack_chk_guard;
1258 int offset;
1259 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1260 CORE_ADDR addr;
1261
1262 /* Try to parse the instructions in Step 1. */
1263 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1264 &basereg, &offset);
1265 if (!addr)
1266 return pc;
1267
1268 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1269 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1270 Otherwise, this sequence cannot be for stack protector. */
1271 if (stack_chk_guard.minsym == NULL
1272 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1273 return pc;
1274
1275 if (is_thumb)
1276 {
1277 unsigned int destreg;
1278 unsigned short insn
1279 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1280
1281 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1282 if ((insn & 0xf800) != 0x6800)
1283 return pc;
1284 if (bits (insn, 3, 5) != basereg)
1285 return pc;
1286 destreg = bits (insn, 0, 2);
1287
1288 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1289 byte_order_for_code);
1290 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1291 if ((insn & 0xf800) != 0x6000)
1292 return pc;
1293 if (destreg != bits (insn, 0, 2))
1294 return pc;
1295 }
1296 else
1297 {
1298 unsigned int destreg;
1299 unsigned int insn
1300 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1301
1302 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1303 if ((insn & 0x0e500000) != 0x04100000)
1304 return pc;
1305 if (bits (insn, 16, 19) != basereg)
1306 return pc;
1307 destreg = bits (insn, 12, 15);
1308 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1309 insn = read_code_unsigned_integer (pc + offset + 4,
1310 4, byte_order_for_code);
1311 if ((insn & 0x0e500000) != 0x04000000)
1312 return pc;
1313 if (bits (insn, 12, 15) != destreg)
1314 return pc;
1315 }
1316 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1317 on arm. */
1318 if (is_thumb)
1319 return pc + offset + 4;
1320 else
1321 return pc + offset + 8;
1322 }
1323
1324 /* Advance the PC across any function entry prologue instructions to
1325 reach some "real" code.
1326
1327 The APCS (ARM Procedure Call Standard) defines the following
1328 prologue:
1329
1330 mov ip, sp
1331 [stmfd sp!, {a1,a2,a3,a4}]
1332 stmfd sp!, {...,fp,ip,lr,pc}
1333 [stfe f7, [sp, #-12]!]
1334 [stfe f6, [sp, #-12]!]
1335 [stfe f5, [sp, #-12]!]
1336 [stfe f4, [sp, #-12]!]
1337 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1338
1339 static CORE_ADDR
1340 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1341 {
1342 CORE_ADDR func_addr, limit_pc;
1343
1344 /* See if we can determine the end of the prologue via the symbol table.
1345 If so, then return either PC, or the PC after the prologue, whichever
1346 is greater. */
1347 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1348 {
1349 CORE_ADDR post_prologue_pc
1350 = skip_prologue_using_sal (gdbarch, func_addr);
1351 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1352
1353 if (post_prologue_pc)
1354 post_prologue_pc
1355 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1356
1357
1358 /* GCC always emits a line note before the prologue and another
1359 one after, even if the two are at the same address or on the
1360 same line. Take advantage of this so that we do not need to
1361 know every instruction that might appear in the prologue. We
1362 will have producer information for most binaries; if it is
1363 missing (e.g. for -gstabs), assuming the GNU tools. */
1364 if (post_prologue_pc
1365 && (cust == NULL
1366 || COMPUNIT_PRODUCER (cust) == NULL
1367 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1368 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1369 return post_prologue_pc;
1370
1371 if (post_prologue_pc != 0)
1372 {
1373 CORE_ADDR analyzed_limit;
1374
1375 /* For non-GCC compilers, make sure the entire line is an
1376 acceptable prologue; GDB will round this function's
1377 return value up to the end of the following line so we
1378 can not skip just part of a line (and we do not want to).
1379
1380 RealView does not treat the prologue specially, but does
1381 associate prologue code with the opening brace; so this
1382 lets us skip the first line if we think it is the opening
1383 brace. */
1384 if (arm_pc_is_thumb (gdbarch, func_addr))
1385 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1386 post_prologue_pc, NULL);
1387 else
1388 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1389 post_prologue_pc, NULL);
1390
1391 if (analyzed_limit != post_prologue_pc)
1392 return func_addr;
1393
1394 return post_prologue_pc;
1395 }
1396 }
1397
1398 /* Can't determine prologue from the symbol table, need to examine
1399 instructions. */
1400
1401 /* Find an upper limit on the function prologue using the debug
1402 information. If the debug information could not be used to provide
1403 that bound, then use an arbitrary large number as the upper bound. */
1404 /* Like arm_scan_prologue, stop no later than pc + 64. */
1405 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1406 if (limit_pc == 0)
1407 limit_pc = pc + 64; /* Magic. */
1408
1409
1410 /* Check if this is Thumb code. */
1411 if (arm_pc_is_thumb (gdbarch, pc))
1412 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1413 else
1414 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1415 }
1416
1417 /* *INDENT-OFF* */
1418 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1419 This function decodes a Thumb function prologue to determine:
1420 1) the size of the stack frame
1421 2) which registers are saved on it
1422 3) the offsets of saved regs
1423 4) the offset from the stack pointer to the frame pointer
1424
1425 A typical Thumb function prologue would create this stack frame
1426 (offsets relative to FP)
1427 old SP -> 24 stack parameters
1428 20 LR
1429 16 R7
1430 R7 -> 0 local variables (16 bytes)
1431 SP -> -12 additional stack space (12 bytes)
1432 The frame size would thus be 36 bytes, and the frame offset would be
1433 12 bytes. The frame register is R7.
1434
1435 The comments for thumb_skip_prolog() describe the algorithm we use
1436 to detect the end of the prolog. */
1437 /* *INDENT-ON* */
1438
1439 static void
1440 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1441 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1442 {
1443 CORE_ADDR prologue_start;
1444 CORE_ADDR prologue_end;
1445
1446 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1447 &prologue_end))
1448 {
1449 /* See comment in arm_scan_prologue for an explanation of
1450 this heuristics. */
1451 if (prologue_end > prologue_start + 64)
1452 {
1453 prologue_end = prologue_start + 64;
1454 }
1455 }
1456 else
1457 /* We're in the boondocks: we have no idea where the start of the
1458 function is. */
1459 return;
1460
1461 prologue_end = std::min (prologue_end, prev_pc);
1462
1463 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1464 }
1465
1466 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1467 otherwise. */
1468
1469 static int
1470 arm_instruction_restores_sp (unsigned int insn)
1471 {
1472 if (bits (insn, 28, 31) != INST_NV)
1473 {
1474 if ((insn & 0x0df0f000) == 0x0080d000
1475 /* ADD SP (register or immediate). */
1476 || (insn & 0x0df0f000) == 0x0040d000
1477 /* SUB SP (register or immediate). */
1478 || (insn & 0x0ffffff0) == 0x01a0d000
1479 /* MOV SP. */
1480 || (insn & 0x0fff0000) == 0x08bd0000
1481 /* POP (LDMIA). */
1482 || (insn & 0x0fff0000) == 0x049d0000)
1483 /* POP of a single register. */
1484 return 1;
1485 }
1486
1487 return 0;
1488 }
1489
1490 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1491 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1492 fill it in. Return the first address not recognized as a prologue
1493 instruction.
1494
1495 We recognize all the instructions typically found in ARM prologues,
1496 plus harmless instructions which can be skipped (either for analysis
1497 purposes, or a more restrictive set that can be skipped when finding
1498 the end of the prologue). */
1499
1500 static CORE_ADDR
1501 arm_analyze_prologue (struct gdbarch *gdbarch,
1502 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1503 struct arm_prologue_cache *cache)
1504 {
1505 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1506 int regno;
1507 CORE_ADDR offset, current_pc;
1508 pv_t regs[ARM_FPS_REGNUM];
1509 CORE_ADDR unrecognized_pc = 0;
1510
1511 /* Search the prologue looking for instructions that set up the
1512 frame pointer, adjust the stack pointer, and save registers.
1513
1514 Be careful, however, and if it doesn't look like a prologue,
1515 don't try to scan it. If, for instance, a frameless function
1516 begins with stmfd sp!, then we will tell ourselves there is
1517 a frame, which will confuse stack traceback, as well as "finish"
1518 and other operations that rely on a knowledge of the stack
1519 traceback. */
1520
1521 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1522 regs[regno] = pv_register (regno, 0);
1523 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1524
1525 for (current_pc = prologue_start;
1526 current_pc < prologue_end;
1527 current_pc += 4)
1528 {
1529 unsigned int insn
1530 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1531
1532 if (insn == 0xe1a0c00d) /* mov ip, sp */
1533 {
1534 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1535 continue;
1536 }
1537 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1538 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1539 {
1540 unsigned imm = insn & 0xff; /* immediate value */
1541 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1542 int rd = bits (insn, 12, 15);
1543 imm = (imm >> rot) | (imm << (32 - rot));
1544 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1545 continue;
1546 }
1547 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1548 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1549 {
1550 unsigned imm = insn & 0xff; /* immediate value */
1551 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1552 int rd = bits (insn, 12, 15);
1553 imm = (imm >> rot) | (imm << (32 - rot));
1554 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1555 continue;
1556 }
1557 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1558 [sp, #-4]! */
1559 {
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1563 stack.store (regs[ARM_SP_REGNUM], 4,
1564 regs[bits (insn, 12, 15)]);
1565 continue;
1566 }
1567 else if ((insn & 0xffff0000) == 0xe92d0000)
1568 /* stmfd sp!, {..., fp, ip, lr, pc}
1569 or
1570 stmfd sp!, {a1, a2, a3, a4} */
1571 {
1572 int mask = insn & 0xffff;
1573
1574 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1575 break;
1576
1577 /* Calculate offsets of saved registers. */
1578 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1579 if (mask & (1 << regno))
1580 {
1581 regs[ARM_SP_REGNUM]
1582 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1583 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1584 }
1585 }
1586 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1587 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1588 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1589 {
1590 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 continue;
1592 }
1593 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1594 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1595 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1596 {
1597 /* No need to add this to saved_regs -- it's just an arg reg. */
1598 continue;
1599 }
1600 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1601 { registers } */
1602 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1603 {
1604 /* No need to add this to saved_regs -- it's just arg regs. */
1605 continue;
1606 }
1607 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1608 {
1609 unsigned imm = insn & 0xff; /* immediate value */
1610 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1611 imm = (imm >> rot) | (imm << (32 - rot));
1612 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1613 }
1614 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1615 {
1616 unsigned imm = insn & 0xff; /* immediate value */
1617 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1618 imm = (imm >> rot) | (imm << (32 - rot));
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1620 }
1621 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1622 [sp, -#c]! */
1623 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1624 {
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1629 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1630 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1631 }
1632 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1633 [sp!] */
1634 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1635 {
1636 int n_saved_fp_regs;
1637 unsigned int fp_start_reg, fp_bound_reg;
1638
1639 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1640 break;
1641
1642 if ((insn & 0x800) == 0x800) /* N0 is set */
1643 {
1644 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1645 n_saved_fp_regs = 3;
1646 else
1647 n_saved_fp_regs = 1;
1648 }
1649 else
1650 {
1651 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1652 n_saved_fp_regs = 2;
1653 else
1654 n_saved_fp_regs = 4;
1655 }
1656
1657 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1658 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1659 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1660 {
1661 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1662 stack.store (regs[ARM_SP_REGNUM], 12,
1663 regs[fp_start_reg++]);
1664 }
1665 }
1666 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1667 {
1668 /* Allow some special function calls when skipping the
1669 prologue; GCC generates these before storing arguments to
1670 the stack. */
1671 CORE_ADDR dest = BranchDest (current_pc, insn);
1672
1673 if (skip_prologue_function (gdbarch, dest, 0))
1674 continue;
1675 else
1676 break;
1677 }
1678 else if ((insn & 0xf0000000) != 0xe0000000)
1679 break; /* Condition not true, exit early. */
1680 else if (arm_instruction_changes_pc (insn))
1681 /* Don't scan past anything that might change control flow. */
1682 break;
1683 else if (arm_instruction_restores_sp (insn))
1684 {
1685 /* Don't scan past the epilogue. */
1686 break;
1687 }
1688 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1689 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1690 /* Ignore block loads from the stack, potentially copying
1691 parameters from memory. */
1692 continue;
1693 else if ((insn & 0xfc500000) == 0xe4100000
1694 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1695 /* Similarly ignore single loads from the stack. */
1696 continue;
1697 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1698 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1699 register instead of the stack. */
1700 continue;
1701 else
1702 {
1703 /* The optimizer might shove anything into the prologue, if
1704 we build up cache (cache != NULL) from scanning prologue,
1705 we just skip what we don't recognize and scan further to
1706 make cache as complete as possible. However, if we skip
1707 prologue, we'll stop immediately on unrecognized
1708 instruction. */
1709 unrecognized_pc = current_pc;
1710 if (cache != NULL)
1711 continue;
1712 else
1713 break;
1714 }
1715 }
1716
1717 if (unrecognized_pc == 0)
1718 unrecognized_pc = current_pc;
1719
1720 if (cache)
1721 {
1722 int framereg, framesize;
1723
1724 /* The frame size is just the distance from the frame register
1725 to the original stack pointer. */
1726 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1727 {
1728 /* Frame pointer is fp. */
1729 framereg = ARM_FP_REGNUM;
1730 framesize = -regs[ARM_FP_REGNUM].k;
1731 }
1732 else
1733 {
1734 /* Try the stack pointer... this is a bit desperate. */
1735 framereg = ARM_SP_REGNUM;
1736 framesize = -regs[ARM_SP_REGNUM].k;
1737 }
1738
1739 cache->framereg = framereg;
1740 cache->framesize = framesize;
1741
1742 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1743 if (stack.find_reg (gdbarch, regno, &offset))
1744 cache->saved_regs[regno].addr = offset;
1745 }
1746
1747 if (arm_debug)
1748 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1749 paddress (gdbarch, unrecognized_pc));
1750
1751 return unrecognized_pc;
1752 }
1753
1754 static void
1755 arm_scan_prologue (struct frame_info *this_frame,
1756 struct arm_prologue_cache *cache)
1757 {
1758 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1759 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1760 CORE_ADDR prologue_start, prologue_end;
1761 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1762 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1763
1764 /* Assume there is no frame until proven otherwise. */
1765 cache->framereg = ARM_SP_REGNUM;
1766 cache->framesize = 0;
1767
1768 /* Check for Thumb prologue. */
1769 if (arm_frame_is_thumb (this_frame))
1770 {
1771 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1772 return;
1773 }
1774
1775 /* Find the function prologue. If we can't find the function in
1776 the symbol table, peek in the stack frame to find the PC. */
1777 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1778 &prologue_end))
1779 {
1780 /* One way to find the end of the prologue (which works well
1781 for unoptimized code) is to do the following:
1782
1783 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1784
1785 if (sal.line == 0)
1786 prologue_end = prev_pc;
1787 else if (sal.end < prologue_end)
1788 prologue_end = sal.end;
1789
1790 This mechanism is very accurate so long as the optimizer
1791 doesn't move any instructions from the function body into the
1792 prologue. If this happens, sal.end will be the last
1793 instruction in the first hunk of prologue code just before
1794 the first instruction that the scheduler has moved from
1795 the body to the prologue.
1796
1797 In order to make sure that we scan all of the prologue
1798 instructions, we use a slightly less accurate mechanism which
1799 may scan more than necessary. To help compensate for this
1800 lack of accuracy, the prologue scanning loop below contains
1801 several clauses which'll cause the loop to terminate early if
1802 an implausible prologue instruction is encountered.
1803
1804 The expression
1805
1806 prologue_start + 64
1807
1808 is a suitable endpoint since it accounts for the largest
1809 possible prologue plus up to five instructions inserted by
1810 the scheduler. */
1811
1812 if (prologue_end > prologue_start + 64)
1813 {
1814 prologue_end = prologue_start + 64; /* See above. */
1815 }
1816 }
1817 else
1818 {
1819 /* We have no symbol information. Our only option is to assume this
1820 function has a standard stack frame and the normal frame register.
1821 Then, we can find the value of our frame pointer on entrance to
1822 the callee (or at the present moment if this is the innermost frame).
1823 The value stored there should be the address of the stmfd + 8. */
1824 CORE_ADDR frame_loc;
1825 ULONGEST return_value;
1826
1827 /* AAPCS does not use a frame register, so we can abort here. */
1828 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1829 return;
1830
1831 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1832 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1833 &return_value))
1834 return;
1835 else
1836 {
1837 prologue_start = gdbarch_addr_bits_remove
1838 (gdbarch, return_value) - 8;
1839 prologue_end = prologue_start + 64; /* See above. */
1840 }
1841 }
1842
1843 if (prev_pc < prologue_end)
1844 prologue_end = prev_pc;
1845
1846 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1847 }
1848
1849 static struct arm_prologue_cache *
1850 arm_make_prologue_cache (struct frame_info *this_frame)
1851 {
1852 int reg;
1853 struct arm_prologue_cache *cache;
1854 CORE_ADDR unwound_fp;
1855
1856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1858
1859 arm_scan_prologue (this_frame, cache);
1860
1861 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1862 if (unwound_fp == 0)
1863 return cache;
1864
1865 cache->prev_sp = unwound_fp + cache->framesize;
1866
1867 /* Calculate actual addresses of saved registers using offsets
1868 determined by arm_scan_prologue. */
1869 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1870 if (trad_frame_addr_p (cache->saved_regs, reg))
1871 cache->saved_regs[reg].addr += cache->prev_sp;
1872
1873 return cache;
1874 }
1875
1876 /* Implementation of the stop_reason hook for arm_prologue frames. */
1877
1878 static enum unwind_stop_reason
1879 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1880 void **this_cache)
1881 {
1882 struct arm_prologue_cache *cache;
1883 CORE_ADDR pc;
1884
1885 if (*this_cache == NULL)
1886 *this_cache = arm_make_prologue_cache (this_frame);
1887 cache = (struct arm_prologue_cache *) *this_cache;
1888
1889 /* This is meant to halt the backtrace at "_start". */
1890 pc = get_frame_pc (this_frame);
1891 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1892 return UNWIND_OUTERMOST;
1893
1894 /* If we've hit a wall, stop. */
1895 if (cache->prev_sp == 0)
1896 return UNWIND_OUTERMOST;
1897
1898 return UNWIND_NO_REASON;
1899 }
1900
1901 /* Our frame ID for a normal frame is the current function's starting PC
1902 and the caller's SP when we were called. */
1903
1904 static void
1905 arm_prologue_this_id (struct frame_info *this_frame,
1906 void **this_cache,
1907 struct frame_id *this_id)
1908 {
1909 struct arm_prologue_cache *cache;
1910 struct frame_id id;
1911 CORE_ADDR pc, func;
1912
1913 if (*this_cache == NULL)
1914 *this_cache = arm_make_prologue_cache (this_frame);
1915 cache = (struct arm_prologue_cache *) *this_cache;
1916
1917 /* Use function start address as part of the frame ID. If we cannot
1918 identify the start address (due to missing symbol information),
1919 fall back to just using the current PC. */
1920 pc = get_frame_pc (this_frame);
1921 func = get_frame_func (this_frame);
1922 if (!func)
1923 func = pc;
1924
1925 id = frame_id_build (cache->prev_sp, func);
1926 *this_id = id;
1927 }
1928
1929 static struct value *
1930 arm_prologue_prev_register (struct frame_info *this_frame,
1931 void **this_cache,
1932 int prev_regnum)
1933 {
1934 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1935 struct arm_prologue_cache *cache;
1936
1937 if (*this_cache == NULL)
1938 *this_cache = arm_make_prologue_cache (this_frame);
1939 cache = (struct arm_prologue_cache *) *this_cache;
1940
1941 /* If we are asked to unwind the PC, then we need to return the LR
1942 instead. The prologue may save PC, but it will point into this
1943 frame's prologue, not the next frame's resume location. Also
1944 strip the saved T bit. A valid LR may have the low bit set, but
1945 a valid PC never does. */
1946 if (prev_regnum == ARM_PC_REGNUM)
1947 {
1948 CORE_ADDR lr;
1949
1950 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1951 return frame_unwind_got_constant (this_frame, prev_regnum,
1952 arm_addr_bits_remove (gdbarch, lr));
1953 }
1954
1955 /* SP is generally not saved to the stack, but this frame is
1956 identified by the next frame's stack pointer at the time of the call.
1957 The value was already reconstructed into PREV_SP. */
1958 if (prev_regnum == ARM_SP_REGNUM)
1959 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1960
1961 /* The CPSR may have been changed by the call instruction and by the
1962 called function. The only bit we can reconstruct is the T bit,
1963 by checking the low bit of LR as of the call. This is a reliable
1964 indicator of Thumb-ness except for some ARM v4T pre-interworking
1965 Thumb code, which could get away with a clear low bit as long as
1966 the called function did not use bx. Guess that all other
1967 bits are unchanged; the condition flags are presumably lost,
1968 but the processor status is likely valid. */
1969 if (prev_regnum == ARM_PS_REGNUM)
1970 {
1971 CORE_ADDR lr, cpsr;
1972 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1973
1974 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1975 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1976 if (IS_THUMB_ADDR (lr))
1977 cpsr |= t_bit;
1978 else
1979 cpsr &= ~t_bit;
1980 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1981 }
1982
1983 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1984 prev_regnum);
1985 }
1986
1987 struct frame_unwind arm_prologue_unwind = {
1988 NORMAL_FRAME,
1989 arm_prologue_unwind_stop_reason,
1990 arm_prologue_this_id,
1991 arm_prologue_prev_register,
1992 NULL,
1993 default_frame_sniffer
1994 };
1995
1996 /* Maintain a list of ARM exception table entries per objfile, similar to the
1997 list of mapping symbols. We only cache entries for standard ARM-defined
1998 personality routines; the cache will contain only the frame unwinding
1999 instructions associated with the entry (not the descriptors). */
2000
2001 struct arm_exidx_entry
2002 {
2003 CORE_ADDR addr;
2004 gdb_byte *entry;
2005
2006 bool operator< (const arm_exidx_entry &other) const
2007 {
2008 return addr < other.addr;
2009 }
2010 };
2011
2012 struct arm_exidx_data
2013 {
2014 std::vector<std::vector<arm_exidx_entry>> section_maps;
2015 };
2016
2017 /* Per-BFD key to store exception handling information. */
2018 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2019
2020 static struct obj_section *
2021 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2022 {
2023 struct obj_section *osect;
2024
2025 ALL_OBJFILE_OSECTIONS (objfile, osect)
2026 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2027 {
2028 bfd_vma start, size;
2029 start = bfd_section_vma (osect->the_bfd_section);
2030 size = bfd_section_size (osect->the_bfd_section);
2031
2032 if (start <= vma && vma < start + size)
2033 return osect;
2034 }
2035
2036 return NULL;
2037 }
2038
2039 /* Parse contents of exception table and exception index sections
2040 of OBJFILE, and fill in the exception table entry cache.
2041
2042 For each entry that refers to a standard ARM-defined personality
2043 routine, extract the frame unwinding instructions (from either
2044 the index or the table section). The unwinding instructions
2045 are normalized by:
2046 - extracting them from the rest of the table data
2047 - converting to host endianness
2048 - appending the implicit 0xb0 ("Finish") code
2049
2050 The extracted and normalized instructions are stored for later
2051 retrieval by the arm_find_exidx_entry routine. */
2052
2053 static void
2054 arm_exidx_new_objfile (struct objfile *objfile)
2055 {
2056 struct arm_exidx_data *data;
2057 asection *exidx, *extab;
2058 bfd_vma exidx_vma = 0, extab_vma = 0;
2059 LONGEST i;
2060
2061 /* If we've already touched this file, do nothing. */
2062 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2063 return;
2064
2065 /* Read contents of exception table and index. */
2066 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2067 gdb::byte_vector exidx_data;
2068 if (exidx)
2069 {
2070 exidx_vma = bfd_section_vma (exidx);
2071 exidx_data.resize (bfd_section_size (exidx));
2072
2073 if (!bfd_get_section_contents (objfile->obfd, exidx,
2074 exidx_data.data (), 0,
2075 exidx_data.size ()))
2076 return;
2077 }
2078
2079 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2080 gdb::byte_vector extab_data;
2081 if (extab)
2082 {
2083 extab_vma = bfd_section_vma (extab);
2084 extab_data.resize (bfd_section_size (extab));
2085
2086 if (!bfd_get_section_contents (objfile->obfd, extab,
2087 extab_data.data (), 0,
2088 extab_data.size ()))
2089 return;
2090 }
2091
2092 /* Allocate exception table data structure. */
2093 data = arm_exidx_data_key.emplace (objfile->obfd);
2094 data->section_maps.resize (objfile->obfd->section_count);
2095
2096 /* Fill in exception table. */
2097 for (i = 0; i < exidx_data.size () / 8; i++)
2098 {
2099 struct arm_exidx_entry new_exidx_entry;
2100 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2101 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2102 exidx_data.data () + i * 8 + 4);
2103 bfd_vma addr = 0, word = 0;
2104 int n_bytes = 0, n_words = 0;
2105 struct obj_section *sec;
2106 gdb_byte *entry = NULL;
2107
2108 /* Extract address of start of function. */
2109 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2110 idx += exidx_vma + i * 8;
2111
2112 /* Find section containing function and compute section offset. */
2113 sec = arm_obj_section_from_vma (objfile, idx);
2114 if (sec == NULL)
2115 continue;
2116 idx -= bfd_section_vma (sec->the_bfd_section);
2117
2118 /* Determine address of exception table entry. */
2119 if (val == 1)
2120 {
2121 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2122 }
2123 else if ((val & 0xff000000) == 0x80000000)
2124 {
2125 /* Exception table entry embedded in .ARM.exidx
2126 -- must be short form. */
2127 word = val;
2128 n_bytes = 3;
2129 }
2130 else if (!(val & 0x80000000))
2131 {
2132 /* Exception table entry in .ARM.extab. */
2133 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2134 addr += exidx_vma + i * 8 + 4;
2135
2136 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2137 {
2138 word = bfd_h_get_32 (objfile->obfd,
2139 extab_data.data () + addr - extab_vma);
2140 addr += 4;
2141
2142 if ((word & 0xff000000) == 0x80000000)
2143 {
2144 /* Short form. */
2145 n_bytes = 3;
2146 }
2147 else if ((word & 0xff000000) == 0x81000000
2148 || (word & 0xff000000) == 0x82000000)
2149 {
2150 /* Long form. */
2151 n_bytes = 2;
2152 n_words = ((word >> 16) & 0xff);
2153 }
2154 else if (!(word & 0x80000000))
2155 {
2156 bfd_vma pers;
2157 struct obj_section *pers_sec;
2158 int gnu_personality = 0;
2159
2160 /* Custom personality routine. */
2161 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2162 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2163
2164 /* Check whether we've got one of the variants of the
2165 GNU personality routines. */
2166 pers_sec = arm_obj_section_from_vma (objfile, pers);
2167 if (pers_sec)
2168 {
2169 static const char *personality[] =
2170 {
2171 "__gcc_personality_v0",
2172 "__gxx_personality_v0",
2173 "__gcj_personality_v0",
2174 "__gnu_objc_personality_v0",
2175 NULL
2176 };
2177
2178 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2179 int k;
2180
2181 for (k = 0; personality[k]; k++)
2182 if (lookup_minimal_symbol_by_pc_name
2183 (pc, personality[k], objfile))
2184 {
2185 gnu_personality = 1;
2186 break;
2187 }
2188 }
2189
2190 /* If so, the next word contains a word count in the high
2191 byte, followed by the same unwind instructions as the
2192 pre-defined forms. */
2193 if (gnu_personality
2194 && addr + 4 <= extab_vma + extab_data.size ())
2195 {
2196 word = bfd_h_get_32 (objfile->obfd,
2197 (extab_data.data ()
2198 + addr - extab_vma));
2199 addr += 4;
2200 n_bytes = 3;
2201 n_words = ((word >> 24) & 0xff);
2202 }
2203 }
2204 }
2205 }
2206
2207 /* Sanity check address. */
2208 if (n_words)
2209 if (addr < extab_vma
2210 || addr + 4 * n_words > extab_vma + extab_data.size ())
2211 n_words = n_bytes = 0;
2212
2213 /* The unwind instructions reside in WORD (only the N_BYTES least
2214 significant bytes are valid), followed by N_WORDS words in the
2215 extab section starting at ADDR. */
2216 if (n_bytes || n_words)
2217 {
2218 gdb_byte *p = entry
2219 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2220 n_bytes + n_words * 4 + 1);
2221
2222 while (n_bytes--)
2223 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2224
2225 while (n_words--)
2226 {
2227 word = bfd_h_get_32 (objfile->obfd,
2228 extab_data.data () + addr - extab_vma);
2229 addr += 4;
2230
2231 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2232 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2233 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2234 *p++ = (gdb_byte) (word & 0xff);
2235 }
2236
2237 /* Implied "Finish" to terminate the list. */
2238 *p++ = 0xb0;
2239 }
2240
2241 /* Push entry onto vector. They are guaranteed to always
2242 appear in order of increasing addresses. */
2243 new_exidx_entry.addr = idx;
2244 new_exidx_entry.entry = entry;
2245 data->section_maps[sec->the_bfd_section->index].push_back
2246 (new_exidx_entry);
2247 }
2248 }
2249
2250 /* Search for the exception table entry covering MEMADDR. If one is found,
2251 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2252 set *START to the start of the region covered by this entry. */
2253
2254 static gdb_byte *
2255 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2256 {
2257 struct obj_section *sec;
2258
2259 sec = find_pc_section (memaddr);
2260 if (sec != NULL)
2261 {
2262 struct arm_exidx_data *data;
2263 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2264
2265 data = arm_exidx_data_key.get (sec->objfile->obfd);
2266 if (data != NULL)
2267 {
2268 std::vector<arm_exidx_entry> &map
2269 = data->section_maps[sec->the_bfd_section->index];
2270 if (!map.empty ())
2271 {
2272 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2273
2274 /* std::lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < map.end ())
2279 {
2280 if (idx->addr == map_key.addr)
2281 {
2282 if (start)
2283 *start = idx->addr + obj_section_addr (sec);
2284 return idx->entry;
2285 }
2286 }
2287
2288 if (idx > map.begin ())
2289 {
2290 idx = idx - 1;
2291 if (start)
2292 *start = idx->addr + obj_section_addr (sec);
2293 return idx->entry;
2294 }
2295 }
2296 }
2297 }
2298
2299 return NULL;
2300 }
2301
2302 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2303 instruction list from the ARM exception table entry ENTRY, allocate and
2304 return a prologue cache structure describing how to unwind this frame.
2305
2306 Return NULL if the unwinding instruction list contains a "spare",
2307 "reserved" or "refuse to unwind" instruction as defined in section
2308 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2309 for the ARM Architecture" document. */
2310
2311 static struct arm_prologue_cache *
2312 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2313 {
2314 CORE_ADDR vsp = 0;
2315 int vsp_valid = 0;
2316
2317 struct arm_prologue_cache *cache;
2318 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2319 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2320
2321 for (;;)
2322 {
2323 gdb_byte insn;
2324
2325 /* Whenever we reload SP, we actually have to retrieve its
2326 actual value in the current frame. */
2327 if (!vsp_valid)
2328 {
2329 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2330 {
2331 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2332 vsp = get_frame_register_unsigned (this_frame, reg);
2333 }
2334 else
2335 {
2336 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2337 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2338 }
2339
2340 vsp_valid = 1;
2341 }
2342
2343 /* Decode next unwind instruction. */
2344 insn = *entry++;
2345
2346 if ((insn & 0xc0) == 0)
2347 {
2348 int offset = insn & 0x3f;
2349 vsp += (offset << 2) + 4;
2350 }
2351 else if ((insn & 0xc0) == 0x40)
2352 {
2353 int offset = insn & 0x3f;
2354 vsp -= (offset << 2) + 4;
2355 }
2356 else if ((insn & 0xf0) == 0x80)
2357 {
2358 int mask = ((insn & 0xf) << 8) | *entry++;
2359 int i;
2360
2361 /* The special case of an all-zero mask identifies
2362 "Refuse to unwind". We return NULL to fall back
2363 to the prologue analyzer. */
2364 if (mask == 0)
2365 return NULL;
2366
2367 /* Pop registers r4..r15 under mask. */
2368 for (i = 0; i < 12; i++)
2369 if (mask & (1 << i))
2370 {
2371 cache->saved_regs[4 + i].addr = vsp;
2372 vsp += 4;
2373 }
2374
2375 /* Special-case popping SP -- we need to reload vsp. */
2376 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2377 vsp_valid = 0;
2378 }
2379 else if ((insn & 0xf0) == 0x90)
2380 {
2381 int reg = insn & 0xf;
2382
2383 /* Reserved cases. */
2384 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2385 return NULL;
2386
2387 /* Set SP from another register and mark VSP for reload. */
2388 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2389 vsp_valid = 0;
2390 }
2391 else if ((insn & 0xf0) == 0xa0)
2392 {
2393 int count = insn & 0x7;
2394 int pop_lr = (insn & 0x8) != 0;
2395 int i;
2396
2397 /* Pop r4..r[4+count]. */
2398 for (i = 0; i <= count; i++)
2399 {
2400 cache->saved_regs[4 + i].addr = vsp;
2401 vsp += 4;
2402 }
2403
2404 /* If indicated by flag, pop LR as well. */
2405 if (pop_lr)
2406 {
2407 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2408 vsp += 4;
2409 }
2410 }
2411 else if (insn == 0xb0)
2412 {
2413 /* We could only have updated PC by popping into it; if so, it
2414 will show up as address. Otherwise, copy LR into PC. */
2415 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2416 cache->saved_regs[ARM_PC_REGNUM]
2417 = cache->saved_regs[ARM_LR_REGNUM];
2418
2419 /* We're done. */
2420 break;
2421 }
2422 else if (insn == 0xb1)
2423 {
2424 int mask = *entry++;
2425 int i;
2426
2427 /* All-zero mask and mask >= 16 is "spare". */
2428 if (mask == 0 || mask >= 16)
2429 return NULL;
2430
2431 /* Pop r0..r3 under mask. */
2432 for (i = 0; i < 4; i++)
2433 if (mask & (1 << i))
2434 {
2435 cache->saved_regs[i].addr = vsp;
2436 vsp += 4;
2437 }
2438 }
2439 else if (insn == 0xb2)
2440 {
2441 ULONGEST offset = 0;
2442 unsigned shift = 0;
2443
2444 do
2445 {
2446 offset |= (*entry & 0x7f) << shift;
2447 shift += 7;
2448 }
2449 while (*entry++ & 0x80);
2450
2451 vsp += 0x204 + (offset << 2);
2452 }
2453 else if (insn == 0xb3)
2454 {
2455 int start = *entry >> 4;
2456 int count = (*entry++) & 0xf;
2457 int i;
2458
2459 /* Only registers D0..D15 are valid here. */
2460 if (start + count >= 16)
2461 return NULL;
2462
2463 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2464 for (i = 0; i <= count; i++)
2465 {
2466 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2467 vsp += 8;
2468 }
2469
2470 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2471 vsp += 4;
2472 }
2473 else if ((insn & 0xf8) == 0xb8)
2474 {
2475 int count = insn & 0x7;
2476 int i;
2477
2478 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if (insn == 0xc6)
2489 {
2490 int start = *entry >> 4;
2491 int count = (*entry++) & 0xf;
2492 int i;
2493
2494 /* Only registers WR0..WR15 are valid. */
2495 if (start + count >= 16)
2496 return NULL;
2497
2498 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2499 for (i = 0; i <= count; i++)
2500 {
2501 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2502 vsp += 8;
2503 }
2504 }
2505 else if (insn == 0xc7)
2506 {
2507 int mask = *entry++;
2508 int i;
2509
2510 /* All-zero mask and mask >= 16 is "spare". */
2511 if (mask == 0 || mask >= 16)
2512 return NULL;
2513
2514 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2515 for (i = 0; i < 4; i++)
2516 if (mask & (1 << i))
2517 {
2518 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2519 vsp += 4;
2520 }
2521 }
2522 else if ((insn & 0xf8) == 0xc0)
2523 {
2524 int count = insn & 0x7;
2525 int i;
2526
2527 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2528 for (i = 0; i <= count; i++)
2529 {
2530 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2531 vsp += 8;
2532 }
2533 }
2534 else if (insn == 0xc8)
2535 {
2536 int start = *entry >> 4;
2537 int count = (*entry++) & 0xf;
2538 int i;
2539
2540 /* Only registers D0..D31 are valid. */
2541 if (start + count >= 16)
2542 return NULL;
2543
2544 /* Pop VFP double-precision registers
2545 D[16+start]..D[16+start+count]. */
2546 for (i = 0; i <= count; i++)
2547 {
2548 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2549 vsp += 8;
2550 }
2551 }
2552 else if (insn == 0xc9)
2553 {
2554 int start = *entry >> 4;
2555 int count = (*entry++) & 0xf;
2556 int i;
2557
2558 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else if ((insn & 0xf8) == 0xd0)
2566 {
2567 int count = insn & 0x7;
2568 int i;
2569
2570 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2571 for (i = 0; i <= count; i++)
2572 {
2573 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2574 vsp += 8;
2575 }
2576 }
2577 else
2578 {
2579 /* Everything else is "spare". */
2580 return NULL;
2581 }
2582 }
2583
2584 /* If we restore SP from a register, assume this was the frame register.
2585 Otherwise just fall back to SP as frame register. */
2586 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2587 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2588 else
2589 cache->framereg = ARM_SP_REGNUM;
2590
2591 /* Determine offset to previous frame. */
2592 cache->framesize
2593 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2594
2595 /* We already got the previous SP. */
2596 cache->prev_sp = vsp;
2597
2598 return cache;
2599 }
2600
2601 /* Unwinding via ARM exception table entries. Note that the sniffer
2602 already computes a filled-in prologue cache, which is then used
2603 with the same arm_prologue_this_id and arm_prologue_prev_register
2604 routines also used for prologue-parsing based unwinding. */
2605
2606 static int
2607 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2608 struct frame_info *this_frame,
2609 void **this_prologue_cache)
2610 {
2611 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2613 CORE_ADDR addr_in_block, exidx_region, func_start;
2614 struct arm_prologue_cache *cache;
2615 gdb_byte *entry;
2616
2617 /* See if we have an ARM exception table entry covering this address. */
2618 addr_in_block = get_frame_address_in_block (this_frame);
2619 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2620 if (!entry)
2621 return 0;
2622
2623 /* The ARM exception table does not describe unwind information
2624 for arbitrary PC values, but is guaranteed to be correct only
2625 at call sites. We have to decide here whether we want to use
2626 ARM exception table information for this frame, or fall back
2627 to using prologue parsing. (Note that if we have DWARF CFI,
2628 this sniffer isn't even called -- CFI is always preferred.)
2629
2630 Before we make this decision, however, we check whether we
2631 actually have *symbol* information for the current frame.
2632 If not, prologue parsing would not work anyway, so we might
2633 as well use the exception table and hope for the best. */
2634 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2635 {
2636 int exc_valid = 0;
2637
2638 /* If the next frame is "normal", we are at a call site in this
2639 frame, so exception information is guaranteed to be valid. */
2640 if (get_next_frame (this_frame)
2641 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2642 exc_valid = 1;
2643
2644 /* We also assume exception information is valid if we're currently
2645 blocked in a system call. The system library is supposed to
2646 ensure this, so that e.g. pthread cancellation works. */
2647 if (arm_frame_is_thumb (this_frame))
2648 {
2649 ULONGEST insn;
2650
2651 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2652 2, byte_order_for_code, &insn)
2653 && (insn & 0xff00) == 0xdf00 /* svc */)
2654 exc_valid = 1;
2655 }
2656 else
2657 {
2658 ULONGEST insn;
2659
2660 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2661 4, byte_order_for_code, &insn)
2662 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2663 exc_valid = 1;
2664 }
2665
2666 /* Bail out if we don't know that exception information is valid. */
2667 if (!exc_valid)
2668 return 0;
2669
2670 /* The ARM exception index does not mark the *end* of the region
2671 covered by the entry, and some functions will not have any entry.
2672 To correctly recognize the end of the covered region, the linker
2673 should have inserted dummy records with a CANTUNWIND marker.
2674
2675 Unfortunately, current versions of GNU ld do not reliably do
2676 this, and thus we may have found an incorrect entry above.
2677 As a (temporary) sanity check, we only use the entry if it
2678 lies *within* the bounds of the function. Note that this check
2679 might reject perfectly valid entries that just happen to cover
2680 multiple functions; therefore this check ought to be removed
2681 once the linker is fixed. */
2682 if (func_start > exidx_region)
2683 return 0;
2684 }
2685
2686 /* Decode the list of unwinding instructions into a prologue cache.
2687 Note that this may fail due to e.g. a "refuse to unwind" code. */
2688 cache = arm_exidx_fill_cache (this_frame, entry);
2689 if (!cache)
2690 return 0;
2691
2692 *this_prologue_cache = cache;
2693 return 1;
2694 }
2695
2696 struct frame_unwind arm_exidx_unwind = {
2697 NORMAL_FRAME,
2698 default_frame_unwind_stop_reason,
2699 arm_prologue_this_id,
2700 arm_prologue_prev_register,
2701 NULL,
2702 arm_exidx_unwind_sniffer
2703 };
2704
2705 static struct arm_prologue_cache *
2706 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2707 {
2708 struct arm_prologue_cache *cache;
2709 int reg;
2710
2711 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2712 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2713
2714 /* Still rely on the offset calculated from prologue. */
2715 arm_scan_prologue (this_frame, cache);
2716
2717 /* Since we are in epilogue, the SP has been restored. */
2718 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2719
2720 /* Calculate actual addresses of saved registers using offsets
2721 determined by arm_scan_prologue. */
2722 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2723 if (trad_frame_addr_p (cache->saved_regs, reg))
2724 cache->saved_regs[reg].addr += cache->prev_sp;
2725
2726 return cache;
2727 }
2728
2729 /* Implementation of function hook 'this_id' in
2730 'struct frame_uwnind' for epilogue unwinder. */
2731
2732 static void
2733 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2734 void **this_cache,
2735 struct frame_id *this_id)
2736 {
2737 struct arm_prologue_cache *cache;
2738 CORE_ADDR pc, func;
2739
2740 if (*this_cache == NULL)
2741 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2742 cache = (struct arm_prologue_cache *) *this_cache;
2743
2744 /* Use function start address as part of the frame ID. If we cannot
2745 identify the start address (due to missing symbol information),
2746 fall back to just using the current PC. */
2747 pc = get_frame_pc (this_frame);
2748 func = get_frame_func (this_frame);
2749 if (func == 0)
2750 func = pc;
2751
2752 (*this_id) = frame_id_build (cache->prev_sp, pc);
2753 }
2754
2755 /* Implementation of function hook 'prev_register' in
2756 'struct frame_uwnind' for epilogue unwinder. */
2757
2758 static struct value *
2759 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2760 void **this_cache, int regnum)
2761 {
2762 if (*this_cache == NULL)
2763 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2764
2765 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2766 }
2767
2768 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2769 CORE_ADDR pc);
2770 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2771 CORE_ADDR pc);
2772
2773 /* Implementation of function hook 'sniffer' in
2774 'struct frame_uwnind' for epilogue unwinder. */
2775
2776 static int
2777 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2778 struct frame_info *this_frame,
2779 void **this_prologue_cache)
2780 {
2781 if (frame_relative_level (this_frame) == 0)
2782 {
2783 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2784 CORE_ADDR pc = get_frame_pc (this_frame);
2785
2786 if (arm_frame_is_thumb (this_frame))
2787 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2788 else
2789 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2790 }
2791 else
2792 return 0;
2793 }
2794
2795 /* Frame unwinder from epilogue. */
2796
2797 static const struct frame_unwind arm_epilogue_frame_unwind =
2798 {
2799 NORMAL_FRAME,
2800 default_frame_unwind_stop_reason,
2801 arm_epilogue_frame_this_id,
2802 arm_epilogue_frame_prev_register,
2803 NULL,
2804 arm_epilogue_frame_sniffer,
2805 };
2806
2807 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2808 trampoline, return the target PC. Otherwise return 0.
2809
2810 void call0a (char c, short s, int i, long l) {}
2811
2812 int main (void)
2813 {
2814 (*pointer_to_call0a) (c, s, i, l);
2815 }
2816
2817 Instead of calling a stub library function _call_via_xx (xx is
2818 the register name), GCC may inline the trampoline in the object
2819 file as below (register r2 has the address of call0a).
2820
2821 .global main
2822 .type main, %function
2823 ...
2824 bl .L1
2825 ...
2826 .size main, .-main
2827
2828 .L1:
2829 bx r2
2830
2831 The trampoline 'bx r2' doesn't belong to main. */
2832
2833 static CORE_ADDR
2834 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2835 {
2836 /* The heuristics of recognizing such trampoline is that FRAME is
2837 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2838 if (arm_frame_is_thumb (frame))
2839 {
2840 gdb_byte buf[2];
2841
2842 if (target_read_memory (pc, buf, 2) == 0)
2843 {
2844 struct gdbarch *gdbarch = get_frame_arch (frame);
2845 enum bfd_endian byte_order_for_code
2846 = gdbarch_byte_order_for_code (gdbarch);
2847 uint16_t insn
2848 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2849
2850 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2851 {
2852 CORE_ADDR dest
2853 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2854
2855 /* Clear the LSB so that gdb core sets step-resume
2856 breakpoint at the right address. */
2857 return UNMAKE_THUMB_ADDR (dest);
2858 }
2859 }
2860 }
2861
2862 return 0;
2863 }
2864
2865 static struct arm_prologue_cache *
2866 arm_make_stub_cache (struct frame_info *this_frame)
2867 {
2868 struct arm_prologue_cache *cache;
2869
2870 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2871 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2872
2873 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2874
2875 return cache;
2876 }
2877
2878 /* Our frame ID for a stub frame is the current SP and LR. */
2879
2880 static void
2881 arm_stub_this_id (struct frame_info *this_frame,
2882 void **this_cache,
2883 struct frame_id *this_id)
2884 {
2885 struct arm_prologue_cache *cache;
2886
2887 if (*this_cache == NULL)
2888 *this_cache = arm_make_stub_cache (this_frame);
2889 cache = (struct arm_prologue_cache *) *this_cache;
2890
2891 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2892 }
2893
2894 static int
2895 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2896 struct frame_info *this_frame,
2897 void **this_prologue_cache)
2898 {
2899 CORE_ADDR addr_in_block;
2900 gdb_byte dummy[4];
2901 CORE_ADDR pc, start_addr;
2902 const char *name;
2903
2904 addr_in_block = get_frame_address_in_block (this_frame);
2905 pc = get_frame_pc (this_frame);
2906 if (in_plt_section (addr_in_block)
2907 /* We also use the stub winder if the target memory is unreadable
2908 to avoid having the prologue unwinder trying to read it. */
2909 || target_read_memory (pc, dummy, 4) != 0)
2910 return 1;
2911
2912 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2913 && arm_skip_bx_reg (this_frame, pc) != 0)
2914 return 1;
2915
2916 return 0;
2917 }
2918
2919 struct frame_unwind arm_stub_unwind = {
2920 NORMAL_FRAME,
2921 default_frame_unwind_stop_reason,
2922 arm_stub_this_id,
2923 arm_prologue_prev_register,
2924 NULL,
2925 arm_stub_unwind_sniffer
2926 };
2927
2928 /* Put here the code to store, into CACHE->saved_regs, the addresses
2929 of the saved registers of frame described by THIS_FRAME. CACHE is
2930 returned. */
2931
2932 static struct arm_prologue_cache *
2933 arm_m_exception_cache (struct frame_info *this_frame)
2934 {
2935 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2936 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2937 struct arm_prologue_cache *cache;
2938 CORE_ADDR lr;
2939 CORE_ADDR sp;
2940 CORE_ADDR unwound_sp;
2941 LONGEST xpsr;
2942 uint32_t exc_return;
2943 uint32_t process_stack_used;
2944 uint32_t extended_frame_used;
2945 uint32_t secure_stack_used;
2946
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2949
2950 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2951 describes which bits in LR that define which stack was used prior
2952 to the exception and if FPU is used (causing extended stack frame). */
2953
2954 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2955 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2956
2957 /* Check EXC_RETURN indicator bits. */
2958 exc_return = (((lr >> 28) & 0xf) == 0xf);
2959
2960 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2961 process_stack_used = ((lr & (1 << 2)) != 0);
2962 if (exc_return && process_stack_used)
2963 {
2964 /* Thread (process) stack used.
2965 Potentially this could be other register defined by target, but PSP
2966 can be considered a standard name for the "Process Stack Pointer".
2967 To be fully aware of system registers like MSP and PSP, these could
2968 be added to a separate XML arm-m-system-profile that is valid for
2969 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
2970 corefile off-line, then these registers must be defined by GDB,
2971 and also be included in the corefile regsets. */
2972
2973 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
2974 if (psp_regnum == -1)
2975 {
2976 /* Thread (process) stack could not be fetched,
2977 give warning and exit. */
2978
2979 warning (_("no PSP thread stack unwinding supported."));
2980
2981 /* Terminate any further stack unwinding by refer to self. */
2982 cache->prev_sp = sp;
2983 return cache;
2984 }
2985 else
2986 {
2987 /* Thread (process) stack used, use PSP as SP. */
2988 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
2989 }
2990 }
2991 else
2992 {
2993 /* Main stack used, use MSP as SP. */
2994 unwound_sp = sp;
2995 }
2996
2997 /* The hardware saves eight 32-bit words, comprising xPSR,
2998 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2999 "B1.5.6 Exception entry behavior" in
3000 "ARMv7-M Architecture Reference Manual". */
3001 cache->saved_regs[0].addr = unwound_sp;
3002 cache->saved_regs[1].addr = unwound_sp + 4;
3003 cache->saved_regs[2].addr = unwound_sp + 8;
3004 cache->saved_regs[3].addr = unwound_sp + 12;
3005 cache->saved_regs[ARM_IP_REGNUM].addr = unwound_sp + 16;
3006 cache->saved_regs[ARM_LR_REGNUM].addr = unwound_sp + 20;
3007 cache->saved_regs[ARM_PC_REGNUM].addr = unwound_sp + 24;
3008 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3009
3010 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3011 type used. */
3012 extended_frame_used = ((lr & (1 << 4)) == 0);
3013 if (exc_return && extended_frame_used)
3014 {
3015 int i;
3016 int fpu_regs_stack_offset;
3017
3018 /* This code does not take into account the lazy stacking, see "Lazy
3019 context save of FP state", in B1.5.7, also ARM AN298, supported
3020 by Cortex-M4F architecture.
3021 To fully handle this the FPCCR register (Floating-point Context
3022 Control Register) needs to be read out and the bits ASPEN and LSPEN
3023 could be checked to setup correct lazy stacked FP registers.
3024 This register is located at address 0xE000EF34. */
3025
3026 /* Extended stack frame type used. */
3027 fpu_regs_stack_offset = unwound_sp + 0x20;
3028 for (i = 0; i < 16; i++)
3029 {
3030 cache->saved_regs[ARM_D0_REGNUM + i].addr = fpu_regs_stack_offset;
3031 fpu_regs_stack_offset += 4;
3032 }
3033 cache->saved_regs[ARM_FPSCR_REGNUM].addr = unwound_sp + 0x60;
3034
3035 /* Offset 0x64 is reserved. */
3036 cache->prev_sp = unwound_sp + 0x68;
3037 }
3038 else
3039 {
3040 /* Standard stack frame type used. */
3041 cache->prev_sp = unwound_sp + 0x20;
3042 }
3043
3044 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3045 secure_stack_used = ((lr & (1 << 6)) != 0);
3046 if (exc_return && secure_stack_used)
3047 {
3048 /* ARMv8-M Exception and interrupt handling is not considered here.
3049 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3050 the Secure or Non-secure stack was used. To separate Secure and
3051 Non-secure stacks, processors that are based on the ARMv8-M
3052 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3053 In addition, a stack limit feature is provided using stack limit
3054 registers (accessible using MSR and MRS instructions) in Privileged
3055 level. */
3056 }
3057
3058 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3059 aligner between the top of the 32-byte stack frame and the
3060 previous context's stack pointer. */
3061 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3062 && (xpsr & (1 << 9)) != 0)
3063 cache->prev_sp += 4;
3064
3065 return cache;
3066 }
3067
3068 /* Implementation of function hook 'this_id' in
3069 'struct frame_uwnind'. */
3070
3071 static void
3072 arm_m_exception_this_id (struct frame_info *this_frame,
3073 void **this_cache,
3074 struct frame_id *this_id)
3075 {
3076 struct arm_prologue_cache *cache;
3077
3078 if (*this_cache == NULL)
3079 *this_cache = arm_m_exception_cache (this_frame);
3080 cache = (struct arm_prologue_cache *) *this_cache;
3081
3082 /* Our frame ID for a stub frame is the current SP and LR. */
3083 *this_id = frame_id_build (cache->prev_sp,
3084 get_frame_pc (this_frame));
3085 }
3086
3087 /* Implementation of function hook 'prev_register' in
3088 'struct frame_uwnind'. */
3089
3090 static struct value *
3091 arm_m_exception_prev_register (struct frame_info *this_frame,
3092 void **this_cache,
3093 int prev_regnum)
3094 {
3095 struct arm_prologue_cache *cache;
3096
3097 if (*this_cache == NULL)
3098 *this_cache = arm_m_exception_cache (this_frame);
3099 cache = (struct arm_prologue_cache *) *this_cache;
3100
3101 /* The value was already reconstructed into PREV_SP. */
3102 if (prev_regnum == ARM_SP_REGNUM)
3103 return frame_unwind_got_constant (this_frame, prev_regnum,
3104 cache->prev_sp);
3105
3106 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3107 prev_regnum);
3108 }
3109
3110 /* Implementation of function hook 'sniffer' in
3111 'struct frame_uwnind'. */
3112
3113 static int
3114 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3115 struct frame_info *this_frame,
3116 void **this_prologue_cache)
3117 {
3118 CORE_ADDR this_pc = get_frame_pc (this_frame);
3119
3120 /* No need to check is_m; this sniffer is only registered for
3121 M-profile architectures. */
3122
3123 /* Check if exception frame returns to a magic PC value. */
3124 return arm_m_addr_is_magic (this_pc);
3125 }
3126
3127 /* Frame unwinder for M-profile exceptions. */
3128
3129 struct frame_unwind arm_m_exception_unwind =
3130 {
3131 SIGTRAMP_FRAME,
3132 default_frame_unwind_stop_reason,
3133 arm_m_exception_this_id,
3134 arm_m_exception_prev_register,
3135 NULL,
3136 arm_m_exception_unwind_sniffer
3137 };
3138
3139 static CORE_ADDR
3140 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3141 {
3142 struct arm_prologue_cache *cache;
3143
3144 if (*this_cache == NULL)
3145 *this_cache = arm_make_prologue_cache (this_frame);
3146 cache = (struct arm_prologue_cache *) *this_cache;
3147
3148 return cache->prev_sp - cache->framesize;
3149 }
3150
3151 struct frame_base arm_normal_base = {
3152 &arm_prologue_unwind,
3153 arm_normal_frame_base,
3154 arm_normal_frame_base,
3155 arm_normal_frame_base
3156 };
3157
3158 static struct value *
3159 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3160 int regnum)
3161 {
3162 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3163 CORE_ADDR lr, cpsr;
3164 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3165
3166 switch (regnum)
3167 {
3168 case ARM_PC_REGNUM:
3169 /* The PC is normally copied from the return column, which
3170 describes saves of LR. However, that version may have an
3171 extra bit set to indicate Thumb state. The bit is not
3172 part of the PC. */
3173 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3174 return frame_unwind_got_constant (this_frame, regnum,
3175 arm_addr_bits_remove (gdbarch, lr));
3176
3177 case ARM_PS_REGNUM:
3178 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3179 cpsr = get_frame_register_unsigned (this_frame, regnum);
3180 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3181 if (IS_THUMB_ADDR (lr))
3182 cpsr |= t_bit;
3183 else
3184 cpsr &= ~t_bit;
3185 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3186
3187 default:
3188 internal_error (__FILE__, __LINE__,
3189 _("Unexpected register %d"), regnum);
3190 }
3191 }
3192
3193 static void
3194 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3195 struct dwarf2_frame_state_reg *reg,
3196 struct frame_info *this_frame)
3197 {
3198 switch (regnum)
3199 {
3200 case ARM_PC_REGNUM:
3201 case ARM_PS_REGNUM:
3202 reg->how = DWARF2_FRAME_REG_FN;
3203 reg->loc.fn = arm_dwarf2_prev_register;
3204 break;
3205 case ARM_SP_REGNUM:
3206 reg->how = DWARF2_FRAME_REG_CFA;
3207 break;
3208 }
3209 }
3210
3211 /* Implement the stack_frame_destroyed_p gdbarch method. */
3212
3213 static int
3214 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3215 {
3216 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3217 unsigned int insn, insn2;
3218 int found_return = 0, found_stack_adjust = 0;
3219 CORE_ADDR func_start, func_end;
3220 CORE_ADDR scan_pc;
3221 gdb_byte buf[4];
3222
3223 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3224 return 0;
3225
3226 /* The epilogue is a sequence of instructions along the following lines:
3227
3228 - add stack frame size to SP or FP
3229 - [if frame pointer used] restore SP from FP
3230 - restore registers from SP [may include PC]
3231 - a return-type instruction [if PC wasn't already restored]
3232
3233 In a first pass, we scan forward from the current PC and verify the
3234 instructions we find as compatible with this sequence, ending in a
3235 return instruction.
3236
3237 However, this is not sufficient to distinguish indirect function calls
3238 within a function from indirect tail calls in the epilogue in some cases.
3239 Therefore, if we didn't already find any SP-changing instruction during
3240 forward scan, we add a backward scanning heuristic to ensure we actually
3241 are in the epilogue. */
3242
3243 scan_pc = pc;
3244 while (scan_pc < func_end && !found_return)
3245 {
3246 if (target_read_memory (scan_pc, buf, 2))
3247 break;
3248
3249 scan_pc += 2;
3250 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3251
3252 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3253 found_return = 1;
3254 else if (insn == 0x46f7) /* mov pc, lr */
3255 found_return = 1;
3256 else if (thumb_instruction_restores_sp (insn))
3257 {
3258 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3259 found_return = 1;
3260 }
3261 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3262 {
3263 if (target_read_memory (scan_pc, buf, 2))
3264 break;
3265
3266 scan_pc += 2;
3267 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3268
3269 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3270 {
3271 if (insn2 & 0x8000) /* <registers> include PC. */
3272 found_return = 1;
3273 }
3274 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3275 && (insn2 & 0x0fff) == 0x0b04)
3276 {
3277 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3278 found_return = 1;
3279 }
3280 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3281 && (insn2 & 0x0e00) == 0x0a00)
3282 ;
3283 else
3284 break;
3285 }
3286 else
3287 break;
3288 }
3289
3290 if (!found_return)
3291 return 0;
3292
3293 /* Since any instruction in the epilogue sequence, with the possible
3294 exception of return itself, updates the stack pointer, we need to
3295 scan backwards for at most one instruction. Try either a 16-bit or
3296 a 32-bit instruction. This is just a heuristic, so we do not worry
3297 too much about false positives. */
3298
3299 if (pc - 4 < func_start)
3300 return 0;
3301 if (target_read_memory (pc - 4, buf, 4))
3302 return 0;
3303
3304 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3305 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3306
3307 if (thumb_instruction_restores_sp (insn2))
3308 found_stack_adjust = 1;
3309 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3310 found_stack_adjust = 1;
3311 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3312 && (insn2 & 0x0fff) == 0x0b04)
3313 found_stack_adjust = 1;
3314 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3315 && (insn2 & 0x0e00) == 0x0a00)
3316 found_stack_adjust = 1;
3317
3318 return found_stack_adjust;
3319 }
3320
3321 static int
3322 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3323 {
3324 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3325 unsigned int insn;
3326 int found_return;
3327 CORE_ADDR func_start, func_end;
3328
3329 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3330 return 0;
3331
3332 /* We are in the epilogue if the previous instruction was a stack
3333 adjustment and the next instruction is a possible return (bx, mov
3334 pc, or pop). We could have to scan backwards to find the stack
3335 adjustment, or forwards to find the return, but this is a decent
3336 approximation. First scan forwards. */
3337
3338 found_return = 0;
3339 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3340 if (bits (insn, 28, 31) != INST_NV)
3341 {
3342 if ((insn & 0x0ffffff0) == 0x012fff10)
3343 /* BX. */
3344 found_return = 1;
3345 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3346 /* MOV PC. */
3347 found_return = 1;
3348 else if ((insn & 0x0fff0000) == 0x08bd0000
3349 && (insn & 0x0000c000) != 0)
3350 /* POP (LDMIA), including PC or LR. */
3351 found_return = 1;
3352 }
3353
3354 if (!found_return)
3355 return 0;
3356
3357 /* Scan backwards. This is just a heuristic, so do not worry about
3358 false positives from mode changes. */
3359
3360 if (pc < func_start + 4)
3361 return 0;
3362
3363 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3364 if (arm_instruction_restores_sp (insn))
3365 return 1;
3366
3367 return 0;
3368 }
3369
3370 /* Implement the stack_frame_destroyed_p gdbarch method. */
3371
3372 static int
3373 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3374 {
3375 if (arm_pc_is_thumb (gdbarch, pc))
3376 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3377 else
3378 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3379 }
3380
3381 /* When arguments must be pushed onto the stack, they go on in reverse
3382 order. The code below implements a FILO (stack) to do this. */
3383
3384 struct stack_item
3385 {
3386 int len;
3387 struct stack_item *prev;
3388 gdb_byte *data;
3389 };
3390
3391 static struct stack_item *
3392 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3393 {
3394 struct stack_item *si;
3395 si = XNEW (struct stack_item);
3396 si->data = (gdb_byte *) xmalloc (len);
3397 si->len = len;
3398 si->prev = prev;
3399 memcpy (si->data, contents, len);
3400 return si;
3401 }
3402
3403 static struct stack_item *
3404 pop_stack_item (struct stack_item *si)
3405 {
3406 struct stack_item *dead = si;
3407 si = si->prev;
3408 xfree (dead->data);
3409 xfree (dead);
3410 return si;
3411 }
3412
3413 /* Implement the gdbarch type alignment method, overrides the generic
3414 alignment algorithm for anything that is arm specific. */
3415
3416 static ULONGEST
3417 arm_type_align (gdbarch *gdbarch, struct type *t)
3418 {
3419 t = check_typedef (t);
3420 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3421 {
3422 /* Use the natural alignment for vector types (the same for
3423 scalar type), but the maximum alignment is 64-bit. */
3424 if (TYPE_LENGTH (t) > 8)
3425 return 8;
3426 else
3427 return TYPE_LENGTH (t);
3428 }
3429
3430 /* Allow the common code to calculate the alignment. */
3431 return 0;
3432 }
3433
3434 /* Possible base types for a candidate for passing and returning in
3435 VFP registers. */
3436
3437 enum arm_vfp_cprc_base_type
3438 {
3439 VFP_CPRC_UNKNOWN,
3440 VFP_CPRC_SINGLE,
3441 VFP_CPRC_DOUBLE,
3442 VFP_CPRC_VEC64,
3443 VFP_CPRC_VEC128
3444 };
3445
3446 /* The length of one element of base type B. */
3447
3448 static unsigned
3449 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3450 {
3451 switch (b)
3452 {
3453 case VFP_CPRC_SINGLE:
3454 return 4;
3455 case VFP_CPRC_DOUBLE:
3456 return 8;
3457 case VFP_CPRC_VEC64:
3458 return 8;
3459 case VFP_CPRC_VEC128:
3460 return 16;
3461 default:
3462 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3463 (int) b);
3464 }
3465 }
3466
3467 /* The character ('s', 'd' or 'q') for the type of VFP register used
3468 for passing base type B. */
3469
3470 static int
3471 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3472 {
3473 switch (b)
3474 {
3475 case VFP_CPRC_SINGLE:
3476 return 's';
3477 case VFP_CPRC_DOUBLE:
3478 return 'd';
3479 case VFP_CPRC_VEC64:
3480 return 'd';
3481 case VFP_CPRC_VEC128:
3482 return 'q';
3483 default:
3484 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3485 (int) b);
3486 }
3487 }
3488
3489 /* Determine whether T may be part of a candidate for passing and
3490 returning in VFP registers, ignoring the limit on the total number
3491 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3492 classification of the first valid component found; if it is not
3493 VFP_CPRC_UNKNOWN, all components must have the same classification
3494 as *BASE_TYPE. If it is found that T contains a type not permitted
3495 for passing and returning in VFP registers, a type differently
3496 classified from *BASE_TYPE, or two types differently classified
3497 from each other, return -1, otherwise return the total number of
3498 base-type elements found (possibly 0 in an empty structure or
3499 array). Vector types are not currently supported, matching the
3500 generic AAPCS support. */
3501
3502 static int
3503 arm_vfp_cprc_sub_candidate (struct type *t,
3504 enum arm_vfp_cprc_base_type *base_type)
3505 {
3506 t = check_typedef (t);
3507 switch (t->code ())
3508 {
3509 case TYPE_CODE_FLT:
3510 switch (TYPE_LENGTH (t))
3511 {
3512 case 4:
3513 if (*base_type == VFP_CPRC_UNKNOWN)
3514 *base_type = VFP_CPRC_SINGLE;
3515 else if (*base_type != VFP_CPRC_SINGLE)
3516 return -1;
3517 return 1;
3518
3519 case 8:
3520 if (*base_type == VFP_CPRC_UNKNOWN)
3521 *base_type = VFP_CPRC_DOUBLE;
3522 else if (*base_type != VFP_CPRC_DOUBLE)
3523 return -1;
3524 return 1;
3525
3526 default:
3527 return -1;
3528 }
3529 break;
3530
3531 case TYPE_CODE_COMPLEX:
3532 /* Arguments of complex T where T is one of the types float or
3533 double get treated as if they are implemented as:
3534
3535 struct complexT
3536 {
3537 T real;
3538 T imag;
3539 };
3540
3541 */
3542 switch (TYPE_LENGTH (t))
3543 {
3544 case 8:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_SINGLE;
3547 else if (*base_type != VFP_CPRC_SINGLE)
3548 return -1;
3549 return 2;
3550
3551 case 16:
3552 if (*base_type == VFP_CPRC_UNKNOWN)
3553 *base_type = VFP_CPRC_DOUBLE;
3554 else if (*base_type != VFP_CPRC_DOUBLE)
3555 return -1;
3556 return 2;
3557
3558 default:
3559 return -1;
3560 }
3561 break;
3562
3563 case TYPE_CODE_ARRAY:
3564 {
3565 if (t->is_vector ())
3566 {
3567 /* A 64-bit or 128-bit containerized vector type are VFP
3568 CPRCs. */
3569 switch (TYPE_LENGTH (t))
3570 {
3571 case 8:
3572 if (*base_type == VFP_CPRC_UNKNOWN)
3573 *base_type = VFP_CPRC_VEC64;
3574 return 1;
3575 case 16:
3576 if (*base_type == VFP_CPRC_UNKNOWN)
3577 *base_type = VFP_CPRC_VEC128;
3578 return 1;
3579 default:
3580 return -1;
3581 }
3582 }
3583 else
3584 {
3585 int count;
3586 unsigned unitlen;
3587
3588 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3589 base_type);
3590 if (count == -1)
3591 return -1;
3592 if (TYPE_LENGTH (t) == 0)
3593 {
3594 gdb_assert (count == 0);
3595 return 0;
3596 }
3597 else if (count == 0)
3598 return -1;
3599 unitlen = arm_vfp_cprc_unit_length (*base_type);
3600 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3601 return TYPE_LENGTH (t) / unitlen;
3602 }
3603 }
3604 break;
3605
3606 case TYPE_CODE_STRUCT:
3607 {
3608 int count = 0;
3609 unsigned unitlen;
3610 int i;
3611 for (i = 0; i < t->num_fields (); i++)
3612 {
3613 int sub_count = 0;
3614
3615 if (!field_is_static (&t->field (i)))
3616 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3617 base_type);
3618 if (sub_count == -1)
3619 return -1;
3620 count += sub_count;
3621 }
3622 if (TYPE_LENGTH (t) == 0)
3623 {
3624 gdb_assert (count == 0);
3625 return 0;
3626 }
3627 else if (count == 0)
3628 return -1;
3629 unitlen = arm_vfp_cprc_unit_length (*base_type);
3630 if (TYPE_LENGTH (t) != unitlen * count)
3631 return -1;
3632 return count;
3633 }
3634
3635 case TYPE_CODE_UNION:
3636 {
3637 int count = 0;
3638 unsigned unitlen;
3639 int i;
3640 for (i = 0; i < t->num_fields (); i++)
3641 {
3642 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3643 base_type);
3644 if (sub_count == -1)
3645 return -1;
3646 count = (count > sub_count ? count : sub_count);
3647 }
3648 if (TYPE_LENGTH (t) == 0)
3649 {
3650 gdb_assert (count == 0);
3651 return 0;
3652 }
3653 else if (count == 0)
3654 return -1;
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 if (TYPE_LENGTH (t) != unitlen * count)
3657 return -1;
3658 return count;
3659 }
3660
3661 default:
3662 break;
3663 }
3664
3665 return -1;
3666 }
3667
3668 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3669 if passed to or returned from a non-variadic function with the VFP
3670 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3671 *BASE_TYPE to the base type for T and *COUNT to the number of
3672 elements of that base type before returning. */
3673
3674 static int
3675 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3676 int *count)
3677 {
3678 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3679 int c = arm_vfp_cprc_sub_candidate (t, &b);
3680 if (c <= 0 || c > 4)
3681 return 0;
3682 *base_type = b;
3683 *count = c;
3684 return 1;
3685 }
3686
3687 /* Return 1 if the VFP ABI should be used for passing arguments to and
3688 returning values from a function of type FUNC_TYPE, 0
3689 otherwise. */
3690
3691 static int
3692 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3693 {
3694 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3695 /* Variadic functions always use the base ABI. Assume that functions
3696 without debug info are not variadic. */
3697 if (func_type && check_typedef (func_type)->has_varargs ())
3698 return 0;
3699 /* The VFP ABI is only supported as a variant of AAPCS. */
3700 if (tdep->arm_abi != ARM_ABI_AAPCS)
3701 return 0;
3702 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3703 }
3704
3705 /* We currently only support passing parameters in integer registers, which
3706 conforms with GCC's default model, and VFP argument passing following
3707 the VFP variant of AAPCS. Several other variants exist and
3708 we should probably support some of them based on the selected ABI. */
3709
3710 static CORE_ADDR
3711 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3712 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3713 struct value **args, CORE_ADDR sp,
3714 function_call_return_method return_method,
3715 CORE_ADDR struct_addr)
3716 {
3717 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3718 int argnum;
3719 int argreg;
3720 int nstack;
3721 struct stack_item *si = NULL;
3722 int use_vfp_abi;
3723 struct type *ftype;
3724 unsigned vfp_regs_free = (1 << 16) - 1;
3725
3726 /* Determine the type of this function and whether the VFP ABI
3727 applies. */
3728 ftype = check_typedef (value_type (function));
3729 if (ftype->code () == TYPE_CODE_PTR)
3730 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3731 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3732
3733 /* Set the return address. For the ARM, the return breakpoint is
3734 always at BP_ADDR. */
3735 if (arm_pc_is_thumb (gdbarch, bp_addr))
3736 bp_addr |= 1;
3737 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3738
3739 /* Walk through the list of args and determine how large a temporary
3740 stack is required. Need to take care here as structs may be
3741 passed on the stack, and we have to push them. */
3742 nstack = 0;
3743
3744 argreg = ARM_A1_REGNUM;
3745 nstack = 0;
3746
3747 /* The struct_return pointer occupies the first parameter
3748 passing register. */
3749 if (return_method == return_method_struct)
3750 {
3751 if (arm_debug)
3752 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3753 gdbarch_register_name (gdbarch, argreg),
3754 paddress (gdbarch, struct_addr));
3755 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3756 argreg++;
3757 }
3758
3759 for (argnum = 0; argnum < nargs; argnum++)
3760 {
3761 int len;
3762 struct type *arg_type;
3763 struct type *target_type;
3764 enum type_code typecode;
3765 const bfd_byte *val;
3766 int align;
3767 enum arm_vfp_cprc_base_type vfp_base_type;
3768 int vfp_base_count;
3769 int may_use_core_reg = 1;
3770
3771 arg_type = check_typedef (value_type (args[argnum]));
3772 len = TYPE_LENGTH (arg_type);
3773 target_type = TYPE_TARGET_TYPE (arg_type);
3774 typecode = arg_type->code ();
3775 val = value_contents (args[argnum]);
3776
3777 align = type_align (arg_type);
3778 /* Round alignment up to a whole number of words. */
3779 align = (align + ARM_INT_REGISTER_SIZE - 1)
3780 & ~(ARM_INT_REGISTER_SIZE - 1);
3781 /* Different ABIs have different maximum alignments. */
3782 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3783 {
3784 /* The APCS ABI only requires word alignment. */
3785 align = ARM_INT_REGISTER_SIZE;
3786 }
3787 else
3788 {
3789 /* The AAPCS requires at most doubleword alignment. */
3790 if (align > ARM_INT_REGISTER_SIZE * 2)
3791 align = ARM_INT_REGISTER_SIZE * 2;
3792 }
3793
3794 if (use_vfp_abi
3795 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3796 &vfp_base_count))
3797 {
3798 int regno;
3799 int unit_length;
3800 int shift;
3801 unsigned mask;
3802
3803 /* Because this is a CPRC it cannot go in a core register or
3804 cause a core register to be skipped for alignment.
3805 Either it goes in VFP registers and the rest of this loop
3806 iteration is skipped for this argument, or it goes on the
3807 stack (and the stack alignment code is correct for this
3808 case). */
3809 may_use_core_reg = 0;
3810
3811 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3812 shift = unit_length / 4;
3813 mask = (1 << (shift * vfp_base_count)) - 1;
3814 for (regno = 0; regno < 16; regno += shift)
3815 if (((vfp_regs_free >> regno) & mask) == mask)
3816 break;
3817
3818 if (regno < 16)
3819 {
3820 int reg_char;
3821 int reg_scaled;
3822 int i;
3823
3824 vfp_regs_free &= ~(mask << regno);
3825 reg_scaled = regno / shift;
3826 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3827 for (i = 0; i < vfp_base_count; i++)
3828 {
3829 char name_buf[4];
3830 int regnum;
3831 if (reg_char == 'q')
3832 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3833 val + i * unit_length);
3834 else
3835 {
3836 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3837 reg_char, reg_scaled + i);
3838 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3839 strlen (name_buf));
3840 regcache->cooked_write (regnum, val + i * unit_length);
3841 }
3842 }
3843 continue;
3844 }
3845 else
3846 {
3847 /* This CPRC could not go in VFP registers, so all VFP
3848 registers are now marked as used. */
3849 vfp_regs_free = 0;
3850 }
3851 }
3852
3853 /* Push stack padding for doubleword alignment. */
3854 if (nstack & (align - 1))
3855 {
3856 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3857 nstack += ARM_INT_REGISTER_SIZE;
3858 }
3859
3860 /* Doubleword aligned quantities must go in even register pairs. */
3861 if (may_use_core_reg
3862 && argreg <= ARM_LAST_ARG_REGNUM
3863 && align > ARM_INT_REGISTER_SIZE
3864 && argreg & 1)
3865 argreg++;
3866
3867 /* If the argument is a pointer to a function, and it is a
3868 Thumb function, create a LOCAL copy of the value and set
3869 the THUMB bit in it. */
3870 if (TYPE_CODE_PTR == typecode
3871 && target_type != NULL
3872 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3873 {
3874 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3875 if (arm_pc_is_thumb (gdbarch, regval))
3876 {
3877 bfd_byte *copy = (bfd_byte *) alloca (len);
3878 store_unsigned_integer (copy, len, byte_order,
3879 MAKE_THUMB_ADDR (regval));
3880 val = copy;
3881 }
3882 }
3883
3884 /* Copy the argument to general registers or the stack in
3885 register-sized pieces. Large arguments are split between
3886 registers and stack. */
3887 while (len > 0)
3888 {
3889 int partial_len = len < ARM_INT_REGISTER_SIZE
3890 ? len : ARM_INT_REGISTER_SIZE;
3891 CORE_ADDR regval
3892 = extract_unsigned_integer (val, partial_len, byte_order);
3893
3894 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3895 {
3896 /* The argument is being passed in a general purpose
3897 register. */
3898 if (byte_order == BFD_ENDIAN_BIG)
3899 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3900 if (arm_debug)
3901 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3902 argnum,
3903 gdbarch_register_name
3904 (gdbarch, argreg),
3905 phex (regval, ARM_INT_REGISTER_SIZE));
3906 regcache_cooked_write_unsigned (regcache, argreg, regval);
3907 argreg++;
3908 }
3909 else
3910 {
3911 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3912
3913 memset (buf, 0, sizeof (buf));
3914 store_unsigned_integer (buf, partial_len, byte_order, regval);
3915
3916 /* Push the arguments onto the stack. */
3917 if (arm_debug)
3918 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3919 argnum, nstack);
3920 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3921 nstack += ARM_INT_REGISTER_SIZE;
3922 }
3923
3924 len -= partial_len;
3925 val += partial_len;
3926 }
3927 }
3928 /* If we have an odd number of words to push, then decrement the stack
3929 by one word now, so first stack argument will be dword aligned. */
3930 if (nstack & 4)
3931 sp -= 4;
3932
3933 while (si)
3934 {
3935 sp -= si->len;
3936 write_memory (sp, si->data, si->len);
3937 si = pop_stack_item (si);
3938 }
3939
3940 /* Finally, update teh SP register. */
3941 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3942
3943 return sp;
3944 }
3945
3946
3947 /* Always align the frame to an 8-byte boundary. This is required on
3948 some platforms and harmless on the rest. */
3949
3950 static CORE_ADDR
3951 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3952 {
3953 /* Align the stack to eight bytes. */
3954 return sp & ~ (CORE_ADDR) 7;
3955 }
3956
3957 static void
3958 print_fpu_flags (struct ui_file *file, int flags)
3959 {
3960 if (flags & (1 << 0))
3961 fputs_filtered ("IVO ", file);
3962 if (flags & (1 << 1))
3963 fputs_filtered ("DVZ ", file);
3964 if (flags & (1 << 2))
3965 fputs_filtered ("OFL ", file);
3966 if (flags & (1 << 3))
3967 fputs_filtered ("UFL ", file);
3968 if (flags & (1 << 4))
3969 fputs_filtered ("INX ", file);
3970 fputc_filtered ('\n', file);
3971 }
3972
3973 /* Print interesting information about the floating point processor
3974 (if present) or emulator. */
3975 static void
3976 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3977 struct frame_info *frame, const char *args)
3978 {
3979 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3980 int type;
3981
3982 type = (status >> 24) & 127;
3983 if (status & (1 << 31))
3984 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3985 else
3986 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3987 /* i18n: [floating point unit] mask */
3988 fputs_filtered (_("mask: "), file);
3989 print_fpu_flags (file, status >> 16);
3990 /* i18n: [floating point unit] flags */
3991 fputs_filtered (_("flags: "), file);
3992 print_fpu_flags (file, status);
3993 }
3994
3995 /* Construct the ARM extended floating point type. */
3996 static struct type *
3997 arm_ext_type (struct gdbarch *gdbarch)
3998 {
3999 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4000
4001 if (!tdep->arm_ext_type)
4002 tdep->arm_ext_type
4003 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4004 floatformats_arm_ext);
4005
4006 return tdep->arm_ext_type;
4007 }
4008
4009 static struct type *
4010 arm_neon_double_type (struct gdbarch *gdbarch)
4011 {
4012 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4013
4014 if (tdep->neon_double_type == NULL)
4015 {
4016 struct type *t, *elem;
4017
4018 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4019 TYPE_CODE_UNION);
4020 elem = builtin_type (gdbarch)->builtin_uint8;
4021 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4022 elem = builtin_type (gdbarch)->builtin_uint16;
4023 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4024 elem = builtin_type (gdbarch)->builtin_uint32;
4025 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4026 elem = builtin_type (gdbarch)->builtin_uint64;
4027 append_composite_type_field (t, "u64", elem);
4028 elem = builtin_type (gdbarch)->builtin_float;
4029 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4030 elem = builtin_type (gdbarch)->builtin_double;
4031 append_composite_type_field (t, "f64", elem);
4032
4033 t->set_is_vector (true);
4034 t->set_name ("neon_d");
4035 tdep->neon_double_type = t;
4036 }
4037
4038 return tdep->neon_double_type;
4039 }
4040
4041 /* FIXME: The vector types are not correctly ordered on big-endian
4042 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4043 bits of d0 - regardless of what unit size is being held in d0. So
4044 the offset of the first uint8 in d0 is 7, but the offset of the
4045 first float is 4. This code works as-is for little-endian
4046 targets. */
4047
4048 static struct type *
4049 arm_neon_quad_type (struct gdbarch *gdbarch)
4050 {
4051 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4052
4053 if (tdep->neon_quad_type == NULL)
4054 {
4055 struct type *t, *elem;
4056
4057 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4058 TYPE_CODE_UNION);
4059 elem = builtin_type (gdbarch)->builtin_uint8;
4060 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4061 elem = builtin_type (gdbarch)->builtin_uint16;
4062 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint32;
4064 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint64;
4066 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_float;
4068 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4069 elem = builtin_type (gdbarch)->builtin_double;
4070 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4071
4072 t->set_is_vector (true);
4073 t->set_name ("neon_q");
4074 tdep->neon_quad_type = t;
4075 }
4076
4077 return tdep->neon_quad_type;
4078 }
4079
4080 /* Return the GDB type object for the "standard" data type of data in
4081 register N. */
4082
4083 static struct type *
4084 arm_register_type (struct gdbarch *gdbarch, int regnum)
4085 {
4086 int num_regs = gdbarch_num_regs (gdbarch);
4087
4088 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4089 && regnum >= num_regs && regnum < num_regs + 32)
4090 return builtin_type (gdbarch)->builtin_float;
4091
4092 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4093 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4094 return arm_neon_quad_type (gdbarch);
4095
4096 /* If the target description has register information, we are only
4097 in this function so that we can override the types of
4098 double-precision registers for NEON. */
4099 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4100 {
4101 struct type *t = tdesc_register_type (gdbarch, regnum);
4102
4103 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4104 && t->code () == TYPE_CODE_FLT
4105 && gdbarch_tdep (gdbarch)->have_neon)
4106 return arm_neon_double_type (gdbarch);
4107 else
4108 return t;
4109 }
4110
4111 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4112 {
4113 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4114 return builtin_type (gdbarch)->builtin_void;
4115
4116 return arm_ext_type (gdbarch);
4117 }
4118 else if (regnum == ARM_SP_REGNUM)
4119 return builtin_type (gdbarch)->builtin_data_ptr;
4120 else if (regnum == ARM_PC_REGNUM)
4121 return builtin_type (gdbarch)->builtin_func_ptr;
4122 else if (regnum >= ARRAY_SIZE (arm_register_names))
4123 /* These registers are only supported on targets which supply
4124 an XML description. */
4125 return builtin_type (gdbarch)->builtin_int0;
4126 else
4127 return builtin_type (gdbarch)->builtin_uint32;
4128 }
4129
4130 /* Map a DWARF register REGNUM onto the appropriate GDB register
4131 number. */
4132
4133 static int
4134 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4135 {
4136 /* Core integer regs. */
4137 if (reg >= 0 && reg <= 15)
4138 return reg;
4139
4140 /* Legacy FPA encoding. These were once used in a way which
4141 overlapped with VFP register numbering, so their use is
4142 discouraged, but GDB doesn't support the ARM toolchain
4143 which used them for VFP. */
4144 if (reg >= 16 && reg <= 23)
4145 return ARM_F0_REGNUM + reg - 16;
4146
4147 /* New assignments for the FPA registers. */
4148 if (reg >= 96 && reg <= 103)
4149 return ARM_F0_REGNUM + reg - 96;
4150
4151 /* WMMX register assignments. */
4152 if (reg >= 104 && reg <= 111)
4153 return ARM_WCGR0_REGNUM + reg - 104;
4154
4155 if (reg >= 112 && reg <= 127)
4156 return ARM_WR0_REGNUM + reg - 112;
4157
4158 if (reg >= 192 && reg <= 199)
4159 return ARM_WC0_REGNUM + reg - 192;
4160
4161 /* VFP v2 registers. A double precision value is actually
4162 in d1 rather than s2, but the ABI only defines numbering
4163 for the single precision registers. This will "just work"
4164 in GDB for little endian targets (we'll read eight bytes,
4165 starting in s0 and then progressing to s1), but will be
4166 reversed on big endian targets with VFP. This won't
4167 be a problem for the new Neon quad registers; you're supposed
4168 to use DW_OP_piece for those. */
4169 if (reg >= 64 && reg <= 95)
4170 {
4171 char name_buf[4];
4172
4173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4175 strlen (name_buf));
4176 }
4177
4178 /* VFP v3 / Neon registers. This range is also used for VFP v2
4179 registers, except that it now describes d0 instead of s0. */
4180 if (reg >= 256 && reg <= 287)
4181 {
4182 char name_buf[4];
4183
4184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4186 strlen (name_buf));
4187 }
4188
4189 return -1;
4190 }
4191
4192 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4193 static int
4194 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4195 {
4196 int reg = regnum;
4197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4198
4199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4201
4202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4204
4205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4207
4208 if (reg < NUM_GREGS)
4209 return SIM_ARM_R0_REGNUM + reg;
4210 reg -= NUM_GREGS;
4211
4212 if (reg < NUM_FREGS)
4213 return SIM_ARM_FP0_REGNUM + reg;
4214 reg -= NUM_FREGS;
4215
4216 if (reg < NUM_SREGS)
4217 return SIM_ARM_FPS_REGNUM + reg;
4218 reg -= NUM_SREGS;
4219
4220 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4221 }
4222
4223 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4224 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4225 NULL if an error occurs. BUF is freed. */
4226
4227 static gdb_byte *
4228 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4229 int old_len, int new_len)
4230 {
4231 gdb_byte *new_buf;
4232 int bytes_to_read = new_len - old_len;
4233
4234 new_buf = (gdb_byte *) xmalloc (new_len);
4235 memcpy (new_buf + bytes_to_read, buf, old_len);
4236 xfree (buf);
4237 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4238 {
4239 xfree (new_buf);
4240 return NULL;
4241 }
4242 return new_buf;
4243 }
4244
4245 /* An IT block is at most the 2-byte IT instruction followed by
4246 four 4-byte instructions. The furthest back we must search to
4247 find an IT block that affects the current instruction is thus
4248 2 + 3 * 4 == 14 bytes. */
4249 #define MAX_IT_BLOCK_PREFIX 14
4250
4251 /* Use a quick scan if there are more than this many bytes of
4252 code. */
4253 #define IT_SCAN_THRESHOLD 32
4254
4255 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4256 A breakpoint in an IT block may not be hit, depending on the
4257 condition flags. */
4258 static CORE_ADDR
4259 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4260 {
4261 gdb_byte *buf;
4262 char map_type;
4263 CORE_ADDR boundary, func_start;
4264 int buf_len;
4265 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4266 int i, any, last_it, last_it_count;
4267
4268 /* If we are using BKPT breakpoints, none of this is necessary. */
4269 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4270 return bpaddr;
4271
4272 /* ARM mode does not have this problem. */
4273 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4274 return bpaddr;
4275
4276 /* We are setting a breakpoint in Thumb code that could potentially
4277 contain an IT block. The first step is to find how much Thumb
4278 code there is; we do not need to read outside of known Thumb
4279 sequences. */
4280 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4281 if (map_type == 0)
4282 /* Thumb-2 code must have mapping symbols to have a chance. */
4283 return bpaddr;
4284
4285 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4286
4287 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4288 && func_start > boundary)
4289 boundary = func_start;
4290
4291 /* Search for a candidate IT instruction. We have to do some fancy
4292 footwork to distinguish a real IT instruction from the second
4293 half of a 32-bit instruction, but there is no need for that if
4294 there's no candidate. */
4295 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4296 if (buf_len == 0)
4297 /* No room for an IT instruction. */
4298 return bpaddr;
4299
4300 buf = (gdb_byte *) xmalloc (buf_len);
4301 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4302 return bpaddr;
4303 any = 0;
4304 for (i = 0; i < buf_len; i += 2)
4305 {
4306 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4307 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4308 {
4309 any = 1;
4310 break;
4311 }
4312 }
4313
4314 if (any == 0)
4315 {
4316 xfree (buf);
4317 return bpaddr;
4318 }
4319
4320 /* OK, the code bytes before this instruction contain at least one
4321 halfword which resembles an IT instruction. We know that it's
4322 Thumb code, but there are still two possibilities. Either the
4323 halfword really is an IT instruction, or it is the second half of
4324 a 32-bit Thumb instruction. The only way we can tell is to
4325 scan forwards from a known instruction boundary. */
4326 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4327 {
4328 int definite;
4329
4330 /* There's a lot of code before this instruction. Start with an
4331 optimistic search; it's easy to recognize halfwords that can
4332 not be the start of a 32-bit instruction, and use that to
4333 lock on to the instruction boundaries. */
4334 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4335 if (buf == NULL)
4336 return bpaddr;
4337 buf_len = IT_SCAN_THRESHOLD;
4338
4339 definite = 0;
4340 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4341 {
4342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4343 if (thumb_insn_size (inst1) == 2)
4344 {
4345 definite = 1;
4346 break;
4347 }
4348 }
4349
4350 /* At this point, if DEFINITE, BUF[I] is the first place we
4351 are sure that we know the instruction boundaries, and it is far
4352 enough from BPADDR that we could not miss an IT instruction
4353 affecting BPADDR. If ! DEFINITE, give up - start from a
4354 known boundary. */
4355 if (! definite)
4356 {
4357 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4358 bpaddr - boundary);
4359 if (buf == NULL)
4360 return bpaddr;
4361 buf_len = bpaddr - boundary;
4362 i = 0;
4363 }
4364 }
4365 else
4366 {
4367 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4368 if (buf == NULL)
4369 return bpaddr;
4370 buf_len = bpaddr - boundary;
4371 i = 0;
4372 }
4373
4374 /* Scan forwards. Find the last IT instruction before BPADDR. */
4375 last_it = -1;
4376 last_it_count = 0;
4377 while (i < buf_len)
4378 {
4379 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4380 last_it_count--;
4381 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4382 {
4383 last_it = i;
4384 if (inst1 & 0x0001)
4385 last_it_count = 4;
4386 else if (inst1 & 0x0002)
4387 last_it_count = 3;
4388 else if (inst1 & 0x0004)
4389 last_it_count = 2;
4390 else
4391 last_it_count = 1;
4392 }
4393 i += thumb_insn_size (inst1);
4394 }
4395
4396 xfree (buf);
4397
4398 if (last_it == -1)
4399 /* There wasn't really an IT instruction after all. */
4400 return bpaddr;
4401
4402 if (last_it_count < 1)
4403 /* It was too far away. */
4404 return bpaddr;
4405
4406 /* This really is a trouble spot. Move the breakpoint to the IT
4407 instruction. */
4408 return bpaddr - buf_len + last_it;
4409 }
4410
4411 /* ARM displaced stepping support.
4412
4413 Generally ARM displaced stepping works as follows:
4414
4415 1. When an instruction is to be single-stepped, it is first decoded by
4416 arm_process_displaced_insn. Depending on the type of instruction, it is
4417 then copied to a scratch location, possibly in a modified form. The
4418 copy_* set of functions performs such modification, as necessary. A
4419 breakpoint is placed after the modified instruction in the scratch space
4420 to return control to GDB. Note in particular that instructions which
4421 modify the PC will no longer do so after modification.
4422
4423 2. The instruction is single-stepped, by setting the PC to the scratch
4424 location address, and resuming. Control returns to GDB when the
4425 breakpoint is hit.
4426
4427 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4428 function used for the current instruction. This function's job is to
4429 put the CPU/memory state back to what it would have been if the
4430 instruction had been executed unmodified in its original location. */
4431
4432 /* NOP instruction (mov r0, r0). */
4433 #define ARM_NOP 0xe1a00000
4434 #define THUMB_NOP 0x4600
4435
4436 /* Helper for register reads for displaced stepping. In particular, this
4437 returns the PC as it would be seen by the instruction at its original
4438 location. */
4439
4440 ULONGEST
4441 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4442 int regno)
4443 {
4444 ULONGEST ret;
4445 CORE_ADDR from = dsc->insn_addr;
4446
4447 if (regno == ARM_PC_REGNUM)
4448 {
4449 /* Compute pipeline offset:
4450 - When executing an ARM instruction, PC reads as the address of the
4451 current instruction plus 8.
4452 - When executing a Thumb instruction, PC reads as the address of the
4453 current instruction plus 4. */
4454
4455 if (!dsc->is_thumb)
4456 from += 8;
4457 else
4458 from += 4;
4459
4460 if (debug_displaced)
4461 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4462 (unsigned long) from);
4463 return (ULONGEST) from;
4464 }
4465 else
4466 {
4467 regcache_cooked_read_unsigned (regs, regno, &ret);
4468 if (debug_displaced)
4469 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4470 regno, (unsigned long) ret);
4471 return ret;
4472 }
4473 }
4474
4475 static int
4476 displaced_in_arm_mode (struct regcache *regs)
4477 {
4478 ULONGEST ps;
4479 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4480
4481 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4482
4483 return (ps & t_bit) == 0;
4484 }
4485
4486 /* Write to the PC as from a branch instruction. */
4487
4488 static void
4489 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4490 ULONGEST val)
4491 {
4492 if (!dsc->is_thumb)
4493 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4494 architecture versions < 6. */
4495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4496 val & ~(ULONGEST) 0x3);
4497 else
4498 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4499 val & ~(ULONGEST) 0x1);
4500 }
4501
4502 /* Write to the PC as from a branch-exchange instruction. */
4503
4504 static void
4505 bx_write_pc (struct regcache *regs, ULONGEST val)
4506 {
4507 ULONGEST ps;
4508 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4509
4510 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4511
4512 if ((val & 1) == 1)
4513 {
4514 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4515 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4516 }
4517 else if ((val & 2) == 0)
4518 {
4519 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4520 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4521 }
4522 else
4523 {
4524 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4525 mode, align dest to 4 bytes). */
4526 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4527 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4528 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4529 }
4530 }
4531
4532 /* Write to the PC as if from a load instruction. */
4533
4534 static void
4535 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4536 ULONGEST val)
4537 {
4538 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4539 bx_write_pc (regs, val);
4540 else
4541 branch_write_pc (regs, dsc, val);
4542 }
4543
4544 /* Write to the PC as if from an ALU instruction. */
4545
4546 static void
4547 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4548 ULONGEST val)
4549 {
4550 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4551 bx_write_pc (regs, val);
4552 else
4553 branch_write_pc (regs, dsc, val);
4554 }
4555
4556 /* Helper for writing to registers for displaced stepping. Writing to the PC
4557 has a varying effects depending on the instruction which does the write:
4558 this is controlled by the WRITE_PC argument. */
4559
4560 void
4561 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4562 int regno, ULONGEST val, enum pc_write_style write_pc)
4563 {
4564 if (regno == ARM_PC_REGNUM)
4565 {
4566 if (debug_displaced)
4567 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4568 (unsigned long) val);
4569 switch (write_pc)
4570 {
4571 case BRANCH_WRITE_PC:
4572 branch_write_pc (regs, dsc, val);
4573 break;
4574
4575 case BX_WRITE_PC:
4576 bx_write_pc (regs, val);
4577 break;
4578
4579 case LOAD_WRITE_PC:
4580 load_write_pc (regs, dsc, val);
4581 break;
4582
4583 case ALU_WRITE_PC:
4584 alu_write_pc (regs, dsc, val);
4585 break;
4586
4587 case CANNOT_WRITE_PC:
4588 warning (_("Instruction wrote to PC in an unexpected way when "
4589 "single-stepping"));
4590 break;
4591
4592 default:
4593 internal_error (__FILE__, __LINE__,
4594 _("Invalid argument to displaced_write_reg"));
4595 }
4596
4597 dsc->wrote_to_pc = 1;
4598 }
4599 else
4600 {
4601 if (debug_displaced)
4602 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4603 regno, (unsigned long) val);
4604 regcache_cooked_write_unsigned (regs, regno, val);
4605 }
4606 }
4607
4608 /* This function is used to concisely determine if an instruction INSN
4609 references PC. Register fields of interest in INSN should have the
4610 corresponding fields of BITMASK set to 0b1111. The function
4611 returns return 1 if any of these fields in INSN reference the PC
4612 (also 0b1111, r15), else it returns 0. */
4613
4614 static int
4615 insn_references_pc (uint32_t insn, uint32_t bitmask)
4616 {
4617 uint32_t lowbit = 1;
4618
4619 while (bitmask != 0)
4620 {
4621 uint32_t mask;
4622
4623 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4624 ;
4625
4626 if (!lowbit)
4627 break;
4628
4629 mask = lowbit * 0xf;
4630
4631 if ((insn & mask) == mask)
4632 return 1;
4633
4634 bitmask &= ~mask;
4635 }
4636
4637 return 0;
4638 }
4639
4640 /* The simplest copy function. Many instructions have the same effect no
4641 matter what address they are executed at: in those cases, use this. */
4642
4643 static int
4644 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4645 const char *iname, arm_displaced_step_closure *dsc)
4646 {
4647 if (debug_displaced)
4648 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4649 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4650 iname);
4651
4652 dsc->modinsn[0] = insn;
4653
4654 return 0;
4655 }
4656
4657 static int
4658 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4659 uint16_t insn2, const char *iname,
4660 arm_displaced_step_closure *dsc)
4661 {
4662 if (debug_displaced)
4663 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4664 "opcode/class '%s' unmodified\n", insn1, insn2,
4665 iname);
4666
4667 dsc->modinsn[0] = insn1;
4668 dsc->modinsn[1] = insn2;
4669 dsc->numinsns = 2;
4670
4671 return 0;
4672 }
4673
4674 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4675 modification. */
4676 static int
4677 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4678 const char *iname,
4679 arm_displaced_step_closure *dsc)
4680 {
4681 if (debug_displaced)
4682 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4683 "opcode/class '%s' unmodified\n", insn,
4684 iname);
4685
4686 dsc->modinsn[0] = insn;
4687
4688 return 0;
4689 }
4690
4691 /* Preload instructions with immediate offset. */
4692
4693 static void
4694 cleanup_preload (struct gdbarch *gdbarch,
4695 struct regcache *regs, arm_displaced_step_closure *dsc)
4696 {
4697 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4698 if (!dsc->u.preload.immed)
4699 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4700 }
4701
4702 static void
4703 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4704 arm_displaced_step_closure *dsc, unsigned int rn)
4705 {
4706 ULONGEST rn_val;
4707 /* Preload instructions:
4708
4709 {pli/pld} [rn, #+/-imm]
4710 ->
4711 {pli/pld} [r0, #+/-imm]. */
4712
4713 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4714 rn_val = displaced_read_reg (regs, dsc, rn);
4715 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4716 dsc->u.preload.immed = 1;
4717
4718 dsc->cleanup = &cleanup_preload;
4719 }
4720
4721 static int
4722 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4723 arm_displaced_step_closure *dsc)
4724 {
4725 unsigned int rn = bits (insn, 16, 19);
4726
4727 if (!insn_references_pc (insn, 0x000f0000ul))
4728 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4729
4730 if (debug_displaced)
4731 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4732 (unsigned long) insn);
4733
4734 dsc->modinsn[0] = insn & 0xfff0ffff;
4735
4736 install_preload (gdbarch, regs, dsc, rn);
4737
4738 return 0;
4739 }
4740
4741 static int
4742 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4743 struct regcache *regs, arm_displaced_step_closure *dsc)
4744 {
4745 unsigned int rn = bits (insn1, 0, 3);
4746 unsigned int u_bit = bit (insn1, 7);
4747 int imm12 = bits (insn2, 0, 11);
4748 ULONGEST pc_val;
4749
4750 if (rn != ARM_PC_REGNUM)
4751 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4752
4753 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4754 PLD (literal) Encoding T1. */
4755 if (debug_displaced)
4756 fprintf_unfiltered (gdb_stdlog,
4757 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4758 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4759 imm12);
4760
4761 if (!u_bit)
4762 imm12 = -1 * imm12;
4763
4764 /* Rewrite instruction {pli/pld} PC imm12 into:
4765 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4766
4767 {pli/pld} [r0, r1]
4768
4769 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4770
4771 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4772 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4773
4774 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4775
4776 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4777 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4778 dsc->u.preload.immed = 0;
4779
4780 /* {pli/pld} [r0, r1] */
4781 dsc->modinsn[0] = insn1 & 0xfff0;
4782 dsc->modinsn[1] = 0xf001;
4783 dsc->numinsns = 2;
4784
4785 dsc->cleanup = &cleanup_preload;
4786 return 0;
4787 }
4788
4789 /* Preload instructions with register offset. */
4790
4791 static void
4792 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4793 arm_displaced_step_closure *dsc, unsigned int rn,
4794 unsigned int rm)
4795 {
4796 ULONGEST rn_val, rm_val;
4797
4798 /* Preload register-offset instructions:
4799
4800 {pli/pld} [rn, rm {, shift}]
4801 ->
4802 {pli/pld} [r0, r1 {, shift}]. */
4803
4804 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4805 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4806 rn_val = displaced_read_reg (regs, dsc, rn);
4807 rm_val = displaced_read_reg (regs, dsc, rm);
4808 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4809 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4810 dsc->u.preload.immed = 0;
4811
4812 dsc->cleanup = &cleanup_preload;
4813 }
4814
4815 static int
4816 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4817 struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4819 {
4820 unsigned int rn = bits (insn, 16, 19);
4821 unsigned int rm = bits (insn, 0, 3);
4822
4823
4824 if (!insn_references_pc (insn, 0x000f000ful))
4825 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4826
4827 if (debug_displaced)
4828 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4829 (unsigned long) insn);
4830
4831 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4832
4833 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4834 return 0;
4835 }
4836
4837 /* Copy/cleanup coprocessor load and store instructions. */
4838
4839 static void
4840 cleanup_copro_load_store (struct gdbarch *gdbarch,
4841 struct regcache *regs,
4842 arm_displaced_step_closure *dsc)
4843 {
4844 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4845
4846 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4847
4848 if (dsc->u.ldst.writeback)
4849 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4850 }
4851
4852 static void
4853 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4854 arm_displaced_step_closure *dsc,
4855 int writeback, unsigned int rn)
4856 {
4857 ULONGEST rn_val;
4858
4859 /* Coprocessor load/store instructions:
4860
4861 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4862 ->
4863 {stc/stc2} [r0, #+/-imm].
4864
4865 ldc/ldc2 are handled identically. */
4866
4867 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4868 rn_val = displaced_read_reg (regs, dsc, rn);
4869 /* PC should be 4-byte aligned. */
4870 rn_val = rn_val & 0xfffffffc;
4871 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4872
4873 dsc->u.ldst.writeback = writeback;
4874 dsc->u.ldst.rn = rn;
4875
4876 dsc->cleanup = &cleanup_copro_load_store;
4877 }
4878
4879 static int
4880 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs,
4882 arm_displaced_step_closure *dsc)
4883 {
4884 unsigned int rn = bits (insn, 16, 19);
4885
4886 if (!insn_references_pc (insn, 0x000f0000ul))
4887 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4888
4889 if (debug_displaced)
4890 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4891 "load/store insn %.8lx\n", (unsigned long) insn);
4892
4893 dsc->modinsn[0] = insn & 0xfff0ffff;
4894
4895 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4896
4897 return 0;
4898 }
4899
4900 static int
4901 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4902 uint16_t insn2, struct regcache *regs,
4903 arm_displaced_step_closure *dsc)
4904 {
4905 unsigned int rn = bits (insn1, 0, 3);
4906
4907 if (rn != ARM_PC_REGNUM)
4908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4909 "copro load/store", dsc);
4910
4911 if (debug_displaced)
4912 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4913 "load/store insn %.4x%.4x\n", insn1, insn2);
4914
4915 dsc->modinsn[0] = insn1 & 0xfff0;
4916 dsc->modinsn[1] = insn2;
4917 dsc->numinsns = 2;
4918
4919 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4920 doesn't support writeback, so pass 0. */
4921 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4922
4923 return 0;
4924 }
4925
4926 /* Clean up branch instructions (actually perform the branch, by setting
4927 PC). */
4928
4929 static void
4930 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4931 arm_displaced_step_closure *dsc)
4932 {
4933 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4934 int branch_taken = condition_true (dsc->u.branch.cond, status);
4935 enum pc_write_style write_pc = dsc->u.branch.exchange
4936 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4937
4938 if (!branch_taken)
4939 return;
4940
4941 if (dsc->u.branch.link)
4942 {
4943 /* The value of LR should be the next insn of current one. In order
4944 not to confuse logic handling later insn `bx lr', if current insn mode
4945 is Thumb, the bit 0 of LR value should be set to 1. */
4946 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4947
4948 if (dsc->is_thumb)
4949 next_insn_addr |= 0x1;
4950
4951 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4952 CANNOT_WRITE_PC);
4953 }
4954
4955 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4956 }
4957
4958 /* Copy B/BL/BLX instructions with immediate destinations. */
4959
4960 static void
4961 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4962 arm_displaced_step_closure *dsc,
4963 unsigned int cond, int exchange, int link, long offset)
4964 {
4965 /* Implement "BL<cond> <label>" as:
4966
4967 Preparation: cond <- instruction condition
4968 Insn: mov r0, r0 (nop)
4969 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4970
4971 B<cond> similar, but don't set r14 in cleanup. */
4972
4973 dsc->u.branch.cond = cond;
4974 dsc->u.branch.link = link;
4975 dsc->u.branch.exchange = exchange;
4976
4977 dsc->u.branch.dest = dsc->insn_addr;
4978 if (link && exchange)
4979 /* For BLX, offset is computed from the Align (PC, 4). */
4980 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4981
4982 if (dsc->is_thumb)
4983 dsc->u.branch.dest += 4 + offset;
4984 else
4985 dsc->u.branch.dest += 8 + offset;
4986
4987 dsc->cleanup = &cleanup_branch;
4988 }
4989 static int
4990 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4991 struct regcache *regs, arm_displaced_step_closure *dsc)
4992 {
4993 unsigned int cond = bits (insn, 28, 31);
4994 int exchange = (cond == 0xf);
4995 int link = exchange || bit (insn, 24);
4996 long offset;
4997
4998 if (debug_displaced)
4999 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5000 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5001 (unsigned long) insn);
5002 if (exchange)
5003 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5004 then arrange the switch into Thumb mode. */
5005 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5006 else
5007 offset = bits (insn, 0, 23) << 2;
5008
5009 if (bit (offset, 25))
5010 offset = offset | ~0x3ffffff;
5011
5012 dsc->modinsn[0] = ARM_NOP;
5013
5014 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5015 return 0;
5016 }
5017
5018 static int
5019 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5020 uint16_t insn2, struct regcache *regs,
5021 arm_displaced_step_closure *dsc)
5022 {
5023 int link = bit (insn2, 14);
5024 int exchange = link && !bit (insn2, 12);
5025 int cond = INST_AL;
5026 long offset = 0;
5027 int j1 = bit (insn2, 13);
5028 int j2 = bit (insn2, 11);
5029 int s = sbits (insn1, 10, 10);
5030 int i1 = !(j1 ^ bit (insn1, 10));
5031 int i2 = !(j2 ^ bit (insn1, 10));
5032
5033 if (!link && !exchange) /* B */
5034 {
5035 offset = (bits (insn2, 0, 10) << 1);
5036 if (bit (insn2, 12)) /* Encoding T4 */
5037 {
5038 offset |= (bits (insn1, 0, 9) << 12)
5039 | (i2 << 22)
5040 | (i1 << 23)
5041 | (s << 24);
5042 cond = INST_AL;
5043 }
5044 else /* Encoding T3 */
5045 {
5046 offset |= (bits (insn1, 0, 5) << 12)
5047 | (j1 << 18)
5048 | (j2 << 19)
5049 | (s << 20);
5050 cond = bits (insn1, 6, 9);
5051 }
5052 }
5053 else
5054 {
5055 offset = (bits (insn1, 0, 9) << 12);
5056 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5057 offset |= exchange ?
5058 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5059 }
5060
5061 if (debug_displaced)
5062 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5063 "%.4x %.4x with offset %.8lx\n",
5064 link ? (exchange) ? "blx" : "bl" : "b",
5065 insn1, insn2, offset);
5066
5067 dsc->modinsn[0] = THUMB_NOP;
5068
5069 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5070 return 0;
5071 }
5072
5073 /* Copy B Thumb instructions. */
5074 static int
5075 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5076 arm_displaced_step_closure *dsc)
5077 {
5078 unsigned int cond = 0;
5079 int offset = 0;
5080 unsigned short bit_12_15 = bits (insn, 12, 15);
5081 CORE_ADDR from = dsc->insn_addr;
5082
5083 if (bit_12_15 == 0xd)
5084 {
5085 /* offset = SignExtend (imm8:0, 32) */
5086 offset = sbits ((insn << 1), 0, 8);
5087 cond = bits (insn, 8, 11);
5088 }
5089 else if (bit_12_15 == 0xe) /* Encoding T2 */
5090 {
5091 offset = sbits ((insn << 1), 0, 11);
5092 cond = INST_AL;
5093 }
5094
5095 if (debug_displaced)
5096 fprintf_unfiltered (gdb_stdlog,
5097 "displaced: copying b immediate insn %.4x "
5098 "with offset %d\n", insn, offset);
5099
5100 dsc->u.branch.cond = cond;
5101 dsc->u.branch.link = 0;
5102 dsc->u.branch.exchange = 0;
5103 dsc->u.branch.dest = from + 4 + offset;
5104
5105 dsc->modinsn[0] = THUMB_NOP;
5106
5107 dsc->cleanup = &cleanup_branch;
5108
5109 return 0;
5110 }
5111
5112 /* Copy BX/BLX with register-specified destinations. */
5113
5114 static void
5115 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5116 arm_displaced_step_closure *dsc, int link,
5117 unsigned int cond, unsigned int rm)
5118 {
5119 /* Implement {BX,BLX}<cond> <reg>" as:
5120
5121 Preparation: cond <- instruction condition
5122 Insn: mov r0, r0 (nop)
5123 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5124
5125 Don't set r14 in cleanup for BX. */
5126
5127 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5128
5129 dsc->u.branch.cond = cond;
5130 dsc->u.branch.link = link;
5131
5132 dsc->u.branch.exchange = 1;
5133
5134 dsc->cleanup = &cleanup_branch;
5135 }
5136
5137 static int
5138 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5139 struct regcache *regs, arm_displaced_step_closure *dsc)
5140 {
5141 unsigned int cond = bits (insn, 28, 31);
5142 /* BX: x12xxx1x
5143 BLX: x12xxx3x. */
5144 int link = bit (insn, 5);
5145 unsigned int rm = bits (insn, 0, 3);
5146
5147 if (debug_displaced)
5148 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5149 (unsigned long) insn);
5150
5151 dsc->modinsn[0] = ARM_NOP;
5152
5153 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5154 return 0;
5155 }
5156
5157 static int
5158 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5159 struct regcache *regs,
5160 arm_displaced_step_closure *dsc)
5161 {
5162 int link = bit (insn, 7);
5163 unsigned int rm = bits (insn, 3, 6);
5164
5165 if (debug_displaced)
5166 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5167 (unsigned short) insn);
5168
5169 dsc->modinsn[0] = THUMB_NOP;
5170
5171 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5172
5173 return 0;
5174 }
5175
5176
5177 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5178
5179 static void
5180 cleanup_alu_imm (struct gdbarch *gdbarch,
5181 struct regcache *regs, arm_displaced_step_closure *dsc)
5182 {
5183 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5184 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5185 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5186 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5187 }
5188
5189 static int
5190 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5191 arm_displaced_step_closure *dsc)
5192 {
5193 unsigned int rn = bits (insn, 16, 19);
5194 unsigned int rd = bits (insn, 12, 15);
5195 unsigned int op = bits (insn, 21, 24);
5196 int is_mov = (op == 0xd);
5197 ULONGEST rd_val, rn_val;
5198
5199 if (!insn_references_pc (insn, 0x000ff000ul))
5200 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5201
5202 if (debug_displaced)
5203 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5204 "%.8lx\n", is_mov ? "move" : "ALU",
5205 (unsigned long) insn);
5206
5207 /* Instruction is of form:
5208
5209 <op><cond> rd, [rn,] #imm
5210
5211 Rewrite as:
5212
5213 Preparation: tmp1, tmp2 <- r0, r1;
5214 r0, r1 <- rd, rn
5215 Insn: <op><cond> r0, r1, #imm
5216 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5217 */
5218
5219 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5220 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5221 rn_val = displaced_read_reg (regs, dsc, rn);
5222 rd_val = displaced_read_reg (regs, dsc, rd);
5223 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5224 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5225 dsc->rd = rd;
5226
5227 if (is_mov)
5228 dsc->modinsn[0] = insn & 0xfff00fff;
5229 else
5230 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5231
5232 dsc->cleanup = &cleanup_alu_imm;
5233
5234 return 0;
5235 }
5236
5237 static int
5238 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5239 uint16_t insn2, struct regcache *regs,
5240 arm_displaced_step_closure *dsc)
5241 {
5242 unsigned int op = bits (insn1, 5, 8);
5243 unsigned int rn, rm, rd;
5244 ULONGEST rd_val, rn_val;
5245
5246 rn = bits (insn1, 0, 3); /* Rn */
5247 rm = bits (insn2, 0, 3); /* Rm */
5248 rd = bits (insn2, 8, 11); /* Rd */
5249
5250 /* This routine is only called for instruction MOV. */
5251 gdb_assert (op == 0x2 && rn == 0xf);
5252
5253 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5255
5256 if (debug_displaced)
5257 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5258 "ALU", insn1, insn2);
5259
5260 /* Instruction is of form:
5261
5262 <op><cond> rd, [rn,] #imm
5263
5264 Rewrite as:
5265
5266 Preparation: tmp1, tmp2 <- r0, r1;
5267 r0, r1 <- rd, rn
5268 Insn: <op><cond> r0, r1, #imm
5269 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5270 */
5271
5272 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5273 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5274 rn_val = displaced_read_reg (regs, dsc, rn);
5275 rd_val = displaced_read_reg (regs, dsc, rd);
5276 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5277 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5278 dsc->rd = rd;
5279
5280 dsc->modinsn[0] = insn1;
5281 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5282 dsc->numinsns = 2;
5283
5284 dsc->cleanup = &cleanup_alu_imm;
5285
5286 return 0;
5287 }
5288
5289 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5290
5291 static void
5292 cleanup_alu_reg (struct gdbarch *gdbarch,
5293 struct regcache *regs, arm_displaced_step_closure *dsc)
5294 {
5295 ULONGEST rd_val;
5296 int i;
5297
5298 rd_val = displaced_read_reg (regs, dsc, 0);
5299
5300 for (i = 0; i < 3; i++)
5301 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5302
5303 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5304 }
5305
5306 static void
5307 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5308 arm_displaced_step_closure *dsc,
5309 unsigned int rd, unsigned int rn, unsigned int rm)
5310 {
5311 ULONGEST rd_val, rn_val, rm_val;
5312
5313 /* Instruction is of form:
5314
5315 <op><cond> rd, [rn,] rm [, <shift>]
5316
5317 Rewrite as:
5318
5319 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5320 r0, r1, r2 <- rd, rn, rm
5321 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5322 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5323 */
5324
5325 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5326 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5327 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5328 rd_val = displaced_read_reg (regs, dsc, rd);
5329 rn_val = displaced_read_reg (regs, dsc, rn);
5330 rm_val = displaced_read_reg (regs, dsc, rm);
5331 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5332 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5333 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5334 dsc->rd = rd;
5335
5336 dsc->cleanup = &cleanup_alu_reg;
5337 }
5338
5339 static int
5340 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5341 arm_displaced_step_closure *dsc)
5342 {
5343 unsigned int op = bits (insn, 21, 24);
5344 int is_mov = (op == 0xd);
5345
5346 if (!insn_references_pc (insn, 0x000ff00ful))
5347 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5348
5349 if (debug_displaced)
5350 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5351 is_mov ? "move" : "ALU", (unsigned long) insn);
5352
5353 if (is_mov)
5354 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5355 else
5356 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5357
5358 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5359 bits (insn, 0, 3));
5360 return 0;
5361 }
5362
5363 static int
5364 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5365 struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 unsigned rm, rd;
5369
5370 rm = bits (insn, 3, 6);
5371 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5372
5373 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5374 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5375
5376 if (debug_displaced)
5377 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5378 (unsigned short) insn);
5379
5380 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5381
5382 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5383
5384 return 0;
5385 }
5386
5387 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5388
5389 static void
5390 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5391 struct regcache *regs,
5392 arm_displaced_step_closure *dsc)
5393 {
5394 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5395 int i;
5396
5397 for (i = 0; i < 4; i++)
5398 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5399
5400 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5401 }
5402
5403 static void
5404 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5405 arm_displaced_step_closure *dsc,
5406 unsigned int rd, unsigned int rn, unsigned int rm,
5407 unsigned rs)
5408 {
5409 int i;
5410 ULONGEST rd_val, rn_val, rm_val, rs_val;
5411
5412 /* Instruction is of form:
5413
5414 <op><cond> rd, [rn,] rm, <shift> rs
5415
5416 Rewrite as:
5417
5418 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5419 r0, r1, r2, r3 <- rd, rn, rm, rs
5420 Insn: <op><cond> r0, r1, r2, <shift> r3
5421 Cleanup: tmp5 <- r0
5422 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5423 rd <- tmp5
5424 */
5425
5426 for (i = 0; i < 4; i++)
5427 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5428
5429 rd_val = displaced_read_reg (regs, dsc, rd);
5430 rn_val = displaced_read_reg (regs, dsc, rn);
5431 rm_val = displaced_read_reg (regs, dsc, rm);
5432 rs_val = displaced_read_reg (regs, dsc, rs);
5433 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5434 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5435 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5436 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5437 dsc->rd = rd;
5438 dsc->cleanup = &cleanup_alu_shifted_reg;
5439 }
5440
5441 static int
5442 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5443 struct regcache *regs,
5444 arm_displaced_step_closure *dsc)
5445 {
5446 unsigned int op = bits (insn, 21, 24);
5447 int is_mov = (op == 0xd);
5448 unsigned int rd, rn, rm, rs;
5449
5450 if (!insn_references_pc (insn, 0x000fff0ful))
5451 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5452
5453 if (debug_displaced)
5454 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5455 "%.8lx\n", is_mov ? "move" : "ALU",
5456 (unsigned long) insn);
5457
5458 rn = bits (insn, 16, 19);
5459 rm = bits (insn, 0, 3);
5460 rs = bits (insn, 8, 11);
5461 rd = bits (insn, 12, 15);
5462
5463 if (is_mov)
5464 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5465 else
5466 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5467
5468 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5469
5470 return 0;
5471 }
5472
5473 /* Clean up load instructions. */
5474
5475 static void
5476 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5477 arm_displaced_step_closure *dsc)
5478 {
5479 ULONGEST rt_val, rt_val2 = 0, rn_val;
5480
5481 rt_val = displaced_read_reg (regs, dsc, 0);
5482 if (dsc->u.ldst.xfersize == 8)
5483 rt_val2 = displaced_read_reg (regs, dsc, 1);
5484 rn_val = displaced_read_reg (regs, dsc, 2);
5485
5486 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5487 if (dsc->u.ldst.xfersize > 4)
5488 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5489 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5490 if (!dsc->u.ldst.immed)
5491 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5492
5493 /* Handle register writeback. */
5494 if (dsc->u.ldst.writeback)
5495 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5496 /* Put result in right place. */
5497 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5498 if (dsc->u.ldst.xfersize == 8)
5499 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5500 }
5501
5502 /* Clean up store instructions. */
5503
5504 static void
5505 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5506 arm_displaced_step_closure *dsc)
5507 {
5508 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5509
5510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5511 if (dsc->u.ldst.xfersize > 4)
5512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5514 if (!dsc->u.ldst.immed)
5515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5516 if (!dsc->u.ldst.restore_r4)
5517 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5518
5519 /* Writeback. */
5520 if (dsc->u.ldst.writeback)
5521 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5522 }
5523
5524 /* Copy "extra" load/store instructions. These are halfword/doubleword
5525 transfers, which have a different encoding to byte/word transfers. */
5526
5527 static int
5528 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5529 struct regcache *regs, arm_displaced_step_closure *dsc)
5530 {
5531 unsigned int op1 = bits (insn, 20, 24);
5532 unsigned int op2 = bits (insn, 5, 6);
5533 unsigned int rt = bits (insn, 12, 15);
5534 unsigned int rn = bits (insn, 16, 19);
5535 unsigned int rm = bits (insn, 0, 3);
5536 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5537 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5538 int immed = (op1 & 0x4) != 0;
5539 int opcode;
5540 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5541
5542 if (!insn_references_pc (insn, 0x000ff00ful))
5543 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5544
5545 if (debug_displaced)
5546 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5547 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5548 (unsigned long) insn);
5549
5550 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5551
5552 if (opcode < 0)
5553 internal_error (__FILE__, __LINE__,
5554 _("copy_extra_ld_st: instruction decode error"));
5555
5556 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5557 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5558 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5559 if (!immed)
5560 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5561
5562 rt_val = displaced_read_reg (regs, dsc, rt);
5563 if (bytesize[opcode] == 8)
5564 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5565 rn_val = displaced_read_reg (regs, dsc, rn);
5566 if (!immed)
5567 rm_val = displaced_read_reg (regs, dsc, rm);
5568
5569 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5570 if (bytesize[opcode] == 8)
5571 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5572 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5573 if (!immed)
5574 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5575
5576 dsc->rd = rt;
5577 dsc->u.ldst.xfersize = bytesize[opcode];
5578 dsc->u.ldst.rn = rn;
5579 dsc->u.ldst.immed = immed;
5580 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5581 dsc->u.ldst.restore_r4 = 0;
5582
5583 if (immed)
5584 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5585 ->
5586 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5587 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5588 else
5589 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5590 ->
5591 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5592 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5593
5594 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5595
5596 return 0;
5597 }
5598
5599 /* Copy byte/half word/word loads and stores. */
5600
5601 static void
5602 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5603 arm_displaced_step_closure *dsc, int load,
5604 int immed, int writeback, int size, int usermode,
5605 int rt, int rm, int rn)
5606 {
5607 ULONGEST rt_val, rn_val, rm_val = 0;
5608
5609 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5610 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5611 if (!immed)
5612 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5613 if (!load)
5614 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5615
5616 rt_val = displaced_read_reg (regs, dsc, rt);
5617 rn_val = displaced_read_reg (regs, dsc, rn);
5618 if (!immed)
5619 rm_val = displaced_read_reg (regs, dsc, rm);
5620
5621 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5622 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5623 if (!immed)
5624 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5625 dsc->rd = rt;
5626 dsc->u.ldst.xfersize = size;
5627 dsc->u.ldst.rn = rn;
5628 dsc->u.ldst.immed = immed;
5629 dsc->u.ldst.writeback = writeback;
5630
5631 /* To write PC we can do:
5632
5633 Before this sequence of instructions:
5634 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5635 r2 is the Rn value got from displaced_read_reg.
5636
5637 Insn1: push {pc} Write address of STR instruction + offset on stack
5638 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5639 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5640 = addr(Insn1) + offset - addr(Insn3) - 8
5641 = offset - 16
5642 Insn4: add r4, r4, #8 r4 = offset - 8
5643 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5644 = from + offset
5645 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5646
5647 Otherwise we don't know what value to write for PC, since the offset is
5648 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5649 of this can be found in Section "Saving from r15" in
5650 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5651
5652 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5653 }
5654
5655
5656 static int
5657 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5658 uint16_t insn2, struct regcache *regs,
5659 arm_displaced_step_closure *dsc, int size)
5660 {
5661 unsigned int u_bit = bit (insn1, 7);
5662 unsigned int rt = bits (insn2, 12, 15);
5663 int imm12 = bits (insn2, 0, 11);
5664 ULONGEST pc_val;
5665
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5669 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5670 imm12);
5671
5672 if (!u_bit)
5673 imm12 = -1 * imm12;
5674
5675 /* Rewrite instruction LDR Rt imm12 into:
5676
5677 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5678
5679 LDR R0, R2, R3,
5680
5681 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5682
5683
5684 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5685 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5686 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5687
5688 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5689
5690 pc_val = pc_val & 0xfffffffc;
5691
5692 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5693 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5694
5695 dsc->rd = rt;
5696
5697 dsc->u.ldst.xfersize = size;
5698 dsc->u.ldst.immed = 0;
5699 dsc->u.ldst.writeback = 0;
5700 dsc->u.ldst.restore_r4 = 0;
5701
5702 /* LDR R0, R2, R3 */
5703 dsc->modinsn[0] = 0xf852;
5704 dsc->modinsn[1] = 0x3;
5705 dsc->numinsns = 2;
5706
5707 dsc->cleanup = &cleanup_load;
5708
5709 return 0;
5710 }
5711
5712 static int
5713 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5714 uint16_t insn2, struct regcache *regs,
5715 arm_displaced_step_closure *dsc,
5716 int writeback, int immed)
5717 {
5718 unsigned int rt = bits (insn2, 12, 15);
5719 unsigned int rn = bits (insn1, 0, 3);
5720 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5721 /* In LDR (register), there is also a register Rm, which is not allowed to
5722 be PC, so we don't have to check it. */
5723
5724 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5726 dsc);
5727
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog,
5730 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5731 rt, rn, insn1, insn2);
5732
5733 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5734 0, rt, rm, rn);
5735
5736 dsc->u.ldst.restore_r4 = 0;
5737
5738 if (immed)
5739 /* ldr[b]<cond> rt, [rn, #imm], etc.
5740 ->
5741 ldr[b]<cond> r0, [r2, #imm]. */
5742 {
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = insn2 & 0x0fff;
5745 }
5746 else
5747 /* ldr[b]<cond> rt, [rn, rm], etc.
5748 ->
5749 ldr[b]<cond> r0, [r2, r3]. */
5750 {
5751 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5752 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5753 }
5754
5755 dsc->numinsns = 2;
5756
5757 return 0;
5758 }
5759
5760
5761 static int
5762 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5763 struct regcache *regs,
5764 arm_displaced_step_closure *dsc,
5765 int load, int size, int usermode)
5766 {
5767 int immed = !bit (insn, 25);
5768 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5769 unsigned int rt = bits (insn, 12, 15);
5770 unsigned int rn = bits (insn, 16, 19);
5771 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5772
5773 if (!insn_references_pc (insn, 0x000ff00ful))
5774 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5775
5776 if (debug_displaced)
5777 fprintf_unfiltered (gdb_stdlog,
5778 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5779 load ? (size == 1 ? "ldrb" : "ldr")
5780 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5781 rt, rn,
5782 (unsigned long) insn);
5783
5784 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5785 usermode, rt, rm, rn);
5786
5787 if (load || rt != ARM_PC_REGNUM)
5788 {
5789 dsc->u.ldst.restore_r4 = 0;
5790
5791 if (immed)
5792 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5793 ->
5794 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5795 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5796 else
5797 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5798 ->
5799 {ldr,str}[b]<cond> r0, [r2, r3]. */
5800 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5801 }
5802 else
5803 {
5804 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5805 dsc->u.ldst.restore_r4 = 1;
5806 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5807 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5808 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5809 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5810 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5811
5812 /* As above. */
5813 if (immed)
5814 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5815 else
5816 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5817
5818 dsc->numinsns = 6;
5819 }
5820
5821 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5822
5823 return 0;
5824 }
5825
5826 /* Cleanup LDM instructions with fully-populated register list. This is an
5827 unfortunate corner case: it's impossible to implement correctly by modifying
5828 the instruction. The issue is as follows: we have an instruction,
5829
5830 ldm rN, {r0-r15}
5831
5832 which we must rewrite to avoid loading PC. A possible solution would be to
5833 do the load in two halves, something like (with suitable cleanup
5834 afterwards):
5835
5836 mov r8, rN
5837 ldm[id][ab] r8!, {r0-r7}
5838 str r7, <temp>
5839 ldm[id][ab] r8, {r7-r14}
5840 <bkpt>
5841
5842 but at present there's no suitable place for <temp>, since the scratch space
5843 is overwritten before the cleanup routine is called. For now, we simply
5844 emulate the instruction. */
5845
5846 static void
5847 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5848 arm_displaced_step_closure *dsc)
5849 {
5850 int inc = dsc->u.block.increment;
5851 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5852 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5853 uint32_t regmask = dsc->u.block.regmask;
5854 int regno = inc ? 0 : 15;
5855 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5856 int exception_return = dsc->u.block.load && dsc->u.block.user
5857 && (regmask & 0x8000) != 0;
5858 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5859 int do_transfer = condition_true (dsc->u.block.cond, status);
5860 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5861
5862 if (!do_transfer)
5863 return;
5864
5865 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5866 sensible we can do here. Complain loudly. */
5867 if (exception_return)
5868 error (_("Cannot single-step exception return"));
5869
5870 /* We don't handle any stores here for now. */
5871 gdb_assert (dsc->u.block.load != 0);
5872
5873 if (debug_displaced)
5874 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5875 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5876 dsc->u.block.increment ? "inc" : "dec",
5877 dsc->u.block.before ? "before" : "after");
5878
5879 while (regmask)
5880 {
5881 uint32_t memword;
5882
5883 if (inc)
5884 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5885 regno++;
5886 else
5887 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5888 regno--;
5889
5890 xfer_addr += bump_before;
5891
5892 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5893 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5894
5895 xfer_addr += bump_after;
5896
5897 regmask &= ~(1 << regno);
5898 }
5899
5900 if (dsc->u.block.writeback)
5901 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5902 CANNOT_WRITE_PC);
5903 }
5904
5905 /* Clean up an STM which included the PC in the register list. */
5906
5907 static void
5908 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5909 arm_displaced_step_closure *dsc)
5910 {
5911 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5912 int store_executed = condition_true (dsc->u.block.cond, status);
5913 CORE_ADDR pc_stored_at, transferred_regs
5914 = count_one_bits (dsc->u.block.regmask);
5915 CORE_ADDR stm_insn_addr;
5916 uint32_t pc_val;
5917 long offset;
5918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5919
5920 /* If condition code fails, there's nothing else to do. */
5921 if (!store_executed)
5922 return;
5923
5924 if (dsc->u.block.increment)
5925 {
5926 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5927
5928 if (dsc->u.block.before)
5929 pc_stored_at += 4;
5930 }
5931 else
5932 {
5933 pc_stored_at = dsc->u.block.xfer_addr;
5934
5935 if (dsc->u.block.before)
5936 pc_stored_at -= 4;
5937 }
5938
5939 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5940 stm_insn_addr = dsc->scratch_base;
5941 offset = pc_val - stm_insn_addr;
5942
5943 if (debug_displaced)
5944 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5945 "STM instruction\n", offset);
5946
5947 /* Rewrite the stored PC to the proper value for the non-displaced original
5948 instruction. */
5949 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5950 dsc->insn_addr + offset);
5951 }
5952
5953 /* Clean up an LDM which includes the PC in the register list. We clumped all
5954 the registers in the transferred list into a contiguous range r0...rX (to
5955 avoid loading PC directly and losing control of the debugged program), so we
5956 must undo that here. */
5957
5958 static void
5959 cleanup_block_load_pc (struct gdbarch *gdbarch,
5960 struct regcache *regs,
5961 arm_displaced_step_closure *dsc)
5962 {
5963 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5964 int load_executed = condition_true (dsc->u.block.cond, status);
5965 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5966 unsigned int regs_loaded = count_one_bits (mask);
5967 unsigned int num_to_shuffle = regs_loaded, clobbered;
5968
5969 /* The method employed here will fail if the register list is fully populated
5970 (we need to avoid loading PC directly). */
5971 gdb_assert (num_to_shuffle < 16);
5972
5973 if (!load_executed)
5974 return;
5975
5976 clobbered = (1 << num_to_shuffle) - 1;
5977
5978 while (num_to_shuffle > 0)
5979 {
5980 if ((mask & (1 << write_reg)) != 0)
5981 {
5982 unsigned int read_reg = num_to_shuffle - 1;
5983
5984 if (read_reg != write_reg)
5985 {
5986 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5987 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5990 "loaded register r%d to r%d\n"), read_reg,
5991 write_reg);
5992 }
5993 else if (debug_displaced)
5994 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5995 "r%d already in the right place\n"),
5996 write_reg);
5997
5998 clobbered &= ~(1 << write_reg);
5999
6000 num_to_shuffle--;
6001 }
6002
6003 write_reg--;
6004 }
6005
6006 /* Restore any registers we scribbled over. */
6007 for (write_reg = 0; clobbered != 0; write_reg++)
6008 {
6009 if ((clobbered & (1 << write_reg)) != 0)
6010 {
6011 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6012 CANNOT_WRITE_PC);
6013 if (debug_displaced)
6014 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6015 "clobbered register r%d\n"), write_reg);
6016 clobbered &= ~(1 << write_reg);
6017 }
6018 }
6019
6020 /* Perform register writeback manually. */
6021 if (dsc->u.block.writeback)
6022 {
6023 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6024
6025 if (dsc->u.block.increment)
6026 new_rn_val += regs_loaded * 4;
6027 else
6028 new_rn_val -= regs_loaded * 4;
6029
6030 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6031 CANNOT_WRITE_PC);
6032 }
6033 }
6034
6035 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6036 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6037
6038 static int
6039 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6040 struct regcache *regs,
6041 arm_displaced_step_closure *dsc)
6042 {
6043 int load = bit (insn, 20);
6044 int user = bit (insn, 22);
6045 int increment = bit (insn, 23);
6046 int before = bit (insn, 24);
6047 int writeback = bit (insn, 21);
6048 int rn = bits (insn, 16, 19);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6053 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6060 }
6061
6062 if (debug_displaced)
6063 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6064 "%.8lx\n", (unsigned long) insn);
6065
6066 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6067 dsc->u.block.rn = rn;
6068
6069 dsc->u.block.load = load;
6070 dsc->u.block.user = user;
6071 dsc->u.block.increment = increment;
6072 dsc->u.block.before = before;
6073 dsc->u.block.writeback = writeback;
6074 dsc->u.block.cond = bits (insn, 28, 31);
6075
6076 dsc->u.block.regmask = insn & 0xffff;
6077
6078 if (load)
6079 {
6080 if ((insn & 0xffff) == 0xffff)
6081 {
6082 /* LDM with a fully-populated register list. This case is
6083 particularly tricky. Implement for now by fully emulating the
6084 instruction (which might not behave perfectly in all cases, but
6085 these instructions should be rare enough for that not to matter
6086 too much). */
6087 dsc->modinsn[0] = ARM_NOP;
6088
6089 dsc->cleanup = &cleanup_block_load_all;
6090 }
6091 else
6092 {
6093 /* LDM of a list of registers which includes PC. Implement by
6094 rewriting the list of registers to be transferred into a
6095 contiguous chunk r0...rX before doing the transfer, then shuffling
6096 registers into the correct places in the cleanup routine. */
6097 unsigned int regmask = insn & 0xffff;
6098 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6099 unsigned int i;
6100
6101 for (i = 0; i < num_in_list; i++)
6102 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6103
6104 /* Writeback makes things complicated. We need to avoid clobbering
6105 the base register with one of the registers in our modified
6106 register list, but just using a different register can't work in
6107 all cases, e.g.:
6108
6109 ldm r14!, {r0-r13,pc}
6110
6111 which would need to be rewritten as:
6112
6113 ldm rN!, {r0-r14}
6114
6115 but that can't work, because there's no free register for N.
6116
6117 Solve this by turning off the writeback bit, and emulating
6118 writeback manually in the cleanup routine. */
6119
6120 if (writeback)
6121 insn &= ~(1 << 21);
6122
6123 new_regmask = (1 << num_in_list) - 1;
6124
6125 if (debug_displaced)
6126 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6127 "{..., pc}: original reg list %.4x, modified "
6128 "list %.4x\n"), rn, writeback ? "!" : "",
6129 (int) insn & 0xffff, new_regmask);
6130
6131 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6132
6133 dsc->cleanup = &cleanup_block_load_pc;
6134 }
6135 }
6136 else
6137 {
6138 /* STM of a list of registers which includes PC. Run the instruction
6139 as-is, but out of line: this will store the wrong value for the PC,
6140 so we must manually fix up the memory in the cleanup routine.
6141 Doing things this way has the advantage that we can auto-detect
6142 the offset of the PC write (which is architecture-dependent) in
6143 the cleanup routine. */
6144 dsc->modinsn[0] = insn;
6145
6146 dsc->cleanup = &cleanup_block_store_pc;
6147 }
6148
6149 return 0;
6150 }
6151
6152 static int
6153 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6154 struct regcache *regs,
6155 arm_displaced_step_closure *dsc)
6156 {
6157 int rn = bits (insn1, 0, 3);
6158 int load = bit (insn1, 4);
6159 int writeback = bit (insn1, 5);
6160
6161 /* Block transfers which don't mention PC can be run directly
6162 out-of-line. */
6163 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6164 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6165
6166 if (rn == ARM_PC_REGNUM)
6167 {
6168 warning (_("displaced: Unpredictable LDM or STM with "
6169 "base register r15"));
6170 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6171 "unpredictable ldm/stm", dsc);
6172 }
6173
6174 if (debug_displaced)
6175 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6176 "%.4x%.4x\n", insn1, insn2);
6177
6178 /* Clear bit 13, since it should be always zero. */
6179 dsc->u.block.regmask = (insn2 & 0xdfff);
6180 dsc->u.block.rn = rn;
6181
6182 dsc->u.block.load = load;
6183 dsc->u.block.user = 0;
6184 dsc->u.block.increment = bit (insn1, 7);
6185 dsc->u.block.before = bit (insn1, 8);
6186 dsc->u.block.writeback = writeback;
6187 dsc->u.block.cond = INST_AL;
6188 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6189
6190 if (load)
6191 {
6192 if (dsc->u.block.regmask == 0xffff)
6193 {
6194 /* This branch is impossible to happen. */
6195 gdb_assert (0);
6196 }
6197 else
6198 {
6199 unsigned int regmask = dsc->u.block.regmask;
6200 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6201 unsigned int i;
6202
6203 for (i = 0; i < num_in_list; i++)
6204 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6205
6206 if (writeback)
6207 insn1 &= ~(1 << 5);
6208
6209 new_regmask = (1 << num_in_list) - 1;
6210
6211 if (debug_displaced)
6212 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6213 "{..., pc}: original reg list %.4x, modified "
6214 "list %.4x\n"), rn, writeback ? "!" : "",
6215 (int) dsc->u.block.regmask, new_regmask);
6216
6217 dsc->modinsn[0] = insn1;
6218 dsc->modinsn[1] = (new_regmask & 0xffff);
6219 dsc->numinsns = 2;
6220
6221 dsc->cleanup = &cleanup_block_load_pc;
6222 }
6223 }
6224 else
6225 {
6226 dsc->modinsn[0] = insn1;
6227 dsc->modinsn[1] = insn2;
6228 dsc->numinsns = 2;
6229 dsc->cleanup = &cleanup_block_store_pc;
6230 }
6231 return 0;
6232 }
6233
6234 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6235 This is used to avoid a dependency on BFD's bfd_endian enum. */
6236
6237 ULONGEST
6238 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6239 int byte_order)
6240 {
6241 return read_memory_unsigned_integer (memaddr, len,
6242 (enum bfd_endian) byte_order);
6243 }
6244
6245 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6246
6247 CORE_ADDR
6248 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6249 CORE_ADDR val)
6250 {
6251 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6252 }
6253
6254 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6255
6256 static CORE_ADDR
6257 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6258 {
6259 return 0;
6260 }
6261
6262 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6263
6264 int
6265 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6266 {
6267 return arm_is_thumb (self->regcache);
6268 }
6269
6270 /* single_step() is called just before we want to resume the inferior,
6271 if we want to single-step it but there is no hardware or kernel
6272 single-step support. We find the target of the coming instructions
6273 and breakpoint them. */
6274
6275 std::vector<CORE_ADDR>
6276 arm_software_single_step (struct regcache *regcache)
6277 {
6278 struct gdbarch *gdbarch = regcache->arch ();
6279 struct arm_get_next_pcs next_pcs_ctx;
6280
6281 arm_get_next_pcs_ctor (&next_pcs_ctx,
6282 &arm_get_next_pcs_ops,
6283 gdbarch_byte_order (gdbarch),
6284 gdbarch_byte_order_for_code (gdbarch),
6285 0,
6286 regcache);
6287
6288 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6289
6290 for (CORE_ADDR &pc_ref : next_pcs)
6291 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6292
6293 return next_pcs;
6294 }
6295
6296 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6297 for Linux, where some SVC instructions must be treated specially. */
6298
6299 static void
6300 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6301 arm_displaced_step_closure *dsc)
6302 {
6303 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6304
6305 if (debug_displaced)
6306 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6307 "%.8lx\n", (unsigned long) resume_addr);
6308
6309 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6310 }
6311
6312
6313 /* Common copy routine for svc instruction. */
6314
6315 static int
6316 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6317 arm_displaced_step_closure *dsc)
6318 {
6319 /* Preparation: none.
6320 Insn: unmodified svc.
6321 Cleanup: pc <- insn_addr + insn_size. */
6322
6323 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6324 instruction. */
6325 dsc->wrote_to_pc = 1;
6326
6327 /* Allow OS-specific code to override SVC handling. */
6328 if (dsc->u.svc.copy_svc_os)
6329 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6330 else
6331 {
6332 dsc->cleanup = &cleanup_svc;
6333 return 0;
6334 }
6335 }
6336
6337 static int
6338 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6339 struct regcache *regs, arm_displaced_step_closure *dsc)
6340 {
6341
6342 if (debug_displaced)
6343 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6344 (unsigned long) insn);
6345
6346 dsc->modinsn[0] = insn;
6347
6348 return install_svc (gdbarch, regs, dsc);
6349 }
6350
6351 static int
6352 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6353 struct regcache *regs, arm_displaced_step_closure *dsc)
6354 {
6355
6356 if (debug_displaced)
6357 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6358 insn);
6359
6360 dsc->modinsn[0] = insn;
6361
6362 return install_svc (gdbarch, regs, dsc);
6363 }
6364
6365 /* Copy undefined instructions. */
6366
6367 static int
6368 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6369 arm_displaced_step_closure *dsc)
6370 {
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog,
6373 "displaced: copying undefined insn %.8lx\n",
6374 (unsigned long) insn);
6375
6376 dsc->modinsn[0] = insn;
6377
6378 return 0;
6379 }
6380
6381 static int
6382 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6383 arm_displaced_step_closure *dsc)
6384 {
6385
6386 if (debug_displaced)
6387 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6388 "%.4x %.4x\n", (unsigned short) insn1,
6389 (unsigned short) insn2);
6390
6391 dsc->modinsn[0] = insn1;
6392 dsc->modinsn[1] = insn2;
6393 dsc->numinsns = 2;
6394
6395 return 0;
6396 }
6397
6398 /* Copy unpredictable instructions. */
6399
6400 static int
6401 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6402 arm_displaced_step_closure *dsc)
6403 {
6404 if (debug_displaced)
6405 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6406 "%.8lx\n", (unsigned long) insn);
6407
6408 dsc->modinsn[0] = insn;
6409
6410 return 0;
6411 }
6412
6413 /* The decode_* functions are instruction decoding helpers. They mostly follow
6414 the presentation in the ARM ARM. */
6415
6416 static int
6417 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6418 struct regcache *regs,
6419 arm_displaced_step_closure *dsc)
6420 {
6421 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6422 unsigned int rn = bits (insn, 16, 19);
6423
6424 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6425 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6426 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6427 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6428 else if ((op1 & 0x60) == 0x20)
6429 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6430 else if ((op1 & 0x71) == 0x40)
6431 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6432 dsc);
6433 else if ((op1 & 0x77) == 0x41)
6434 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6435 else if ((op1 & 0x77) == 0x45)
6436 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6437 else if ((op1 & 0x77) == 0x51)
6438 {
6439 if (rn != 0xf)
6440 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6441 else
6442 return arm_copy_unpred (gdbarch, insn, dsc);
6443 }
6444 else if ((op1 & 0x77) == 0x55)
6445 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6446 else if (op1 == 0x57)
6447 switch (op2)
6448 {
6449 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6450 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6451 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6452 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6453 default: return arm_copy_unpred (gdbarch, insn, dsc);
6454 }
6455 else if ((op1 & 0x63) == 0x43)
6456 return arm_copy_unpred (gdbarch, insn, dsc);
6457 else if ((op2 & 0x1) == 0x0)
6458 switch (op1 & ~0x80)
6459 {
6460 case 0x61:
6461 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6462 case 0x65:
6463 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6464 case 0x71: case 0x75:
6465 /* pld/pldw reg. */
6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6467 case 0x63: case 0x67: case 0x73: case 0x77:
6468 return arm_copy_unpred (gdbarch, insn, dsc);
6469 default:
6470 return arm_copy_undef (gdbarch, insn, dsc);
6471 }
6472 else
6473 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6474 }
6475
6476 static int
6477 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6478 struct regcache *regs,
6479 arm_displaced_step_closure *dsc)
6480 {
6481 if (bit (insn, 27) == 0)
6482 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6483 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6484 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6485 {
6486 case 0x0: case 0x2:
6487 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6488
6489 case 0x1: case 0x3:
6490 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6491
6492 case 0x4: case 0x5: case 0x6: case 0x7:
6493 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6494
6495 case 0x8:
6496 switch ((insn & 0xe00000) >> 21)
6497 {
6498 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6499 /* stc/stc2. */
6500 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6501
6502 case 0x2:
6503 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6504
6505 default:
6506 return arm_copy_undef (gdbarch, insn, dsc);
6507 }
6508
6509 case 0x9:
6510 {
6511 int rn_f = (bits (insn, 16, 19) == 0xf);
6512 switch ((insn & 0xe00000) >> 21)
6513 {
6514 case 0x1: case 0x3:
6515 /* ldc/ldc2 imm (undefined for rn == pc). */
6516 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6517 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6518
6519 case 0x2:
6520 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6521
6522 case 0x4: case 0x5: case 0x6: case 0x7:
6523 /* ldc/ldc2 lit (undefined for rn != pc). */
6524 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6525 : arm_copy_undef (gdbarch, insn, dsc);
6526
6527 default:
6528 return arm_copy_undef (gdbarch, insn, dsc);
6529 }
6530 }
6531
6532 case 0xa:
6533 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6534
6535 case 0xb:
6536 if (bits (insn, 16, 19) == 0xf)
6537 /* ldc/ldc2 lit. */
6538 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6539 else
6540 return arm_copy_undef (gdbarch, insn, dsc);
6541
6542 case 0xc:
6543 if (bit (insn, 4))
6544 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6545 else
6546 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6547
6548 case 0xd:
6549 if (bit (insn, 4))
6550 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6551 else
6552 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6553
6554 default:
6555 return arm_copy_undef (gdbarch, insn, dsc);
6556 }
6557 }
6558
6559 /* Decode miscellaneous instructions in dp/misc encoding space. */
6560
6561 static int
6562 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6563 struct regcache *regs,
6564 arm_displaced_step_closure *dsc)
6565 {
6566 unsigned int op2 = bits (insn, 4, 6);
6567 unsigned int op = bits (insn, 21, 22);
6568
6569 switch (op2)
6570 {
6571 case 0x0:
6572 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6573
6574 case 0x1:
6575 if (op == 0x1) /* bx. */
6576 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6577 else if (op == 0x3)
6578 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6579 else
6580 return arm_copy_undef (gdbarch, insn, dsc);
6581
6582 case 0x2:
6583 if (op == 0x1)
6584 /* Not really supported. */
6585 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6586 else
6587 return arm_copy_undef (gdbarch, insn, dsc);
6588
6589 case 0x3:
6590 if (op == 0x1)
6591 return arm_copy_bx_blx_reg (gdbarch, insn,
6592 regs, dsc); /* blx register. */
6593 else
6594 return arm_copy_undef (gdbarch, insn, dsc);
6595
6596 case 0x5:
6597 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6598
6599 case 0x7:
6600 if (op == 0x1)
6601 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6602 else if (op == 0x3)
6603 /* Not really supported. */
6604 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6605 /* Fall through. */
6606
6607 default:
6608 return arm_copy_undef (gdbarch, insn, dsc);
6609 }
6610 }
6611
6612 static int
6613 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6614 struct regcache *regs,
6615 arm_displaced_step_closure *dsc)
6616 {
6617 if (bit (insn, 25))
6618 switch (bits (insn, 20, 24))
6619 {
6620 case 0x10:
6621 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6622
6623 case 0x14:
6624 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6625
6626 case 0x12: case 0x16:
6627 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6628
6629 default:
6630 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6631 }
6632 else
6633 {
6634 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6635
6636 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6637 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6638 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6639 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6641 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6643 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6644 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6645 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6646 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6647 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6648 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6649 /* 2nd arg means "unprivileged". */
6650 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6651 dsc);
6652 }
6653
6654 /* Should be unreachable. */
6655 return 1;
6656 }
6657
6658 static int
6659 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6660 struct regcache *regs,
6661 arm_displaced_step_closure *dsc)
6662 {
6663 int a = bit (insn, 25), b = bit (insn, 4);
6664 uint32_t op1 = bits (insn, 20, 24);
6665
6666 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6667 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6668 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6669 else if ((!a && (op1 & 0x17) == 0x02)
6670 || (a && (op1 & 0x17) == 0x02 && !b))
6671 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6672 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6673 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6674 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6675 else if ((!a && (op1 & 0x17) == 0x03)
6676 || (a && (op1 & 0x17) == 0x03 && !b))
6677 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6678 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6679 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6680 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6681 else if ((!a && (op1 & 0x17) == 0x06)
6682 || (a && (op1 & 0x17) == 0x06 && !b))
6683 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6684 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6685 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6686 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6687 else if ((!a && (op1 & 0x17) == 0x07)
6688 || (a && (op1 & 0x17) == 0x07 && !b))
6689 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6690
6691 /* Should be unreachable. */
6692 return 1;
6693 }
6694
6695 static int
6696 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6697 arm_displaced_step_closure *dsc)
6698 {
6699 switch (bits (insn, 20, 24))
6700 {
6701 case 0x00: case 0x01: case 0x02: case 0x03:
6702 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6703
6704 case 0x04: case 0x05: case 0x06: case 0x07:
6705 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6706
6707 case 0x08: case 0x09: case 0x0a: case 0x0b:
6708 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6709 return arm_copy_unmodified (gdbarch, insn,
6710 "decode/pack/unpack/saturate/reverse", dsc);
6711
6712 case 0x18:
6713 if (bits (insn, 5, 7) == 0) /* op2. */
6714 {
6715 if (bits (insn, 12, 15) == 0xf)
6716 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6717 else
6718 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6719 }
6720 else
6721 return arm_copy_undef (gdbarch, insn, dsc);
6722
6723 case 0x1a: case 0x1b:
6724 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6725 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6726 else
6727 return arm_copy_undef (gdbarch, insn, dsc);
6728
6729 case 0x1c: case 0x1d:
6730 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6731 {
6732 if (bits (insn, 0, 3) == 0xf)
6733 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6734 else
6735 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6736 }
6737 else
6738 return arm_copy_undef (gdbarch, insn, dsc);
6739
6740 case 0x1e: case 0x1f:
6741 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6742 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6743 else
6744 return arm_copy_undef (gdbarch, insn, dsc);
6745 }
6746
6747 /* Should be unreachable. */
6748 return 1;
6749 }
6750
6751 static int
6752 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6753 struct regcache *regs,
6754 arm_displaced_step_closure *dsc)
6755 {
6756 if (bit (insn, 25))
6757 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6758 else
6759 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6760 }
6761
6762 static int
6763 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6764 struct regcache *regs,
6765 arm_displaced_step_closure *dsc)
6766 {
6767 unsigned int opcode = bits (insn, 20, 24);
6768
6769 switch (opcode)
6770 {
6771 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6772 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6773
6774 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6775 case 0x12: case 0x16:
6776 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6777
6778 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6779 case 0x13: case 0x17:
6780 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6781
6782 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6783 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6784 /* Note: no writeback for these instructions. Bit 25 will always be
6785 zero though (via caller), so the following works OK. */
6786 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 }
6788
6789 /* Should be unreachable. */
6790 return 1;
6791 }
6792
6793 /* Decode shifted register instructions. */
6794
6795 static int
6796 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6797 uint16_t insn2, struct regcache *regs,
6798 arm_displaced_step_closure *dsc)
6799 {
6800 /* PC is only allowed to be used in instruction MOV. */
6801
6802 unsigned int op = bits (insn1, 5, 8);
6803 unsigned int rn = bits (insn1, 0, 3);
6804
6805 if (op == 0x2 && rn == 0xf) /* MOV */
6806 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6807 else
6808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6809 "dp (shift reg)", dsc);
6810 }
6811
6812
6813 /* Decode extension register load/store. Exactly the same as
6814 arm_decode_ext_reg_ld_st. */
6815
6816 static int
6817 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6818 uint16_t insn2, struct regcache *regs,
6819 arm_displaced_step_closure *dsc)
6820 {
6821 unsigned int opcode = bits (insn1, 4, 8);
6822
6823 switch (opcode)
6824 {
6825 case 0x04: case 0x05:
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6827 "vfp/neon vmov", dsc);
6828
6829 case 0x08: case 0x0c: /* 01x00 */
6830 case 0x0a: case 0x0e: /* 01x10 */
6831 case 0x12: case 0x16: /* 10x10 */
6832 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6833 "vfp/neon vstm/vpush", dsc);
6834
6835 case 0x09: case 0x0d: /* 01x01 */
6836 case 0x0b: case 0x0f: /* 01x11 */
6837 case 0x13: case 0x17: /* 10x11 */
6838 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6839 "vfp/neon vldm/vpop", dsc);
6840
6841 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6842 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6843 "vstr", dsc);
6844 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6845 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6846 }
6847
6848 /* Should be unreachable. */
6849 return 1;
6850 }
6851
6852 static int
6853 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6854 struct regcache *regs, arm_displaced_step_closure *dsc)
6855 {
6856 unsigned int op1 = bits (insn, 20, 25);
6857 int op = bit (insn, 4);
6858 unsigned int coproc = bits (insn, 8, 11);
6859
6860 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6861 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6862 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6863 && (coproc & 0xe) != 0xa)
6864 /* stc/stc2. */
6865 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6866 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6867 && (coproc & 0xe) != 0xa)
6868 /* ldc/ldc2 imm/lit. */
6869 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6870 else if ((op1 & 0x3e) == 0x00)
6871 return arm_copy_undef (gdbarch, insn, dsc);
6872 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6873 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6874 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6876 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6877 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6878 else if ((op1 & 0x30) == 0x20 && !op)
6879 {
6880 if ((coproc & 0xe) == 0xa)
6881 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6882 else
6883 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6884 }
6885 else if ((op1 & 0x30) == 0x20 && op)
6886 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6887 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6888 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6889 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6890 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6891 else if ((op1 & 0x30) == 0x30)
6892 return arm_copy_svc (gdbarch, insn, regs, dsc);
6893 else
6894 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6895 }
6896
6897 static int
6898 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6899 uint16_t insn2, struct regcache *regs,
6900 arm_displaced_step_closure *dsc)
6901 {
6902 unsigned int coproc = bits (insn2, 8, 11);
6903 unsigned int bit_5_8 = bits (insn1, 5, 8);
6904 unsigned int bit_9 = bit (insn1, 9);
6905 unsigned int bit_4 = bit (insn1, 4);
6906
6907 if (bit_9 == 0)
6908 {
6909 if (bit_5_8 == 2)
6910 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6911 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6912 dsc);
6913 else if (bit_5_8 == 0) /* UNDEFINED. */
6914 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6915 else
6916 {
6917 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6918 if ((coproc & 0xe) == 0xa)
6919 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6920 dsc);
6921 else /* coproc is not 101x. */
6922 {
6923 if (bit_4 == 0) /* STC/STC2. */
6924 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6925 "stc/stc2", dsc);
6926 else /* LDC/LDC2 {literal, immediate}. */
6927 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6928 regs, dsc);
6929 }
6930 }
6931 }
6932 else
6933 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6934
6935 return 0;
6936 }
6937
6938 static void
6939 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6940 arm_displaced_step_closure *dsc, int rd)
6941 {
6942 /* ADR Rd, #imm
6943
6944 Rewrite as:
6945
6946 Preparation: Rd <- PC
6947 Insn: ADD Rd, #imm
6948 Cleanup: Null.
6949 */
6950
6951 /* Rd <- PC */
6952 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6953 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6954 }
6955
6956 static int
6957 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6958 arm_displaced_step_closure *dsc,
6959 int rd, unsigned int imm)
6960 {
6961
6962 /* Encoding T2: ADDS Rd, #imm */
6963 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6964
6965 install_pc_relative (gdbarch, regs, dsc, rd);
6966
6967 return 0;
6968 }
6969
6970 static int
6971 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6972 struct regcache *regs,
6973 arm_displaced_step_closure *dsc)
6974 {
6975 unsigned int rd = bits (insn, 8, 10);
6976 unsigned int imm8 = bits (insn, 0, 7);
6977
6978 if (debug_displaced)
6979 fprintf_unfiltered (gdb_stdlog,
6980 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6981 rd, imm8, insn);
6982
6983 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6984 }
6985
6986 static int
6987 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6988 uint16_t insn2, struct regcache *regs,
6989 arm_displaced_step_closure *dsc)
6990 {
6991 unsigned int rd = bits (insn2, 8, 11);
6992 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6993 extract raw immediate encoding rather than computing immediate. When
6994 generating ADD or SUB instruction, we can simply perform OR operation to
6995 set immediate into ADD. */
6996 unsigned int imm_3_8 = insn2 & 0x70ff;
6997 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6998
6999 if (debug_displaced)
7000 fprintf_unfiltered (gdb_stdlog,
7001 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7002 rd, imm_i, imm_3_8, insn1, insn2);
7003
7004 if (bit (insn1, 7)) /* Encoding T2 */
7005 {
7006 /* Encoding T3: SUB Rd, Rd, #imm */
7007 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7008 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7009 }
7010 else /* Encoding T3 */
7011 {
7012 /* Encoding T3: ADD Rd, Rd, #imm */
7013 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7014 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7015 }
7016 dsc->numinsns = 2;
7017
7018 install_pc_relative (gdbarch, regs, dsc, rd);
7019
7020 return 0;
7021 }
7022
7023 static int
7024 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7025 struct regcache *regs,
7026 arm_displaced_step_closure *dsc)
7027 {
7028 unsigned int rt = bits (insn1, 8, 10);
7029 unsigned int pc;
7030 int imm8 = (bits (insn1, 0, 7) << 2);
7031
7032 /* LDR Rd, #imm8
7033
7034 Rwrite as:
7035
7036 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7037
7038 Insn: LDR R0, [R2, R3];
7039 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7040
7041 if (debug_displaced)
7042 fprintf_unfiltered (gdb_stdlog,
7043 "displaced: copying thumb ldr r%d [pc #%d]\n"
7044 , rt, imm8);
7045
7046 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7047 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7048 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7049 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7050 /* The assembler calculates the required value of the offset from the
7051 Align(PC,4) value of this instruction to the label. */
7052 pc = pc & 0xfffffffc;
7053
7054 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7055 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7056
7057 dsc->rd = rt;
7058 dsc->u.ldst.xfersize = 4;
7059 dsc->u.ldst.rn = 0;
7060 dsc->u.ldst.immed = 0;
7061 dsc->u.ldst.writeback = 0;
7062 dsc->u.ldst.restore_r4 = 0;
7063
7064 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7065
7066 dsc->cleanup = &cleanup_load;
7067
7068 return 0;
7069 }
7070
7071 /* Copy Thumb cbnz/cbz instruction. */
7072
7073 static int
7074 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7075 struct regcache *regs,
7076 arm_displaced_step_closure *dsc)
7077 {
7078 int non_zero = bit (insn1, 11);
7079 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7080 CORE_ADDR from = dsc->insn_addr;
7081 int rn = bits (insn1, 0, 2);
7082 int rn_val = displaced_read_reg (regs, dsc, rn);
7083
7084 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7085 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7086 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7087 condition is false, let it be, cleanup_branch will do nothing. */
7088 if (dsc->u.branch.cond)
7089 {
7090 dsc->u.branch.cond = INST_AL;
7091 dsc->u.branch.dest = from + 4 + imm5;
7092 }
7093 else
7094 dsc->u.branch.dest = from + 2;
7095
7096 dsc->u.branch.link = 0;
7097 dsc->u.branch.exchange = 0;
7098
7099 if (debug_displaced)
7100 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7101 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7102 rn, rn_val, insn1, dsc->u.branch.dest);
7103
7104 dsc->modinsn[0] = THUMB_NOP;
7105
7106 dsc->cleanup = &cleanup_branch;
7107 return 0;
7108 }
7109
7110 /* Copy Table Branch Byte/Halfword */
7111 static int
7112 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7113 uint16_t insn2, struct regcache *regs,
7114 arm_displaced_step_closure *dsc)
7115 {
7116 ULONGEST rn_val, rm_val;
7117 int is_tbh = bit (insn2, 4);
7118 CORE_ADDR halfwords = 0;
7119 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7120
7121 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7122 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7123
7124 if (is_tbh)
7125 {
7126 gdb_byte buf[2];
7127
7128 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7129 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7130 }
7131 else
7132 {
7133 gdb_byte buf[1];
7134
7135 target_read_memory (rn_val + rm_val, buf, 1);
7136 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7137 }
7138
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7141 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7142 (unsigned int) rn_val, (unsigned int) rm_val,
7143 (unsigned int) halfwords);
7144
7145 dsc->u.branch.cond = INST_AL;
7146 dsc->u.branch.link = 0;
7147 dsc->u.branch.exchange = 0;
7148 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7149
7150 dsc->cleanup = &cleanup_branch;
7151
7152 return 0;
7153 }
7154
7155 static void
7156 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7157 arm_displaced_step_closure *dsc)
7158 {
7159 /* PC <- r7 */
7160 int val = displaced_read_reg (regs, dsc, 7);
7161 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7162
7163 /* r7 <- r8 */
7164 val = displaced_read_reg (regs, dsc, 8);
7165 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7166
7167 /* r8 <- tmp[0] */
7168 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7169
7170 }
7171
7172 static int
7173 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7174 struct regcache *regs,
7175 arm_displaced_step_closure *dsc)
7176 {
7177 dsc->u.block.regmask = insn1 & 0x00ff;
7178
7179 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7180 to :
7181
7182 (1) register list is full, that is, r0-r7 are used.
7183 Prepare: tmp[0] <- r8
7184
7185 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7186 MOV r8, r7; Move value of r7 to r8;
7187 POP {r7}; Store PC value into r7.
7188
7189 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7190
7191 (2) register list is not full, supposing there are N registers in
7192 register list (except PC, 0 <= N <= 7).
7193 Prepare: for each i, 0 - N, tmp[i] <- ri.
7194
7195 POP {r0, r1, ...., rN};
7196
7197 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7198 from tmp[] properly.
7199 */
7200 if (debug_displaced)
7201 fprintf_unfiltered (gdb_stdlog,
7202 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7203 dsc->u.block.regmask, insn1);
7204
7205 if (dsc->u.block.regmask == 0xff)
7206 {
7207 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7208
7209 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7210 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7211 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7212
7213 dsc->numinsns = 3;
7214 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7215 }
7216 else
7217 {
7218 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7219 unsigned int i;
7220 unsigned int new_regmask;
7221
7222 for (i = 0; i < num_in_list + 1; i++)
7223 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7224
7225 new_regmask = (1 << (num_in_list + 1)) - 1;
7226
7227 if (debug_displaced)
7228 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7229 "{..., pc}: original reg list %.4x,"
7230 " modified list %.4x\n"),
7231 (int) dsc->u.block.regmask, new_regmask);
7232
7233 dsc->u.block.regmask |= 0x8000;
7234 dsc->u.block.writeback = 0;
7235 dsc->u.block.cond = INST_AL;
7236
7237 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7238
7239 dsc->cleanup = &cleanup_block_load_pc;
7240 }
7241
7242 return 0;
7243 }
7244
7245 static void
7246 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7247 struct regcache *regs,
7248 arm_displaced_step_closure *dsc)
7249 {
7250 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7251 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7252 int err = 0;
7253
7254 /* 16-bit thumb instructions. */
7255 switch (op_bit_12_15)
7256 {
7257 /* Shift (imme), add, subtract, move and compare. */
7258 case 0: case 1: case 2: case 3:
7259 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7260 "shift/add/sub/mov/cmp",
7261 dsc);
7262 break;
7263 case 4:
7264 switch (op_bit_10_11)
7265 {
7266 case 0: /* Data-processing */
7267 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7268 "data-processing",
7269 dsc);
7270 break;
7271 case 1: /* Special data instructions and branch and exchange. */
7272 {
7273 unsigned short op = bits (insn1, 7, 9);
7274 if (op == 6 || op == 7) /* BX or BLX */
7275 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7276 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7277 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7278 else
7279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7280 dsc);
7281 }
7282 break;
7283 default: /* LDR (literal) */
7284 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7285 }
7286 break;
7287 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7289 break;
7290 case 10:
7291 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7292 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7293 else /* Generate SP-relative address */
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7295 break;
7296 case 11: /* Misc 16-bit instructions */
7297 {
7298 switch (bits (insn1, 8, 11))
7299 {
7300 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7301 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7302 break;
7303 case 12: case 13: /* POP */
7304 if (bit (insn1, 8)) /* PC is in register list. */
7305 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7306 else
7307 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7308 break;
7309 case 15: /* If-Then, and hints */
7310 if (bits (insn1, 0, 3))
7311 /* If-Then makes up to four following instructions conditional.
7312 IT instruction itself is not conditional, so handle it as a
7313 common unmodified instruction. */
7314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7315 dsc);
7316 else
7317 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7318 break;
7319 default:
7320 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7321 }
7322 }
7323 break;
7324 case 12:
7325 if (op_bit_10_11 < 2) /* Store multiple registers */
7326 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7327 else /* Load multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7329 break;
7330 case 13: /* Conditional branch and supervisor call */
7331 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7332 err = thumb_copy_b (gdbarch, insn1, dsc);
7333 else
7334 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7335 break;
7336 case 14: /* Unconditional branch */
7337 err = thumb_copy_b (gdbarch, insn1, dsc);
7338 break;
7339 default:
7340 err = 1;
7341 }
7342
7343 if (err)
7344 internal_error (__FILE__, __LINE__,
7345 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7346 }
7347
7348 static int
7349 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7350 uint16_t insn1, uint16_t insn2,
7351 struct regcache *regs,
7352 arm_displaced_step_closure *dsc)
7353 {
7354 int rt = bits (insn2, 12, 15);
7355 int rn = bits (insn1, 0, 3);
7356 int op1 = bits (insn1, 7, 8);
7357
7358 switch (bits (insn1, 5, 6))
7359 {
7360 case 0: /* Load byte and memory hints */
7361 if (rt == 0xf) /* PLD/PLI */
7362 {
7363 if (rn == 0xf)
7364 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7365 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7366 else
7367 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7368 "pli/pld", dsc);
7369 }
7370 else
7371 {
7372 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7374 1);
7375 else
7376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7377 "ldrb{reg, immediate}/ldrbt",
7378 dsc);
7379 }
7380
7381 break;
7382 case 1: /* Load halfword and memory hints. */
7383 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7384 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7385 "pld/unalloc memhint", dsc);
7386 else
7387 {
7388 if (rn == 0xf)
7389 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7390 2);
7391 else
7392 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 "ldrh/ldrht", dsc);
7394 }
7395 break;
7396 case 2: /* Load word */
7397 {
7398 int insn2_bit_8_11 = bits (insn2, 8, 11);
7399
7400 if (rn == 0xf)
7401 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7402 else if (op1 == 0x1) /* Encoding T3 */
7403 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7404 0, 1);
7405 else /* op1 == 0x0 */
7406 {
7407 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7408 /* LDR (immediate) */
7409 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7410 dsc, bit (insn2, 8), 1);
7411 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 "ldrt", dsc);
7414 else
7415 /* LDR (register) */
7416 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7417 dsc, 0, 0);
7418 }
7419 break;
7420 }
7421 default:
7422 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7423 break;
7424 }
7425 return 0;
7426 }
7427
7428 static void
7429 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7430 uint16_t insn2, struct regcache *regs,
7431 arm_displaced_step_closure *dsc)
7432 {
7433 int err = 0;
7434 unsigned short op = bit (insn2, 15);
7435 unsigned int op1 = bits (insn1, 11, 12);
7436
7437 switch (op1)
7438 {
7439 case 1:
7440 {
7441 switch (bits (insn1, 9, 10))
7442 {
7443 case 0:
7444 if (bit (insn1, 6))
7445 {
7446 /* Load/store {dual, exclusive}, table branch. */
7447 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7448 && bits (insn2, 5, 7) == 0)
7449 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7450 dsc);
7451 else
7452 /* PC is not allowed to use in load/store {dual, exclusive}
7453 instructions. */
7454 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7455 "load/store dual/ex", dsc);
7456 }
7457 else /* load/store multiple */
7458 {
7459 switch (bits (insn1, 7, 8))
7460 {
7461 case 0: case 3: /* SRS, RFE */
7462 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7463 "srs/rfe", dsc);
7464 break;
7465 case 1: case 2: /* LDM/STM/PUSH/POP */
7466 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7467 break;
7468 }
7469 }
7470 break;
7471
7472 case 1:
7473 /* Data-processing (shift register). */
7474 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7475 dsc);
7476 break;
7477 default: /* Coprocessor instructions. */
7478 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7479 break;
7480 }
7481 break;
7482 }
7483 case 2: /* op1 = 2 */
7484 if (op) /* Branch and misc control. */
7485 {
7486 if (bit (insn2, 14) /* BLX/BL */
7487 || bit (insn2, 12) /* Unconditional branch */
7488 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7489 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7490 else
7491 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7492 "misc ctrl", dsc);
7493 }
7494 else
7495 {
7496 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7497 {
7498 int dp_op = bits (insn1, 4, 8);
7499 int rn = bits (insn1, 0, 3);
7500 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7501 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7502 regs, dsc);
7503 else
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "dp/pb", dsc);
7506 }
7507 else /* Data processing (modified immediate) */
7508 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7509 "dp/mi", dsc);
7510 }
7511 break;
7512 case 3: /* op1 = 3 */
7513 switch (bits (insn1, 9, 10))
7514 {
7515 case 0:
7516 if (bit (insn1, 4))
7517 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7518 regs, dsc);
7519 else /* NEON Load/Store and Store single data item */
7520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7521 "neon elt/struct load/store",
7522 dsc);
7523 break;
7524 case 1: /* op1 = 3, bits (9, 10) == 1 */
7525 switch (bits (insn1, 7, 8))
7526 {
7527 case 0: case 1: /* Data processing (register) */
7528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 "dp(reg)", dsc);
7530 break;
7531 case 2: /* Multiply and absolute difference */
7532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7533 "mul/mua/diff", dsc);
7534 break;
7535 case 3: /* Long multiply and divide */
7536 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7537 "lmul/lmua", dsc);
7538 break;
7539 }
7540 break;
7541 default: /* Coprocessor instructions */
7542 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7543 break;
7544 }
7545 break;
7546 default:
7547 err = 1;
7548 }
7549
7550 if (err)
7551 internal_error (__FILE__, __LINE__,
7552 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7553
7554 }
7555
7556 static void
7557 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7558 struct regcache *regs,
7559 arm_displaced_step_closure *dsc)
7560 {
7561 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7562 uint16_t insn1
7563 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7564
7565 if (debug_displaced)
7566 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7567 "at %.8lx\n", insn1, (unsigned long) from);
7568
7569 dsc->is_thumb = 1;
7570 dsc->insn_size = thumb_insn_size (insn1);
7571 if (thumb_insn_size (insn1) == 4)
7572 {
7573 uint16_t insn2
7574 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7575 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7576 }
7577 else
7578 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7579 }
7580
7581 void
7582 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7583 CORE_ADDR to, struct regcache *regs,
7584 arm_displaced_step_closure *dsc)
7585 {
7586 int err = 0;
7587 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7588 uint32_t insn;
7589
7590 /* Most displaced instructions use a 1-instruction scratch space, so set this
7591 here and override below if/when necessary. */
7592 dsc->numinsns = 1;
7593 dsc->insn_addr = from;
7594 dsc->scratch_base = to;
7595 dsc->cleanup = NULL;
7596 dsc->wrote_to_pc = 0;
7597
7598 if (!displaced_in_arm_mode (regs))
7599 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7600
7601 dsc->is_thumb = 0;
7602 dsc->insn_size = 4;
7603 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7604 if (debug_displaced)
7605 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7606 "at %.8lx\n", (unsigned long) insn,
7607 (unsigned long) from);
7608
7609 if ((insn & 0xf0000000) == 0xf0000000)
7610 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7611 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7612 {
7613 case 0x0: case 0x1: case 0x2: case 0x3:
7614 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7615 break;
7616
7617 case 0x4: case 0x5: case 0x6:
7618 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7619 break;
7620
7621 case 0x7:
7622 err = arm_decode_media (gdbarch, insn, dsc);
7623 break;
7624
7625 case 0x8: case 0x9: case 0xa: case 0xb:
7626 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7627 break;
7628
7629 case 0xc: case 0xd: case 0xe: case 0xf:
7630 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7631 break;
7632 }
7633
7634 if (err)
7635 internal_error (__FILE__, __LINE__,
7636 _("arm_process_displaced_insn: Instruction decode error"));
7637 }
7638
7639 /* Actually set up the scratch space for a displaced instruction. */
7640
7641 void
7642 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7643 CORE_ADDR to, arm_displaced_step_closure *dsc)
7644 {
7645 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7646 unsigned int i, len, offset;
7647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7648 int size = dsc->is_thumb? 2 : 4;
7649 const gdb_byte *bkp_insn;
7650
7651 offset = 0;
7652 /* Poke modified instruction(s). */
7653 for (i = 0; i < dsc->numinsns; i++)
7654 {
7655 if (debug_displaced)
7656 {
7657 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7658 if (size == 4)
7659 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7660 dsc->modinsn[i]);
7661 else if (size == 2)
7662 fprintf_unfiltered (gdb_stdlog, "%.4x",
7663 (unsigned short)dsc->modinsn[i]);
7664
7665 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7666 (unsigned long) to + offset);
7667
7668 }
7669 write_memory_unsigned_integer (to + offset, size,
7670 byte_order_for_code,
7671 dsc->modinsn[i]);
7672 offset += size;
7673 }
7674
7675 /* Choose the correct breakpoint instruction. */
7676 if (dsc->is_thumb)
7677 {
7678 bkp_insn = tdep->thumb_breakpoint;
7679 len = tdep->thumb_breakpoint_size;
7680 }
7681 else
7682 {
7683 bkp_insn = tdep->arm_breakpoint;
7684 len = tdep->arm_breakpoint_size;
7685 }
7686
7687 /* Put breakpoint afterwards. */
7688 write_memory (to + offset, bkp_insn, len);
7689
7690 if (debug_displaced)
7691 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7692 paddress (gdbarch, from), paddress (gdbarch, to));
7693 }
7694
7695 /* Entry point for cleaning things up after a displaced instruction has been
7696 single-stepped. */
7697
7698 void
7699 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7700 struct displaced_step_closure *dsc_,
7701 CORE_ADDR from, CORE_ADDR to,
7702 struct regcache *regs)
7703 {
7704 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7705
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7708
7709 if (!dsc->wrote_to_pc)
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7712
7713 }
7714
7715 #include "bfd-in2.h"
7716 #include "libcoff.h"
7717
7718 static int
7719 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7720 {
7721 gdb_disassembler *di
7722 = static_cast<gdb_disassembler *>(info->application_data);
7723 struct gdbarch *gdbarch = di->arch ();
7724
7725 if (arm_pc_is_thumb (gdbarch, memaddr))
7726 {
7727 static asymbol *asym;
7728 static combined_entry_type ce;
7729 static struct coff_symbol_struct csym;
7730 static struct bfd fake_bfd;
7731 static bfd_target fake_target;
7732
7733 if (csym.native == NULL)
7734 {
7735 /* Create a fake symbol vector containing a Thumb symbol.
7736 This is solely so that the code in print_insn_little_arm()
7737 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7738 the presence of a Thumb symbol and switch to decoding
7739 Thumb instructions. */
7740
7741 fake_target.flavour = bfd_target_coff_flavour;
7742 fake_bfd.xvec = &fake_target;
7743 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7744 csym.native = &ce;
7745 csym.symbol.the_bfd = &fake_bfd;
7746 csym.symbol.name = "fake";
7747 asym = (asymbol *) & csym;
7748 }
7749
7750 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7751 info->symbols = &asym;
7752 }
7753 else
7754 info->symbols = NULL;
7755
7756 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7757 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7758 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7759 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7760 in default_print_insn. */
7761 if (exec_bfd != NULL)
7762 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7763
7764 return default_print_insn (memaddr, info);
7765 }
7766
7767 /* The following define instruction sequences that will cause ARM
7768 cpu's to take an undefined instruction trap. These are used to
7769 signal a breakpoint to GDB.
7770
7771 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7772 modes. A different instruction is required for each mode. The ARM
7773 cpu's can also be big or little endian. Thus four different
7774 instructions are needed to support all cases.
7775
7776 Note: ARMv4 defines several new instructions that will take the
7777 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7778 not in fact add the new instructions. The new undefined
7779 instructions in ARMv4 are all instructions that had no defined
7780 behaviour in earlier chips. There is no guarantee that they will
7781 raise an exception, but may be treated as NOP's. In practice, it
7782 may only safe to rely on instructions matching:
7783
7784 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7785 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7786 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7787
7788 Even this may only true if the condition predicate is true. The
7789 following use a condition predicate of ALWAYS so it is always TRUE.
7790
7791 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7792 and NetBSD all use a software interrupt rather than an undefined
7793 instruction to force a trap. This can be handled by by the
7794 abi-specific code during establishment of the gdbarch vector. */
7795
7796 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7797 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7798 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7799 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7800
7801 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7802 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7803 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7804 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7805
7806 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7807
7808 static int
7809 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7810 {
7811 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7812 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7813
7814 if (arm_pc_is_thumb (gdbarch, *pcptr))
7815 {
7816 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7817
7818 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7819 check whether we are replacing a 32-bit instruction. */
7820 if (tdep->thumb2_breakpoint != NULL)
7821 {
7822 gdb_byte buf[2];
7823
7824 if (target_read_memory (*pcptr, buf, 2) == 0)
7825 {
7826 unsigned short inst1;
7827
7828 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7829 if (thumb_insn_size (inst1) == 4)
7830 return ARM_BP_KIND_THUMB2;
7831 }
7832 }
7833
7834 return ARM_BP_KIND_THUMB;
7835 }
7836 else
7837 return ARM_BP_KIND_ARM;
7838
7839 }
7840
7841 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7842
7843 static const gdb_byte *
7844 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7845 {
7846 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7847
7848 switch (kind)
7849 {
7850 case ARM_BP_KIND_ARM:
7851 *size = tdep->arm_breakpoint_size;
7852 return tdep->arm_breakpoint;
7853 case ARM_BP_KIND_THUMB:
7854 *size = tdep->thumb_breakpoint_size;
7855 return tdep->thumb_breakpoint;
7856 case ARM_BP_KIND_THUMB2:
7857 *size = tdep->thumb2_breakpoint_size;
7858 return tdep->thumb2_breakpoint;
7859 default:
7860 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7861 }
7862 }
7863
7864 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7865
7866 static int
7867 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7868 struct regcache *regcache,
7869 CORE_ADDR *pcptr)
7870 {
7871 gdb_byte buf[4];
7872
7873 /* Check the memory pointed by PC is readable. */
7874 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7875 {
7876 struct arm_get_next_pcs next_pcs_ctx;
7877
7878 arm_get_next_pcs_ctor (&next_pcs_ctx,
7879 &arm_get_next_pcs_ops,
7880 gdbarch_byte_order (gdbarch),
7881 gdbarch_byte_order_for_code (gdbarch),
7882 0,
7883 regcache);
7884
7885 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7886
7887 /* If MEMADDR is the next instruction of current pc, do the
7888 software single step computation, and get the thumb mode by
7889 the destination address. */
7890 for (CORE_ADDR pc : next_pcs)
7891 {
7892 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7893 {
7894 if (IS_THUMB_ADDR (pc))
7895 {
7896 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7897 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7898 }
7899 else
7900 return ARM_BP_KIND_ARM;
7901 }
7902 }
7903 }
7904
7905 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7906 }
7907
7908 /* Extract from an array REGBUF containing the (raw) register state a
7909 function return value of type TYPE, and copy that, in virtual
7910 format, into VALBUF. */
7911
7912 static void
7913 arm_extract_return_value (struct type *type, struct regcache *regs,
7914 gdb_byte *valbuf)
7915 {
7916 struct gdbarch *gdbarch = regs->arch ();
7917 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7918
7919 if (TYPE_CODE_FLT == type->code ())
7920 {
7921 switch (gdbarch_tdep (gdbarch)->fp_model)
7922 {
7923 case ARM_FLOAT_FPA:
7924 {
7925 /* The value is in register F0 in internal format. We need to
7926 extract the raw value and then convert it to the desired
7927 internal type. */
7928 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7929
7930 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7931 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7932 valbuf, type);
7933 }
7934 break;
7935
7936 case ARM_FLOAT_SOFT_FPA:
7937 case ARM_FLOAT_SOFT_VFP:
7938 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7939 not using the VFP ABI code. */
7940 case ARM_FLOAT_VFP:
7941 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7942 if (TYPE_LENGTH (type) > 4)
7943 regs->cooked_read (ARM_A1_REGNUM + 1,
7944 valbuf + ARM_INT_REGISTER_SIZE);
7945 break;
7946
7947 default:
7948 internal_error (__FILE__, __LINE__,
7949 _("arm_extract_return_value: "
7950 "Floating point model not supported"));
7951 break;
7952 }
7953 }
7954 else if (type->code () == TYPE_CODE_INT
7955 || type->code () == TYPE_CODE_CHAR
7956 || type->code () == TYPE_CODE_BOOL
7957 || type->code () == TYPE_CODE_PTR
7958 || TYPE_IS_REFERENCE (type)
7959 || type->code () == TYPE_CODE_ENUM)
7960 {
7961 /* If the type is a plain integer, then the access is
7962 straight-forward. Otherwise we have to play around a bit
7963 more. */
7964 int len = TYPE_LENGTH (type);
7965 int regno = ARM_A1_REGNUM;
7966 ULONGEST tmp;
7967
7968 while (len > 0)
7969 {
7970 /* By using store_unsigned_integer we avoid having to do
7971 anything special for small big-endian values. */
7972 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7973 store_unsigned_integer (valbuf,
7974 (len > ARM_INT_REGISTER_SIZE
7975 ? ARM_INT_REGISTER_SIZE : len),
7976 byte_order, tmp);
7977 len -= ARM_INT_REGISTER_SIZE;
7978 valbuf += ARM_INT_REGISTER_SIZE;
7979 }
7980 }
7981 else
7982 {
7983 /* For a structure or union the behaviour is as if the value had
7984 been stored to word-aligned memory and then loaded into
7985 registers with 32-bit load instruction(s). */
7986 int len = TYPE_LENGTH (type);
7987 int regno = ARM_A1_REGNUM;
7988 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7989
7990 while (len > 0)
7991 {
7992 regs->cooked_read (regno++, tmpbuf);
7993 memcpy (valbuf, tmpbuf,
7994 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7995 len -= ARM_INT_REGISTER_SIZE;
7996 valbuf += ARM_INT_REGISTER_SIZE;
7997 }
7998 }
7999 }
8000
8001
8002 /* Will a function return an aggregate type in memory or in a
8003 register? Return 0 if an aggregate type can be returned in a
8004 register, 1 if it must be returned in memory. */
8005
8006 static int
8007 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8008 {
8009 enum type_code code;
8010
8011 type = check_typedef (type);
8012
8013 /* Simple, non-aggregate types (ie not including vectors and
8014 complex) are always returned in a register (or registers). */
8015 code = type->code ();
8016 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8017 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8018 return 0;
8019
8020 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8021 {
8022 /* Vector values should be returned using ARM registers if they
8023 are not over 16 bytes. */
8024 return (TYPE_LENGTH (type) > 16);
8025 }
8026
8027 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8028 {
8029 /* The AAPCS says all aggregates not larger than a word are returned
8030 in a register. */
8031 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8032 return 0;
8033
8034 return 1;
8035 }
8036 else
8037 {
8038 int nRc;
8039
8040 /* All aggregate types that won't fit in a register must be returned
8041 in memory. */
8042 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8043 return 1;
8044
8045 /* In the ARM ABI, "integer" like aggregate types are returned in
8046 registers. For an aggregate type to be integer like, its size
8047 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8048 offset of each addressable subfield must be zero. Note that bit
8049 fields are not addressable, and all addressable subfields of
8050 unions always start at offset zero.
8051
8052 This function is based on the behaviour of GCC 2.95.1.
8053 See: gcc/arm.c: arm_return_in_memory() for details.
8054
8055 Note: All versions of GCC before GCC 2.95.2 do not set up the
8056 parameters correctly for a function returning the following
8057 structure: struct { float f;}; This should be returned in memory,
8058 not a register. Richard Earnshaw sent me a patch, but I do not
8059 know of any way to detect if a function like the above has been
8060 compiled with the correct calling convention. */
8061
8062 /* Assume all other aggregate types can be returned in a register.
8063 Run a check for structures, unions and arrays. */
8064 nRc = 0;
8065
8066 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8067 {
8068 int i;
8069 /* Need to check if this struct/union is "integer" like. For
8070 this to be true, its size must be less than or equal to
8071 ARM_INT_REGISTER_SIZE and the offset of each addressable
8072 subfield must be zero. Note that bit fields are not
8073 addressable, and unions always start at offset zero. If any
8074 of the subfields is a floating point type, the struct/union
8075 cannot be an integer type. */
8076
8077 /* For each field in the object, check:
8078 1) Is it FP? --> yes, nRc = 1;
8079 2) Is it addressable (bitpos != 0) and
8080 not packed (bitsize == 0)?
8081 --> yes, nRc = 1
8082 */
8083
8084 for (i = 0; i < type->num_fields (); i++)
8085 {
8086 enum type_code field_type_code;
8087
8088 field_type_code
8089 = check_typedef (type->field (i).type ())->code ();
8090
8091 /* Is it a floating point type field? */
8092 if (field_type_code == TYPE_CODE_FLT)
8093 {
8094 nRc = 1;
8095 break;
8096 }
8097
8098 /* If bitpos != 0, then we have to care about it. */
8099 if (TYPE_FIELD_BITPOS (type, i) != 0)
8100 {
8101 /* Bitfields are not addressable. If the field bitsize is
8102 zero, then the field is not packed. Hence it cannot be
8103 a bitfield or any other packed type. */
8104 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8105 {
8106 nRc = 1;
8107 break;
8108 }
8109 }
8110 }
8111 }
8112
8113 return nRc;
8114 }
8115 }
8116
8117 /* Write into appropriate registers a function return value of type
8118 TYPE, given in virtual format. */
8119
8120 static void
8121 arm_store_return_value (struct type *type, struct regcache *regs,
8122 const gdb_byte *valbuf)
8123 {
8124 struct gdbarch *gdbarch = regs->arch ();
8125 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8126
8127 if (type->code () == TYPE_CODE_FLT)
8128 {
8129 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8130
8131 switch (gdbarch_tdep (gdbarch)->fp_model)
8132 {
8133 case ARM_FLOAT_FPA:
8134
8135 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8136 regs->cooked_write (ARM_F0_REGNUM, buf);
8137 break;
8138
8139 case ARM_FLOAT_SOFT_FPA:
8140 case ARM_FLOAT_SOFT_VFP:
8141 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8142 not using the VFP ABI code. */
8143 case ARM_FLOAT_VFP:
8144 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8145 if (TYPE_LENGTH (type) > 4)
8146 regs->cooked_write (ARM_A1_REGNUM + 1,
8147 valbuf + ARM_INT_REGISTER_SIZE);
8148 break;
8149
8150 default:
8151 internal_error (__FILE__, __LINE__,
8152 _("arm_store_return_value: Floating "
8153 "point model not supported"));
8154 break;
8155 }
8156 }
8157 else if (type->code () == TYPE_CODE_INT
8158 || type->code () == TYPE_CODE_CHAR
8159 || type->code () == TYPE_CODE_BOOL
8160 || type->code () == TYPE_CODE_PTR
8161 || TYPE_IS_REFERENCE (type)
8162 || type->code () == TYPE_CODE_ENUM)
8163 {
8164 if (TYPE_LENGTH (type) <= 4)
8165 {
8166 /* Values of one word or less are zero/sign-extended and
8167 returned in r0. */
8168 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8169 LONGEST val = unpack_long (type, valbuf);
8170
8171 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8172 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8173 }
8174 else
8175 {
8176 /* Integral values greater than one word are stored in consecutive
8177 registers starting with r0. This will always be a multiple of
8178 the regiser size. */
8179 int len = TYPE_LENGTH (type);
8180 int regno = ARM_A1_REGNUM;
8181
8182 while (len > 0)
8183 {
8184 regs->cooked_write (regno++, valbuf);
8185 len -= ARM_INT_REGISTER_SIZE;
8186 valbuf += ARM_INT_REGISTER_SIZE;
8187 }
8188 }
8189 }
8190 else
8191 {
8192 /* For a structure or union the behaviour is as if the value had
8193 been stored to word-aligned memory and then loaded into
8194 registers with 32-bit load instruction(s). */
8195 int len = TYPE_LENGTH (type);
8196 int regno = ARM_A1_REGNUM;
8197 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8198
8199 while (len > 0)
8200 {
8201 memcpy (tmpbuf, valbuf,
8202 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8203 regs->cooked_write (regno++, tmpbuf);
8204 len -= ARM_INT_REGISTER_SIZE;
8205 valbuf += ARM_INT_REGISTER_SIZE;
8206 }
8207 }
8208 }
8209
8210
8211 /* Handle function return values. */
8212
8213 static enum return_value_convention
8214 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8215 struct type *valtype, struct regcache *regcache,
8216 gdb_byte *readbuf, const gdb_byte *writebuf)
8217 {
8218 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8219 struct type *func_type = function ? value_type (function) : NULL;
8220 enum arm_vfp_cprc_base_type vfp_base_type;
8221 int vfp_base_count;
8222
8223 if (arm_vfp_abi_for_function (gdbarch, func_type)
8224 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8225 {
8226 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8227 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8228 int i;
8229 for (i = 0; i < vfp_base_count; i++)
8230 {
8231 if (reg_char == 'q')
8232 {
8233 if (writebuf)
8234 arm_neon_quad_write (gdbarch, regcache, i,
8235 writebuf + i * unit_length);
8236
8237 if (readbuf)
8238 arm_neon_quad_read (gdbarch, regcache, i,
8239 readbuf + i * unit_length);
8240 }
8241 else
8242 {
8243 char name_buf[4];
8244 int regnum;
8245
8246 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8247 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8248 strlen (name_buf));
8249 if (writebuf)
8250 regcache->cooked_write (regnum, writebuf + i * unit_length);
8251 if (readbuf)
8252 regcache->cooked_read (regnum, readbuf + i * unit_length);
8253 }
8254 }
8255 return RETURN_VALUE_REGISTER_CONVENTION;
8256 }
8257
8258 if (valtype->code () == TYPE_CODE_STRUCT
8259 || valtype->code () == TYPE_CODE_UNION
8260 || valtype->code () == TYPE_CODE_ARRAY)
8261 {
8262 if (tdep->struct_return == pcc_struct_return
8263 || arm_return_in_memory (gdbarch, valtype))
8264 return RETURN_VALUE_STRUCT_CONVENTION;
8265 }
8266 else if (valtype->code () == TYPE_CODE_COMPLEX)
8267 {
8268 if (arm_return_in_memory (gdbarch, valtype))
8269 return RETURN_VALUE_STRUCT_CONVENTION;
8270 }
8271
8272 if (writebuf)
8273 arm_store_return_value (valtype, regcache, writebuf);
8274
8275 if (readbuf)
8276 arm_extract_return_value (valtype, regcache, readbuf);
8277
8278 return RETURN_VALUE_REGISTER_CONVENTION;
8279 }
8280
8281
8282 static int
8283 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8284 {
8285 struct gdbarch *gdbarch = get_frame_arch (frame);
8286 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8287 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8288 CORE_ADDR jb_addr;
8289 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8290
8291 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8292
8293 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8294 ARM_INT_REGISTER_SIZE))
8295 return 0;
8296
8297 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8298 return 1;
8299 }
8300 /* A call to cmse secure entry function "foo" at "a" is modified by
8301 GNU ld as "b".
8302 a) bl xxxx <foo>
8303
8304 <foo>
8305 xxxx:
8306
8307 b) bl yyyy <__acle_se_foo>
8308
8309 section .gnu.sgstubs:
8310 <foo>
8311 yyyy: sg // secure gateway
8312 b.w xxxx <__acle_se_foo> // original_branch_dest
8313
8314 <__acle_se_foo>
8315 xxxx:
8316
8317 When the control at "b", the pc contains "yyyy" (sg address) which is a
8318 trampoline and does not exist in source code. This function returns the
8319 target pc "xxxx". For more details please refer to section 5.4
8320 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8321 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8322 document on www.developer.arm.com. */
8323
8324 static CORE_ADDR
8325 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8326 {
8327 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8328 char *target_name = (char *) alloca (target_len);
8329 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8330
8331 struct bound_minimal_symbol minsym
8332 = lookup_minimal_symbol (target_name, NULL, objfile);
8333
8334 if (minsym.minsym != nullptr)
8335 return BMSYMBOL_VALUE_ADDRESS (minsym);
8336
8337 return 0;
8338 }
8339
8340 /* Return true when SEC points to ".gnu.sgstubs" section. */
8341
8342 static bool
8343 arm_is_sgstubs_section (struct obj_section *sec)
8344 {
8345 return (sec != nullptr
8346 && sec->the_bfd_section != nullptr
8347 && sec->the_bfd_section->name != nullptr
8348 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8349 }
8350
8351 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8352 return the target PC. Otherwise return 0. */
8353
8354 CORE_ADDR
8355 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8356 {
8357 const char *name;
8358 int namelen;
8359 CORE_ADDR start_addr;
8360
8361 /* Find the starting address and name of the function containing the PC. */
8362 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8363 {
8364 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8365 check here. */
8366 start_addr = arm_skip_bx_reg (frame, pc);
8367 if (start_addr != 0)
8368 return start_addr;
8369
8370 return 0;
8371 }
8372
8373 /* If PC is in a Thumb call or return stub, return the address of the
8374 target PC, which is in a register. The thunk functions are called
8375 _call_via_xx, where x is the register name. The possible names
8376 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8377 functions, named __ARM_call_via_r[0-7]. */
8378 if (startswith (name, "_call_via_")
8379 || startswith (name, "__ARM_call_via_"))
8380 {
8381 /* Use the name suffix to determine which register contains the
8382 target PC. */
8383 static const char *table[15] =
8384 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8385 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8386 };
8387 int regno;
8388 int offset = strlen (name) - 2;
8389
8390 for (regno = 0; regno <= 14; regno++)
8391 if (strcmp (&name[offset], table[regno]) == 0)
8392 return get_frame_register_unsigned (frame, regno);
8393 }
8394
8395 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8396 non-interworking calls to foo. We could decode the stubs
8397 to find the target but it's easier to use the symbol table. */
8398 namelen = strlen (name);
8399 if (name[0] == '_' && name[1] == '_'
8400 && ((namelen > 2 + strlen ("_from_thumb")
8401 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8402 || (namelen > 2 + strlen ("_from_arm")
8403 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8404 {
8405 char *target_name;
8406 int target_len = namelen - 2;
8407 struct bound_minimal_symbol minsym;
8408 struct objfile *objfile;
8409 struct obj_section *sec;
8410
8411 if (name[namelen - 1] == 'b')
8412 target_len -= strlen ("_from_thumb");
8413 else
8414 target_len -= strlen ("_from_arm");
8415
8416 target_name = (char *) alloca (target_len + 1);
8417 memcpy (target_name, name + 2, target_len);
8418 target_name[target_len] = '\0';
8419
8420 sec = find_pc_section (pc);
8421 objfile = (sec == NULL) ? NULL : sec->objfile;
8422 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8423 if (minsym.minsym != NULL)
8424 return BMSYMBOL_VALUE_ADDRESS (minsym);
8425 else
8426 return 0;
8427 }
8428
8429 struct obj_section *section = find_pc_section (pc);
8430
8431 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8432 if (arm_is_sgstubs_section (section))
8433 return arm_skip_cmse_entry (pc, name, section->objfile);
8434
8435 return 0; /* not a stub */
8436 }
8437
8438 static void
8439 arm_update_current_architecture (void)
8440 {
8441 struct gdbarch_info info;
8442
8443 /* If the current architecture is not ARM, we have nothing to do. */
8444 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8445 return;
8446
8447 /* Update the architecture. */
8448 gdbarch_info_init (&info);
8449
8450 if (!gdbarch_update_p (info))
8451 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8452 }
8453
8454 static void
8455 set_fp_model_sfunc (const char *args, int from_tty,
8456 struct cmd_list_element *c)
8457 {
8458 int fp_model;
8459
8460 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8461 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8462 {
8463 arm_fp_model = (enum arm_float_model) fp_model;
8464 break;
8465 }
8466
8467 if (fp_model == ARM_FLOAT_LAST)
8468 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8469 current_fp_model);
8470
8471 arm_update_current_architecture ();
8472 }
8473
8474 static void
8475 show_fp_model (struct ui_file *file, int from_tty,
8476 struct cmd_list_element *c, const char *value)
8477 {
8478 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8479
8480 if (arm_fp_model == ARM_FLOAT_AUTO
8481 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8482 fprintf_filtered (file, _("\
8483 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8484 fp_model_strings[tdep->fp_model]);
8485 else
8486 fprintf_filtered (file, _("\
8487 The current ARM floating point model is \"%s\".\n"),
8488 fp_model_strings[arm_fp_model]);
8489 }
8490
8491 static void
8492 arm_set_abi (const char *args, int from_tty,
8493 struct cmd_list_element *c)
8494 {
8495 int arm_abi;
8496
8497 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8498 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8499 {
8500 arm_abi_global = (enum arm_abi_kind) arm_abi;
8501 break;
8502 }
8503
8504 if (arm_abi == ARM_ABI_LAST)
8505 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8506 arm_abi_string);
8507
8508 arm_update_current_architecture ();
8509 }
8510
8511 static void
8512 arm_show_abi (struct ui_file *file, int from_tty,
8513 struct cmd_list_element *c, const char *value)
8514 {
8515 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8516
8517 if (arm_abi_global == ARM_ABI_AUTO
8518 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8519 fprintf_filtered (file, _("\
8520 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8521 arm_abi_strings[tdep->arm_abi]);
8522 else
8523 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8524 arm_abi_string);
8525 }
8526
8527 static void
8528 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8529 struct cmd_list_element *c, const char *value)
8530 {
8531 fprintf_filtered (file,
8532 _("The current execution mode assumed "
8533 "(when symbols are unavailable) is \"%s\".\n"),
8534 arm_fallback_mode_string);
8535 }
8536
8537 static void
8538 arm_show_force_mode (struct ui_file *file, int from_tty,
8539 struct cmd_list_element *c, const char *value)
8540 {
8541 fprintf_filtered (file,
8542 _("The current execution mode assumed "
8543 "(even when symbols are available) is \"%s\".\n"),
8544 arm_force_mode_string);
8545 }
8546
8547 /* If the user changes the register disassembly style used for info
8548 register and other commands, we have to also switch the style used
8549 in opcodes for disassembly output. This function is run in the "set
8550 arm disassembly" command, and does that. */
8551
8552 static void
8553 set_disassembly_style_sfunc (const char *args, int from_tty,
8554 struct cmd_list_element *c)
8555 {
8556 /* Convert the short style name into the long style name (eg, reg-names-*)
8557 before calling the generic set_disassembler_options() function. */
8558 std::string long_name = std::string ("reg-names-") + disassembly_style;
8559 set_disassembler_options (&long_name[0]);
8560 }
8561
8562 static void
8563 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8564 struct cmd_list_element *c, const char *value)
8565 {
8566 struct gdbarch *gdbarch = get_current_arch ();
8567 char *options = get_disassembler_options (gdbarch);
8568 const char *style = "";
8569 int len = 0;
8570 const char *opt;
8571
8572 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8573 if (CONST_STRNEQ (opt, "reg-names-"))
8574 {
8575 style = &opt[strlen ("reg-names-")];
8576 len = strcspn (style, ",");
8577 }
8578
8579 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8580 }
8581 \f
8582 /* Return the ARM register name corresponding to register I. */
8583 static const char *
8584 arm_register_name (struct gdbarch *gdbarch, int i)
8585 {
8586 const int num_regs = gdbarch_num_regs (gdbarch);
8587
8588 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8589 && i >= num_regs && i < num_regs + 32)
8590 {
8591 static const char *const vfp_pseudo_names[] = {
8592 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8593 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8594 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8595 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8596 };
8597
8598 return vfp_pseudo_names[i - num_regs];
8599 }
8600
8601 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8602 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8603 {
8604 static const char *const neon_pseudo_names[] = {
8605 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8606 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8607 };
8608
8609 return neon_pseudo_names[i - num_regs - 32];
8610 }
8611
8612 if (i >= ARRAY_SIZE (arm_register_names))
8613 /* These registers are only supported on targets which supply
8614 an XML description. */
8615 return "";
8616
8617 return arm_register_names[i];
8618 }
8619
8620 /* Test whether the coff symbol specific value corresponds to a Thumb
8621 function. */
8622
8623 static int
8624 coff_sym_is_thumb (int val)
8625 {
8626 return (val == C_THUMBEXT
8627 || val == C_THUMBSTAT
8628 || val == C_THUMBEXTFUNC
8629 || val == C_THUMBSTATFUNC
8630 || val == C_THUMBLABEL);
8631 }
8632
8633 /* arm_coff_make_msymbol_special()
8634 arm_elf_make_msymbol_special()
8635
8636 These functions test whether the COFF or ELF symbol corresponds to
8637 an address in thumb code, and set a "special" bit in a minimal
8638 symbol to indicate that it does. */
8639
8640 static void
8641 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8642 {
8643 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8644
8645 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8646 == ST_BRANCH_TO_THUMB)
8647 MSYMBOL_SET_SPECIAL (msym);
8648 }
8649
8650 static void
8651 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8652 {
8653 if (coff_sym_is_thumb (val))
8654 MSYMBOL_SET_SPECIAL (msym);
8655 }
8656
8657 static void
8658 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8659 asymbol *sym)
8660 {
8661 const char *name = bfd_asymbol_name (sym);
8662 struct arm_per_bfd *data;
8663 struct arm_mapping_symbol new_map_sym;
8664
8665 gdb_assert (name[0] == '$');
8666 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8667 return;
8668
8669 data = arm_bfd_data_key.get (objfile->obfd);
8670 if (data == NULL)
8671 data = arm_bfd_data_key.emplace (objfile->obfd,
8672 objfile->obfd->section_count);
8673 arm_mapping_symbol_vec &map
8674 = data->section_maps[bfd_asymbol_section (sym)->index];
8675
8676 new_map_sym.value = sym->value;
8677 new_map_sym.type = name[1];
8678
8679 /* Insert at the end, the vector will be sorted on first use. */
8680 map.push_back (new_map_sym);
8681 }
8682
8683 static void
8684 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8685 {
8686 struct gdbarch *gdbarch = regcache->arch ();
8687 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8688
8689 /* If necessary, set the T bit. */
8690 if (arm_apcs_32)
8691 {
8692 ULONGEST val, t_bit;
8693 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8694 t_bit = arm_psr_thumb_bit (gdbarch);
8695 if (arm_pc_is_thumb (gdbarch, pc))
8696 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8697 val | t_bit);
8698 else
8699 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8700 val & ~t_bit);
8701 }
8702 }
8703
8704 /* Read the contents of a NEON quad register, by reading from two
8705 double registers. This is used to implement the quad pseudo
8706 registers, and for argument passing in case the quad registers are
8707 missing; vectors are passed in quad registers when using the VFP
8708 ABI, even if a NEON unit is not present. REGNUM is the index of
8709 the quad register, in [0, 15]. */
8710
8711 static enum register_status
8712 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8713 int regnum, gdb_byte *buf)
8714 {
8715 char name_buf[4];
8716 gdb_byte reg_buf[8];
8717 int offset, double_regnum;
8718 enum register_status status;
8719
8720 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8721 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8722 strlen (name_buf));
8723
8724 /* d0 is always the least significant half of q0. */
8725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8726 offset = 8;
8727 else
8728 offset = 0;
8729
8730 status = regcache->raw_read (double_regnum, reg_buf);
8731 if (status != REG_VALID)
8732 return status;
8733 memcpy (buf + offset, reg_buf, 8);
8734
8735 offset = 8 - offset;
8736 status = regcache->raw_read (double_regnum + 1, reg_buf);
8737 if (status != REG_VALID)
8738 return status;
8739 memcpy (buf + offset, reg_buf, 8);
8740
8741 return REG_VALID;
8742 }
8743
8744 static enum register_status
8745 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8746 int regnum, gdb_byte *buf)
8747 {
8748 const int num_regs = gdbarch_num_regs (gdbarch);
8749 char name_buf[4];
8750 gdb_byte reg_buf[8];
8751 int offset, double_regnum;
8752
8753 gdb_assert (regnum >= num_regs);
8754 regnum -= num_regs;
8755
8756 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8757 /* Quad-precision register. */
8758 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8759 else
8760 {
8761 enum register_status status;
8762
8763 /* Single-precision register. */
8764 gdb_assert (regnum < 32);
8765
8766 /* s0 is always the least significant half of d0. */
8767 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8768 offset = (regnum & 1) ? 0 : 4;
8769 else
8770 offset = (regnum & 1) ? 4 : 0;
8771
8772 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8773 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8774 strlen (name_buf));
8775
8776 status = regcache->raw_read (double_regnum, reg_buf);
8777 if (status == REG_VALID)
8778 memcpy (buf, reg_buf + offset, 4);
8779 return status;
8780 }
8781 }
8782
8783 /* Store the contents of BUF to a NEON quad register, by writing to
8784 two double registers. This is used to implement the quad pseudo
8785 registers, and for argument passing in case the quad registers are
8786 missing; vectors are passed in quad registers when using the VFP
8787 ABI, even if a NEON unit is not present. REGNUM is the index
8788 of the quad register, in [0, 15]. */
8789
8790 static void
8791 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8792 int regnum, const gdb_byte *buf)
8793 {
8794 char name_buf[4];
8795 int offset, double_regnum;
8796
8797 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8798 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8799 strlen (name_buf));
8800
8801 /* d0 is always the least significant half of q0. */
8802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8803 offset = 8;
8804 else
8805 offset = 0;
8806
8807 regcache->raw_write (double_regnum, buf + offset);
8808 offset = 8 - offset;
8809 regcache->raw_write (double_regnum + 1, buf + offset);
8810 }
8811
8812 static void
8813 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8814 int regnum, const gdb_byte *buf)
8815 {
8816 const int num_regs = gdbarch_num_regs (gdbarch);
8817 char name_buf[4];
8818 gdb_byte reg_buf[8];
8819 int offset, double_regnum;
8820
8821 gdb_assert (regnum >= num_regs);
8822 regnum -= num_regs;
8823
8824 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8825 /* Quad-precision register. */
8826 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8827 else
8828 {
8829 /* Single-precision register. */
8830 gdb_assert (regnum < 32);
8831
8832 /* s0 is always the least significant half of d0. */
8833 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8834 offset = (regnum & 1) ? 0 : 4;
8835 else
8836 offset = (regnum & 1) ? 4 : 0;
8837
8838 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8839 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8840 strlen (name_buf));
8841
8842 regcache->raw_read (double_regnum, reg_buf);
8843 memcpy (reg_buf + offset, buf, 4);
8844 regcache->raw_write (double_regnum, reg_buf);
8845 }
8846 }
8847
8848 static struct value *
8849 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8850 {
8851 const int *reg_p = (const int *) baton;
8852 return value_of_register (*reg_p, frame);
8853 }
8854 \f
8855 static enum gdb_osabi
8856 arm_elf_osabi_sniffer (bfd *abfd)
8857 {
8858 unsigned int elfosabi;
8859 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8860
8861 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8862
8863 if (elfosabi == ELFOSABI_ARM)
8864 /* GNU tools use this value. Check note sections in this case,
8865 as well. */
8866 bfd_map_over_sections (abfd,
8867 generic_elf_osabi_sniff_abi_tag_sections,
8868 &osabi);
8869
8870 /* Anything else will be handled by the generic ELF sniffer. */
8871 return osabi;
8872 }
8873
8874 static int
8875 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8876 struct reggroup *group)
8877 {
8878 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8879 this, FPS register belongs to save_regroup, restore_reggroup, and
8880 all_reggroup, of course. */
8881 if (regnum == ARM_FPS_REGNUM)
8882 return (group == float_reggroup
8883 || group == save_reggroup
8884 || group == restore_reggroup
8885 || group == all_reggroup);
8886 else
8887 return default_register_reggroup_p (gdbarch, regnum, group);
8888 }
8889
8890 /* For backward-compatibility we allow two 'g' packet lengths with
8891 the remote protocol depending on whether FPA registers are
8892 supplied. M-profile targets do not have FPA registers, but some
8893 stubs already exist in the wild which use a 'g' packet which
8894 supplies them albeit with dummy values. The packet format which
8895 includes FPA registers should be considered deprecated for
8896 M-profile targets. */
8897
8898 static void
8899 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8900 {
8901 if (gdbarch_tdep (gdbarch)->is_m)
8902 {
8903 const target_desc *tdesc;
8904
8905 /* If we know from the executable this is an M-profile target,
8906 cater for remote targets whose register set layout is the
8907 same as the FPA layout. */
8908 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8909 register_remote_g_packet_guess (gdbarch,
8910 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8911 tdesc);
8912
8913 /* The regular M-profile layout. */
8914 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8915 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8916 tdesc);
8917
8918 /* M-profile plus M4F VFP. */
8919 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8920 register_remote_g_packet_guess (gdbarch,
8921 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8922 tdesc);
8923 }
8924
8925 /* Otherwise we don't have a useful guess. */
8926 }
8927
8928 /* Implement the code_of_frame_writable gdbarch method. */
8929
8930 static int
8931 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8932 {
8933 if (gdbarch_tdep (gdbarch)->is_m
8934 && get_frame_type (frame) == SIGTRAMP_FRAME)
8935 {
8936 /* M-profile exception frames return to some magic PCs, where
8937 isn't writable at all. */
8938 return 0;
8939 }
8940 else
8941 return 1;
8942 }
8943
8944 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8945 to be postfixed by a version (eg armv7hl). */
8946
8947 static const char *
8948 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8949 {
8950 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8951 return "arm(v[^- ]*)?";
8952 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8953 }
8954
8955 /* Initialize the current architecture based on INFO. If possible,
8956 re-use an architecture from ARCHES, which is a list of
8957 architectures already created during this debugging session.
8958
8959 Called e.g. at program startup, when reading a core file, and when
8960 reading a binary file. */
8961
8962 static struct gdbarch *
8963 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8964 {
8965 struct gdbarch_tdep *tdep;
8966 struct gdbarch *gdbarch;
8967 struct gdbarch_list *best_arch;
8968 enum arm_abi_kind arm_abi = arm_abi_global;
8969 enum arm_float_model fp_model = arm_fp_model;
8970 tdesc_arch_data_up tdesc_data;
8971 int i;
8972 bool is_m = false;
8973 int vfp_register_count = 0;
8974 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8975 bool have_wmmx_registers = false;
8976 bool have_neon = false;
8977 bool have_fpa_registers = true;
8978 const struct target_desc *tdesc = info.target_desc;
8979
8980 /* If we have an object to base this architecture on, try to determine
8981 its ABI. */
8982
8983 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8984 {
8985 int ei_osabi, e_flags;
8986
8987 switch (bfd_get_flavour (info.abfd))
8988 {
8989 case bfd_target_coff_flavour:
8990 /* Assume it's an old APCS-style ABI. */
8991 /* XXX WinCE? */
8992 arm_abi = ARM_ABI_APCS;
8993 break;
8994
8995 case bfd_target_elf_flavour:
8996 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8997 e_flags = elf_elfheader (info.abfd)->e_flags;
8998
8999 if (ei_osabi == ELFOSABI_ARM)
9000 {
9001 /* GNU tools used to use this value, but do not for EABI
9002 objects. There's nowhere to tag an EABI version
9003 anyway, so assume APCS. */
9004 arm_abi = ARM_ABI_APCS;
9005 }
9006 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9007 {
9008 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9009
9010 switch (eabi_ver)
9011 {
9012 case EF_ARM_EABI_UNKNOWN:
9013 /* Assume GNU tools. */
9014 arm_abi = ARM_ABI_APCS;
9015 break;
9016
9017 case EF_ARM_EABI_VER4:
9018 case EF_ARM_EABI_VER5:
9019 arm_abi = ARM_ABI_AAPCS;
9020 /* EABI binaries default to VFP float ordering.
9021 They may also contain build attributes that can
9022 be used to identify if the VFP argument-passing
9023 ABI is in use. */
9024 if (fp_model == ARM_FLOAT_AUTO)
9025 {
9026 #ifdef HAVE_ELF
9027 switch (bfd_elf_get_obj_attr_int (info.abfd,
9028 OBJ_ATTR_PROC,
9029 Tag_ABI_VFP_args))
9030 {
9031 case AEABI_VFP_args_base:
9032 /* "The user intended FP parameter/result
9033 passing to conform to AAPCS, base
9034 variant". */
9035 fp_model = ARM_FLOAT_SOFT_VFP;
9036 break;
9037 case AEABI_VFP_args_vfp:
9038 /* "The user intended FP parameter/result
9039 passing to conform to AAPCS, VFP
9040 variant". */
9041 fp_model = ARM_FLOAT_VFP;
9042 break;
9043 case AEABI_VFP_args_toolchain:
9044 /* "The user intended FP parameter/result
9045 passing to conform to tool chain-specific
9046 conventions" - we don't know any such
9047 conventions, so leave it as "auto". */
9048 break;
9049 case AEABI_VFP_args_compatible:
9050 /* "Code is compatible with both the base
9051 and VFP variants; the user did not permit
9052 non-variadic functions to pass FP
9053 parameters/results" - leave it as
9054 "auto". */
9055 break;
9056 default:
9057 /* Attribute value not mentioned in the
9058 November 2012 ABI, so leave it as
9059 "auto". */
9060 break;
9061 }
9062 #else
9063 fp_model = ARM_FLOAT_SOFT_VFP;
9064 #endif
9065 }
9066 break;
9067
9068 default:
9069 /* Leave it as "auto". */
9070 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9071 break;
9072 }
9073
9074 #ifdef HAVE_ELF
9075 /* Detect M-profile programs. This only works if the
9076 executable file includes build attributes; GCC does
9077 copy them to the executable, but e.g. RealView does
9078 not. */
9079 int attr_arch
9080 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9081 Tag_CPU_arch);
9082 int attr_profile
9083 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9084 Tag_CPU_arch_profile);
9085
9086 /* GCC specifies the profile for v6-M; RealView only
9087 specifies the profile for architectures starting with
9088 V7 (as opposed to architectures with a tag
9089 numerically greater than TAG_CPU_ARCH_V7). */
9090 if (!tdesc_has_registers (tdesc)
9091 && (attr_arch == TAG_CPU_ARCH_V6_M
9092 || attr_arch == TAG_CPU_ARCH_V6S_M
9093 || attr_profile == 'M'))
9094 is_m = true;
9095 #endif
9096 }
9097
9098 if (fp_model == ARM_FLOAT_AUTO)
9099 {
9100 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9101 {
9102 case 0:
9103 /* Leave it as "auto". Strictly speaking this case
9104 means FPA, but almost nobody uses that now, and
9105 many toolchains fail to set the appropriate bits
9106 for the floating-point model they use. */
9107 break;
9108 case EF_ARM_SOFT_FLOAT:
9109 fp_model = ARM_FLOAT_SOFT_FPA;
9110 break;
9111 case EF_ARM_VFP_FLOAT:
9112 fp_model = ARM_FLOAT_VFP;
9113 break;
9114 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9115 fp_model = ARM_FLOAT_SOFT_VFP;
9116 break;
9117 }
9118 }
9119
9120 if (e_flags & EF_ARM_BE8)
9121 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9122
9123 break;
9124
9125 default:
9126 /* Leave it as "auto". */
9127 break;
9128 }
9129 }
9130
9131 /* Check any target description for validity. */
9132 if (tdesc_has_registers (tdesc))
9133 {
9134 /* For most registers we require GDB's default names; but also allow
9135 the numeric names for sp / lr / pc, as a convenience. */
9136 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9137 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9138 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9139
9140 const struct tdesc_feature *feature;
9141 int valid_p;
9142
9143 feature = tdesc_find_feature (tdesc,
9144 "org.gnu.gdb.arm.core");
9145 if (feature == NULL)
9146 {
9147 feature = tdesc_find_feature (tdesc,
9148 "org.gnu.gdb.arm.m-profile");
9149 if (feature == NULL)
9150 return NULL;
9151 else
9152 is_m = true;
9153 }
9154
9155 tdesc_data = tdesc_data_alloc ();
9156
9157 valid_p = 1;
9158 for (i = 0; i < ARM_SP_REGNUM; i++)
9159 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9160 arm_register_names[i]);
9161 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9162 ARM_SP_REGNUM,
9163 arm_sp_names);
9164 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9165 ARM_LR_REGNUM,
9166 arm_lr_names);
9167 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9168 ARM_PC_REGNUM,
9169 arm_pc_names);
9170 if (is_m)
9171 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9172 ARM_PS_REGNUM, "xpsr");
9173 else
9174 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9175 ARM_PS_REGNUM, "cpsr");
9176
9177 if (!valid_p)
9178 return NULL;
9179
9180 feature = tdesc_find_feature (tdesc,
9181 "org.gnu.gdb.arm.fpa");
9182 if (feature != NULL)
9183 {
9184 valid_p = 1;
9185 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9186 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9187 arm_register_names[i]);
9188 if (!valid_p)
9189 return NULL;
9190 }
9191 else
9192 have_fpa_registers = false;
9193
9194 feature = tdesc_find_feature (tdesc,
9195 "org.gnu.gdb.xscale.iwmmxt");
9196 if (feature != NULL)
9197 {
9198 static const char *const iwmmxt_names[] = {
9199 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9200 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9201 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9202 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9203 };
9204
9205 valid_p = 1;
9206 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9207 valid_p
9208 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9209 iwmmxt_names[i - ARM_WR0_REGNUM]);
9210
9211 /* Check for the control registers, but do not fail if they
9212 are missing. */
9213 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9214 tdesc_numbered_register (feature, tdesc_data.get (), i,
9215 iwmmxt_names[i - ARM_WR0_REGNUM]);
9216
9217 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9218 valid_p
9219 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9220 iwmmxt_names[i - ARM_WR0_REGNUM]);
9221
9222 if (!valid_p)
9223 return NULL;
9224
9225 have_wmmx_registers = true;
9226 }
9227
9228 /* If we have a VFP unit, check whether the single precision registers
9229 are present. If not, then we will synthesize them as pseudo
9230 registers. */
9231 feature = tdesc_find_feature (tdesc,
9232 "org.gnu.gdb.arm.vfp");
9233 if (feature != NULL)
9234 {
9235 static const char *const vfp_double_names[] = {
9236 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9237 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9238 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9239 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9240 };
9241
9242 /* Require the double precision registers. There must be either
9243 16 or 32. */
9244 valid_p = 1;
9245 for (i = 0; i < 32; i++)
9246 {
9247 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9248 ARM_D0_REGNUM + i,
9249 vfp_double_names[i]);
9250 if (!valid_p)
9251 break;
9252 }
9253 if (!valid_p && i == 16)
9254 valid_p = 1;
9255
9256 /* Also require FPSCR. */
9257 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9258 ARM_FPSCR_REGNUM, "fpscr");
9259 if (!valid_p)
9260 return NULL;
9261
9262 if (tdesc_unnumbered_register (feature, "s0") == 0)
9263 have_vfp_pseudos = true;
9264
9265 vfp_register_count = i;
9266
9267 /* If we have VFP, also check for NEON. The architecture allows
9268 NEON without VFP (integer vector operations only), but GDB
9269 does not support that. */
9270 feature = tdesc_find_feature (tdesc,
9271 "org.gnu.gdb.arm.neon");
9272 if (feature != NULL)
9273 {
9274 /* NEON requires 32 double-precision registers. */
9275 if (i != 32)
9276 return NULL;
9277
9278 /* If there are quad registers defined by the stub, use
9279 their type; otherwise (normally) provide them with
9280 the default type. */
9281 if (tdesc_unnumbered_register (feature, "q0") == 0)
9282 have_neon_pseudos = true;
9283
9284 have_neon = true;
9285 }
9286 }
9287 }
9288
9289 /* If there is already a candidate, use it. */
9290 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9291 best_arch != NULL;
9292 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9293 {
9294 if (arm_abi != ARM_ABI_AUTO
9295 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9296 continue;
9297
9298 if (fp_model != ARM_FLOAT_AUTO
9299 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9300 continue;
9301
9302 /* There are various other properties in tdep that we do not
9303 need to check here: those derived from a target description,
9304 since gdbarches with a different target description are
9305 automatically disqualified. */
9306
9307 /* Do check is_m, though, since it might come from the binary. */
9308 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9309 continue;
9310
9311 /* Found a match. */
9312 break;
9313 }
9314
9315 if (best_arch != NULL)
9316 return best_arch->gdbarch;
9317
9318 tdep = XCNEW (struct gdbarch_tdep);
9319 gdbarch = gdbarch_alloc (&info, tdep);
9320
9321 /* Record additional information about the architecture we are defining.
9322 These are gdbarch discriminators, like the OSABI. */
9323 tdep->arm_abi = arm_abi;
9324 tdep->fp_model = fp_model;
9325 tdep->is_m = is_m;
9326 tdep->have_fpa_registers = have_fpa_registers;
9327 tdep->have_wmmx_registers = have_wmmx_registers;
9328 gdb_assert (vfp_register_count == 0
9329 || vfp_register_count == 16
9330 || vfp_register_count == 32);
9331 tdep->vfp_register_count = vfp_register_count;
9332 tdep->have_vfp_pseudos = have_vfp_pseudos;
9333 tdep->have_neon_pseudos = have_neon_pseudos;
9334 tdep->have_neon = have_neon;
9335
9336 arm_register_g_packet_guesses (gdbarch);
9337
9338 /* Breakpoints. */
9339 switch (info.byte_order_for_code)
9340 {
9341 case BFD_ENDIAN_BIG:
9342 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9343 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9344 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9345 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9346
9347 break;
9348
9349 case BFD_ENDIAN_LITTLE:
9350 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9351 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9352 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9353 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9354
9355 break;
9356
9357 default:
9358 internal_error (__FILE__, __LINE__,
9359 _("arm_gdbarch_init: bad byte order for float format"));
9360 }
9361
9362 /* On ARM targets char defaults to unsigned. */
9363 set_gdbarch_char_signed (gdbarch, 0);
9364
9365 /* wchar_t is unsigned under the AAPCS. */
9366 if (tdep->arm_abi == ARM_ABI_AAPCS)
9367 set_gdbarch_wchar_signed (gdbarch, 0);
9368 else
9369 set_gdbarch_wchar_signed (gdbarch, 1);
9370
9371 /* Compute type alignment. */
9372 set_gdbarch_type_align (gdbarch, arm_type_align);
9373
9374 /* Note: for displaced stepping, this includes the breakpoint, and one word
9375 of additional scratch space. This setting isn't used for anything beside
9376 displaced stepping at present. */
9377 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9378
9379 /* This should be low enough for everything. */
9380 tdep->lowest_pc = 0x20;
9381 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9382
9383 /* The default, for both APCS and AAPCS, is to return small
9384 structures in registers. */
9385 tdep->struct_return = reg_struct_return;
9386
9387 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9388 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9389
9390 if (is_m)
9391 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9392
9393 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9394
9395 frame_base_set_default (gdbarch, &arm_normal_base);
9396
9397 /* Address manipulation. */
9398 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9399
9400 /* Advance PC across function entry code. */
9401 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9402
9403 /* Detect whether PC is at a point where the stack has been destroyed. */
9404 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9405
9406 /* Skip trampolines. */
9407 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9408
9409 /* The stack grows downward. */
9410 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9411
9412 /* Breakpoint manipulation. */
9413 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9414 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9415 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9416 arm_breakpoint_kind_from_current_state);
9417
9418 /* Information about registers, etc. */
9419 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9420 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9421 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9422 set_gdbarch_register_type (gdbarch, arm_register_type);
9423 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9424
9425 /* This "info float" is FPA-specific. Use the generic version if we
9426 do not have FPA. */
9427 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9428 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9429
9430 /* Internal <-> external register number maps. */
9431 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9432 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9433
9434 set_gdbarch_register_name (gdbarch, arm_register_name);
9435
9436 /* Returning results. */
9437 set_gdbarch_return_value (gdbarch, arm_return_value);
9438
9439 /* Disassembly. */
9440 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9441
9442 /* Minsymbol frobbing. */
9443 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9444 set_gdbarch_coff_make_msymbol_special (gdbarch,
9445 arm_coff_make_msymbol_special);
9446 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9447
9448 /* Thumb-2 IT block support. */
9449 set_gdbarch_adjust_breakpoint_address (gdbarch,
9450 arm_adjust_breakpoint_address);
9451
9452 /* Virtual tables. */
9453 set_gdbarch_vbit_in_delta (gdbarch, 1);
9454
9455 /* Hook in the ABI-specific overrides, if they have been registered. */
9456 gdbarch_init_osabi (info, gdbarch);
9457
9458 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9459
9460 /* Add some default predicates. */
9461 if (is_m)
9462 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9463 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9464 dwarf2_append_unwinders (gdbarch);
9465 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9466 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9467 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9468
9469 /* Now we have tuned the configuration, set a few final things,
9470 based on what the OS ABI has told us. */
9471
9472 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9473 binaries are always marked. */
9474 if (tdep->arm_abi == ARM_ABI_AUTO)
9475 tdep->arm_abi = ARM_ABI_APCS;
9476
9477 /* Watchpoints are not steppable. */
9478 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9479
9480 /* We used to default to FPA for generic ARM, but almost nobody
9481 uses that now, and we now provide a way for the user to force
9482 the model. So default to the most useful variant. */
9483 if (tdep->fp_model == ARM_FLOAT_AUTO)
9484 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9485
9486 if (tdep->jb_pc >= 0)
9487 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9488
9489 /* Floating point sizes and format. */
9490 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9491 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9492 {
9493 set_gdbarch_double_format
9494 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9495 set_gdbarch_long_double_format
9496 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9497 }
9498 else
9499 {
9500 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9501 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9502 }
9503
9504 if (have_vfp_pseudos)
9505 {
9506 /* NOTE: These are the only pseudo registers used by
9507 the ARM target at the moment. If more are added, a
9508 little more care in numbering will be needed. */
9509
9510 int num_pseudos = 32;
9511 if (have_neon_pseudos)
9512 num_pseudos += 16;
9513 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9514 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9515 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9516 }
9517
9518 if (tdesc_data != nullptr)
9519 {
9520 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9521
9522 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9523
9524 /* Override tdesc_register_type to adjust the types of VFP
9525 registers for NEON. */
9526 set_gdbarch_register_type (gdbarch, arm_register_type);
9527 }
9528
9529 /* Add standard register aliases. We add aliases even for those
9530 names which are used by the current architecture - it's simpler,
9531 and does no harm, since nothing ever lists user registers. */
9532 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9533 user_reg_add (gdbarch, arm_register_aliases[i].name,
9534 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9535
9536 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9537 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9538
9539 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9540
9541 return gdbarch;
9542 }
9543
9544 static void
9545 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9546 {
9547 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9548
9549 if (tdep == NULL)
9550 return;
9551
9552 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9553 (int) tdep->fp_model);
9554 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9555 (int) tdep->have_fpa_registers);
9556 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9557 (int) tdep->have_wmmx_registers);
9558 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9559 (int) tdep->vfp_register_count);
9560 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9561 (int) tdep->have_vfp_pseudos);
9562 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9563 (int) tdep->have_neon_pseudos);
9564 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9565 (int) tdep->have_neon);
9566 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9567 (unsigned long) tdep->lowest_pc);
9568 }
9569
9570 #if GDB_SELF_TEST
9571 namespace selftests
9572 {
9573 static void arm_record_test (void);
9574 }
9575 #endif
9576
9577 void _initialize_arm_tdep ();
9578 void
9579 _initialize_arm_tdep ()
9580 {
9581 long length;
9582 int i, j;
9583 char regdesc[1024], *rdptr = regdesc;
9584 size_t rest = sizeof (regdesc);
9585
9586 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9587
9588 /* Add ourselves to objfile event chain. */
9589 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9590
9591 /* Register an ELF OS ABI sniffer for ARM binaries. */
9592 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9593 bfd_target_elf_flavour,
9594 arm_elf_osabi_sniffer);
9595
9596 /* Add root prefix command for all "set arm"/"show arm" commands. */
9597 add_basic_prefix_cmd ("arm", no_class,
9598 _("Various ARM-specific commands."),
9599 &setarmcmdlist, "set arm ", 0, &setlist);
9600
9601 add_show_prefix_cmd ("arm", no_class,
9602 _("Various ARM-specific commands."),
9603 &showarmcmdlist, "show arm ", 0, &showlist);
9604
9605
9606 arm_disassembler_options = xstrdup ("reg-names-std");
9607 const disasm_options_t *disasm_options
9608 = &disassembler_options_arm ()->options;
9609 int num_disassembly_styles = 0;
9610 for (i = 0; disasm_options->name[i] != NULL; i++)
9611 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9612 num_disassembly_styles++;
9613
9614 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9615 valid_disassembly_styles = XNEWVEC (const char *,
9616 num_disassembly_styles + 1);
9617 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9618 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9619 {
9620 size_t offset = strlen ("reg-names-");
9621 const char *style = disasm_options->name[i];
9622 valid_disassembly_styles[j++] = &style[offset];
9623 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9624 disasm_options->description[i]);
9625 rdptr += length;
9626 rest -= length;
9627 }
9628 /* Mark the end of valid options. */
9629 valid_disassembly_styles[num_disassembly_styles] = NULL;
9630
9631 /* Create the help text. */
9632 std::string helptext = string_printf ("%s%s%s",
9633 _("The valid values are:\n"),
9634 regdesc,
9635 _("The default is \"std\"."));
9636
9637 add_setshow_enum_cmd("disassembler", no_class,
9638 valid_disassembly_styles, &disassembly_style,
9639 _("Set the disassembly style."),
9640 _("Show the disassembly style."),
9641 helptext.c_str (),
9642 set_disassembly_style_sfunc,
9643 show_disassembly_style_sfunc,
9644 &setarmcmdlist, &showarmcmdlist);
9645
9646 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9647 _("Set usage of ARM 32-bit mode."),
9648 _("Show usage of ARM 32-bit mode."),
9649 _("When off, a 26-bit PC will be used."),
9650 NULL,
9651 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9652 mode is %s. */
9653 &setarmcmdlist, &showarmcmdlist);
9654
9655 /* Add a command to allow the user to force the FPU model. */
9656 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9657 _("Set the floating point type."),
9658 _("Show the floating point type."),
9659 _("auto - Determine the FP typefrom the OS-ABI.\n\
9660 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9661 fpa - FPA co-processor (GCC compiled).\n\
9662 softvfp - Software FP with pure-endian doubles.\n\
9663 vfp - VFP co-processor."),
9664 set_fp_model_sfunc, show_fp_model,
9665 &setarmcmdlist, &showarmcmdlist);
9666
9667 /* Add a command to allow the user to force the ABI. */
9668 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9669 _("Set the ABI."),
9670 _("Show the ABI."),
9671 NULL, arm_set_abi, arm_show_abi,
9672 &setarmcmdlist, &showarmcmdlist);
9673
9674 /* Add two commands to allow the user to force the assumed
9675 execution mode. */
9676 add_setshow_enum_cmd ("fallback-mode", class_support,
9677 arm_mode_strings, &arm_fallback_mode_string,
9678 _("Set the mode assumed when symbols are unavailable."),
9679 _("Show the mode assumed when symbols are unavailable."),
9680 NULL, NULL, arm_show_fallback_mode,
9681 &setarmcmdlist, &showarmcmdlist);
9682 add_setshow_enum_cmd ("force-mode", class_support,
9683 arm_mode_strings, &arm_force_mode_string,
9684 _("Set the mode assumed even when symbols are available."),
9685 _("Show the mode assumed even when symbols are available."),
9686 NULL, NULL, arm_show_force_mode,
9687 &setarmcmdlist, &showarmcmdlist);
9688
9689 /* Debugging flag. */
9690 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9691 _("Set ARM debugging."),
9692 _("Show ARM debugging."),
9693 _("When on, arm-specific debugging is enabled."),
9694 NULL,
9695 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9696 &setdebuglist, &showdebuglist);
9697
9698 #if GDB_SELF_TEST
9699 selftests::register_test ("arm-record", selftests::arm_record_test);
9700 #endif
9701
9702 }
9703
9704 /* ARM-reversible process record data structures. */
9705
9706 #define ARM_INSN_SIZE_BYTES 4
9707 #define THUMB_INSN_SIZE_BYTES 2
9708 #define THUMB2_INSN_SIZE_BYTES 4
9709
9710
9711 /* Position of the bit within a 32-bit ARM instruction
9712 that defines whether the instruction is a load or store. */
9713 #define INSN_S_L_BIT_NUM 20
9714
9715 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9716 do \
9717 { \
9718 unsigned int reg_len = LENGTH; \
9719 if (reg_len) \
9720 { \
9721 REGS = XNEWVEC (uint32_t, reg_len); \
9722 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9723 } \
9724 } \
9725 while (0)
9726
9727 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9728 do \
9729 { \
9730 unsigned int mem_len = LENGTH; \
9731 if (mem_len) \
9732 { \
9733 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9734 memcpy(&MEMS->len, &RECORD_BUF[0], \
9735 sizeof(struct arm_mem_r) * LENGTH); \
9736 } \
9737 } \
9738 while (0)
9739
9740 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9741 #define INSN_RECORDED(ARM_RECORD) \
9742 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9743
9744 /* ARM memory record structure. */
9745 struct arm_mem_r
9746 {
9747 uint32_t len; /* Record length. */
9748 uint32_t addr; /* Memory address. */
9749 };
9750
9751 /* ARM instruction record contains opcode of current insn
9752 and execution state (before entry to decode_insn()),
9753 contains list of to-be-modified registers and
9754 memory blocks (on return from decode_insn()). */
9755
9756 typedef struct insn_decode_record_t
9757 {
9758 struct gdbarch *gdbarch;
9759 struct regcache *regcache;
9760 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9761 uint32_t arm_insn; /* Should accommodate thumb. */
9762 uint32_t cond; /* Condition code. */
9763 uint32_t opcode; /* Insn opcode. */
9764 uint32_t decode; /* Insn decode bits. */
9765 uint32_t mem_rec_count; /* No of mem records. */
9766 uint32_t reg_rec_count; /* No of reg records. */
9767 uint32_t *arm_regs; /* Registers to be saved for this record. */
9768 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9769 } insn_decode_record;
9770
9771
9772 /* Checks ARM SBZ and SBO mandatory fields. */
9773
9774 static int
9775 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9776 {
9777 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9778
9779 if (!len)
9780 return 1;
9781
9782 if (!sbo)
9783 ones = ~ones;
9784
9785 while (ones)
9786 {
9787 if (!(ones & sbo))
9788 {
9789 return 0;
9790 }
9791 ones = ones >> 1;
9792 }
9793 return 1;
9794 }
9795
9796 enum arm_record_result
9797 {
9798 ARM_RECORD_SUCCESS = 0,
9799 ARM_RECORD_FAILURE = 1
9800 };
9801
9802 typedef enum
9803 {
9804 ARM_RECORD_STRH=1,
9805 ARM_RECORD_STRD
9806 } arm_record_strx_t;
9807
9808 typedef enum
9809 {
9810 ARM_RECORD=1,
9811 THUMB_RECORD,
9812 THUMB2_RECORD
9813 } record_type_t;
9814
9815
9816 static int
9817 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9818 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9819 {
9820
9821 struct regcache *reg_cache = arm_insn_r->regcache;
9822 ULONGEST u_regval[2]= {0};
9823
9824 uint32_t reg_src1 = 0, reg_src2 = 0;
9825 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9826
9827 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9828 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9829
9830 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9831 {
9832 /* 1) Handle misc store, immediate offset. */
9833 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9834 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9835 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9836 regcache_raw_read_unsigned (reg_cache, reg_src1,
9837 &u_regval[0]);
9838 if (ARM_PC_REGNUM == reg_src1)
9839 {
9840 /* If R15 was used as Rn, hence current PC+8. */
9841 u_regval[0] = u_regval[0] + 8;
9842 }
9843 offset_8 = (immed_high << 4) | immed_low;
9844 /* Calculate target store address. */
9845 if (14 == arm_insn_r->opcode)
9846 {
9847 tgt_mem_addr = u_regval[0] + offset_8;
9848 }
9849 else
9850 {
9851 tgt_mem_addr = u_regval[0] - offset_8;
9852 }
9853 if (ARM_RECORD_STRH == str_type)
9854 {
9855 record_buf_mem[0] = 2;
9856 record_buf_mem[1] = tgt_mem_addr;
9857 arm_insn_r->mem_rec_count = 1;
9858 }
9859 else if (ARM_RECORD_STRD == str_type)
9860 {
9861 record_buf_mem[0] = 4;
9862 record_buf_mem[1] = tgt_mem_addr;
9863 record_buf_mem[2] = 4;
9864 record_buf_mem[3] = tgt_mem_addr + 4;
9865 arm_insn_r->mem_rec_count = 2;
9866 }
9867 }
9868 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9869 {
9870 /* 2) Store, register offset. */
9871 /* Get Rm. */
9872 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9873 /* Get Rn. */
9874 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9875 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9876 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9877 if (15 == reg_src2)
9878 {
9879 /* If R15 was used as Rn, hence current PC+8. */
9880 u_regval[0] = u_regval[0] + 8;
9881 }
9882 /* Calculate target store address, Rn +/- Rm, register offset. */
9883 if (12 == arm_insn_r->opcode)
9884 {
9885 tgt_mem_addr = u_regval[0] + u_regval[1];
9886 }
9887 else
9888 {
9889 tgt_mem_addr = u_regval[1] - u_regval[0];
9890 }
9891 if (ARM_RECORD_STRH == str_type)
9892 {
9893 record_buf_mem[0] = 2;
9894 record_buf_mem[1] = tgt_mem_addr;
9895 arm_insn_r->mem_rec_count = 1;
9896 }
9897 else if (ARM_RECORD_STRD == str_type)
9898 {
9899 record_buf_mem[0] = 4;
9900 record_buf_mem[1] = tgt_mem_addr;
9901 record_buf_mem[2] = 4;
9902 record_buf_mem[3] = tgt_mem_addr + 4;
9903 arm_insn_r->mem_rec_count = 2;
9904 }
9905 }
9906 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9907 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9908 {
9909 /* 3) Store, immediate pre-indexed. */
9910 /* 5) Store, immediate post-indexed. */
9911 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9912 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9913 offset_8 = (immed_high << 4) | immed_low;
9914 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9915 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9916 /* Calculate target store address, Rn +/- Rm, register offset. */
9917 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9918 {
9919 tgt_mem_addr = u_regval[0] + offset_8;
9920 }
9921 else
9922 {
9923 tgt_mem_addr = u_regval[0] - offset_8;
9924 }
9925 if (ARM_RECORD_STRH == str_type)
9926 {
9927 record_buf_mem[0] = 2;
9928 record_buf_mem[1] = tgt_mem_addr;
9929 arm_insn_r->mem_rec_count = 1;
9930 }
9931 else if (ARM_RECORD_STRD == str_type)
9932 {
9933 record_buf_mem[0] = 4;
9934 record_buf_mem[1] = tgt_mem_addr;
9935 record_buf_mem[2] = 4;
9936 record_buf_mem[3] = tgt_mem_addr + 4;
9937 arm_insn_r->mem_rec_count = 2;
9938 }
9939 /* Record Rn also as it changes. */
9940 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9941 arm_insn_r->reg_rec_count = 1;
9942 }
9943 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9944 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9945 {
9946 /* 4) Store, register pre-indexed. */
9947 /* 6) Store, register post -indexed. */
9948 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9949 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9950 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9951 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9952 /* Calculate target store address, Rn +/- Rm, register offset. */
9953 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9954 {
9955 tgt_mem_addr = u_regval[0] + u_regval[1];
9956 }
9957 else
9958 {
9959 tgt_mem_addr = u_regval[1] - u_regval[0];
9960 }
9961 if (ARM_RECORD_STRH == str_type)
9962 {
9963 record_buf_mem[0] = 2;
9964 record_buf_mem[1] = tgt_mem_addr;
9965 arm_insn_r->mem_rec_count = 1;
9966 }
9967 else if (ARM_RECORD_STRD == str_type)
9968 {
9969 record_buf_mem[0] = 4;
9970 record_buf_mem[1] = tgt_mem_addr;
9971 record_buf_mem[2] = 4;
9972 record_buf_mem[3] = tgt_mem_addr + 4;
9973 arm_insn_r->mem_rec_count = 2;
9974 }
9975 /* Record Rn also as it changes. */
9976 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9977 arm_insn_r->reg_rec_count = 1;
9978 }
9979 return 0;
9980 }
9981
9982 /* Handling ARM extension space insns. */
9983
9984 static int
9985 arm_record_extension_space (insn_decode_record *arm_insn_r)
9986 {
9987 int ret = 0; /* Return value: -1:record failure ; 0:success */
9988 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9989 uint32_t record_buf[8], record_buf_mem[8];
9990 uint32_t reg_src1 = 0;
9991 struct regcache *reg_cache = arm_insn_r->regcache;
9992 ULONGEST u_regval = 0;
9993
9994 gdb_assert (!INSN_RECORDED(arm_insn_r));
9995 /* Handle unconditional insn extension space. */
9996
9997 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9998 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9999 if (arm_insn_r->cond)
10000 {
10001 /* PLD has no affect on architectural state, it just affects
10002 the caches. */
10003 if (5 == ((opcode1 & 0xE0) >> 5))
10004 {
10005 /* BLX(1) */
10006 record_buf[0] = ARM_PS_REGNUM;
10007 record_buf[1] = ARM_LR_REGNUM;
10008 arm_insn_r->reg_rec_count = 2;
10009 }
10010 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10011 }
10012
10013
10014 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10015 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10016 {
10017 ret = -1;
10018 /* Undefined instruction on ARM V5; need to handle if later
10019 versions define it. */
10020 }
10021
10022 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10023 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10024 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10025
10026 /* Handle arithmetic insn extension space. */
10027 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10028 && !INSN_RECORDED(arm_insn_r))
10029 {
10030 /* Handle MLA(S) and MUL(S). */
10031 if (in_inclusive_range (insn_op1, 0U, 3U))
10032 {
10033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10034 record_buf[1] = ARM_PS_REGNUM;
10035 arm_insn_r->reg_rec_count = 2;
10036 }
10037 else if (in_inclusive_range (insn_op1, 4U, 15U))
10038 {
10039 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10040 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10041 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10042 record_buf[2] = ARM_PS_REGNUM;
10043 arm_insn_r->reg_rec_count = 3;
10044 }
10045 }
10046
10047 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10048 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10049 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10050
10051 /* Handle control insn extension space. */
10052
10053 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10054 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10055 {
10056 if (!bit (arm_insn_r->arm_insn,25))
10057 {
10058 if (!bits (arm_insn_r->arm_insn, 4, 7))
10059 {
10060 if ((0 == insn_op1) || (2 == insn_op1))
10061 {
10062 /* MRS. */
10063 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10064 arm_insn_r->reg_rec_count = 1;
10065 }
10066 else if (1 == insn_op1)
10067 {
10068 /* CSPR is going to be changed. */
10069 record_buf[0] = ARM_PS_REGNUM;
10070 arm_insn_r->reg_rec_count = 1;
10071 }
10072 else if (3 == insn_op1)
10073 {
10074 /* SPSR is going to be changed. */
10075 /* We need to get SPSR value, which is yet to be done. */
10076 return -1;
10077 }
10078 }
10079 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10080 {
10081 if (1 == insn_op1)
10082 {
10083 /* BX. */
10084 record_buf[0] = ARM_PS_REGNUM;
10085 arm_insn_r->reg_rec_count = 1;
10086 }
10087 else if (3 == insn_op1)
10088 {
10089 /* CLZ. */
10090 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10091 arm_insn_r->reg_rec_count = 1;
10092 }
10093 }
10094 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10095 {
10096 /* BLX. */
10097 record_buf[0] = ARM_PS_REGNUM;
10098 record_buf[1] = ARM_LR_REGNUM;
10099 arm_insn_r->reg_rec_count = 2;
10100 }
10101 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10102 {
10103 /* QADD, QSUB, QDADD, QDSUB */
10104 record_buf[0] = ARM_PS_REGNUM;
10105 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10106 arm_insn_r->reg_rec_count = 2;
10107 }
10108 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10109 {
10110 /* BKPT. */
10111 record_buf[0] = ARM_PS_REGNUM;
10112 record_buf[1] = ARM_LR_REGNUM;
10113 arm_insn_r->reg_rec_count = 2;
10114
10115 /* Save SPSR also;how? */
10116 return -1;
10117 }
10118 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10119 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10120 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10121 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10122 )
10123 {
10124 if (0 == insn_op1 || 1 == insn_op1)
10125 {
10126 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10127 /* We dont do optimization for SMULW<y> where we
10128 need only Rd. */
10129 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10130 record_buf[1] = ARM_PS_REGNUM;
10131 arm_insn_r->reg_rec_count = 2;
10132 }
10133 else if (2 == insn_op1)
10134 {
10135 /* SMLAL<x><y>. */
10136 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10137 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10138 arm_insn_r->reg_rec_count = 2;
10139 }
10140 else if (3 == insn_op1)
10141 {
10142 /* SMUL<x><y>. */
10143 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10144 arm_insn_r->reg_rec_count = 1;
10145 }
10146 }
10147 }
10148 else
10149 {
10150 /* MSR : immediate form. */
10151 if (1 == insn_op1)
10152 {
10153 /* CSPR is going to be changed. */
10154 record_buf[0] = ARM_PS_REGNUM;
10155 arm_insn_r->reg_rec_count = 1;
10156 }
10157 else if (3 == insn_op1)
10158 {
10159 /* SPSR is going to be changed. */
10160 /* we need to get SPSR value, which is yet to be done */
10161 return -1;
10162 }
10163 }
10164 }
10165
10166 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10167 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10168 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10169
10170 /* Handle load/store insn extension space. */
10171
10172 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10173 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10174 && !INSN_RECORDED(arm_insn_r))
10175 {
10176 /* SWP/SWPB. */
10177 if (0 == insn_op1)
10178 {
10179 /* These insn, changes register and memory as well. */
10180 /* SWP or SWPB insn. */
10181 /* Get memory address given by Rn. */
10182 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10183 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10184 /* SWP insn ?, swaps word. */
10185 if (8 == arm_insn_r->opcode)
10186 {
10187 record_buf_mem[0] = 4;
10188 }
10189 else
10190 {
10191 /* SWPB insn, swaps only byte. */
10192 record_buf_mem[0] = 1;
10193 }
10194 record_buf_mem[1] = u_regval;
10195 arm_insn_r->mem_rec_count = 1;
10196 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10197 arm_insn_r->reg_rec_count = 1;
10198 }
10199 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10200 {
10201 /* STRH. */
10202 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10203 ARM_RECORD_STRH);
10204 }
10205 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10206 {
10207 /* LDRD. */
10208 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10209 record_buf[1] = record_buf[0] + 1;
10210 arm_insn_r->reg_rec_count = 2;
10211 }
10212 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10213 {
10214 /* STRD. */
10215 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10216 ARM_RECORD_STRD);
10217 }
10218 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10219 {
10220 /* LDRH, LDRSB, LDRSH. */
10221 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10222 arm_insn_r->reg_rec_count = 1;
10223 }
10224
10225 }
10226
10227 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10228 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10229 && !INSN_RECORDED(arm_insn_r))
10230 {
10231 ret = -1;
10232 /* Handle coprocessor insn extension space. */
10233 }
10234
10235 /* To be done for ARMv5 and later; as of now we return -1. */
10236 if (-1 == ret)
10237 return ret;
10238
10239 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10240 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10241
10242 return ret;
10243 }
10244
10245 /* Handling opcode 000 insns. */
10246
10247 static int
10248 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10249 {
10250 struct regcache *reg_cache = arm_insn_r->regcache;
10251 uint32_t record_buf[8], record_buf_mem[8];
10252 ULONGEST u_regval[2] = {0};
10253
10254 uint32_t reg_src1 = 0;
10255 uint32_t opcode1 = 0;
10256
10257 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10258 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10259 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10260
10261 if (!((opcode1 & 0x19) == 0x10))
10262 {
10263 /* Data-processing (register) and Data-processing (register-shifted
10264 register */
10265 /* Out of 11 shifter operands mode, all the insn modifies destination
10266 register, which is specified by 13-16 decode. */
10267 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10268 record_buf[1] = ARM_PS_REGNUM;
10269 arm_insn_r->reg_rec_count = 2;
10270 }
10271 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10272 {
10273 /* Miscellaneous instructions */
10274
10275 if (3 == arm_insn_r->decode && 0x12 == opcode1
10276 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10277 {
10278 /* Handle BLX, branch and link/exchange. */
10279 if (9 == arm_insn_r->opcode)
10280 {
10281 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10282 and R14 stores the return address. */
10283 record_buf[0] = ARM_PS_REGNUM;
10284 record_buf[1] = ARM_LR_REGNUM;
10285 arm_insn_r->reg_rec_count = 2;
10286 }
10287 }
10288 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10289 {
10290 /* Handle enhanced software breakpoint insn, BKPT. */
10291 /* CPSR is changed to be executed in ARM state, disabling normal
10292 interrupts, entering abort mode. */
10293 /* According to high vector configuration PC is set. */
10294 /* user hit breakpoint and type reverse, in
10295 that case, we need to go back with previous CPSR and
10296 Program Counter. */
10297 record_buf[0] = ARM_PS_REGNUM;
10298 record_buf[1] = ARM_LR_REGNUM;
10299 arm_insn_r->reg_rec_count = 2;
10300
10301 /* Save SPSR also; how? */
10302 return -1;
10303 }
10304 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10305 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10306 {
10307 /* Handle BX, branch and link/exchange. */
10308 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10309 record_buf[0] = ARM_PS_REGNUM;
10310 arm_insn_r->reg_rec_count = 1;
10311 }
10312 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10313 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10314 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10315 {
10316 /* Count leading zeros: CLZ. */
10317 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10318 arm_insn_r->reg_rec_count = 1;
10319 }
10320 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10321 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10322 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10323 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10324 {
10325 /* Handle MRS insn. */
10326 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10327 arm_insn_r->reg_rec_count = 1;
10328 }
10329 }
10330 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10331 {
10332 /* Multiply and multiply-accumulate */
10333
10334 /* Handle multiply instructions. */
10335 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10336 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10337 {
10338 /* Handle MLA and MUL. */
10339 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10340 record_buf[1] = ARM_PS_REGNUM;
10341 arm_insn_r->reg_rec_count = 2;
10342 }
10343 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10344 {
10345 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10346 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10347 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10348 record_buf[2] = ARM_PS_REGNUM;
10349 arm_insn_r->reg_rec_count = 3;
10350 }
10351 }
10352 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10353 {
10354 /* Synchronization primitives */
10355
10356 /* Handling SWP, SWPB. */
10357 /* These insn, changes register and memory as well. */
10358 /* SWP or SWPB insn. */
10359
10360 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10361 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10362 /* SWP insn ?, swaps word. */
10363 if (8 == arm_insn_r->opcode)
10364 {
10365 record_buf_mem[0] = 4;
10366 }
10367 else
10368 {
10369 /* SWPB insn, swaps only byte. */
10370 record_buf_mem[0] = 1;
10371 }
10372 record_buf_mem[1] = u_regval[0];
10373 arm_insn_r->mem_rec_count = 1;
10374 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10375 arm_insn_r->reg_rec_count = 1;
10376 }
10377 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10378 || 15 == arm_insn_r->decode)
10379 {
10380 if ((opcode1 & 0x12) == 2)
10381 {
10382 /* Extra load/store (unprivileged) */
10383 return -1;
10384 }
10385 else
10386 {
10387 /* Extra load/store */
10388 switch (bits (arm_insn_r->arm_insn, 5, 6))
10389 {
10390 case 1:
10391 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10392 {
10393 /* STRH (register), STRH (immediate) */
10394 arm_record_strx (arm_insn_r, &record_buf[0],
10395 &record_buf_mem[0], ARM_RECORD_STRH);
10396 }
10397 else if ((opcode1 & 0x05) == 0x1)
10398 {
10399 /* LDRH (register) */
10400 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10401 arm_insn_r->reg_rec_count = 1;
10402
10403 if (bit (arm_insn_r->arm_insn, 21))
10404 {
10405 /* Write back to Rn. */
10406 record_buf[arm_insn_r->reg_rec_count++]
10407 = bits (arm_insn_r->arm_insn, 16, 19);
10408 }
10409 }
10410 else if ((opcode1 & 0x05) == 0x5)
10411 {
10412 /* LDRH (immediate), LDRH (literal) */
10413 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10414
10415 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10416 arm_insn_r->reg_rec_count = 1;
10417
10418 if (rn != 15)
10419 {
10420 /*LDRH (immediate) */
10421 if (bit (arm_insn_r->arm_insn, 21))
10422 {
10423 /* Write back to Rn. */
10424 record_buf[arm_insn_r->reg_rec_count++] = rn;
10425 }
10426 }
10427 }
10428 else
10429 return -1;
10430 break;
10431 case 2:
10432 if ((opcode1 & 0x05) == 0x0)
10433 {
10434 /* LDRD (register) */
10435 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10436 record_buf[1] = record_buf[0] + 1;
10437 arm_insn_r->reg_rec_count = 2;
10438
10439 if (bit (arm_insn_r->arm_insn, 21))
10440 {
10441 /* Write back to Rn. */
10442 record_buf[arm_insn_r->reg_rec_count++]
10443 = bits (arm_insn_r->arm_insn, 16, 19);
10444 }
10445 }
10446 else if ((opcode1 & 0x05) == 0x1)
10447 {
10448 /* LDRSB (register) */
10449 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10450 arm_insn_r->reg_rec_count = 1;
10451
10452 if (bit (arm_insn_r->arm_insn, 21))
10453 {
10454 /* Write back to Rn. */
10455 record_buf[arm_insn_r->reg_rec_count++]
10456 = bits (arm_insn_r->arm_insn, 16, 19);
10457 }
10458 }
10459 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10460 {
10461 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10462 LDRSB (literal) */
10463 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10464
10465 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10466 arm_insn_r->reg_rec_count = 1;
10467
10468 if (rn != 15)
10469 {
10470 /*LDRD (immediate), LDRSB (immediate) */
10471 if (bit (arm_insn_r->arm_insn, 21))
10472 {
10473 /* Write back to Rn. */
10474 record_buf[arm_insn_r->reg_rec_count++] = rn;
10475 }
10476 }
10477 }
10478 else
10479 return -1;
10480 break;
10481 case 3:
10482 if ((opcode1 & 0x05) == 0x0)
10483 {
10484 /* STRD (register) */
10485 arm_record_strx (arm_insn_r, &record_buf[0],
10486 &record_buf_mem[0], ARM_RECORD_STRD);
10487 }
10488 else if ((opcode1 & 0x05) == 0x1)
10489 {
10490 /* LDRSH (register) */
10491 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10492 arm_insn_r->reg_rec_count = 1;
10493
10494 if (bit (arm_insn_r->arm_insn, 21))
10495 {
10496 /* Write back to Rn. */
10497 record_buf[arm_insn_r->reg_rec_count++]
10498 = bits (arm_insn_r->arm_insn, 16, 19);
10499 }
10500 }
10501 else if ((opcode1 & 0x05) == 0x4)
10502 {
10503 /* STRD (immediate) */
10504 arm_record_strx (arm_insn_r, &record_buf[0],
10505 &record_buf_mem[0], ARM_RECORD_STRD);
10506 }
10507 else if ((opcode1 & 0x05) == 0x5)
10508 {
10509 /* LDRSH (immediate), LDRSH (literal) */
10510 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10511 arm_insn_r->reg_rec_count = 1;
10512
10513 if (bit (arm_insn_r->arm_insn, 21))
10514 {
10515 /* Write back to Rn. */
10516 record_buf[arm_insn_r->reg_rec_count++]
10517 = bits (arm_insn_r->arm_insn, 16, 19);
10518 }
10519 }
10520 else
10521 return -1;
10522 break;
10523 default:
10524 return -1;
10525 }
10526 }
10527 }
10528 else
10529 {
10530 return -1;
10531 }
10532
10533 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10534 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10535 return 0;
10536 }
10537
10538 /* Handling opcode 001 insns. */
10539
10540 static int
10541 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10542 {
10543 uint32_t record_buf[8], record_buf_mem[8];
10544
10545 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10546 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10547
10548 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10549 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10550 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10551 )
10552 {
10553 /* Handle MSR insn. */
10554 if (9 == arm_insn_r->opcode)
10555 {
10556 /* CSPR is going to be changed. */
10557 record_buf[0] = ARM_PS_REGNUM;
10558 arm_insn_r->reg_rec_count = 1;
10559 }
10560 else
10561 {
10562 /* SPSR is going to be changed. */
10563 }
10564 }
10565 else if (arm_insn_r->opcode <= 15)
10566 {
10567 /* Normal data processing insns. */
10568 /* Out of 11 shifter operands mode, all the insn modifies destination
10569 register, which is specified by 13-16 decode. */
10570 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10571 record_buf[1] = ARM_PS_REGNUM;
10572 arm_insn_r->reg_rec_count = 2;
10573 }
10574 else
10575 {
10576 return -1;
10577 }
10578
10579 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10580 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10581 return 0;
10582 }
10583
10584 static int
10585 arm_record_media (insn_decode_record *arm_insn_r)
10586 {
10587 uint32_t record_buf[8];
10588
10589 switch (bits (arm_insn_r->arm_insn, 22, 24))
10590 {
10591 case 0:
10592 /* Parallel addition and subtraction, signed */
10593 case 1:
10594 /* Parallel addition and subtraction, unsigned */
10595 case 2:
10596 case 3:
10597 /* Packing, unpacking, saturation and reversal */
10598 {
10599 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10600
10601 record_buf[arm_insn_r->reg_rec_count++] = rd;
10602 }
10603 break;
10604
10605 case 4:
10606 case 5:
10607 /* Signed multiplies */
10608 {
10609 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10610 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10611
10612 record_buf[arm_insn_r->reg_rec_count++] = rd;
10613 if (op1 == 0x0)
10614 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10615 else if (op1 == 0x4)
10616 record_buf[arm_insn_r->reg_rec_count++]
10617 = bits (arm_insn_r->arm_insn, 12, 15);
10618 }
10619 break;
10620
10621 case 6:
10622 {
10623 if (bit (arm_insn_r->arm_insn, 21)
10624 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10625 {
10626 /* SBFX */
10627 record_buf[arm_insn_r->reg_rec_count++]
10628 = bits (arm_insn_r->arm_insn, 12, 15);
10629 }
10630 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10631 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10632 {
10633 /* USAD8 and USADA8 */
10634 record_buf[arm_insn_r->reg_rec_count++]
10635 = bits (arm_insn_r->arm_insn, 16, 19);
10636 }
10637 }
10638 break;
10639
10640 case 7:
10641 {
10642 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10643 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10644 {
10645 /* Permanently UNDEFINED */
10646 return -1;
10647 }
10648 else
10649 {
10650 /* BFC, BFI and UBFX */
10651 record_buf[arm_insn_r->reg_rec_count++]
10652 = bits (arm_insn_r->arm_insn, 12, 15);
10653 }
10654 }
10655 break;
10656
10657 default:
10658 return -1;
10659 }
10660
10661 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10662
10663 return 0;
10664 }
10665
10666 /* Handle ARM mode instructions with opcode 010. */
10667
10668 static int
10669 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10670 {
10671 struct regcache *reg_cache = arm_insn_r->regcache;
10672
10673 uint32_t reg_base , reg_dest;
10674 uint32_t offset_12, tgt_mem_addr;
10675 uint32_t record_buf[8], record_buf_mem[8];
10676 unsigned char wback;
10677 ULONGEST u_regval;
10678
10679 /* Calculate wback. */
10680 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10681 || (bit (arm_insn_r->arm_insn, 21) == 1);
10682
10683 arm_insn_r->reg_rec_count = 0;
10684 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10685
10686 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10687 {
10688 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10689 and LDRT. */
10690
10691 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10692 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10693
10694 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10695 preceeds a LDR instruction having R15 as reg_base, it
10696 emulates a branch and link instruction, and hence we need to save
10697 CPSR and PC as well. */
10698 if (ARM_PC_REGNUM == reg_dest)
10699 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10700
10701 /* If wback is true, also save the base register, which is going to be
10702 written to. */
10703 if (wback)
10704 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10705 }
10706 else
10707 {
10708 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10709
10710 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10711 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10712
10713 /* Handle bit U. */
10714 if (bit (arm_insn_r->arm_insn, 23))
10715 {
10716 /* U == 1: Add the offset. */
10717 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10718 }
10719 else
10720 {
10721 /* U == 0: subtract the offset. */
10722 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10723 }
10724
10725 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10726 bytes. */
10727 if (bit (arm_insn_r->arm_insn, 22))
10728 {
10729 /* STRB and STRBT: 1 byte. */
10730 record_buf_mem[0] = 1;
10731 }
10732 else
10733 {
10734 /* STR and STRT: 4 bytes. */
10735 record_buf_mem[0] = 4;
10736 }
10737
10738 /* Handle bit P. */
10739 if (bit (arm_insn_r->arm_insn, 24))
10740 record_buf_mem[1] = tgt_mem_addr;
10741 else
10742 record_buf_mem[1] = (uint32_t) u_regval;
10743
10744 arm_insn_r->mem_rec_count = 1;
10745
10746 /* If wback is true, also save the base register, which is going to be
10747 written to. */
10748 if (wback)
10749 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10750 }
10751
10752 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10753 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10754 return 0;
10755 }
10756
10757 /* Handling opcode 011 insns. */
10758
10759 static int
10760 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10761 {
10762 struct regcache *reg_cache = arm_insn_r->regcache;
10763
10764 uint32_t shift_imm = 0;
10765 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10766 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10767 uint32_t record_buf[8], record_buf_mem[8];
10768
10769 LONGEST s_word;
10770 ULONGEST u_regval[2];
10771
10772 if (bit (arm_insn_r->arm_insn, 4))
10773 return arm_record_media (arm_insn_r);
10774
10775 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10776 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10777
10778 /* Handle enhanced store insns and LDRD DSP insn,
10779 order begins according to addressing modes for store insns
10780 STRH insn. */
10781
10782 /* LDR or STR? */
10783 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10784 {
10785 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10786 /* LDR insn has a capability to do branching, if
10787 MOV LR, PC is preceded by LDR insn having Rn as R15
10788 in that case, it emulates branch and link insn, and hence we
10789 need to save CSPR and PC as well. */
10790 if (15 != reg_dest)
10791 {
10792 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10793 arm_insn_r->reg_rec_count = 1;
10794 }
10795 else
10796 {
10797 record_buf[0] = reg_dest;
10798 record_buf[1] = ARM_PS_REGNUM;
10799 arm_insn_r->reg_rec_count = 2;
10800 }
10801 }
10802 else
10803 {
10804 if (! bits (arm_insn_r->arm_insn, 4, 11))
10805 {
10806 /* Store insn, register offset and register pre-indexed,
10807 register post-indexed. */
10808 /* Get Rm. */
10809 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10810 /* Get Rn. */
10811 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10812 regcache_raw_read_unsigned (reg_cache, reg_src1
10813 , &u_regval[0]);
10814 regcache_raw_read_unsigned (reg_cache, reg_src2
10815 , &u_regval[1]);
10816 if (15 == reg_src2)
10817 {
10818 /* If R15 was used as Rn, hence current PC+8. */
10819 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10820 u_regval[0] = u_regval[0] + 8;
10821 }
10822 /* Calculate target store address, Rn +/- Rm, register offset. */
10823 /* U == 1. */
10824 if (bit (arm_insn_r->arm_insn, 23))
10825 {
10826 tgt_mem_addr = u_regval[0] + u_regval[1];
10827 }
10828 else
10829 {
10830 tgt_mem_addr = u_regval[1] - u_regval[0];
10831 }
10832
10833 switch (arm_insn_r->opcode)
10834 {
10835 /* STR. */
10836 case 8:
10837 case 12:
10838 /* STR. */
10839 case 9:
10840 case 13:
10841 /* STRT. */
10842 case 1:
10843 case 5:
10844 /* STR. */
10845 case 0:
10846 case 4:
10847 record_buf_mem[0] = 4;
10848 break;
10849
10850 /* STRB. */
10851 case 10:
10852 case 14:
10853 /* STRB. */
10854 case 11:
10855 case 15:
10856 /* STRBT. */
10857 case 3:
10858 case 7:
10859 /* STRB. */
10860 case 2:
10861 case 6:
10862 record_buf_mem[0] = 1;
10863 break;
10864
10865 default:
10866 gdb_assert_not_reached ("no decoding pattern found");
10867 break;
10868 }
10869 record_buf_mem[1] = tgt_mem_addr;
10870 arm_insn_r->mem_rec_count = 1;
10871
10872 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10873 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10874 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10875 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10876 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10877 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10878 )
10879 {
10880 /* Rn is going to be changed in pre-indexed mode and
10881 post-indexed mode as well. */
10882 record_buf[0] = reg_src2;
10883 arm_insn_r->reg_rec_count = 1;
10884 }
10885 }
10886 else
10887 {
10888 /* Store insn, scaled register offset; scaled pre-indexed. */
10889 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10890 /* Get Rm. */
10891 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10892 /* Get Rn. */
10893 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10894 /* Get shift_imm. */
10895 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10897 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10898 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10899 /* Offset_12 used as shift. */
10900 switch (offset_12)
10901 {
10902 case 0:
10903 /* Offset_12 used as index. */
10904 offset_12 = u_regval[0] << shift_imm;
10905 break;
10906
10907 case 1:
10908 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10909 break;
10910
10911 case 2:
10912 if (!shift_imm)
10913 {
10914 if (bit (u_regval[0], 31))
10915 {
10916 offset_12 = 0xFFFFFFFF;
10917 }
10918 else
10919 {
10920 offset_12 = 0;
10921 }
10922 }
10923 else
10924 {
10925 /* This is arithmetic shift. */
10926 offset_12 = s_word >> shift_imm;
10927 }
10928 break;
10929
10930 case 3:
10931 if (!shift_imm)
10932 {
10933 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10934 &u_regval[1]);
10935 /* Get C flag value and shift it by 31. */
10936 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10937 | (u_regval[0]) >> 1);
10938 }
10939 else
10940 {
10941 offset_12 = (u_regval[0] >> shift_imm) \
10942 | (u_regval[0] <<
10943 (sizeof(uint32_t) - shift_imm));
10944 }
10945 break;
10946
10947 default:
10948 gdb_assert_not_reached ("no decoding pattern found");
10949 break;
10950 }
10951
10952 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10953 /* bit U set. */
10954 if (bit (arm_insn_r->arm_insn, 23))
10955 {
10956 tgt_mem_addr = u_regval[1] + offset_12;
10957 }
10958 else
10959 {
10960 tgt_mem_addr = u_regval[1] - offset_12;
10961 }
10962
10963 switch (arm_insn_r->opcode)
10964 {
10965 /* STR. */
10966 case 8:
10967 case 12:
10968 /* STR. */
10969 case 9:
10970 case 13:
10971 /* STRT. */
10972 case 1:
10973 case 5:
10974 /* STR. */
10975 case 0:
10976 case 4:
10977 record_buf_mem[0] = 4;
10978 break;
10979
10980 /* STRB. */
10981 case 10:
10982 case 14:
10983 /* STRB. */
10984 case 11:
10985 case 15:
10986 /* STRBT. */
10987 case 3:
10988 case 7:
10989 /* STRB. */
10990 case 2:
10991 case 6:
10992 record_buf_mem[0] = 1;
10993 break;
10994
10995 default:
10996 gdb_assert_not_reached ("no decoding pattern found");
10997 break;
10998 }
10999 record_buf_mem[1] = tgt_mem_addr;
11000 arm_insn_r->mem_rec_count = 1;
11001
11002 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11003 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11004 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11005 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11006 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11007 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11008 )
11009 {
11010 /* Rn is going to be changed in register scaled pre-indexed
11011 mode,and scaled post indexed mode. */
11012 record_buf[0] = reg_src2;
11013 arm_insn_r->reg_rec_count = 1;
11014 }
11015 }
11016 }
11017
11018 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11019 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11020 return 0;
11021 }
11022
11023 /* Handle ARM mode instructions with opcode 100. */
11024
11025 static int
11026 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11027 {
11028 struct regcache *reg_cache = arm_insn_r->regcache;
11029 uint32_t register_count = 0, register_bits;
11030 uint32_t reg_base, addr_mode;
11031 uint32_t record_buf[24], record_buf_mem[48];
11032 uint32_t wback;
11033 ULONGEST u_regval;
11034
11035 /* Fetch the list of registers. */
11036 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11037 arm_insn_r->reg_rec_count = 0;
11038
11039 /* Fetch the base register that contains the address we are loading data
11040 to. */
11041 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11042
11043 /* Calculate wback. */
11044 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11045
11046 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11047 {
11048 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11049
11050 /* Find out which registers are going to be loaded from memory. */
11051 while (register_bits)
11052 {
11053 if (register_bits & 0x00000001)
11054 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11055 register_bits = register_bits >> 1;
11056 register_count++;
11057 }
11058
11059
11060 /* If wback is true, also save the base register, which is going to be
11061 written to. */
11062 if (wback)
11063 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11064
11065 /* Save the CPSR register. */
11066 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11067 }
11068 else
11069 {
11070 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11071
11072 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11073
11074 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11075
11076 /* Find out how many registers are going to be stored to memory. */
11077 while (register_bits)
11078 {
11079 if (register_bits & 0x00000001)
11080 register_count++;
11081 register_bits = register_bits >> 1;
11082 }
11083
11084 switch (addr_mode)
11085 {
11086 /* STMDA (STMED): Decrement after. */
11087 case 0:
11088 record_buf_mem[1] = (uint32_t) u_regval
11089 - register_count * ARM_INT_REGISTER_SIZE + 4;
11090 break;
11091 /* STM (STMIA, STMEA): Increment after. */
11092 case 1:
11093 record_buf_mem[1] = (uint32_t) u_regval;
11094 break;
11095 /* STMDB (STMFD): Decrement before. */
11096 case 2:
11097 record_buf_mem[1] = (uint32_t) u_regval
11098 - register_count * ARM_INT_REGISTER_SIZE;
11099 break;
11100 /* STMIB (STMFA): Increment before. */
11101 case 3:
11102 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11103 break;
11104 default:
11105 gdb_assert_not_reached ("no decoding pattern found");
11106 break;
11107 }
11108
11109 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11110 arm_insn_r->mem_rec_count = 1;
11111
11112 /* If wback is true, also save the base register, which is going to be
11113 written to. */
11114 if (wback)
11115 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11116 }
11117
11118 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11119 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11120 return 0;
11121 }
11122
11123 /* Handling opcode 101 insns. */
11124
11125 static int
11126 arm_record_b_bl (insn_decode_record *arm_insn_r)
11127 {
11128 uint32_t record_buf[8];
11129
11130 /* Handle B, BL, BLX(1) insns. */
11131 /* B simply branches so we do nothing here. */
11132 /* Note: BLX(1) doesnt fall here but instead it falls into
11133 extension space. */
11134 if (bit (arm_insn_r->arm_insn, 24))
11135 {
11136 record_buf[0] = ARM_LR_REGNUM;
11137 arm_insn_r->reg_rec_count = 1;
11138 }
11139
11140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11141
11142 return 0;
11143 }
11144
11145 static int
11146 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11147 {
11148 printf_unfiltered (_("Process record does not support instruction "
11149 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11150 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11151
11152 return -1;
11153 }
11154
11155 /* Record handler for vector data transfer instructions. */
11156
11157 static int
11158 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11159 {
11160 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11161 uint32_t record_buf[4];
11162
11163 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11164 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11165 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11166 bit_l = bit (arm_insn_r->arm_insn, 20);
11167 bit_c = bit (arm_insn_r->arm_insn, 8);
11168
11169 /* Handle VMOV instruction. */
11170 if (bit_l && bit_c)
11171 {
11172 record_buf[0] = reg_t;
11173 arm_insn_r->reg_rec_count = 1;
11174 }
11175 else if (bit_l && !bit_c)
11176 {
11177 /* Handle VMOV instruction. */
11178 if (bits_a == 0x00)
11179 {
11180 record_buf[0] = reg_t;
11181 arm_insn_r->reg_rec_count = 1;
11182 }
11183 /* Handle VMRS instruction. */
11184 else if (bits_a == 0x07)
11185 {
11186 if (reg_t == 15)
11187 reg_t = ARM_PS_REGNUM;
11188
11189 record_buf[0] = reg_t;
11190 arm_insn_r->reg_rec_count = 1;
11191 }
11192 }
11193 else if (!bit_l && !bit_c)
11194 {
11195 /* Handle VMOV instruction. */
11196 if (bits_a == 0x00)
11197 {
11198 record_buf[0] = ARM_D0_REGNUM + reg_v;
11199
11200 arm_insn_r->reg_rec_count = 1;
11201 }
11202 /* Handle VMSR instruction. */
11203 else if (bits_a == 0x07)
11204 {
11205 record_buf[0] = ARM_FPSCR_REGNUM;
11206 arm_insn_r->reg_rec_count = 1;
11207 }
11208 }
11209 else if (!bit_l && bit_c)
11210 {
11211 /* Handle VMOV instruction. */
11212 if (!(bits_a & 0x04))
11213 {
11214 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11215 + ARM_D0_REGNUM;
11216 arm_insn_r->reg_rec_count = 1;
11217 }
11218 /* Handle VDUP instruction. */
11219 else
11220 {
11221 if (bit (arm_insn_r->arm_insn, 21))
11222 {
11223 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11224 record_buf[0] = reg_v + ARM_D0_REGNUM;
11225 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11226 arm_insn_r->reg_rec_count = 2;
11227 }
11228 else
11229 {
11230 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11231 record_buf[0] = reg_v + ARM_D0_REGNUM;
11232 arm_insn_r->reg_rec_count = 1;
11233 }
11234 }
11235 }
11236
11237 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11238 return 0;
11239 }
11240
11241 /* Record handler for extension register load/store instructions. */
11242
11243 static int
11244 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11245 {
11246 uint32_t opcode, single_reg;
11247 uint8_t op_vldm_vstm;
11248 uint32_t record_buf[8], record_buf_mem[128];
11249 ULONGEST u_regval = 0;
11250
11251 struct regcache *reg_cache = arm_insn_r->regcache;
11252
11253 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11254 single_reg = !bit (arm_insn_r->arm_insn, 8);
11255 op_vldm_vstm = opcode & 0x1b;
11256
11257 /* Handle VMOV instructions. */
11258 if ((opcode & 0x1e) == 0x04)
11259 {
11260 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11261 {
11262 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11263 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11264 arm_insn_r->reg_rec_count = 2;
11265 }
11266 else
11267 {
11268 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11269 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11270
11271 if (single_reg)
11272 {
11273 /* The first S register number m is REG_M:M (M is bit 5),
11274 the corresponding D register number is REG_M:M / 2, which
11275 is REG_M. */
11276 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11277 /* The second S register number is REG_M:M + 1, the
11278 corresponding D register number is (REG_M:M + 1) / 2.
11279 IOW, if bit M is 1, the first and second S registers
11280 are mapped to different D registers, otherwise, they are
11281 in the same D register. */
11282 if (bit_m)
11283 {
11284 record_buf[arm_insn_r->reg_rec_count++]
11285 = ARM_D0_REGNUM + reg_m + 1;
11286 }
11287 }
11288 else
11289 {
11290 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11291 arm_insn_r->reg_rec_count = 1;
11292 }
11293 }
11294 }
11295 /* Handle VSTM and VPUSH instructions. */
11296 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11297 || op_vldm_vstm == 0x12)
11298 {
11299 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11300 uint32_t memory_index = 0;
11301
11302 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11303 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11304 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11305 imm_off32 = imm_off8 << 2;
11306 memory_count = imm_off8;
11307
11308 if (bit (arm_insn_r->arm_insn, 23))
11309 start_address = u_regval;
11310 else
11311 start_address = u_regval - imm_off32;
11312
11313 if (bit (arm_insn_r->arm_insn, 21))
11314 {
11315 record_buf[0] = reg_rn;
11316 arm_insn_r->reg_rec_count = 1;
11317 }
11318
11319 while (memory_count > 0)
11320 {
11321 if (single_reg)
11322 {
11323 record_buf_mem[memory_index] = 4;
11324 record_buf_mem[memory_index + 1] = start_address;
11325 start_address = start_address + 4;
11326 memory_index = memory_index + 2;
11327 }
11328 else
11329 {
11330 record_buf_mem[memory_index] = 4;
11331 record_buf_mem[memory_index + 1] = start_address;
11332 record_buf_mem[memory_index + 2] = 4;
11333 record_buf_mem[memory_index + 3] = start_address + 4;
11334 start_address = start_address + 8;
11335 memory_index = memory_index + 4;
11336 }
11337 memory_count--;
11338 }
11339 arm_insn_r->mem_rec_count = (memory_index >> 1);
11340 }
11341 /* Handle VLDM instructions. */
11342 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11343 || op_vldm_vstm == 0x13)
11344 {
11345 uint32_t reg_count, reg_vd;
11346 uint32_t reg_index = 0;
11347 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11348
11349 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11350 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11351
11352 /* REG_VD is the first D register number. If the instruction
11353 loads memory to S registers (SINGLE_REG is TRUE), the register
11354 number is (REG_VD << 1 | bit D), so the corresponding D
11355 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11356 if (!single_reg)
11357 reg_vd = reg_vd | (bit_d << 4);
11358
11359 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11360 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11361
11362 /* If the instruction loads memory to D register, REG_COUNT should
11363 be divided by 2, according to the ARM Architecture Reference
11364 Manual. If the instruction loads memory to S register, divide by
11365 2 as well because two S registers are mapped to D register. */
11366 reg_count = reg_count / 2;
11367 if (single_reg && bit_d)
11368 {
11369 /* Increase the register count if S register list starts from
11370 an odd number (bit d is one). */
11371 reg_count++;
11372 }
11373
11374 while (reg_count > 0)
11375 {
11376 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11377 reg_count--;
11378 }
11379 arm_insn_r->reg_rec_count = reg_index;
11380 }
11381 /* VSTR Vector store register. */
11382 else if ((opcode & 0x13) == 0x10)
11383 {
11384 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11385 uint32_t memory_index = 0;
11386
11387 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11388 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11389 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11390 imm_off32 = imm_off8 << 2;
11391
11392 if (bit (arm_insn_r->arm_insn, 23))
11393 start_address = u_regval + imm_off32;
11394 else
11395 start_address = u_regval - imm_off32;
11396
11397 if (single_reg)
11398 {
11399 record_buf_mem[memory_index] = 4;
11400 record_buf_mem[memory_index + 1] = start_address;
11401 arm_insn_r->mem_rec_count = 1;
11402 }
11403 else
11404 {
11405 record_buf_mem[memory_index] = 4;
11406 record_buf_mem[memory_index + 1] = start_address;
11407 record_buf_mem[memory_index + 2] = 4;
11408 record_buf_mem[memory_index + 3] = start_address + 4;
11409 arm_insn_r->mem_rec_count = 2;
11410 }
11411 }
11412 /* VLDR Vector load register. */
11413 else if ((opcode & 0x13) == 0x11)
11414 {
11415 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11416
11417 if (!single_reg)
11418 {
11419 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11420 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11421 }
11422 else
11423 {
11424 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11425 /* Record register D rather than pseudo register S. */
11426 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11427 }
11428 arm_insn_r->reg_rec_count = 1;
11429 }
11430
11431 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11432 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11433 return 0;
11434 }
11435
11436 /* Record handler for arm/thumb mode VFP data processing instructions. */
11437
11438 static int
11439 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11440 {
11441 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11442 uint32_t record_buf[4];
11443 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11444 enum insn_types curr_insn_type = INSN_INV;
11445
11446 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11447 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11448 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11449 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11450 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11451 bit_d = bit (arm_insn_r->arm_insn, 22);
11452 /* Mask off the "D" bit. */
11453 opc1 = opc1 & ~0x04;
11454
11455 /* Handle VMLA, VMLS. */
11456 if (opc1 == 0x00)
11457 {
11458 if (bit (arm_insn_r->arm_insn, 10))
11459 {
11460 if (bit (arm_insn_r->arm_insn, 6))
11461 curr_insn_type = INSN_T0;
11462 else
11463 curr_insn_type = INSN_T1;
11464 }
11465 else
11466 {
11467 if (dp_op_sz)
11468 curr_insn_type = INSN_T1;
11469 else
11470 curr_insn_type = INSN_T2;
11471 }
11472 }
11473 /* Handle VNMLA, VNMLS, VNMUL. */
11474 else if (opc1 == 0x01)
11475 {
11476 if (dp_op_sz)
11477 curr_insn_type = INSN_T1;
11478 else
11479 curr_insn_type = INSN_T2;
11480 }
11481 /* Handle VMUL. */
11482 else if (opc1 == 0x02 && !(opc3 & 0x01))
11483 {
11484 if (bit (arm_insn_r->arm_insn, 10))
11485 {
11486 if (bit (arm_insn_r->arm_insn, 6))
11487 curr_insn_type = INSN_T0;
11488 else
11489 curr_insn_type = INSN_T1;
11490 }
11491 else
11492 {
11493 if (dp_op_sz)
11494 curr_insn_type = INSN_T1;
11495 else
11496 curr_insn_type = INSN_T2;
11497 }
11498 }
11499 /* Handle VADD, VSUB. */
11500 else if (opc1 == 0x03)
11501 {
11502 if (!bit (arm_insn_r->arm_insn, 9))
11503 {
11504 if (bit (arm_insn_r->arm_insn, 6))
11505 curr_insn_type = INSN_T0;
11506 else
11507 curr_insn_type = INSN_T1;
11508 }
11509 else
11510 {
11511 if (dp_op_sz)
11512 curr_insn_type = INSN_T1;
11513 else
11514 curr_insn_type = INSN_T2;
11515 }
11516 }
11517 /* Handle VDIV. */
11518 else if (opc1 == 0x08)
11519 {
11520 if (dp_op_sz)
11521 curr_insn_type = INSN_T1;
11522 else
11523 curr_insn_type = INSN_T2;
11524 }
11525 /* Handle all other vfp data processing instructions. */
11526 else if (opc1 == 0x0b)
11527 {
11528 /* Handle VMOV. */
11529 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11530 {
11531 if (bit (arm_insn_r->arm_insn, 4))
11532 {
11533 if (bit (arm_insn_r->arm_insn, 6))
11534 curr_insn_type = INSN_T0;
11535 else
11536 curr_insn_type = INSN_T1;
11537 }
11538 else
11539 {
11540 if (dp_op_sz)
11541 curr_insn_type = INSN_T1;
11542 else
11543 curr_insn_type = INSN_T2;
11544 }
11545 }
11546 /* Handle VNEG and VABS. */
11547 else if ((opc2 == 0x01 && opc3 == 0x01)
11548 || (opc2 == 0x00 && opc3 == 0x03))
11549 {
11550 if (!bit (arm_insn_r->arm_insn, 11))
11551 {
11552 if (bit (arm_insn_r->arm_insn, 6))
11553 curr_insn_type = INSN_T0;
11554 else
11555 curr_insn_type = INSN_T1;
11556 }
11557 else
11558 {
11559 if (dp_op_sz)
11560 curr_insn_type = INSN_T1;
11561 else
11562 curr_insn_type = INSN_T2;
11563 }
11564 }
11565 /* Handle VSQRT. */
11566 else if (opc2 == 0x01 && opc3 == 0x03)
11567 {
11568 if (dp_op_sz)
11569 curr_insn_type = INSN_T1;
11570 else
11571 curr_insn_type = INSN_T2;
11572 }
11573 /* Handle VCVT. */
11574 else if (opc2 == 0x07 && opc3 == 0x03)
11575 {
11576 if (!dp_op_sz)
11577 curr_insn_type = INSN_T1;
11578 else
11579 curr_insn_type = INSN_T2;
11580 }
11581 else if (opc3 & 0x01)
11582 {
11583 /* Handle VCVT. */
11584 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11585 {
11586 if (!bit (arm_insn_r->arm_insn, 18))
11587 curr_insn_type = INSN_T2;
11588 else
11589 {
11590 if (dp_op_sz)
11591 curr_insn_type = INSN_T1;
11592 else
11593 curr_insn_type = INSN_T2;
11594 }
11595 }
11596 /* Handle VCVT. */
11597 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11598 {
11599 if (dp_op_sz)
11600 curr_insn_type = INSN_T1;
11601 else
11602 curr_insn_type = INSN_T2;
11603 }
11604 /* Handle VCVTB, VCVTT. */
11605 else if ((opc2 & 0x0e) == 0x02)
11606 curr_insn_type = INSN_T2;
11607 /* Handle VCMP, VCMPE. */
11608 else if ((opc2 & 0x0e) == 0x04)
11609 curr_insn_type = INSN_T3;
11610 }
11611 }
11612
11613 switch (curr_insn_type)
11614 {
11615 case INSN_T0:
11616 reg_vd = reg_vd | (bit_d << 4);
11617 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11618 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11619 arm_insn_r->reg_rec_count = 2;
11620 break;
11621
11622 case INSN_T1:
11623 reg_vd = reg_vd | (bit_d << 4);
11624 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11625 arm_insn_r->reg_rec_count = 1;
11626 break;
11627
11628 case INSN_T2:
11629 reg_vd = (reg_vd << 1) | bit_d;
11630 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11631 arm_insn_r->reg_rec_count = 1;
11632 break;
11633
11634 case INSN_T3:
11635 record_buf[0] = ARM_FPSCR_REGNUM;
11636 arm_insn_r->reg_rec_count = 1;
11637 break;
11638
11639 default:
11640 gdb_assert_not_reached ("no decoding pattern found");
11641 break;
11642 }
11643
11644 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11645 return 0;
11646 }
11647
11648 /* Handling opcode 110 insns. */
11649
11650 static int
11651 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11652 {
11653 uint32_t op1, op1_ebit, coproc;
11654
11655 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11656 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11657 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11658
11659 if ((coproc & 0x0e) == 0x0a)
11660 {
11661 /* Handle extension register ld/st instructions. */
11662 if (!(op1 & 0x20))
11663 return arm_record_exreg_ld_st_insn (arm_insn_r);
11664
11665 /* 64-bit transfers between arm core and extension registers. */
11666 if ((op1 & 0x3e) == 0x04)
11667 return arm_record_exreg_ld_st_insn (arm_insn_r);
11668 }
11669 else
11670 {
11671 /* Handle coprocessor ld/st instructions. */
11672 if (!(op1 & 0x3a))
11673 {
11674 /* Store. */
11675 if (!op1_ebit)
11676 return arm_record_unsupported_insn (arm_insn_r);
11677 else
11678 /* Load. */
11679 return arm_record_unsupported_insn (arm_insn_r);
11680 }
11681
11682 /* Move to coprocessor from two arm core registers. */
11683 if (op1 == 0x4)
11684 return arm_record_unsupported_insn (arm_insn_r);
11685
11686 /* Move to two arm core registers from coprocessor. */
11687 if (op1 == 0x5)
11688 {
11689 uint32_t reg_t[2];
11690
11691 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11692 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11693 arm_insn_r->reg_rec_count = 2;
11694
11695 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11696 return 0;
11697 }
11698 }
11699 return arm_record_unsupported_insn (arm_insn_r);
11700 }
11701
11702 /* Handling opcode 111 insns. */
11703
11704 static int
11705 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11706 {
11707 uint32_t op, op1_ebit, coproc, bits_24_25;
11708 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11709 struct regcache *reg_cache = arm_insn_r->regcache;
11710
11711 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11712 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11713 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11714 op = bit (arm_insn_r->arm_insn, 4);
11715 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11716
11717 /* Handle arm SWI/SVC system call instructions. */
11718 if (bits_24_25 == 0x3)
11719 {
11720 if (tdep->arm_syscall_record != NULL)
11721 {
11722 ULONGEST svc_operand, svc_number;
11723
11724 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11725
11726 if (svc_operand) /* OABI. */
11727 svc_number = svc_operand - 0x900000;
11728 else /* EABI. */
11729 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11730
11731 return tdep->arm_syscall_record (reg_cache, svc_number);
11732 }
11733 else
11734 {
11735 printf_unfiltered (_("no syscall record support\n"));
11736 return -1;
11737 }
11738 }
11739 else if (bits_24_25 == 0x02)
11740 {
11741 if (op)
11742 {
11743 if ((coproc & 0x0e) == 0x0a)
11744 {
11745 /* 8, 16, and 32-bit transfer */
11746 return arm_record_vdata_transfer_insn (arm_insn_r);
11747 }
11748 else
11749 {
11750 if (op1_ebit)
11751 {
11752 /* MRC, MRC2 */
11753 uint32_t record_buf[1];
11754
11755 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11756 if (record_buf[0] == 15)
11757 record_buf[0] = ARM_PS_REGNUM;
11758
11759 arm_insn_r->reg_rec_count = 1;
11760 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11761 record_buf);
11762 return 0;
11763 }
11764 else
11765 {
11766 /* MCR, MCR2 */
11767 return -1;
11768 }
11769 }
11770 }
11771 else
11772 {
11773 if ((coproc & 0x0e) == 0x0a)
11774 {
11775 /* VFP data-processing instructions. */
11776 return arm_record_vfp_data_proc_insn (arm_insn_r);
11777 }
11778 else
11779 {
11780 /* CDP, CDP2 */
11781 return -1;
11782 }
11783 }
11784 }
11785 else
11786 {
11787 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11788
11789 if (op1 == 5)
11790 {
11791 if ((coproc & 0x0e) != 0x0a)
11792 {
11793 /* MRRC, MRRC2 */
11794 return -1;
11795 }
11796 }
11797 else if (op1 == 4 || op1 == 5)
11798 {
11799 if ((coproc & 0x0e) == 0x0a)
11800 {
11801 /* 64-bit transfers between ARM core and extension */
11802 return -1;
11803 }
11804 else if (op1 == 4)
11805 {
11806 /* MCRR, MCRR2 */
11807 return -1;
11808 }
11809 }
11810 else if (op1 == 0 || op1 == 1)
11811 {
11812 /* UNDEFINED */
11813 return -1;
11814 }
11815 else
11816 {
11817 if ((coproc & 0x0e) == 0x0a)
11818 {
11819 /* Extension register load/store */
11820 }
11821 else
11822 {
11823 /* STC, STC2, LDC, LDC2 */
11824 }
11825 return -1;
11826 }
11827 }
11828
11829 return -1;
11830 }
11831
11832 /* Handling opcode 000 insns. */
11833
11834 static int
11835 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11836 {
11837 uint32_t record_buf[8];
11838 uint32_t reg_src1 = 0;
11839
11840 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11841
11842 record_buf[0] = ARM_PS_REGNUM;
11843 record_buf[1] = reg_src1;
11844 thumb_insn_r->reg_rec_count = 2;
11845
11846 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11847
11848 return 0;
11849 }
11850
11851
11852 /* Handling opcode 001 insns. */
11853
11854 static int
11855 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11856 {
11857 uint32_t record_buf[8];
11858 uint32_t reg_src1 = 0;
11859
11860 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11861
11862 record_buf[0] = ARM_PS_REGNUM;
11863 record_buf[1] = reg_src1;
11864 thumb_insn_r->reg_rec_count = 2;
11865
11866 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11867
11868 return 0;
11869 }
11870
11871 /* Handling opcode 010 insns. */
11872
11873 static int
11874 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11875 {
11876 struct regcache *reg_cache = thumb_insn_r->regcache;
11877 uint32_t record_buf[8], record_buf_mem[8];
11878
11879 uint32_t reg_src1 = 0, reg_src2 = 0;
11880 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11881
11882 ULONGEST u_regval[2] = {0};
11883
11884 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11885
11886 if (bit (thumb_insn_r->arm_insn, 12))
11887 {
11888 /* Handle load/store register offset. */
11889 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11890
11891 if (in_inclusive_range (opB, 4U, 7U))
11892 {
11893 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11894 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11895 record_buf[0] = reg_src1;
11896 thumb_insn_r->reg_rec_count = 1;
11897 }
11898 else if (in_inclusive_range (opB, 0U, 2U))
11899 {
11900 /* STR(2), STRB(2), STRH(2) . */
11901 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11902 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11903 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11904 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11905 if (0 == opB)
11906 record_buf_mem[0] = 4; /* STR (2). */
11907 else if (2 == opB)
11908 record_buf_mem[0] = 1; /* STRB (2). */
11909 else if (1 == opB)
11910 record_buf_mem[0] = 2; /* STRH (2). */
11911 record_buf_mem[1] = u_regval[0] + u_regval[1];
11912 thumb_insn_r->mem_rec_count = 1;
11913 }
11914 }
11915 else if (bit (thumb_insn_r->arm_insn, 11))
11916 {
11917 /* Handle load from literal pool. */
11918 /* LDR(3). */
11919 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11920 record_buf[0] = reg_src1;
11921 thumb_insn_r->reg_rec_count = 1;
11922 }
11923 else if (opcode1)
11924 {
11925 /* Special data instructions and branch and exchange */
11926 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11927 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11928 if ((3 == opcode2) && (!opcode3))
11929 {
11930 /* Branch with exchange. */
11931 record_buf[0] = ARM_PS_REGNUM;
11932 thumb_insn_r->reg_rec_count = 1;
11933 }
11934 else
11935 {
11936 /* Format 8; special data processing insns. */
11937 record_buf[0] = ARM_PS_REGNUM;
11938 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11939 | bits (thumb_insn_r->arm_insn, 0, 2));
11940 thumb_insn_r->reg_rec_count = 2;
11941 }
11942 }
11943 else
11944 {
11945 /* Format 5; data processing insns. */
11946 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11947 if (bit (thumb_insn_r->arm_insn, 7))
11948 {
11949 reg_src1 = reg_src1 + 8;
11950 }
11951 record_buf[0] = ARM_PS_REGNUM;
11952 record_buf[1] = reg_src1;
11953 thumb_insn_r->reg_rec_count = 2;
11954 }
11955
11956 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11957 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11958 record_buf_mem);
11959
11960 return 0;
11961 }
11962
11963 /* Handling opcode 001 insns. */
11964
11965 static int
11966 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11967 {
11968 struct regcache *reg_cache = thumb_insn_r->regcache;
11969 uint32_t record_buf[8], record_buf_mem[8];
11970
11971 uint32_t reg_src1 = 0;
11972 uint32_t opcode = 0, immed_5 = 0;
11973
11974 ULONGEST u_regval = 0;
11975
11976 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11977
11978 if (opcode)
11979 {
11980 /* LDR(1). */
11981 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11982 record_buf[0] = reg_src1;
11983 thumb_insn_r->reg_rec_count = 1;
11984 }
11985 else
11986 {
11987 /* STR(1). */
11988 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11989 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11990 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11991 record_buf_mem[0] = 4;
11992 record_buf_mem[1] = u_regval + (immed_5 * 4);
11993 thumb_insn_r->mem_rec_count = 1;
11994 }
11995
11996 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11997 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11998 record_buf_mem);
11999
12000 return 0;
12001 }
12002
12003 /* Handling opcode 100 insns. */
12004
12005 static int
12006 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12007 {
12008 struct regcache *reg_cache = thumb_insn_r->regcache;
12009 uint32_t record_buf[8], record_buf_mem[8];
12010
12011 uint32_t reg_src1 = 0;
12012 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12013
12014 ULONGEST u_regval = 0;
12015
12016 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12017
12018 if (3 == opcode)
12019 {
12020 /* LDR(4). */
12021 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12022 record_buf[0] = reg_src1;
12023 thumb_insn_r->reg_rec_count = 1;
12024 }
12025 else if (1 == opcode)
12026 {
12027 /* LDRH(1). */
12028 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12029 record_buf[0] = reg_src1;
12030 thumb_insn_r->reg_rec_count = 1;
12031 }
12032 else if (2 == opcode)
12033 {
12034 /* STR(3). */
12035 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12036 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12037 record_buf_mem[0] = 4;
12038 record_buf_mem[1] = u_regval + (immed_8 * 4);
12039 thumb_insn_r->mem_rec_count = 1;
12040 }
12041 else if (0 == opcode)
12042 {
12043 /* STRH(1). */
12044 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12045 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12046 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12047 record_buf_mem[0] = 2;
12048 record_buf_mem[1] = u_regval + (immed_5 * 2);
12049 thumb_insn_r->mem_rec_count = 1;
12050 }
12051
12052 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12053 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12054 record_buf_mem);
12055
12056 return 0;
12057 }
12058
12059 /* Handling opcode 101 insns. */
12060
12061 static int
12062 thumb_record_misc (insn_decode_record *thumb_insn_r)
12063 {
12064 struct regcache *reg_cache = thumb_insn_r->regcache;
12065
12066 uint32_t opcode = 0;
12067 uint32_t register_bits = 0, register_count = 0;
12068 uint32_t index = 0, start_address = 0;
12069 uint32_t record_buf[24], record_buf_mem[48];
12070 uint32_t reg_src1;
12071
12072 ULONGEST u_regval = 0;
12073
12074 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12075
12076 if (opcode == 0 || opcode == 1)
12077 {
12078 /* ADR and ADD (SP plus immediate) */
12079
12080 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12081 record_buf[0] = reg_src1;
12082 thumb_insn_r->reg_rec_count = 1;
12083 }
12084 else
12085 {
12086 /* Miscellaneous 16-bit instructions */
12087 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12088
12089 switch (opcode2)
12090 {
12091 case 6:
12092 /* SETEND and CPS */
12093 break;
12094 case 0:
12095 /* ADD/SUB (SP plus immediate) */
12096 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12097 record_buf[0] = ARM_SP_REGNUM;
12098 thumb_insn_r->reg_rec_count = 1;
12099 break;
12100 case 1: /* fall through */
12101 case 3: /* fall through */
12102 case 9: /* fall through */
12103 case 11:
12104 /* CBNZ, CBZ */
12105 break;
12106 case 2:
12107 /* SXTH, SXTB, UXTH, UXTB */
12108 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12109 thumb_insn_r->reg_rec_count = 1;
12110 break;
12111 case 4: /* fall through */
12112 case 5:
12113 /* PUSH. */
12114 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12115 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12116 while (register_bits)
12117 {
12118 if (register_bits & 0x00000001)
12119 register_count++;
12120 register_bits = register_bits >> 1;
12121 }
12122 start_address = u_regval - \
12123 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12124 thumb_insn_r->mem_rec_count = register_count;
12125 while (register_count)
12126 {
12127 record_buf_mem[(register_count * 2) - 1] = start_address;
12128 record_buf_mem[(register_count * 2) - 2] = 4;
12129 start_address = start_address + 4;
12130 register_count--;
12131 }
12132 record_buf[0] = ARM_SP_REGNUM;
12133 thumb_insn_r->reg_rec_count = 1;
12134 break;
12135 case 10:
12136 /* REV, REV16, REVSH */
12137 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12138 thumb_insn_r->reg_rec_count = 1;
12139 break;
12140 case 12: /* fall through */
12141 case 13:
12142 /* POP. */
12143 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12144 while (register_bits)
12145 {
12146 if (register_bits & 0x00000001)
12147 record_buf[index++] = register_count;
12148 register_bits = register_bits >> 1;
12149 register_count++;
12150 }
12151 record_buf[index++] = ARM_PS_REGNUM;
12152 record_buf[index++] = ARM_SP_REGNUM;
12153 thumb_insn_r->reg_rec_count = index;
12154 break;
12155 case 0xe:
12156 /* BKPT insn. */
12157 /* Handle enhanced software breakpoint insn, BKPT. */
12158 /* CPSR is changed to be executed in ARM state, disabling normal
12159 interrupts, entering abort mode. */
12160 /* According to high vector configuration PC is set. */
12161 /* User hits breakpoint and type reverse, in that case, we need to go back with
12162 previous CPSR and Program Counter. */
12163 record_buf[0] = ARM_PS_REGNUM;
12164 record_buf[1] = ARM_LR_REGNUM;
12165 thumb_insn_r->reg_rec_count = 2;
12166 /* We need to save SPSR value, which is not yet done. */
12167 printf_unfiltered (_("Process record does not support instruction "
12168 "0x%0x at address %s.\n"),
12169 thumb_insn_r->arm_insn,
12170 paddress (thumb_insn_r->gdbarch,
12171 thumb_insn_r->this_addr));
12172 return -1;
12173
12174 case 0xf:
12175 /* If-Then, and hints */
12176 break;
12177 default:
12178 return -1;
12179 };
12180 }
12181
12182 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12183 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12184 record_buf_mem);
12185
12186 return 0;
12187 }
12188
12189 /* Handling opcode 110 insns. */
12190
12191 static int
12192 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12193 {
12194 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12195 struct regcache *reg_cache = thumb_insn_r->regcache;
12196
12197 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12198 uint32_t reg_src1 = 0;
12199 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12200 uint32_t index = 0, start_address = 0;
12201 uint32_t record_buf[24], record_buf_mem[48];
12202
12203 ULONGEST u_regval = 0;
12204
12205 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12206 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12207
12208 if (1 == opcode2)
12209 {
12210
12211 /* LDMIA. */
12212 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12213 /* Get Rn. */
12214 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12215 while (register_bits)
12216 {
12217 if (register_bits & 0x00000001)
12218 record_buf[index++] = register_count;
12219 register_bits = register_bits >> 1;
12220 register_count++;
12221 }
12222 record_buf[index++] = reg_src1;
12223 thumb_insn_r->reg_rec_count = index;
12224 }
12225 else if (0 == opcode2)
12226 {
12227 /* It handles both STMIA. */
12228 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12229 /* Get Rn. */
12230 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12231 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12232 while (register_bits)
12233 {
12234 if (register_bits & 0x00000001)
12235 register_count++;
12236 register_bits = register_bits >> 1;
12237 }
12238 start_address = u_regval;
12239 thumb_insn_r->mem_rec_count = register_count;
12240 while (register_count)
12241 {
12242 record_buf_mem[(register_count * 2) - 1] = start_address;
12243 record_buf_mem[(register_count * 2) - 2] = 4;
12244 start_address = start_address + 4;
12245 register_count--;
12246 }
12247 }
12248 else if (0x1F == opcode1)
12249 {
12250 /* Handle arm syscall insn. */
12251 if (tdep->arm_syscall_record != NULL)
12252 {
12253 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12254 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12255 }
12256 else
12257 {
12258 printf_unfiltered (_("no syscall record support\n"));
12259 return -1;
12260 }
12261 }
12262
12263 /* B (1), conditional branch is automatically taken care in process_record,
12264 as PC is saved there. */
12265
12266 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12267 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12268 record_buf_mem);
12269
12270 return ret;
12271 }
12272
12273 /* Handling opcode 111 insns. */
12274
12275 static int
12276 thumb_record_branch (insn_decode_record *thumb_insn_r)
12277 {
12278 uint32_t record_buf[8];
12279 uint32_t bits_h = 0;
12280
12281 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12282
12283 if (2 == bits_h || 3 == bits_h)
12284 {
12285 /* BL */
12286 record_buf[0] = ARM_LR_REGNUM;
12287 thumb_insn_r->reg_rec_count = 1;
12288 }
12289 else if (1 == bits_h)
12290 {
12291 /* BLX(1). */
12292 record_buf[0] = ARM_PS_REGNUM;
12293 record_buf[1] = ARM_LR_REGNUM;
12294 thumb_insn_r->reg_rec_count = 2;
12295 }
12296
12297 /* B(2) is automatically taken care in process_record, as PC is
12298 saved there. */
12299
12300 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12301
12302 return 0;
12303 }
12304
12305 /* Handler for thumb2 load/store multiple instructions. */
12306
12307 static int
12308 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12309 {
12310 struct regcache *reg_cache = thumb2_insn_r->regcache;
12311
12312 uint32_t reg_rn, op;
12313 uint32_t register_bits = 0, register_count = 0;
12314 uint32_t index = 0, start_address = 0;
12315 uint32_t record_buf[24], record_buf_mem[48];
12316
12317 ULONGEST u_regval = 0;
12318
12319 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12320 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12321
12322 if (0 == op || 3 == op)
12323 {
12324 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12325 {
12326 /* Handle RFE instruction. */
12327 record_buf[0] = ARM_PS_REGNUM;
12328 thumb2_insn_r->reg_rec_count = 1;
12329 }
12330 else
12331 {
12332 /* Handle SRS instruction after reading banked SP. */
12333 return arm_record_unsupported_insn (thumb2_insn_r);
12334 }
12335 }
12336 else if (1 == op || 2 == op)
12337 {
12338 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12339 {
12340 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12341 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12342 while (register_bits)
12343 {
12344 if (register_bits & 0x00000001)
12345 record_buf[index++] = register_count;
12346
12347 register_count++;
12348 register_bits = register_bits >> 1;
12349 }
12350 record_buf[index++] = reg_rn;
12351 record_buf[index++] = ARM_PS_REGNUM;
12352 thumb2_insn_r->reg_rec_count = index;
12353 }
12354 else
12355 {
12356 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12357 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12358 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12359 while (register_bits)
12360 {
12361 if (register_bits & 0x00000001)
12362 register_count++;
12363
12364 register_bits = register_bits >> 1;
12365 }
12366
12367 if (1 == op)
12368 {
12369 /* Start address calculation for LDMDB/LDMEA. */
12370 start_address = u_regval;
12371 }
12372 else if (2 == op)
12373 {
12374 /* Start address calculation for LDMDB/LDMEA. */
12375 start_address = u_regval - register_count * 4;
12376 }
12377
12378 thumb2_insn_r->mem_rec_count = register_count;
12379 while (register_count)
12380 {
12381 record_buf_mem[register_count * 2 - 1] = start_address;
12382 record_buf_mem[register_count * 2 - 2] = 4;
12383 start_address = start_address + 4;
12384 register_count--;
12385 }
12386 record_buf[0] = reg_rn;
12387 record_buf[1] = ARM_PS_REGNUM;
12388 thumb2_insn_r->reg_rec_count = 2;
12389 }
12390 }
12391
12392 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12393 record_buf_mem);
12394 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12395 record_buf);
12396 return ARM_RECORD_SUCCESS;
12397 }
12398
12399 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12400 instructions. */
12401
12402 static int
12403 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12404 {
12405 struct regcache *reg_cache = thumb2_insn_r->regcache;
12406
12407 uint32_t reg_rd, reg_rn, offset_imm;
12408 uint32_t reg_dest1, reg_dest2;
12409 uint32_t address, offset_addr;
12410 uint32_t record_buf[8], record_buf_mem[8];
12411 uint32_t op1, op2, op3;
12412
12413 ULONGEST u_regval[2];
12414
12415 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12416 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12417 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12418
12419 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12420 {
12421 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12422 {
12423 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12424 record_buf[0] = reg_dest1;
12425 record_buf[1] = ARM_PS_REGNUM;
12426 thumb2_insn_r->reg_rec_count = 2;
12427 }
12428
12429 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12430 {
12431 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12432 record_buf[2] = reg_dest2;
12433 thumb2_insn_r->reg_rec_count = 3;
12434 }
12435 }
12436 else
12437 {
12438 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12439 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12440
12441 if (0 == op1 && 0 == op2)
12442 {
12443 /* Handle STREX. */
12444 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12445 address = u_regval[0] + (offset_imm * 4);
12446 record_buf_mem[0] = 4;
12447 record_buf_mem[1] = address;
12448 thumb2_insn_r->mem_rec_count = 1;
12449 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12450 record_buf[0] = reg_rd;
12451 thumb2_insn_r->reg_rec_count = 1;
12452 }
12453 else if (1 == op1 && 0 == op2)
12454 {
12455 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12456 record_buf[0] = reg_rd;
12457 thumb2_insn_r->reg_rec_count = 1;
12458 address = u_regval[0];
12459 record_buf_mem[1] = address;
12460
12461 if (4 == op3)
12462 {
12463 /* Handle STREXB. */
12464 record_buf_mem[0] = 1;
12465 thumb2_insn_r->mem_rec_count = 1;
12466 }
12467 else if (5 == op3)
12468 {
12469 /* Handle STREXH. */
12470 record_buf_mem[0] = 2 ;
12471 thumb2_insn_r->mem_rec_count = 1;
12472 }
12473 else if (7 == op3)
12474 {
12475 /* Handle STREXD. */
12476 address = u_regval[0];
12477 record_buf_mem[0] = 4;
12478 record_buf_mem[2] = 4;
12479 record_buf_mem[3] = address + 4;
12480 thumb2_insn_r->mem_rec_count = 2;
12481 }
12482 }
12483 else
12484 {
12485 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12486
12487 if (bit (thumb2_insn_r->arm_insn, 24))
12488 {
12489 if (bit (thumb2_insn_r->arm_insn, 23))
12490 offset_addr = u_regval[0] + (offset_imm * 4);
12491 else
12492 offset_addr = u_regval[0] - (offset_imm * 4);
12493
12494 address = offset_addr;
12495 }
12496 else
12497 address = u_regval[0];
12498
12499 record_buf_mem[0] = 4;
12500 record_buf_mem[1] = address;
12501 record_buf_mem[2] = 4;
12502 record_buf_mem[3] = address + 4;
12503 thumb2_insn_r->mem_rec_count = 2;
12504 record_buf[0] = reg_rn;
12505 thumb2_insn_r->reg_rec_count = 1;
12506 }
12507 }
12508
12509 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12510 record_buf);
12511 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12512 record_buf_mem);
12513 return ARM_RECORD_SUCCESS;
12514 }
12515
12516 /* Handler for thumb2 data processing (shift register and modified immediate)
12517 instructions. */
12518
12519 static int
12520 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12521 {
12522 uint32_t reg_rd, op;
12523 uint32_t record_buf[8];
12524
12525 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12526 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12527
12528 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12529 {
12530 record_buf[0] = ARM_PS_REGNUM;
12531 thumb2_insn_r->reg_rec_count = 1;
12532 }
12533 else
12534 {
12535 record_buf[0] = reg_rd;
12536 record_buf[1] = ARM_PS_REGNUM;
12537 thumb2_insn_r->reg_rec_count = 2;
12538 }
12539
12540 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12541 record_buf);
12542 return ARM_RECORD_SUCCESS;
12543 }
12544
12545 /* Generic handler for thumb2 instructions which effect destination and PS
12546 registers. */
12547
12548 static int
12549 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12550 {
12551 uint32_t reg_rd;
12552 uint32_t record_buf[8];
12553
12554 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12555
12556 record_buf[0] = reg_rd;
12557 record_buf[1] = ARM_PS_REGNUM;
12558 thumb2_insn_r->reg_rec_count = 2;
12559
12560 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12561 record_buf);
12562 return ARM_RECORD_SUCCESS;
12563 }
12564
12565 /* Handler for thumb2 branch and miscellaneous control instructions. */
12566
12567 static int
12568 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12569 {
12570 uint32_t op, op1, op2;
12571 uint32_t record_buf[8];
12572
12573 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12574 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12575 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12576
12577 /* Handle MSR insn. */
12578 if (!(op1 & 0x2) && 0x38 == op)
12579 {
12580 if (!(op2 & 0x3))
12581 {
12582 /* CPSR is going to be changed. */
12583 record_buf[0] = ARM_PS_REGNUM;
12584 thumb2_insn_r->reg_rec_count = 1;
12585 }
12586 else
12587 {
12588 arm_record_unsupported_insn(thumb2_insn_r);
12589 return -1;
12590 }
12591 }
12592 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12593 {
12594 /* BLX. */
12595 record_buf[0] = ARM_PS_REGNUM;
12596 record_buf[1] = ARM_LR_REGNUM;
12597 thumb2_insn_r->reg_rec_count = 2;
12598 }
12599
12600 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12601 record_buf);
12602 return ARM_RECORD_SUCCESS;
12603 }
12604
12605 /* Handler for thumb2 store single data item instructions. */
12606
12607 static int
12608 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12609 {
12610 struct regcache *reg_cache = thumb2_insn_r->regcache;
12611
12612 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12613 uint32_t address, offset_addr;
12614 uint32_t record_buf[8], record_buf_mem[8];
12615 uint32_t op1, op2;
12616
12617 ULONGEST u_regval[2];
12618
12619 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12620 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12621 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12622 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12623
12624 if (bit (thumb2_insn_r->arm_insn, 23))
12625 {
12626 /* T2 encoding. */
12627 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12628 offset_addr = u_regval[0] + offset_imm;
12629 address = offset_addr;
12630 }
12631 else
12632 {
12633 /* T3 encoding. */
12634 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12635 {
12636 /* Handle STRB (register). */
12637 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12638 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12639 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12640 offset_addr = u_regval[1] << shift_imm;
12641 address = u_regval[0] + offset_addr;
12642 }
12643 else
12644 {
12645 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12646 if (bit (thumb2_insn_r->arm_insn, 10))
12647 {
12648 if (bit (thumb2_insn_r->arm_insn, 9))
12649 offset_addr = u_regval[0] + offset_imm;
12650 else
12651 offset_addr = u_regval[0] - offset_imm;
12652
12653 address = offset_addr;
12654 }
12655 else
12656 address = u_regval[0];
12657 }
12658 }
12659
12660 switch (op1)
12661 {
12662 /* Store byte instructions. */
12663 case 4:
12664 case 0:
12665 record_buf_mem[0] = 1;
12666 break;
12667 /* Store half word instructions. */
12668 case 1:
12669 case 5:
12670 record_buf_mem[0] = 2;
12671 break;
12672 /* Store word instructions. */
12673 case 2:
12674 case 6:
12675 record_buf_mem[0] = 4;
12676 break;
12677
12678 default:
12679 gdb_assert_not_reached ("no decoding pattern found");
12680 break;
12681 }
12682
12683 record_buf_mem[1] = address;
12684 thumb2_insn_r->mem_rec_count = 1;
12685 record_buf[0] = reg_rn;
12686 thumb2_insn_r->reg_rec_count = 1;
12687
12688 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12689 record_buf);
12690 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12691 record_buf_mem);
12692 return ARM_RECORD_SUCCESS;
12693 }
12694
12695 /* Handler for thumb2 load memory hints instructions. */
12696
12697 static int
12698 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12699 {
12700 uint32_t record_buf[8];
12701 uint32_t reg_rt, reg_rn;
12702
12703 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12704 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12705
12706 if (ARM_PC_REGNUM != reg_rt)
12707 {
12708 record_buf[0] = reg_rt;
12709 record_buf[1] = reg_rn;
12710 record_buf[2] = ARM_PS_REGNUM;
12711 thumb2_insn_r->reg_rec_count = 3;
12712
12713 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12714 record_buf);
12715 return ARM_RECORD_SUCCESS;
12716 }
12717
12718 return ARM_RECORD_FAILURE;
12719 }
12720
12721 /* Handler for thumb2 load word instructions. */
12722
12723 static int
12724 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12725 {
12726 uint32_t record_buf[8];
12727
12728 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12729 record_buf[1] = ARM_PS_REGNUM;
12730 thumb2_insn_r->reg_rec_count = 2;
12731
12732 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12733 record_buf);
12734 return ARM_RECORD_SUCCESS;
12735 }
12736
12737 /* Handler for thumb2 long multiply, long multiply accumulate, and
12738 divide instructions. */
12739
12740 static int
12741 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12742 {
12743 uint32_t opcode1 = 0, opcode2 = 0;
12744 uint32_t record_buf[8];
12745
12746 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12747 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12748
12749 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12750 {
12751 /* Handle SMULL, UMULL, SMULAL. */
12752 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12753 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12754 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12755 record_buf[2] = ARM_PS_REGNUM;
12756 thumb2_insn_r->reg_rec_count = 3;
12757 }
12758 else if (1 == opcode1 || 3 == opcode2)
12759 {
12760 /* Handle SDIV and UDIV. */
12761 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12762 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12763 record_buf[2] = ARM_PS_REGNUM;
12764 thumb2_insn_r->reg_rec_count = 3;
12765 }
12766 else
12767 return ARM_RECORD_FAILURE;
12768
12769 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12770 record_buf);
12771 return ARM_RECORD_SUCCESS;
12772 }
12773
12774 /* Record handler for thumb32 coprocessor instructions. */
12775
12776 static int
12777 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12778 {
12779 if (bit (thumb2_insn_r->arm_insn, 25))
12780 return arm_record_coproc_data_proc (thumb2_insn_r);
12781 else
12782 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12783 }
12784
12785 /* Record handler for advance SIMD structure load/store instructions. */
12786
12787 static int
12788 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12789 {
12790 struct regcache *reg_cache = thumb2_insn_r->regcache;
12791 uint32_t l_bit, a_bit, b_bits;
12792 uint32_t record_buf[128], record_buf_mem[128];
12793 uint32_t reg_rn, reg_vd, address, f_elem;
12794 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12795 uint8_t f_ebytes;
12796
12797 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12798 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12799 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12800 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12801 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12802 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12803 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12804 f_elem = 8 / f_ebytes;
12805
12806 if (!l_bit)
12807 {
12808 ULONGEST u_regval = 0;
12809 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12810 address = u_regval;
12811
12812 if (!a_bit)
12813 {
12814 /* Handle VST1. */
12815 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12816 {
12817 if (b_bits == 0x07)
12818 bf_regs = 1;
12819 else if (b_bits == 0x0a)
12820 bf_regs = 2;
12821 else if (b_bits == 0x06)
12822 bf_regs = 3;
12823 else if (b_bits == 0x02)
12824 bf_regs = 4;
12825 else
12826 bf_regs = 0;
12827
12828 for (index_r = 0; index_r < bf_regs; index_r++)
12829 {
12830 for (index_e = 0; index_e < f_elem; index_e++)
12831 {
12832 record_buf_mem[index_m++] = f_ebytes;
12833 record_buf_mem[index_m++] = address;
12834 address = address + f_ebytes;
12835 thumb2_insn_r->mem_rec_count += 1;
12836 }
12837 }
12838 }
12839 /* Handle VST2. */
12840 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12841 {
12842 if (b_bits == 0x09 || b_bits == 0x08)
12843 bf_regs = 1;
12844 else if (b_bits == 0x03)
12845 bf_regs = 2;
12846 else
12847 bf_regs = 0;
12848
12849 for (index_r = 0; index_r < bf_regs; index_r++)
12850 for (index_e = 0; index_e < f_elem; index_e++)
12851 {
12852 for (loop_t = 0; loop_t < 2; loop_t++)
12853 {
12854 record_buf_mem[index_m++] = f_ebytes;
12855 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12856 thumb2_insn_r->mem_rec_count += 1;
12857 }
12858 address = address + (2 * f_ebytes);
12859 }
12860 }
12861 /* Handle VST3. */
12862 else if ((b_bits & 0x0e) == 0x04)
12863 {
12864 for (index_e = 0; index_e < f_elem; index_e++)
12865 {
12866 for (loop_t = 0; loop_t < 3; loop_t++)
12867 {
12868 record_buf_mem[index_m++] = f_ebytes;
12869 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12870 thumb2_insn_r->mem_rec_count += 1;
12871 }
12872 address = address + (3 * f_ebytes);
12873 }
12874 }
12875 /* Handle VST4. */
12876 else if (!(b_bits & 0x0e))
12877 {
12878 for (index_e = 0; index_e < f_elem; index_e++)
12879 {
12880 for (loop_t = 0; loop_t < 4; loop_t++)
12881 {
12882 record_buf_mem[index_m++] = f_ebytes;
12883 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12884 thumb2_insn_r->mem_rec_count += 1;
12885 }
12886 address = address + (4 * f_ebytes);
12887 }
12888 }
12889 }
12890 else
12891 {
12892 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12893
12894 if (bft_size == 0x00)
12895 f_ebytes = 1;
12896 else if (bft_size == 0x01)
12897 f_ebytes = 2;
12898 else if (bft_size == 0x02)
12899 f_ebytes = 4;
12900 else
12901 f_ebytes = 0;
12902
12903 /* Handle VST1. */
12904 if (!(b_bits & 0x0b) || b_bits == 0x08)
12905 thumb2_insn_r->mem_rec_count = 1;
12906 /* Handle VST2. */
12907 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12908 thumb2_insn_r->mem_rec_count = 2;
12909 /* Handle VST3. */
12910 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12911 thumb2_insn_r->mem_rec_count = 3;
12912 /* Handle VST4. */
12913 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12914 thumb2_insn_r->mem_rec_count = 4;
12915
12916 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12917 {
12918 record_buf_mem[index_m] = f_ebytes;
12919 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12920 }
12921 }
12922 }
12923 else
12924 {
12925 if (!a_bit)
12926 {
12927 /* Handle VLD1. */
12928 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12929 thumb2_insn_r->reg_rec_count = 1;
12930 /* Handle VLD2. */
12931 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12932 thumb2_insn_r->reg_rec_count = 2;
12933 /* Handle VLD3. */
12934 else if ((b_bits & 0x0e) == 0x04)
12935 thumb2_insn_r->reg_rec_count = 3;
12936 /* Handle VLD4. */
12937 else if (!(b_bits & 0x0e))
12938 thumb2_insn_r->reg_rec_count = 4;
12939 }
12940 else
12941 {
12942 /* Handle VLD1. */
12943 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12944 thumb2_insn_r->reg_rec_count = 1;
12945 /* Handle VLD2. */
12946 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12947 thumb2_insn_r->reg_rec_count = 2;
12948 /* Handle VLD3. */
12949 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12950 thumb2_insn_r->reg_rec_count = 3;
12951 /* Handle VLD4. */
12952 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12953 thumb2_insn_r->reg_rec_count = 4;
12954
12955 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12956 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12957 }
12958 }
12959
12960 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12961 {
12962 record_buf[index_r] = reg_rn;
12963 thumb2_insn_r->reg_rec_count += 1;
12964 }
12965
12966 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12967 record_buf);
12968 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12969 record_buf_mem);
12970 return 0;
12971 }
12972
12973 /* Decodes thumb2 instruction type and invokes its record handler. */
12974
12975 static unsigned int
12976 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12977 {
12978 uint32_t op, op1, op2;
12979
12980 op = bit (thumb2_insn_r->arm_insn, 15);
12981 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12982 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12983
12984 if (op1 == 0x01)
12985 {
12986 if (!(op2 & 0x64 ))
12987 {
12988 /* Load/store multiple instruction. */
12989 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12990 }
12991 else if ((op2 & 0x64) == 0x4)
12992 {
12993 /* Load/store (dual/exclusive) and table branch instruction. */
12994 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12995 }
12996 else if ((op2 & 0x60) == 0x20)
12997 {
12998 /* Data-processing (shifted register). */
12999 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13000 }
13001 else if (op2 & 0x40)
13002 {
13003 /* Co-processor instructions. */
13004 return thumb2_record_coproc_insn (thumb2_insn_r);
13005 }
13006 }
13007 else if (op1 == 0x02)
13008 {
13009 if (op)
13010 {
13011 /* Branches and miscellaneous control instructions. */
13012 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13013 }
13014 else if (op2 & 0x20)
13015 {
13016 /* Data-processing (plain binary immediate) instruction. */
13017 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13018 }
13019 else
13020 {
13021 /* Data-processing (modified immediate). */
13022 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13023 }
13024 }
13025 else if (op1 == 0x03)
13026 {
13027 if (!(op2 & 0x71 ))
13028 {
13029 /* Store single data item. */
13030 return thumb2_record_str_single_data (thumb2_insn_r);
13031 }
13032 else if (!((op2 & 0x71) ^ 0x10))
13033 {
13034 /* Advanced SIMD or structure load/store instructions. */
13035 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13036 }
13037 else if (!((op2 & 0x67) ^ 0x01))
13038 {
13039 /* Load byte, memory hints instruction. */
13040 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13041 }
13042 else if (!((op2 & 0x67) ^ 0x03))
13043 {
13044 /* Load halfword, memory hints instruction. */
13045 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13046 }
13047 else if (!((op2 & 0x67) ^ 0x05))
13048 {
13049 /* Load word instruction. */
13050 return thumb2_record_ld_word (thumb2_insn_r);
13051 }
13052 else if (!((op2 & 0x70) ^ 0x20))
13053 {
13054 /* Data-processing (register) instruction. */
13055 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13056 }
13057 else if (!((op2 & 0x78) ^ 0x30))
13058 {
13059 /* Multiply, multiply accumulate, abs diff instruction. */
13060 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13061 }
13062 else if (!((op2 & 0x78) ^ 0x38))
13063 {
13064 /* Long multiply, long multiply accumulate, and divide. */
13065 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13066 }
13067 else if (op2 & 0x40)
13068 {
13069 /* Co-processor instructions. */
13070 return thumb2_record_coproc_insn (thumb2_insn_r);
13071 }
13072 }
13073
13074 return -1;
13075 }
13076
13077 namespace {
13078 /* Abstract memory reader. */
13079
13080 class abstract_memory_reader
13081 {
13082 public:
13083 /* Read LEN bytes of target memory at address MEMADDR, placing the
13084 results in GDB's memory at BUF. Return true on success. */
13085
13086 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13087 };
13088
13089 /* Instruction reader from real target. */
13090
13091 class instruction_reader : public abstract_memory_reader
13092 {
13093 public:
13094 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13095 {
13096 if (target_read_memory (memaddr, buf, len))
13097 return false;
13098 else
13099 return true;
13100 }
13101 };
13102
13103 } // namespace
13104
13105 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13106 and positive val on failure. */
13107
13108 static int
13109 extract_arm_insn (abstract_memory_reader& reader,
13110 insn_decode_record *insn_record, uint32_t insn_size)
13111 {
13112 gdb_byte buf[insn_size];
13113
13114 memset (&buf[0], 0, insn_size);
13115
13116 if (!reader.read (insn_record->this_addr, buf, insn_size))
13117 return 1;
13118 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13119 insn_size,
13120 gdbarch_byte_order_for_code (insn_record->gdbarch));
13121 return 0;
13122 }
13123
13124 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13125
13126 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13127 dispatch it. */
13128
13129 static int
13130 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13131 record_type_t record_type, uint32_t insn_size)
13132 {
13133
13134 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13135 instruction. */
13136 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13137 {
13138 arm_record_data_proc_misc_ld_str, /* 000. */
13139 arm_record_data_proc_imm, /* 001. */
13140 arm_record_ld_st_imm_offset, /* 010. */
13141 arm_record_ld_st_reg_offset, /* 011. */
13142 arm_record_ld_st_multiple, /* 100. */
13143 arm_record_b_bl, /* 101. */
13144 arm_record_asimd_vfp_coproc, /* 110. */
13145 arm_record_coproc_data_proc /* 111. */
13146 };
13147
13148 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13149 instruction. */
13150 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13151 { \
13152 thumb_record_shift_add_sub, /* 000. */
13153 thumb_record_add_sub_cmp_mov, /* 001. */
13154 thumb_record_ld_st_reg_offset, /* 010. */
13155 thumb_record_ld_st_imm_offset, /* 011. */
13156 thumb_record_ld_st_stack, /* 100. */
13157 thumb_record_misc, /* 101. */
13158 thumb_record_ldm_stm_swi, /* 110. */
13159 thumb_record_branch /* 111. */
13160 };
13161
13162 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13163 uint32_t insn_id = 0;
13164
13165 if (extract_arm_insn (reader, arm_record, insn_size))
13166 {
13167 if (record_debug)
13168 {
13169 printf_unfiltered (_("Process record: error reading memory at "
13170 "addr %s len = %d.\n"),
13171 paddress (arm_record->gdbarch,
13172 arm_record->this_addr), insn_size);
13173 }
13174 return -1;
13175 }
13176 else if (ARM_RECORD == record_type)
13177 {
13178 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13179 insn_id = bits (arm_record->arm_insn, 25, 27);
13180
13181 if (arm_record->cond == 0xf)
13182 ret = arm_record_extension_space (arm_record);
13183 else
13184 {
13185 /* If this insn has fallen into extension space
13186 then we need not decode it anymore. */
13187 ret = arm_handle_insn[insn_id] (arm_record);
13188 }
13189 if (ret != ARM_RECORD_SUCCESS)
13190 {
13191 arm_record_unsupported_insn (arm_record);
13192 ret = -1;
13193 }
13194 }
13195 else if (THUMB_RECORD == record_type)
13196 {
13197 /* As thumb does not have condition codes, we set negative. */
13198 arm_record->cond = -1;
13199 insn_id = bits (arm_record->arm_insn, 13, 15);
13200 ret = thumb_handle_insn[insn_id] (arm_record);
13201 if (ret != ARM_RECORD_SUCCESS)
13202 {
13203 arm_record_unsupported_insn (arm_record);
13204 ret = -1;
13205 }
13206 }
13207 else if (THUMB2_RECORD == record_type)
13208 {
13209 /* As thumb does not have condition codes, we set negative. */
13210 arm_record->cond = -1;
13211
13212 /* Swap first half of 32bit thumb instruction with second half. */
13213 arm_record->arm_insn
13214 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13215
13216 ret = thumb2_record_decode_insn_handler (arm_record);
13217
13218 if (ret != ARM_RECORD_SUCCESS)
13219 {
13220 arm_record_unsupported_insn (arm_record);
13221 ret = -1;
13222 }
13223 }
13224 else
13225 {
13226 /* Throw assertion. */
13227 gdb_assert_not_reached ("not a valid instruction, could not decode");
13228 }
13229
13230 return ret;
13231 }
13232
13233 #if GDB_SELF_TEST
13234 namespace selftests {
13235
13236 /* Provide both 16-bit and 32-bit thumb instructions. */
13237
13238 class instruction_reader_thumb : public abstract_memory_reader
13239 {
13240 public:
13241 template<size_t SIZE>
13242 instruction_reader_thumb (enum bfd_endian endian,
13243 const uint16_t (&insns)[SIZE])
13244 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13245 {}
13246
13247 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13248 {
13249 SELF_CHECK (len == 4 || len == 2);
13250 SELF_CHECK (memaddr % 2 == 0);
13251 SELF_CHECK ((memaddr / 2) < m_insns_size);
13252
13253 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13254 if (len == 4)
13255 {
13256 store_unsigned_integer (&buf[2], 2, m_endian,
13257 m_insns[memaddr / 2 + 1]);
13258 }
13259 return true;
13260 }
13261
13262 private:
13263 enum bfd_endian m_endian;
13264 const uint16_t *m_insns;
13265 size_t m_insns_size;
13266 };
13267
13268 static void
13269 arm_record_test (void)
13270 {
13271 struct gdbarch_info info;
13272 gdbarch_info_init (&info);
13273 info.bfd_arch_info = bfd_scan_arch ("arm");
13274
13275 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13276
13277 SELF_CHECK (gdbarch != NULL);
13278
13279 /* 16-bit Thumb instructions. */
13280 {
13281 insn_decode_record arm_record;
13282
13283 memset (&arm_record, 0, sizeof (insn_decode_record));
13284 arm_record.gdbarch = gdbarch;
13285
13286 static const uint16_t insns[] = {
13287 /* db b2 uxtb r3, r3 */
13288 0xb2db,
13289 /* cd 58 ldr r5, [r1, r3] */
13290 0x58cd,
13291 };
13292
13293 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13294 instruction_reader_thumb reader (endian, insns);
13295 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13296 THUMB_INSN_SIZE_BYTES);
13297
13298 SELF_CHECK (ret == 0);
13299 SELF_CHECK (arm_record.mem_rec_count == 0);
13300 SELF_CHECK (arm_record.reg_rec_count == 1);
13301 SELF_CHECK (arm_record.arm_regs[0] == 3);
13302
13303 arm_record.this_addr += 2;
13304 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13305 THUMB_INSN_SIZE_BYTES);
13306
13307 SELF_CHECK (ret == 0);
13308 SELF_CHECK (arm_record.mem_rec_count == 0);
13309 SELF_CHECK (arm_record.reg_rec_count == 1);
13310 SELF_CHECK (arm_record.arm_regs[0] == 5);
13311 }
13312
13313 /* 32-bit Thumb-2 instructions. */
13314 {
13315 insn_decode_record arm_record;
13316
13317 memset (&arm_record, 0, sizeof (insn_decode_record));
13318 arm_record.gdbarch = gdbarch;
13319
13320 static const uint16_t insns[] = {
13321 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13322 0xee1d, 0x7f70,
13323 };
13324
13325 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13326 instruction_reader_thumb reader (endian, insns);
13327 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13328 THUMB2_INSN_SIZE_BYTES);
13329
13330 SELF_CHECK (ret == 0);
13331 SELF_CHECK (arm_record.mem_rec_count == 0);
13332 SELF_CHECK (arm_record.reg_rec_count == 1);
13333 SELF_CHECK (arm_record.arm_regs[0] == 7);
13334 }
13335 }
13336 } // namespace selftests
13337 #endif /* GDB_SELF_TEST */
13338
13339 /* Cleans up local record registers and memory allocations. */
13340
13341 static void
13342 deallocate_reg_mem (insn_decode_record *record)
13343 {
13344 xfree (record->arm_regs);
13345 xfree (record->arm_mems);
13346 }
13347
13348
13349 /* Parse the current instruction and record the values of the registers and
13350 memory that will be changed in current instruction to record_arch_list".
13351 Return -1 if something is wrong. */
13352
13353 int
13354 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13355 CORE_ADDR insn_addr)
13356 {
13357
13358 uint32_t no_of_rec = 0;
13359 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13360 ULONGEST t_bit = 0, insn_id = 0;
13361
13362 ULONGEST u_regval = 0;
13363
13364 insn_decode_record arm_record;
13365
13366 memset (&arm_record, 0, sizeof (insn_decode_record));
13367 arm_record.regcache = regcache;
13368 arm_record.this_addr = insn_addr;
13369 arm_record.gdbarch = gdbarch;
13370
13371
13372 if (record_debug > 1)
13373 {
13374 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13375 "addr = %s\n",
13376 paddress (gdbarch, arm_record.this_addr));
13377 }
13378
13379 instruction_reader reader;
13380 if (extract_arm_insn (reader, &arm_record, 2))
13381 {
13382 if (record_debug)
13383 {
13384 printf_unfiltered (_("Process record: error reading memory at "
13385 "addr %s len = %d.\n"),
13386 paddress (arm_record.gdbarch,
13387 arm_record.this_addr), 2);
13388 }
13389 return -1;
13390 }
13391
13392 /* Check the insn, whether it is thumb or arm one. */
13393
13394 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13395 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13396
13397
13398 if (!(u_regval & t_bit))
13399 {
13400 /* We are decoding arm insn. */
13401 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13402 }
13403 else
13404 {
13405 insn_id = bits (arm_record.arm_insn, 11, 15);
13406 /* is it thumb2 insn? */
13407 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13408 {
13409 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13410 THUMB2_INSN_SIZE_BYTES);
13411 }
13412 else
13413 {
13414 /* We are decoding thumb insn. */
13415 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13416 THUMB_INSN_SIZE_BYTES);
13417 }
13418 }
13419
13420 if (0 == ret)
13421 {
13422 /* Record registers. */
13423 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13424 if (arm_record.arm_regs)
13425 {
13426 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13427 {
13428 if (record_full_arch_list_add_reg
13429 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13430 ret = -1;
13431 }
13432 }
13433 /* Record memories. */
13434 if (arm_record.arm_mems)
13435 {
13436 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13437 {
13438 if (record_full_arch_list_add_mem
13439 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13440 arm_record.arm_mems[no_of_rec].len))
13441 ret = -1;
13442 }
13443 }
13444
13445 if (record_full_arch_list_add_end ())
13446 ret = -1;
13447 }
13448
13449
13450 deallocate_reg_mem (&arm_record);
13451
13452 return ret;
13453 }
13454
13455 /* See arm-tdep.h. */
13456
13457 const target_desc *
13458 arm_read_description (arm_fp_type fp_type)
13459 {
13460 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13461
13462 if (tdesc == nullptr)
13463 {
13464 tdesc = arm_create_target_description (fp_type);
13465 tdesc_arm_list[fp_type] = tdesc;
13466 }
13467
13468 return tdesc;
13469 }
13470
13471 /* See arm-tdep.h. */
13472
13473 const target_desc *
13474 arm_read_mprofile_description (arm_m_profile_type m_type)
13475 {
13476 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13477
13478 if (tdesc == nullptr)
13479 {
13480 tdesc = arm_create_mprofile_target_description (m_type);
13481 tdesc_arm_mprofile_list[m_type] = tdesc;
13482 }
13483
13484 return tdesc;
13485 }
This page took 0.313974 seconds and 4 git commands to generate.