constify error_no_arg
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include <string.h>
30 #include "dis-asm.h" /* For register styles. */
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "doublest.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observer.h"
48
49 #include "arm-tdep.h"
50 #include "gdb/sim-arm.h"
51
52 #include "elf-bfd.h"
53 #include "coff/internal.h"
54 #include "elf/arm.h"
55
56 #include "gdb_assert.h"
57 #include "vec.h"
58
59 #include "record.h"
60 #include "record-full.h"
61
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
69
70 static int arm_debug;
71
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
75
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
84
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
87
88 struct arm_mapping_symbol
89 {
90 bfd_vma value;
91 char type;
92 };
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
95
96 struct arm_per_objfile
97 {
98 VEC(arm_mapping_symbol_s) **section_maps;
99 };
100
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
104
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
108 {
109 "auto",
110 "softfpa",
111 "fpa",
112 "softvfp",
113 "vfp",
114 NULL
115 };
116
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
120
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
123 {
124 "auto",
125 "APCS",
126 "AAPCS",
127 NULL
128 };
129
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
133
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
136 {
137 "auto",
138 "arm",
139 "thumb",
140 NULL
141 };
142
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
145
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
152
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
155
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
160 static const struct
161 {
162 const char *name;
163 int regnum;
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
166 { "r0", 0 },
167 { "r1", 1 },
168 { "r2", 2 },
169 { "r3", 3 },
170 { "r4", 4 },
171 { "r5", 5 },
172 { "r6", 6 },
173 { "r7", 7 },
174 { "r8", 8 },
175 { "r9", 9 },
176 { "r10", 10 },
177 { "r11", 11 },
178 { "r12", 12 },
179 { "r13", 13 },
180 { "r14", 14 },
181 { "r15", 15 },
182 /* Synonyms (argument and variable registers). */
183 { "a1", 0 },
184 { "a2", 1 },
185 { "a3", 2 },
186 { "a4", 3 },
187 { "v1", 4 },
188 { "v2", 5 },
189 { "v3", 6 },
190 { "v4", 7 },
191 { "v5", 8 },
192 { "v6", 9 },
193 { "v7", 10 },
194 { "v8", 11 },
195 /* Other platform-specific names for r9. */
196 { "sb", 9 },
197 { "tr", 9 },
198 /* Special names. */
199 { "ip", 12 },
200 { "lr", 14 },
201 /* Names used by GCC (not listed in the ARM EABI). */
202 { "sl", 10 },
203 /* A special name from the older ATPCS. */
204 { "wr", 7 },
205 };
206
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
215
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
218
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
221
222 /* This is used to keep the bfd arch_info in sync with the disassembly
223 style. */
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
227
228 static void convert_from_extended (const struct floatformat *, const void *,
229 void *, int);
230 static void convert_to_extended (const struct floatformat *, void *,
231 const void *, int);
232
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
239
240 static int thumb_insn_size (unsigned short inst1);
241
242 struct arm_prologue_cache
243 {
244 /* The stack pointer at the time this frame was created; i.e. the
245 caller's stack pointer when this function was called. It is used
246 to identify this frame. */
247 CORE_ADDR prev_sp;
248
249 /* The frame base for this frame is just prev_sp - frame size.
250 FRAMESIZE is the distance from the frame pointer to the
251 initial stack pointer. */
252
253 int framesize;
254
255 /* The register used to hold the frame pointer for this frame. */
256 int framereg;
257
258 /* Saved register offsets. */
259 struct trad_frame_saved_reg *saved_regs;
260 };
261
262 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
263 CORE_ADDR prologue_start,
264 CORE_ADDR prologue_end,
265 struct arm_prologue_cache *cache);
266
267 /* Architecture version for displaced stepping. This effects the behaviour of
268 certain instructions, and really should not be hard-wired. */
269
270 #define DISPLACED_STEPPING_ARCH_VERSION 5
271
272 /* Addresses for calling Thumb functions have the bit 0 set.
273 Here are some macros to test, set, or clear bit 0 of addresses. */
274 #define IS_THUMB_ADDR(addr) ((addr) & 1)
275 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
276 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
277
278 /* Set to true if the 32-bit mode is in use. */
279
280 int arm_apcs_32 = 1;
281
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283
284 int
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
286 {
287 if (gdbarch_tdep (gdbarch)->is_m)
288 return XPSR_T;
289 else
290 return CPSR_T;
291 }
292
293 /* Determine if FRAME is executing in Thumb mode. */
294
295 int
296 arm_frame_is_thumb (struct frame_info *frame)
297 {
298 CORE_ADDR cpsr;
299 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
300
301 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
302 directly (from a signal frame or dummy frame) or by interpreting
303 the saved LR (from a prologue or DWARF frame). So consult it and
304 trust the unwinders. */
305 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
306
307 return (cpsr & t_bit) != 0;
308 }
309
310 /* Callback for VEC_lower_bound. */
311
312 static inline int
313 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
314 const struct arm_mapping_symbol *rhs)
315 {
316 return lhs->value < rhs->value;
317 }
318
319 /* Search for the mapping symbol covering MEMADDR. If one is found,
320 return its type. Otherwise, return 0. If START is non-NULL,
321 set *START to the location of the mapping symbol. */
322
323 static char
324 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
325 {
326 struct obj_section *sec;
327
328 /* If there are mapping symbols, consult them. */
329 sec = find_pc_section (memaddr);
330 if (sec != NULL)
331 {
332 struct arm_per_objfile *data;
333 VEC(arm_mapping_symbol_s) *map;
334 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
335 0 };
336 unsigned int idx;
337
338 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 if (data != NULL)
340 {
341 map = data->section_maps[sec->the_bfd_section->index];
342 if (!VEC_empty (arm_mapping_symbol_s, map))
343 {
344 struct arm_mapping_symbol *map_sym;
345
346 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
347 arm_compare_mapping_symbols);
348
349 /* VEC_lower_bound finds the earliest ordered insertion
350 point. If the following symbol starts at this exact
351 address, we use that; otherwise, the preceding
352 mapping symbol covers this address. */
353 if (idx < VEC_length (arm_mapping_symbol_s, map))
354 {
355 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
356 if (map_sym->value == map_key.value)
357 {
358 if (start)
359 *start = map_sym->value + obj_section_addr (sec);
360 return map_sym->type;
361 }
362 }
363
364 if (idx > 0)
365 {
366 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
367 if (start)
368 *start = map_sym->value + obj_section_addr (sec);
369 return map_sym->type;
370 }
371 }
372 }
373 }
374
375 return 0;
376 }
377
378 /* Determine if the program counter specified in MEMADDR is in a Thumb
379 function. This function should be called for addresses unrelated to
380 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381
382 int
383 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
384 {
385 struct bound_minimal_symbol sym;
386 char type;
387 struct displaced_step_closure* dsc
388 = get_displaced_step_closure_by_addr(memaddr);
389
390 /* If checking the mode of displaced instruction in copy area, the mode
391 should be determined by instruction on the original address. */
392 if (dsc)
393 {
394 if (debug_displaced)
395 fprintf_unfiltered (gdb_stdlog,
396 "displaced: check mode of %.8lx instead of %.8lx\n",
397 (unsigned long) dsc->insn_addr,
398 (unsigned long) memaddr);
399 memaddr = dsc->insn_addr;
400 }
401
402 /* If bit 0 of the address is set, assume this is a Thumb address. */
403 if (IS_THUMB_ADDR (memaddr))
404 return 1;
405
406 /* Respect internal mode override if active. */
407 if (arm_override_mode != -1)
408 return arm_override_mode;
409
410 /* If the user wants to override the symbol table, let him. */
411 if (strcmp (arm_force_mode_string, "arm") == 0)
412 return 0;
413 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 return 1;
415
416 /* ARM v6-M and v7-M are always in Thumb mode. */
417 if (gdbarch_tdep (gdbarch)->is_m)
418 return 1;
419
420 /* If there are mapping symbols, consult them. */
421 type = arm_find_mapping_symbol (memaddr, NULL);
422 if (type)
423 return type == 't';
424
425 /* Thumb functions have a "special" bit set in minimal symbols. */
426 sym = lookup_minimal_symbol_by_pc (memaddr);
427 if (sym.minsym)
428 return (MSYMBOL_IS_SPECIAL (sym.minsym));
429
430 /* If the user wants to override the fallback mode, let them. */
431 if (strcmp (arm_fallback_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 return 1;
435
436 /* If we couldn't find any symbol, but we're talking to a running
437 target, then trust the current value of $cpsr. This lets
438 "display/i $pc" always show the correct mode (though if there is
439 a symbol table we will not reach here, so it still may not be
440 displayed in the mode it will be executed). */
441 if (target_has_registers)
442 return arm_frame_is_thumb (get_current_frame ());
443
444 /* Otherwise we're out of luck; we assume ARM. */
445 return 0;
446 }
447
448 /* Remove useless bits from addresses in a running program. */
449 static CORE_ADDR
450 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
451 {
452 /* On M-profile devices, do not strip the low bit from EXC_RETURN
453 (the magic exception return address). */
454 if (gdbarch_tdep (gdbarch)->is_m
455 && (val & 0xfffffff0) == 0xfffffff0)
456 return val;
457
458 if (arm_apcs_32)
459 return UNMAKE_THUMB_ADDR (val);
460 else
461 return (val & 0x03fffffc);
462 }
463
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
467 is being called. */
468 static int
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
470 {
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct bound_minimal_symbol msym;
473
474 msym = lookup_minimal_symbol_by_pc (pc);
475 if (msym.minsym != NULL
476 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
477 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
478 {
479 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
480
481 /* The GNU linker's Thumb call stub to foo is named
482 __foo_from_thumb. */
483 if (strstr (name, "_from_thumb") != NULL)
484 name += 2;
485
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
488 functions. */
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
490 return 1;
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 return 1;
493
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
496 return 1;
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
498 return 1;
499 }
500 else
501 {
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
506
507 if (!is_thumb
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
512 return 1;
513 }
514
515 return 0;
516 }
517
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
526
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
529 instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
535
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
541
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543
544 static unsigned int
545 thumb_expand_immediate (unsigned int imm)
546 {
547 unsigned int count = imm >> 7;
548
549 if (count < 8)
550 switch (count / 2)
551 {
552 case 0:
553 return imm & 0xff;
554 case 1:
555 return (imm & 0xff) | ((imm & 0xff) << 16);
556 case 2:
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
558 case 3:
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 }
562
563 return (0x80 | (imm & 0x7f)) << (32 - count);
564 }
565
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
568
569 static int
570 thumb_instruction_changes_pc (unsigned short inst)
571 {
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 return 1;
574
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
576 return 1;
577
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
579 return 1;
580
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
582 return 1;
583
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
585 return 1;
586
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
588 return 1;
589
590 return 0;
591 }
592
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
595
596 static int
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
598 {
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
600 {
601 /* Branches and miscellaneous control instructions. */
602
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
604 {
605 /* B, BL, BLX. */
606 return 1;
607 }
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
609 {
610 /* SUBS PC, LR, #imm8. */
611 return 1;
612 }
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
614 {
615 /* Conditional branch. */
616 return 1;
617 }
618
619 return 0;
620 }
621
622 if ((inst1 & 0xfe50) == 0xe810)
623 {
624 /* Load multiple or RFE. */
625
626 if (bit (inst1, 7) && !bit (inst1, 8))
627 {
628 /* LDMIA or POP */
629 if (bit (inst2, 15))
630 return 1;
631 }
632 else if (!bit (inst1, 7) && bit (inst1, 8))
633 {
634 /* LDMDB */
635 if (bit (inst2, 15))
636 return 1;
637 }
638 else if (bit (inst1, 7) && bit (inst1, 8))
639 {
640 /* RFEIA */
641 return 1;
642 }
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
644 {
645 /* RFEDB */
646 return 1;
647 }
648
649 return 0;
650 }
651
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
653 {
654 /* MOV PC or MOVS PC. */
655 return 1;
656 }
657
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
659 {
660 /* LDR PC. */
661 if (bits (inst1, 0, 3) == 15)
662 return 1;
663 if (bit (inst1, 7))
664 return 1;
665 if (bit (inst2, 11))
666 return 1;
667 if ((inst2 & 0x0fc0) == 0x0000)
668 return 1;
669
670 return 0;
671 }
672
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
674 {
675 /* TBB. */
676 return 1;
677 }
678
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
680 {
681 /* TBH. */
682 return 1;
683 }
684
685 return 0;
686 }
687
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
693
694 static CORE_ADDR
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
698 {
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
701 int i;
702 pv_t regs[16];
703 struct pv_area *stack;
704 struct cleanup *back_to;
705 CORE_ADDR offset;
706 CORE_ADDR unrecognized_pc = 0;
707
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
712
713 while (start < limit)
714 {
715 unsigned short insn;
716
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
718
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
720 {
721 int regno;
722 int mask;
723
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
725 break;
726
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
730
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
734 {
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
736 -4);
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
738 }
739 }
740 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
741 sub sp, #simm */
742 {
743 offset = (insn & 0x7f) << 2; /* get scaled offset */
744 if (insn & 0x80) /* Check for SUB. */
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 -offset);
747 else
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
749 offset);
750 }
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
753 (insn & 0xff) << 2);
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
757 bits (insn, 6, 8));
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
761 bits (insn, 0, 7));
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
769 {
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
773 }
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
775 {
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
779 }
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
781 {
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
786 pv_t addr;
787
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
790
791 if (pv_area_store_would_trash (stack, addr))
792 break;
793
794 pv_area_store (stack, addr, 4, regs[regno]);
795 }
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
797 {
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
800 pv_t addr;
801
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
804
805 if (pv_area_store_would_trash (stack, addr))
806 break;
807
808 pv_area_store (stack, addr, 4, regs[rd]);
809 }
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
814 ;
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
819 ;
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
824 ;
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
829 ;
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
832 on Thumb. */
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
835 {
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
838 CORE_ADDR loc;
839
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
843 }
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
845 {
846 unsigned short inst2;
847
848 inst2 = read_memory_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
850
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
852 {
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
856 CORE_ADDR nextpc;
857 int j1, j2, imm1, imm2;
858
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
863
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
866
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
871
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
874 break;
875 }
876
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
878 { registers } */
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 {
881 pv_t addr = regs[bits (insn, 0, 3)];
882 int regno;
883
884 if (pv_area_store_would_trash (stack, addr))
885 break;
886
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
890 {
891 addr = pv_add_constant (addr, -4);
892 pv_area_store (stack, addr, 4, regs[regno]);
893 }
894
895 if (insn & 0x0020)
896 regs[bits (insn, 0, 3)] = addr;
897 }
898
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
900 [Rn, #+/-imm]{!} */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
906
907 offset = inst2 & 0xff;
908 if (insn & 0x0080)
909 addr = pv_add_constant (addr, offset);
910 else
911 addr = pv_add_constant (addr, -offset);
912
913 if (pv_area_store_would_trash (stack, addr))
914 break;
915
916 pv_area_store (stack, addr, 4, regs[regno1]);
917 pv_area_store (stack, pv_add_constant (addr, 4),
918 4, regs[regno2]);
919
920 if (insn & 0x0020)
921 regs[bits (insn, 0, 3)] = addr;
922 }
923
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
927 {
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
930
931 offset = inst2 & 0xff;
932 if (inst2 & 0x0200)
933 addr = pv_add_constant (addr, offset);
934 else
935 addr = pv_add_constant (addr, -offset);
936
937 if (pv_area_store_would_trash (stack, addr))
938 break;
939
940 pv_area_store (stack, addr, 4, regs[regno]);
941
942 if (inst2 & 0x0100)
943 regs[bits (insn, 0, 3)] = addr;
944 }
945
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 {
949 int regno = bits (inst2, 12, 15);
950 pv_t addr;
951
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
954
955 if (pv_area_store_would_trash (stack, addr))
956 break;
957
958 pv_area_store (stack, addr, 4, regs[regno]);
959 }
960
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
964 ;
965
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
970 ;
971
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
973 { registers } */
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
978 ;
979
980 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
981 [Rn, #+/-imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
984 ;
985
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
990 ;
991
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
995 ;
996
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
999 {
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1003
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1011 {
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1015
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1018 }
1019
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1022 {
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1026
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1030 }
1031
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1034 {
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1038
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1041 }
1042
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1044 {
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1048
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1051 }
1052
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1054 {
1055 unsigned int imm
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1057
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1059 }
1060
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1063 {
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1067 }
1068
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1070 {
1071 /* Constant pool loads. */
1072 unsigned int constant;
1073 CORE_ADDR loc;
1074
1075 offset = bits (inst2, 0, 11);
1076 if (insn & 0x0080)
1077 loc = start + 4 + offset;
1078 else
1079 loc = start + 4 - offset;
1080
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1083 }
1084
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1086 {
1087 /* Constant pool loads. */
1088 unsigned int constant;
1089 CORE_ADDR loc;
1090
1091 offset = bits (inst2, 0, 7) << 2;
1092 if (insn & 0x0080)
1093 loc = start + 4 + offset;
1094 else
1095 loc = start + 4 - offset;
1096
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1099
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1102 }
1103
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1105 {
1106 /* Don't scan past anything that might change control flow. */
1107 break;
1108 }
1109 else
1110 {
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1114 }
1115
1116 start += 2;
1117 }
1118 else if (thumb_instruction_changes_pc (insn))
1119 {
1120 /* Don't scan past anything that might change control flow. */
1121 break;
1122 }
1123 else
1124 {
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1128 }
1129
1130 start += 2;
1131 }
1132
1133 if (arm_debug)
1134 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1135 paddress (gdbarch, start));
1136
1137 if (unrecognized_pc == 0)
1138 unrecognized_pc = start;
1139
1140 if (cache == NULL)
1141 {
1142 do_cleanups (back_to);
1143 return unrecognized_pc;
1144 }
1145
1146 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1147 {
1148 /* Frame pointer is fp. Frame size is constant. */
1149 cache->framereg = ARM_FP_REGNUM;
1150 cache->framesize = -regs[ARM_FP_REGNUM].k;
1151 }
1152 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1153 {
1154 /* Frame pointer is r7. Frame size is constant. */
1155 cache->framereg = THUMB_FP_REGNUM;
1156 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1157 }
1158 else
1159 {
1160 /* Try the stack pointer... this is a bit desperate. */
1161 cache->framereg = ARM_SP_REGNUM;
1162 cache->framesize = -regs[ARM_SP_REGNUM].k;
1163 }
1164
1165 for (i = 0; i < 16; i++)
1166 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1167 cache->saved_regs[i].addr = offset;
1168
1169 do_cleanups (back_to);
1170 return unrecognized_pc;
1171 }
1172
1173
1174 /* Try to analyze the instructions starting from PC, which load symbol
1175 __stack_chk_guard. Return the address of instruction after loading this
1176 symbol, set the dest register number to *BASEREG, and set the size of
1177 instructions for loading symbol in OFFSET. Return 0 if instructions are
1178 not recognized. */
1179
1180 static CORE_ADDR
1181 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1182 unsigned int *destreg, int *offset)
1183 {
1184 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1185 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1186 unsigned int low, high, address;
1187
1188 address = 0;
1189 if (is_thumb)
1190 {
1191 unsigned short insn1
1192 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1193
1194 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1195 {
1196 *destreg = bits (insn1, 8, 10);
1197 *offset = 2;
1198 address = bits (insn1, 0, 7);
1199 }
1200 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1201 {
1202 unsigned short insn2
1203 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1204
1205 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1206
1207 insn1
1208 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1209 insn2
1210 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1211
1212 /* movt Rd, #const */
1213 if ((insn1 & 0xfbc0) == 0xf2c0)
1214 {
1215 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1216 *destreg = bits (insn2, 8, 11);
1217 *offset = 8;
1218 address = (high << 16 | low);
1219 }
1220 }
1221 }
1222 else
1223 {
1224 unsigned int insn
1225 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1226
1227 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1228 {
1229 address = bits (insn, 0, 11);
1230 *destreg = bits (insn, 12, 15);
1231 *offset = 4;
1232 }
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1234 {
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1236
1237 insn
1238 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1239
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1241 {
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1244 *offset = 8;
1245 address = (high << 16 | low);
1246 }
1247 }
1248 }
1249
1250 return address;
1251 }
1252
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1256
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1261
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1266 they are,
1267
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1270
1271 On ARMv5t, it is,
1272
1273 ldr Rn, .Label
1274 ....
1275 .Lable:
1276 .word __stack_chk_guard
1277
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1282
1283 static CORE_ADDR
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1285 {
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct bound_minimal_symbol stack_chk_guard;
1289 int offset;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1291 CORE_ADDR addr;
1292
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1295 &basereg, &offset);
1296 if (!addr)
1297 return pc;
1298
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* If name of symbol doesn't start with '__stack_chk_guard', this
1301 instruction sequence is not for stack protector. If symbol is
1302 removed, we conservatively think this sequence is for stack protector. */
1303 if (stack_chk_guard.minsym
1304 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1305 "__stack_chk_guard",
1306 strlen ("__stack_chk_guard")) != 0)
1307 return pc;
1308
1309 if (is_thumb)
1310 {
1311 unsigned int destreg;
1312 unsigned short insn
1313 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1314
1315 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1316 if ((insn & 0xf800) != 0x6800)
1317 return pc;
1318 if (bits (insn, 3, 5) != basereg)
1319 return pc;
1320 destreg = bits (insn, 0, 2);
1321
1322 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1323 byte_order_for_code);
1324 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6000)
1326 return pc;
1327 if (destreg != bits (insn, 0, 2))
1328 return pc;
1329 }
1330 else
1331 {
1332 unsigned int destreg;
1333 unsigned int insn
1334 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1335
1336 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1337 if ((insn & 0x0e500000) != 0x04100000)
1338 return pc;
1339 if (bits (insn, 16, 19) != basereg)
1340 return pc;
1341 destreg = bits (insn, 12, 15);
1342 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1343 insn = read_memory_unsigned_integer (pc + offset + 4,
1344 4, byte_order_for_code);
1345 if ((insn & 0x0e500000) != 0x04000000)
1346 return pc;
1347 if (bits (insn, 12, 15) != destreg)
1348 return pc;
1349 }
1350 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1351 on arm. */
1352 if (is_thumb)
1353 return pc + offset + 4;
1354 else
1355 return pc + offset + 8;
1356 }
1357
1358 /* Advance the PC across any function entry prologue instructions to
1359 reach some "real" code.
1360
1361 The APCS (ARM Procedure Call Standard) defines the following
1362 prologue:
1363
1364 mov ip, sp
1365 [stmfd sp!, {a1,a2,a3,a4}]
1366 stmfd sp!, {...,fp,ip,lr,pc}
1367 [stfe f7, [sp, #-12]!]
1368 [stfe f6, [sp, #-12]!]
1369 [stfe f5, [sp, #-12]!]
1370 [stfe f4, [sp, #-12]!]
1371 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1372
1373 static CORE_ADDR
1374 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1375 {
1376 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1377 unsigned long inst;
1378 CORE_ADDR skip_pc;
1379 CORE_ADDR func_addr, limit_pc;
1380
1381 /* See if we can determine the end of the prologue via the symbol table.
1382 If so, then return either PC, or the PC after the prologue, whichever
1383 is greater. */
1384 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1385 {
1386 CORE_ADDR post_prologue_pc
1387 = skip_prologue_using_sal (gdbarch, func_addr);
1388 struct symtab *s = find_pc_symtab (func_addr);
1389
1390 if (post_prologue_pc)
1391 post_prologue_pc
1392 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1393
1394
1395 /* GCC always emits a line note before the prologue and another
1396 one after, even if the two are at the same address or on the
1397 same line. Take advantage of this so that we do not need to
1398 know every instruction that might appear in the prologue. We
1399 will have producer information for most binaries; if it is
1400 missing (e.g. for -gstabs), assuming the GNU tools. */
1401 if (post_prologue_pc
1402 && (s == NULL
1403 || s->producer == NULL
1404 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1405 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1406 return post_prologue_pc;
1407
1408 if (post_prologue_pc != 0)
1409 {
1410 CORE_ADDR analyzed_limit;
1411
1412 /* For non-GCC compilers, make sure the entire line is an
1413 acceptable prologue; GDB will round this function's
1414 return value up to the end of the following line so we
1415 can not skip just part of a line (and we do not want to).
1416
1417 RealView does not treat the prologue specially, but does
1418 associate prologue code with the opening brace; so this
1419 lets us skip the first line if we think it is the opening
1420 brace. */
1421 if (arm_pc_is_thumb (gdbarch, func_addr))
1422 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1423 post_prologue_pc, NULL);
1424 else
1425 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1426 post_prologue_pc, NULL);
1427
1428 if (analyzed_limit != post_prologue_pc)
1429 return func_addr;
1430
1431 return post_prologue_pc;
1432 }
1433 }
1434
1435 /* Can't determine prologue from the symbol table, need to examine
1436 instructions. */
1437
1438 /* Find an upper limit on the function prologue using the debug
1439 information. If the debug information could not be used to provide
1440 that bound, then use an arbitrary large number as the upper bound. */
1441 /* Like arm_scan_prologue, stop no later than pc + 64. */
1442 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1443 if (limit_pc == 0)
1444 limit_pc = pc + 64; /* Magic. */
1445
1446
1447 /* Check if this is Thumb code. */
1448 if (arm_pc_is_thumb (gdbarch, pc))
1449 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1450
1451 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1452 {
1453 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1454
1455 /* "mov ip, sp" is no longer a required part of the prologue. */
1456 if (inst == 0xe1a0c00d) /* mov ip, sp */
1457 continue;
1458
1459 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1460 continue;
1461
1462 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1463 continue;
1464
1465 /* Some prologues begin with "str lr, [sp, #-4]!". */
1466 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1467 continue;
1468
1469 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1470 continue;
1471
1472 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1473 continue;
1474
1475 /* Any insns after this point may float into the code, if it makes
1476 for better instruction scheduling, so we skip them only if we
1477 find them, but still consider the function to be frame-ful. */
1478
1479 /* We may have either one sfmfd instruction here, or several stfe
1480 insns, depending on the version of floating point code we
1481 support. */
1482 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1483 continue;
1484
1485 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1486 continue;
1487
1488 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1489 continue;
1490
1491 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1492 continue;
1493
1494 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1495 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1496 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1497 continue;
1498
1499 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1500 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1501 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1502 continue;
1503
1504 /* Un-recognized instruction; stop scanning. */
1505 break;
1506 }
1507
1508 return skip_pc; /* End of prologue. */
1509 }
1510
1511 /* *INDENT-OFF* */
1512 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1513 This function decodes a Thumb function prologue to determine:
1514 1) the size of the stack frame
1515 2) which registers are saved on it
1516 3) the offsets of saved regs
1517 4) the offset from the stack pointer to the frame pointer
1518
1519 A typical Thumb function prologue would create this stack frame
1520 (offsets relative to FP)
1521 old SP -> 24 stack parameters
1522 20 LR
1523 16 R7
1524 R7 -> 0 local variables (16 bytes)
1525 SP -> -12 additional stack space (12 bytes)
1526 The frame size would thus be 36 bytes, and the frame offset would be
1527 12 bytes. The frame register is R7.
1528
1529 The comments for thumb_skip_prolog() describe the algorithm we use
1530 to detect the end of the prolog. */
1531 /* *INDENT-ON* */
1532
1533 static void
1534 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1535 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1536 {
1537 CORE_ADDR prologue_start;
1538 CORE_ADDR prologue_end;
1539
1540 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1541 &prologue_end))
1542 {
1543 /* See comment in arm_scan_prologue for an explanation of
1544 this heuristics. */
1545 if (prologue_end > prologue_start + 64)
1546 {
1547 prologue_end = prologue_start + 64;
1548 }
1549 }
1550 else
1551 /* We're in the boondocks: we have no idea where the start of the
1552 function is. */
1553 return;
1554
1555 prologue_end = min (prologue_end, prev_pc);
1556
1557 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1558 }
1559
1560 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1561
1562 static int
1563 arm_instruction_changes_pc (uint32_t this_instr)
1564 {
1565 if (bits (this_instr, 28, 31) == INST_NV)
1566 /* Unconditional instructions. */
1567 switch (bits (this_instr, 24, 27))
1568 {
1569 case 0xa:
1570 case 0xb:
1571 /* Branch with Link and change to Thumb. */
1572 return 1;
1573 case 0xc:
1574 case 0xd:
1575 case 0xe:
1576 /* Coprocessor register transfer. */
1577 if (bits (this_instr, 12, 15) == 15)
1578 error (_("Invalid update to pc in instruction"));
1579 return 0;
1580 default:
1581 return 0;
1582 }
1583 else
1584 switch (bits (this_instr, 25, 27))
1585 {
1586 case 0x0:
1587 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1588 {
1589 /* Multiplies and extra load/stores. */
1590 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1591 /* Neither multiplies nor extension load/stores are allowed
1592 to modify PC. */
1593 return 0;
1594
1595 /* Otherwise, miscellaneous instructions. */
1596
1597 /* BX <reg>, BXJ <reg>, BLX <reg> */
1598 if (bits (this_instr, 4, 27) == 0x12fff1
1599 || bits (this_instr, 4, 27) == 0x12fff2
1600 || bits (this_instr, 4, 27) == 0x12fff3)
1601 return 1;
1602
1603 /* Other miscellaneous instructions are unpredictable if they
1604 modify PC. */
1605 return 0;
1606 }
1607 /* Data processing instruction. Fall through. */
1608
1609 case 0x1:
1610 if (bits (this_instr, 12, 15) == 15)
1611 return 1;
1612 else
1613 return 0;
1614
1615 case 0x2:
1616 case 0x3:
1617 /* Media instructions and architecturally undefined instructions. */
1618 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1619 return 0;
1620
1621 /* Stores. */
1622 if (bit (this_instr, 20) == 0)
1623 return 0;
1624
1625 /* Loads. */
1626 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1627 return 1;
1628 else
1629 return 0;
1630
1631 case 0x4:
1632 /* Load/store multiple. */
1633 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1634 return 1;
1635 else
1636 return 0;
1637
1638 case 0x5:
1639 /* Branch and branch with link. */
1640 return 1;
1641
1642 case 0x6:
1643 case 0x7:
1644 /* Coprocessor transfers or SWIs can not affect PC. */
1645 return 0;
1646
1647 default:
1648 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1649 }
1650 }
1651
1652 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1653 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1654 fill it in. Return the first address not recognized as a prologue
1655 instruction.
1656
1657 We recognize all the instructions typically found in ARM prologues,
1658 plus harmless instructions which can be skipped (either for analysis
1659 purposes, or a more restrictive set that can be skipped when finding
1660 the end of the prologue). */
1661
1662 static CORE_ADDR
1663 arm_analyze_prologue (struct gdbarch *gdbarch,
1664 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1665 struct arm_prologue_cache *cache)
1666 {
1667 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1668 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1669 int regno;
1670 CORE_ADDR offset, current_pc;
1671 pv_t regs[ARM_FPS_REGNUM];
1672 struct pv_area *stack;
1673 struct cleanup *back_to;
1674 int framereg, framesize;
1675 CORE_ADDR unrecognized_pc = 0;
1676
1677 /* Search the prologue looking for instructions that set up the
1678 frame pointer, adjust the stack pointer, and save registers.
1679
1680 Be careful, however, and if it doesn't look like a prologue,
1681 don't try to scan it. If, for instance, a frameless function
1682 begins with stmfd sp!, then we will tell ourselves there is
1683 a frame, which will confuse stack traceback, as well as "finish"
1684 and other operations that rely on a knowledge of the stack
1685 traceback. */
1686
1687 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1688 regs[regno] = pv_register (regno, 0);
1689 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1690 back_to = make_cleanup_free_pv_area (stack);
1691
1692 for (current_pc = prologue_start;
1693 current_pc < prologue_end;
1694 current_pc += 4)
1695 {
1696 unsigned int insn
1697 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1698
1699 if (insn == 0xe1a0c00d) /* mov ip, sp */
1700 {
1701 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1702 continue;
1703 }
1704 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1705 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1706 {
1707 unsigned imm = insn & 0xff; /* immediate value */
1708 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1709 int rd = bits (insn, 12, 15);
1710 imm = (imm >> rot) | (imm << (32 - rot));
1711 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1712 continue;
1713 }
1714 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1715 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1716 {
1717 unsigned imm = insn & 0xff; /* immediate value */
1718 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1719 int rd = bits (insn, 12, 15);
1720 imm = (imm >> rot) | (imm << (32 - rot));
1721 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1722 continue;
1723 }
1724 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1725 [sp, #-4]! */
1726 {
1727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1728 break;
1729 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1730 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1731 regs[bits (insn, 12, 15)]);
1732 continue;
1733 }
1734 else if ((insn & 0xffff0000) == 0xe92d0000)
1735 /* stmfd sp!, {..., fp, ip, lr, pc}
1736 or
1737 stmfd sp!, {a1, a2, a3, a4} */
1738 {
1739 int mask = insn & 0xffff;
1740
1741 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1742 break;
1743
1744 /* Calculate offsets of saved registers. */
1745 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1746 if (mask & (1 << regno))
1747 {
1748 regs[ARM_SP_REGNUM]
1749 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1750 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1751 }
1752 }
1753 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1754 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1755 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1756 {
1757 /* No need to add this to saved_regs -- it's just an arg reg. */
1758 continue;
1759 }
1760 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1761 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1762 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1763 {
1764 /* No need to add this to saved_regs -- it's just an arg reg. */
1765 continue;
1766 }
1767 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1768 { registers } */
1769 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1770 {
1771 /* No need to add this to saved_regs -- it's just arg regs. */
1772 continue;
1773 }
1774 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1775 {
1776 unsigned imm = insn & 0xff; /* immediate value */
1777 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1778 imm = (imm >> rot) | (imm << (32 - rot));
1779 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1780 }
1781 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1782 {
1783 unsigned imm = insn & 0xff; /* immediate value */
1784 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1785 imm = (imm >> rot) | (imm << (32 - rot));
1786 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1787 }
1788 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1789 [sp, -#c]! */
1790 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1791 {
1792 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1793 break;
1794
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1796 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1797 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1798 }
1799 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1800 [sp!] */
1801 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1802 {
1803 int n_saved_fp_regs;
1804 unsigned int fp_start_reg, fp_bound_reg;
1805
1806 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1807 break;
1808
1809 if ((insn & 0x800) == 0x800) /* N0 is set */
1810 {
1811 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1812 n_saved_fp_regs = 3;
1813 else
1814 n_saved_fp_regs = 1;
1815 }
1816 else
1817 {
1818 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1819 n_saved_fp_regs = 2;
1820 else
1821 n_saved_fp_regs = 4;
1822 }
1823
1824 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1825 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1826 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1827 {
1828 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1829 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1830 regs[fp_start_reg++]);
1831 }
1832 }
1833 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1834 {
1835 /* Allow some special function calls when skipping the
1836 prologue; GCC generates these before storing arguments to
1837 the stack. */
1838 CORE_ADDR dest = BranchDest (current_pc, insn);
1839
1840 if (skip_prologue_function (gdbarch, dest, 0))
1841 continue;
1842 else
1843 break;
1844 }
1845 else if ((insn & 0xf0000000) != 0xe0000000)
1846 break; /* Condition not true, exit early. */
1847 else if (arm_instruction_changes_pc (insn))
1848 /* Don't scan past anything that might change control flow. */
1849 break;
1850 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1851 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1852 /* Ignore block loads from the stack, potentially copying
1853 parameters from memory. */
1854 continue;
1855 else if ((insn & 0xfc500000) == 0xe4100000
1856 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1857 /* Similarly ignore single loads from the stack. */
1858 continue;
1859 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1860 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1861 register instead of the stack. */
1862 continue;
1863 else
1864 {
1865 /* The optimizer might shove anything into the prologue,
1866 so we just skip what we don't recognize. */
1867 unrecognized_pc = current_pc;
1868 continue;
1869 }
1870 }
1871
1872 if (unrecognized_pc == 0)
1873 unrecognized_pc = current_pc;
1874
1875 /* The frame size is just the distance from the frame register
1876 to the original stack pointer. */
1877 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1878 {
1879 /* Frame pointer is fp. */
1880 framereg = ARM_FP_REGNUM;
1881 framesize = -regs[ARM_FP_REGNUM].k;
1882 }
1883 else
1884 {
1885 /* Try the stack pointer... this is a bit desperate. */
1886 framereg = ARM_SP_REGNUM;
1887 framesize = -regs[ARM_SP_REGNUM].k;
1888 }
1889
1890 if (cache)
1891 {
1892 cache->framereg = framereg;
1893 cache->framesize = framesize;
1894
1895 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1896 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1897 cache->saved_regs[regno].addr = offset;
1898 }
1899
1900 if (arm_debug)
1901 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1902 paddress (gdbarch, unrecognized_pc));
1903
1904 do_cleanups (back_to);
1905 return unrecognized_pc;
1906 }
1907
1908 static void
1909 arm_scan_prologue (struct frame_info *this_frame,
1910 struct arm_prologue_cache *cache)
1911 {
1912 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1914 int regno;
1915 CORE_ADDR prologue_start, prologue_end, current_pc;
1916 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1917 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1918 pv_t regs[ARM_FPS_REGNUM];
1919 struct pv_area *stack;
1920 struct cleanup *back_to;
1921 CORE_ADDR offset;
1922
1923 /* Assume there is no frame until proven otherwise. */
1924 cache->framereg = ARM_SP_REGNUM;
1925 cache->framesize = 0;
1926
1927 /* Check for Thumb prologue. */
1928 if (arm_frame_is_thumb (this_frame))
1929 {
1930 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1931 return;
1932 }
1933
1934 /* Find the function prologue. If we can't find the function in
1935 the symbol table, peek in the stack frame to find the PC. */
1936 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1937 &prologue_end))
1938 {
1939 /* One way to find the end of the prologue (which works well
1940 for unoptimized code) is to do the following:
1941
1942 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1943
1944 if (sal.line == 0)
1945 prologue_end = prev_pc;
1946 else if (sal.end < prologue_end)
1947 prologue_end = sal.end;
1948
1949 This mechanism is very accurate so long as the optimizer
1950 doesn't move any instructions from the function body into the
1951 prologue. If this happens, sal.end will be the last
1952 instruction in the first hunk of prologue code just before
1953 the first instruction that the scheduler has moved from
1954 the body to the prologue.
1955
1956 In order to make sure that we scan all of the prologue
1957 instructions, we use a slightly less accurate mechanism which
1958 may scan more than necessary. To help compensate for this
1959 lack of accuracy, the prologue scanning loop below contains
1960 several clauses which'll cause the loop to terminate early if
1961 an implausible prologue instruction is encountered.
1962
1963 The expression
1964
1965 prologue_start + 64
1966
1967 is a suitable endpoint since it accounts for the largest
1968 possible prologue plus up to five instructions inserted by
1969 the scheduler. */
1970
1971 if (prologue_end > prologue_start + 64)
1972 {
1973 prologue_end = prologue_start + 64; /* See above. */
1974 }
1975 }
1976 else
1977 {
1978 /* We have no symbol information. Our only option is to assume this
1979 function has a standard stack frame and the normal frame register.
1980 Then, we can find the value of our frame pointer on entrance to
1981 the callee (or at the present moment if this is the innermost frame).
1982 The value stored there should be the address of the stmfd + 8. */
1983 CORE_ADDR frame_loc;
1984 LONGEST return_value;
1985
1986 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1987 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1988 return;
1989 else
1990 {
1991 prologue_start = gdbarch_addr_bits_remove
1992 (gdbarch, return_value) - 8;
1993 prologue_end = prologue_start + 64; /* See above. */
1994 }
1995 }
1996
1997 if (prev_pc < prologue_end)
1998 prologue_end = prev_pc;
1999
2000 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2001 }
2002
2003 static struct arm_prologue_cache *
2004 arm_make_prologue_cache (struct frame_info *this_frame)
2005 {
2006 int reg;
2007 struct arm_prologue_cache *cache;
2008 CORE_ADDR unwound_fp;
2009
2010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2012
2013 arm_scan_prologue (this_frame, cache);
2014
2015 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2016 if (unwound_fp == 0)
2017 return cache;
2018
2019 cache->prev_sp = unwound_fp + cache->framesize;
2020
2021 /* Calculate actual addresses of saved registers using offsets
2022 determined by arm_scan_prologue. */
2023 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2024 if (trad_frame_addr_p (cache->saved_regs, reg))
2025 cache->saved_regs[reg].addr += cache->prev_sp;
2026
2027 return cache;
2028 }
2029
2030 /* Our frame ID for a normal frame is the current function's starting PC
2031 and the caller's SP when we were called. */
2032
2033 static void
2034 arm_prologue_this_id (struct frame_info *this_frame,
2035 void **this_cache,
2036 struct frame_id *this_id)
2037 {
2038 struct arm_prologue_cache *cache;
2039 struct frame_id id;
2040 CORE_ADDR pc, func;
2041
2042 if (*this_cache == NULL)
2043 *this_cache = arm_make_prologue_cache (this_frame);
2044 cache = *this_cache;
2045
2046 /* This is meant to halt the backtrace at "_start". */
2047 pc = get_frame_pc (this_frame);
2048 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2049 return;
2050
2051 /* If we've hit a wall, stop. */
2052 if (cache->prev_sp == 0)
2053 return;
2054
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 func = get_frame_func (this_frame);
2059 if (!func)
2060 func = pc;
2061
2062 id = frame_id_build (cache->prev_sp, func);
2063 *this_id = id;
2064 }
2065
2066 static struct value *
2067 arm_prologue_prev_register (struct frame_info *this_frame,
2068 void **this_cache,
2069 int prev_regnum)
2070 {
2071 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2072 struct arm_prologue_cache *cache;
2073
2074 if (*this_cache == NULL)
2075 *this_cache = arm_make_prologue_cache (this_frame);
2076 cache = *this_cache;
2077
2078 /* If we are asked to unwind the PC, then we need to return the LR
2079 instead. The prologue may save PC, but it will point into this
2080 frame's prologue, not the next frame's resume location. Also
2081 strip the saved T bit. A valid LR may have the low bit set, but
2082 a valid PC never does. */
2083 if (prev_regnum == ARM_PC_REGNUM)
2084 {
2085 CORE_ADDR lr;
2086
2087 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2088 return frame_unwind_got_constant (this_frame, prev_regnum,
2089 arm_addr_bits_remove (gdbarch, lr));
2090 }
2091
2092 /* SP is generally not saved to the stack, but this frame is
2093 identified by the next frame's stack pointer at the time of the call.
2094 The value was already reconstructed into PREV_SP. */
2095 if (prev_regnum == ARM_SP_REGNUM)
2096 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2097
2098 /* The CPSR may have been changed by the call instruction and by the
2099 called function. The only bit we can reconstruct is the T bit,
2100 by checking the low bit of LR as of the call. This is a reliable
2101 indicator of Thumb-ness except for some ARM v4T pre-interworking
2102 Thumb code, which could get away with a clear low bit as long as
2103 the called function did not use bx. Guess that all other
2104 bits are unchanged; the condition flags are presumably lost,
2105 but the processor status is likely valid. */
2106 if (prev_regnum == ARM_PS_REGNUM)
2107 {
2108 CORE_ADDR lr, cpsr;
2109 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2110
2111 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2112 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2113 if (IS_THUMB_ADDR (lr))
2114 cpsr |= t_bit;
2115 else
2116 cpsr &= ~t_bit;
2117 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2118 }
2119
2120 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2121 prev_regnum);
2122 }
2123
2124 struct frame_unwind arm_prologue_unwind = {
2125 NORMAL_FRAME,
2126 default_frame_unwind_stop_reason,
2127 arm_prologue_this_id,
2128 arm_prologue_prev_register,
2129 NULL,
2130 default_frame_sniffer
2131 };
2132
2133 /* Maintain a list of ARM exception table entries per objfile, similar to the
2134 list of mapping symbols. We only cache entries for standard ARM-defined
2135 personality routines; the cache will contain only the frame unwinding
2136 instructions associated with the entry (not the descriptors). */
2137
2138 static const struct objfile_data *arm_exidx_data_key;
2139
2140 struct arm_exidx_entry
2141 {
2142 bfd_vma addr;
2143 gdb_byte *entry;
2144 };
2145 typedef struct arm_exidx_entry arm_exidx_entry_s;
2146 DEF_VEC_O(arm_exidx_entry_s);
2147
2148 struct arm_exidx_data
2149 {
2150 VEC(arm_exidx_entry_s) **section_maps;
2151 };
2152
2153 static void
2154 arm_exidx_data_free (struct objfile *objfile, void *arg)
2155 {
2156 struct arm_exidx_data *data = arg;
2157 unsigned int i;
2158
2159 for (i = 0; i < objfile->obfd->section_count; i++)
2160 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2161 }
2162
2163 static inline int
2164 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2165 const struct arm_exidx_entry *rhs)
2166 {
2167 return lhs->addr < rhs->addr;
2168 }
2169
2170 static struct obj_section *
2171 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2172 {
2173 struct obj_section *osect;
2174
2175 ALL_OBJFILE_OSECTIONS (objfile, osect)
2176 if (bfd_get_section_flags (objfile->obfd,
2177 osect->the_bfd_section) & SEC_ALLOC)
2178 {
2179 bfd_vma start, size;
2180 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2181 size = bfd_get_section_size (osect->the_bfd_section);
2182
2183 if (start <= vma && vma < start + size)
2184 return osect;
2185 }
2186
2187 return NULL;
2188 }
2189
2190 /* Parse contents of exception table and exception index sections
2191 of OBJFILE, and fill in the exception table entry cache.
2192
2193 For each entry that refers to a standard ARM-defined personality
2194 routine, extract the frame unwinding instructions (from either
2195 the index or the table section). The unwinding instructions
2196 are normalized by:
2197 - extracting them from the rest of the table data
2198 - converting to host endianness
2199 - appending the implicit 0xb0 ("Finish") code
2200
2201 The extracted and normalized instructions are stored for later
2202 retrieval by the arm_find_exidx_entry routine. */
2203
2204 static void
2205 arm_exidx_new_objfile (struct objfile *objfile)
2206 {
2207 struct cleanup *cleanups;
2208 struct arm_exidx_data *data;
2209 asection *exidx, *extab;
2210 bfd_vma exidx_vma = 0, extab_vma = 0;
2211 bfd_size_type exidx_size = 0, extab_size = 0;
2212 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2213 LONGEST i;
2214
2215 /* If we've already touched this file, do nothing. */
2216 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2217 return;
2218 cleanups = make_cleanup (null_cleanup, NULL);
2219
2220 /* Read contents of exception table and index. */
2221 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2222 if (exidx)
2223 {
2224 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2225 exidx_size = bfd_get_section_size (exidx);
2226 exidx_data = xmalloc (exidx_size);
2227 make_cleanup (xfree, exidx_data);
2228
2229 if (!bfd_get_section_contents (objfile->obfd, exidx,
2230 exidx_data, 0, exidx_size))
2231 {
2232 do_cleanups (cleanups);
2233 return;
2234 }
2235 }
2236
2237 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2238 if (extab)
2239 {
2240 extab_vma = bfd_section_vma (objfile->obfd, extab);
2241 extab_size = bfd_get_section_size (extab);
2242 extab_data = xmalloc (extab_size);
2243 make_cleanup (xfree, extab_data);
2244
2245 if (!bfd_get_section_contents (objfile->obfd, extab,
2246 extab_data, 0, extab_size))
2247 {
2248 do_cleanups (cleanups);
2249 return;
2250 }
2251 }
2252
2253 /* Allocate exception table data structure. */
2254 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2255 set_objfile_data (objfile, arm_exidx_data_key, data);
2256 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2257 objfile->obfd->section_count,
2258 VEC(arm_exidx_entry_s) *);
2259
2260 /* Fill in exception table. */
2261 for (i = 0; i < exidx_size / 8; i++)
2262 {
2263 struct arm_exidx_entry new_exidx_entry;
2264 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2265 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2266 bfd_vma addr = 0, word = 0;
2267 int n_bytes = 0, n_words = 0;
2268 struct obj_section *sec;
2269 gdb_byte *entry = NULL;
2270
2271 /* Extract address of start of function. */
2272 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2273 idx += exidx_vma + i * 8;
2274
2275 /* Find section containing function and compute section offset. */
2276 sec = arm_obj_section_from_vma (objfile, idx);
2277 if (sec == NULL)
2278 continue;
2279 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2280
2281 /* Determine address of exception table entry. */
2282 if (val == 1)
2283 {
2284 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2285 }
2286 else if ((val & 0xff000000) == 0x80000000)
2287 {
2288 /* Exception table entry embedded in .ARM.exidx
2289 -- must be short form. */
2290 word = val;
2291 n_bytes = 3;
2292 }
2293 else if (!(val & 0x80000000))
2294 {
2295 /* Exception table entry in .ARM.extab. */
2296 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2297 addr += exidx_vma + i * 8 + 4;
2298
2299 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2300 {
2301 word = bfd_h_get_32 (objfile->obfd,
2302 extab_data + addr - extab_vma);
2303 addr += 4;
2304
2305 if ((word & 0xff000000) == 0x80000000)
2306 {
2307 /* Short form. */
2308 n_bytes = 3;
2309 }
2310 else if ((word & 0xff000000) == 0x81000000
2311 || (word & 0xff000000) == 0x82000000)
2312 {
2313 /* Long form. */
2314 n_bytes = 2;
2315 n_words = ((word >> 16) & 0xff);
2316 }
2317 else if (!(word & 0x80000000))
2318 {
2319 bfd_vma pers;
2320 struct obj_section *pers_sec;
2321 int gnu_personality = 0;
2322
2323 /* Custom personality routine. */
2324 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2325 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2326
2327 /* Check whether we've got one of the variants of the
2328 GNU personality routines. */
2329 pers_sec = arm_obj_section_from_vma (objfile, pers);
2330 if (pers_sec)
2331 {
2332 static const char *personality[] =
2333 {
2334 "__gcc_personality_v0",
2335 "__gxx_personality_v0",
2336 "__gcj_personality_v0",
2337 "__gnu_objc_personality_v0",
2338 NULL
2339 };
2340
2341 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2342 int k;
2343
2344 for (k = 0; personality[k]; k++)
2345 if (lookup_minimal_symbol_by_pc_name
2346 (pc, personality[k], objfile))
2347 {
2348 gnu_personality = 1;
2349 break;
2350 }
2351 }
2352
2353 /* If so, the next word contains a word count in the high
2354 byte, followed by the same unwind instructions as the
2355 pre-defined forms. */
2356 if (gnu_personality
2357 && addr + 4 <= extab_vma + extab_size)
2358 {
2359 word = bfd_h_get_32 (objfile->obfd,
2360 extab_data + addr - extab_vma);
2361 addr += 4;
2362 n_bytes = 3;
2363 n_words = ((word >> 24) & 0xff);
2364 }
2365 }
2366 }
2367 }
2368
2369 /* Sanity check address. */
2370 if (n_words)
2371 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2372 n_words = n_bytes = 0;
2373
2374 /* The unwind instructions reside in WORD (only the N_BYTES least
2375 significant bytes are valid), followed by N_WORDS words in the
2376 extab section starting at ADDR. */
2377 if (n_bytes || n_words)
2378 {
2379 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2380 n_bytes + n_words * 4 + 1);
2381
2382 while (n_bytes--)
2383 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2384
2385 while (n_words--)
2386 {
2387 word = bfd_h_get_32 (objfile->obfd,
2388 extab_data + addr - extab_vma);
2389 addr += 4;
2390
2391 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2392 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2393 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2394 *p++ = (gdb_byte) (word & 0xff);
2395 }
2396
2397 /* Implied "Finish" to terminate the list. */
2398 *p++ = 0xb0;
2399 }
2400
2401 /* Push entry onto vector. They are guaranteed to always
2402 appear in order of increasing addresses. */
2403 new_exidx_entry.addr = idx;
2404 new_exidx_entry.entry = entry;
2405 VEC_safe_push (arm_exidx_entry_s,
2406 data->section_maps[sec->the_bfd_section->index],
2407 &new_exidx_entry);
2408 }
2409
2410 do_cleanups (cleanups);
2411 }
2412
2413 /* Search for the exception table entry covering MEMADDR. If one is found,
2414 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2415 set *START to the start of the region covered by this entry. */
2416
2417 static gdb_byte *
2418 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2419 {
2420 struct obj_section *sec;
2421
2422 sec = find_pc_section (memaddr);
2423 if (sec != NULL)
2424 {
2425 struct arm_exidx_data *data;
2426 VEC(arm_exidx_entry_s) *map;
2427 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2428 unsigned int idx;
2429
2430 data = objfile_data (sec->objfile, arm_exidx_data_key);
2431 if (data != NULL)
2432 {
2433 map = data->section_maps[sec->the_bfd_section->index];
2434 if (!VEC_empty (arm_exidx_entry_s, map))
2435 {
2436 struct arm_exidx_entry *map_sym;
2437
2438 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2439 arm_compare_exidx_entries);
2440
2441 /* VEC_lower_bound finds the earliest ordered insertion
2442 point. If the following symbol starts at this exact
2443 address, we use that; otherwise, the preceding
2444 exception table entry covers this address. */
2445 if (idx < VEC_length (arm_exidx_entry_s, map))
2446 {
2447 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2448 if (map_sym->addr == map_key.addr)
2449 {
2450 if (start)
2451 *start = map_sym->addr + obj_section_addr (sec);
2452 return map_sym->entry;
2453 }
2454 }
2455
2456 if (idx > 0)
2457 {
2458 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2459 if (start)
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2462 }
2463 }
2464 }
2465 }
2466
2467 return NULL;
2468 }
2469
2470 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2471 instruction list from the ARM exception table entry ENTRY, allocate and
2472 return a prologue cache structure describing how to unwind this frame.
2473
2474 Return NULL if the unwinding instruction list contains a "spare",
2475 "reserved" or "refuse to unwind" instruction as defined in section
2476 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2477 for the ARM Architecture" document. */
2478
2479 static struct arm_prologue_cache *
2480 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2481 {
2482 CORE_ADDR vsp = 0;
2483 int vsp_valid = 0;
2484
2485 struct arm_prologue_cache *cache;
2486 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2487 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2488
2489 for (;;)
2490 {
2491 gdb_byte insn;
2492
2493 /* Whenever we reload SP, we actually have to retrieve its
2494 actual value in the current frame. */
2495 if (!vsp_valid)
2496 {
2497 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2498 {
2499 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2500 vsp = get_frame_register_unsigned (this_frame, reg);
2501 }
2502 else
2503 {
2504 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2505 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2506 }
2507
2508 vsp_valid = 1;
2509 }
2510
2511 /* Decode next unwind instruction. */
2512 insn = *entry++;
2513
2514 if ((insn & 0xc0) == 0)
2515 {
2516 int offset = insn & 0x3f;
2517 vsp += (offset << 2) + 4;
2518 }
2519 else if ((insn & 0xc0) == 0x40)
2520 {
2521 int offset = insn & 0x3f;
2522 vsp -= (offset << 2) + 4;
2523 }
2524 else if ((insn & 0xf0) == 0x80)
2525 {
2526 int mask = ((insn & 0xf) << 8) | *entry++;
2527 int i;
2528
2529 /* The special case of an all-zero mask identifies
2530 "Refuse to unwind". We return NULL to fall back
2531 to the prologue analyzer. */
2532 if (mask == 0)
2533 return NULL;
2534
2535 /* Pop registers r4..r15 under mask. */
2536 for (i = 0; i < 12; i++)
2537 if (mask & (1 << i))
2538 {
2539 cache->saved_regs[4 + i].addr = vsp;
2540 vsp += 4;
2541 }
2542
2543 /* Special-case popping SP -- we need to reload vsp. */
2544 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2545 vsp_valid = 0;
2546 }
2547 else if ((insn & 0xf0) == 0x90)
2548 {
2549 int reg = insn & 0xf;
2550
2551 /* Reserved cases. */
2552 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2553 return NULL;
2554
2555 /* Set SP from another register and mark VSP for reload. */
2556 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2557 vsp_valid = 0;
2558 }
2559 else if ((insn & 0xf0) == 0xa0)
2560 {
2561 int count = insn & 0x7;
2562 int pop_lr = (insn & 0x8) != 0;
2563 int i;
2564
2565 /* Pop r4..r[4+count]. */
2566 for (i = 0; i <= count; i++)
2567 {
2568 cache->saved_regs[4 + i].addr = vsp;
2569 vsp += 4;
2570 }
2571
2572 /* If indicated by flag, pop LR as well. */
2573 if (pop_lr)
2574 {
2575 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2576 vsp += 4;
2577 }
2578 }
2579 else if (insn == 0xb0)
2580 {
2581 /* We could only have updated PC by popping into it; if so, it
2582 will show up as address. Otherwise, copy LR into PC. */
2583 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2584 cache->saved_regs[ARM_PC_REGNUM]
2585 = cache->saved_regs[ARM_LR_REGNUM];
2586
2587 /* We're done. */
2588 break;
2589 }
2590 else if (insn == 0xb1)
2591 {
2592 int mask = *entry++;
2593 int i;
2594
2595 /* All-zero mask and mask >= 16 is "spare". */
2596 if (mask == 0 || mask >= 16)
2597 return NULL;
2598
2599 /* Pop r0..r3 under mask. */
2600 for (i = 0; i < 4; i++)
2601 if (mask & (1 << i))
2602 {
2603 cache->saved_regs[i].addr = vsp;
2604 vsp += 4;
2605 }
2606 }
2607 else if (insn == 0xb2)
2608 {
2609 ULONGEST offset = 0;
2610 unsigned shift = 0;
2611
2612 do
2613 {
2614 offset |= (*entry & 0x7f) << shift;
2615 shift += 7;
2616 }
2617 while (*entry++ & 0x80);
2618
2619 vsp += 0x204 + (offset << 2);
2620 }
2621 else if (insn == 0xb3)
2622 {
2623 int start = *entry >> 4;
2624 int count = (*entry++) & 0xf;
2625 int i;
2626
2627 /* Only registers D0..D15 are valid here. */
2628 if (start + count >= 16)
2629 return NULL;
2630
2631 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2632 for (i = 0; i <= count; i++)
2633 {
2634 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2635 vsp += 8;
2636 }
2637
2638 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2639 vsp += 4;
2640 }
2641 else if ((insn & 0xf8) == 0xb8)
2642 {
2643 int count = insn & 0x7;
2644 int i;
2645
2646 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2647 for (i = 0; i <= count; i++)
2648 {
2649 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2650 vsp += 8;
2651 }
2652
2653 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2654 vsp += 4;
2655 }
2656 else if (insn == 0xc6)
2657 {
2658 int start = *entry >> 4;
2659 int count = (*entry++) & 0xf;
2660 int i;
2661
2662 /* Only registers WR0..WR15 are valid. */
2663 if (start + count >= 16)
2664 return NULL;
2665
2666 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2667 for (i = 0; i <= count; i++)
2668 {
2669 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2670 vsp += 8;
2671 }
2672 }
2673 else if (insn == 0xc7)
2674 {
2675 int mask = *entry++;
2676 int i;
2677
2678 /* All-zero mask and mask >= 16 is "spare". */
2679 if (mask == 0 || mask >= 16)
2680 return NULL;
2681
2682 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2683 for (i = 0; i < 4; i++)
2684 if (mask & (1 << i))
2685 {
2686 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2687 vsp += 4;
2688 }
2689 }
2690 else if ((insn & 0xf8) == 0xc0)
2691 {
2692 int count = insn & 0x7;
2693 int i;
2694
2695 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2696 for (i = 0; i <= count; i++)
2697 {
2698 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2699 vsp += 8;
2700 }
2701 }
2702 else if (insn == 0xc8)
2703 {
2704 int start = *entry >> 4;
2705 int count = (*entry++) & 0xf;
2706 int i;
2707
2708 /* Only registers D0..D31 are valid. */
2709 if (start + count >= 16)
2710 return NULL;
2711
2712 /* Pop VFP double-precision registers
2713 D[16+start]..D[16+start+count]. */
2714 for (i = 0; i <= count; i++)
2715 {
2716 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2717 vsp += 8;
2718 }
2719 }
2720 else if (insn == 0xc9)
2721 {
2722 int start = *entry >> 4;
2723 int count = (*entry++) & 0xf;
2724 int i;
2725
2726 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2727 for (i = 0; i <= count; i++)
2728 {
2729 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2730 vsp += 8;
2731 }
2732 }
2733 else if ((insn & 0xf8) == 0xd0)
2734 {
2735 int count = insn & 0x7;
2736 int i;
2737
2738 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2739 for (i = 0; i <= count; i++)
2740 {
2741 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2742 vsp += 8;
2743 }
2744 }
2745 else
2746 {
2747 /* Everything else is "spare". */
2748 return NULL;
2749 }
2750 }
2751
2752 /* If we restore SP from a register, assume this was the frame register.
2753 Otherwise just fall back to SP as frame register. */
2754 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2755 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2756 else
2757 cache->framereg = ARM_SP_REGNUM;
2758
2759 /* Determine offset to previous frame. */
2760 cache->framesize
2761 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2762
2763 /* We already got the previous SP. */
2764 cache->prev_sp = vsp;
2765
2766 return cache;
2767 }
2768
2769 /* Unwinding via ARM exception table entries. Note that the sniffer
2770 already computes a filled-in prologue cache, which is then used
2771 with the same arm_prologue_this_id and arm_prologue_prev_register
2772 routines also used for prologue-parsing based unwinding. */
2773
2774 static int
2775 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2776 struct frame_info *this_frame,
2777 void **this_prologue_cache)
2778 {
2779 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2780 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2781 CORE_ADDR addr_in_block, exidx_region, func_start;
2782 struct arm_prologue_cache *cache;
2783 gdb_byte *entry;
2784
2785 /* See if we have an ARM exception table entry covering this address. */
2786 addr_in_block = get_frame_address_in_block (this_frame);
2787 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2788 if (!entry)
2789 return 0;
2790
2791 /* The ARM exception table does not describe unwind information
2792 for arbitrary PC values, but is guaranteed to be correct only
2793 at call sites. We have to decide here whether we want to use
2794 ARM exception table information for this frame, or fall back
2795 to using prologue parsing. (Note that if we have DWARF CFI,
2796 this sniffer isn't even called -- CFI is always preferred.)
2797
2798 Before we make this decision, however, we check whether we
2799 actually have *symbol* information for the current frame.
2800 If not, prologue parsing would not work anyway, so we might
2801 as well use the exception table and hope for the best. */
2802 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2803 {
2804 int exc_valid = 0;
2805
2806 /* If the next frame is "normal", we are at a call site in this
2807 frame, so exception information is guaranteed to be valid. */
2808 if (get_next_frame (this_frame)
2809 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2810 exc_valid = 1;
2811
2812 /* We also assume exception information is valid if we're currently
2813 blocked in a system call. The system library is supposed to
2814 ensure this, so that e.g. pthread cancellation works. */
2815 if (arm_frame_is_thumb (this_frame))
2816 {
2817 LONGEST insn;
2818
2819 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2820 byte_order_for_code, &insn)
2821 && (insn & 0xff00) == 0xdf00 /* svc */)
2822 exc_valid = 1;
2823 }
2824 else
2825 {
2826 LONGEST insn;
2827
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2829 byte_order_for_code, &insn)
2830 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2831 exc_valid = 1;
2832 }
2833
2834 /* Bail out if we don't know that exception information is valid. */
2835 if (!exc_valid)
2836 return 0;
2837
2838 /* The ARM exception index does not mark the *end* of the region
2839 covered by the entry, and some functions will not have any entry.
2840 To correctly recognize the end of the covered region, the linker
2841 should have inserted dummy records with a CANTUNWIND marker.
2842
2843 Unfortunately, current versions of GNU ld do not reliably do
2844 this, and thus we may have found an incorrect entry above.
2845 As a (temporary) sanity check, we only use the entry if it
2846 lies *within* the bounds of the function. Note that this check
2847 might reject perfectly valid entries that just happen to cover
2848 multiple functions; therefore this check ought to be removed
2849 once the linker is fixed. */
2850 if (func_start > exidx_region)
2851 return 0;
2852 }
2853
2854 /* Decode the list of unwinding instructions into a prologue cache.
2855 Note that this may fail due to e.g. a "refuse to unwind" code. */
2856 cache = arm_exidx_fill_cache (this_frame, entry);
2857 if (!cache)
2858 return 0;
2859
2860 *this_prologue_cache = cache;
2861 return 1;
2862 }
2863
2864 struct frame_unwind arm_exidx_unwind = {
2865 NORMAL_FRAME,
2866 default_frame_unwind_stop_reason,
2867 arm_prologue_this_id,
2868 arm_prologue_prev_register,
2869 NULL,
2870 arm_exidx_unwind_sniffer
2871 };
2872
2873 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2874 trampoline, return the target PC. Otherwise return 0.
2875
2876 void call0a (char c, short s, int i, long l) {}
2877
2878 int main (void)
2879 {
2880 (*pointer_to_call0a) (c, s, i, l);
2881 }
2882
2883 Instead of calling a stub library function _call_via_xx (xx is
2884 the register name), GCC may inline the trampoline in the object
2885 file as below (register r2 has the address of call0a).
2886
2887 .global main
2888 .type main, %function
2889 ...
2890 bl .L1
2891 ...
2892 .size main, .-main
2893
2894 .L1:
2895 bx r2
2896
2897 The trampoline 'bx r2' doesn't belong to main. */
2898
2899 static CORE_ADDR
2900 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2901 {
2902 /* The heuristics of recognizing such trampoline is that FRAME is
2903 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2904 if (arm_frame_is_thumb (frame))
2905 {
2906 gdb_byte buf[2];
2907
2908 if (target_read_memory (pc, buf, 2) == 0)
2909 {
2910 struct gdbarch *gdbarch = get_frame_arch (frame);
2911 enum bfd_endian byte_order_for_code
2912 = gdbarch_byte_order_for_code (gdbarch);
2913 uint16_t insn
2914 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2915
2916 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2917 {
2918 CORE_ADDR dest
2919 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2920
2921 /* Clear the LSB so that gdb core sets step-resume
2922 breakpoint at the right address. */
2923 return UNMAKE_THUMB_ADDR (dest);
2924 }
2925 }
2926 }
2927
2928 return 0;
2929 }
2930
2931 static struct arm_prologue_cache *
2932 arm_make_stub_cache (struct frame_info *this_frame)
2933 {
2934 struct arm_prologue_cache *cache;
2935
2936 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2937 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2938
2939 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2940
2941 return cache;
2942 }
2943
2944 /* Our frame ID for a stub frame is the current SP and LR. */
2945
2946 static void
2947 arm_stub_this_id (struct frame_info *this_frame,
2948 void **this_cache,
2949 struct frame_id *this_id)
2950 {
2951 struct arm_prologue_cache *cache;
2952
2953 if (*this_cache == NULL)
2954 *this_cache = arm_make_stub_cache (this_frame);
2955 cache = *this_cache;
2956
2957 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2958 }
2959
2960 static int
2961 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2962 struct frame_info *this_frame,
2963 void **this_prologue_cache)
2964 {
2965 CORE_ADDR addr_in_block;
2966 gdb_byte dummy[4];
2967 CORE_ADDR pc, start_addr;
2968 const char *name;
2969
2970 addr_in_block = get_frame_address_in_block (this_frame);
2971 pc = get_frame_pc (this_frame);
2972 if (in_plt_section (addr_in_block)
2973 /* We also use the stub winder if the target memory is unreadable
2974 to avoid having the prologue unwinder trying to read it. */
2975 || target_read_memory (pc, dummy, 4) != 0)
2976 return 1;
2977
2978 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2979 && arm_skip_bx_reg (this_frame, pc) != 0)
2980 return 1;
2981
2982 return 0;
2983 }
2984
2985 struct frame_unwind arm_stub_unwind = {
2986 NORMAL_FRAME,
2987 default_frame_unwind_stop_reason,
2988 arm_stub_this_id,
2989 arm_prologue_prev_register,
2990 NULL,
2991 arm_stub_unwind_sniffer
2992 };
2993
2994 /* Put here the code to store, into CACHE->saved_regs, the addresses
2995 of the saved registers of frame described by THIS_FRAME. CACHE is
2996 returned. */
2997
2998 static struct arm_prologue_cache *
2999 arm_m_exception_cache (struct frame_info *this_frame)
3000 {
3001 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3002 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3003 struct arm_prologue_cache *cache;
3004 CORE_ADDR unwound_sp;
3005 LONGEST xpsr;
3006
3007 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3008 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3009
3010 unwound_sp = get_frame_register_unsigned (this_frame,
3011 ARM_SP_REGNUM);
3012
3013 /* The hardware saves eight 32-bit words, comprising xPSR,
3014 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3015 "B1.5.6 Exception entry behavior" in
3016 "ARMv7-M Architecture Reference Manual". */
3017 cache->saved_regs[0].addr = unwound_sp;
3018 cache->saved_regs[1].addr = unwound_sp + 4;
3019 cache->saved_regs[2].addr = unwound_sp + 8;
3020 cache->saved_regs[3].addr = unwound_sp + 12;
3021 cache->saved_regs[12].addr = unwound_sp + 16;
3022 cache->saved_regs[14].addr = unwound_sp + 20;
3023 cache->saved_regs[15].addr = unwound_sp + 24;
3024 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3025
3026 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3027 aligner between the top of the 32-byte stack frame and the
3028 previous context's stack pointer. */
3029 cache->prev_sp = unwound_sp + 32;
3030 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3031 && (xpsr & (1 << 9)) != 0)
3032 cache->prev_sp += 4;
3033
3034 return cache;
3035 }
3036
3037 /* Implementation of function hook 'this_id' in
3038 'struct frame_uwnind'. */
3039
3040 static void
3041 arm_m_exception_this_id (struct frame_info *this_frame,
3042 void **this_cache,
3043 struct frame_id *this_id)
3044 {
3045 struct arm_prologue_cache *cache;
3046
3047 if (*this_cache == NULL)
3048 *this_cache = arm_m_exception_cache (this_frame);
3049 cache = *this_cache;
3050
3051 /* Our frame ID for a stub frame is the current SP and LR. */
3052 *this_id = frame_id_build (cache->prev_sp,
3053 get_frame_pc (this_frame));
3054 }
3055
3056 /* Implementation of function hook 'prev_register' in
3057 'struct frame_uwnind'. */
3058
3059 static struct value *
3060 arm_m_exception_prev_register (struct frame_info *this_frame,
3061 void **this_cache,
3062 int prev_regnum)
3063 {
3064 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3065 struct arm_prologue_cache *cache;
3066
3067 if (*this_cache == NULL)
3068 *this_cache = arm_m_exception_cache (this_frame);
3069 cache = *this_cache;
3070
3071 /* The value was already reconstructed into PREV_SP. */
3072 if (prev_regnum == ARM_SP_REGNUM)
3073 return frame_unwind_got_constant (this_frame, prev_regnum,
3074 cache->prev_sp);
3075
3076 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3077 prev_regnum);
3078 }
3079
3080 /* Implementation of function hook 'sniffer' in
3081 'struct frame_uwnind'. */
3082
3083 static int
3084 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3085 struct frame_info *this_frame,
3086 void **this_prologue_cache)
3087 {
3088 CORE_ADDR this_pc = get_frame_pc (this_frame);
3089
3090 /* No need to check is_m; this sniffer is only registered for
3091 M-profile architectures. */
3092
3093 /* Exception frames return to one of these magic PCs. Other values
3094 are not defined as of v7-M. See details in "B1.5.8 Exception
3095 return behavior" in "ARMv7-M Architecture Reference Manual". */
3096 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3097 || this_pc == 0xfffffffd)
3098 return 1;
3099
3100 return 0;
3101 }
3102
3103 /* Frame unwinder for M-profile exceptions. */
3104
3105 struct frame_unwind arm_m_exception_unwind =
3106 {
3107 SIGTRAMP_FRAME,
3108 default_frame_unwind_stop_reason,
3109 arm_m_exception_this_id,
3110 arm_m_exception_prev_register,
3111 NULL,
3112 arm_m_exception_unwind_sniffer
3113 };
3114
3115 static CORE_ADDR
3116 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3117 {
3118 struct arm_prologue_cache *cache;
3119
3120 if (*this_cache == NULL)
3121 *this_cache = arm_make_prologue_cache (this_frame);
3122 cache = *this_cache;
3123
3124 return cache->prev_sp - cache->framesize;
3125 }
3126
3127 struct frame_base arm_normal_base = {
3128 &arm_prologue_unwind,
3129 arm_normal_frame_base,
3130 arm_normal_frame_base,
3131 arm_normal_frame_base
3132 };
3133
3134 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3135 dummy frame. The frame ID's base needs to match the TOS value
3136 saved by save_dummy_frame_tos() and returned from
3137 arm_push_dummy_call, and the PC needs to match the dummy frame's
3138 breakpoint. */
3139
3140 static struct frame_id
3141 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3142 {
3143 return frame_id_build (get_frame_register_unsigned (this_frame,
3144 ARM_SP_REGNUM),
3145 get_frame_pc (this_frame));
3146 }
3147
3148 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3149 be used to construct the previous frame's ID, after looking up the
3150 containing function). */
3151
3152 static CORE_ADDR
3153 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3154 {
3155 CORE_ADDR pc;
3156 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3157 return arm_addr_bits_remove (gdbarch, pc);
3158 }
3159
3160 static CORE_ADDR
3161 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3162 {
3163 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3164 }
3165
3166 static struct value *
3167 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3168 int regnum)
3169 {
3170 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3171 CORE_ADDR lr, cpsr;
3172 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3173
3174 switch (regnum)
3175 {
3176 case ARM_PC_REGNUM:
3177 /* The PC is normally copied from the return column, which
3178 describes saves of LR. However, that version may have an
3179 extra bit set to indicate Thumb state. The bit is not
3180 part of the PC. */
3181 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3182 return frame_unwind_got_constant (this_frame, regnum,
3183 arm_addr_bits_remove (gdbarch, lr));
3184
3185 case ARM_PS_REGNUM:
3186 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3187 cpsr = get_frame_register_unsigned (this_frame, regnum);
3188 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3189 if (IS_THUMB_ADDR (lr))
3190 cpsr |= t_bit;
3191 else
3192 cpsr &= ~t_bit;
3193 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3194
3195 default:
3196 internal_error (__FILE__, __LINE__,
3197 _("Unexpected register %d"), regnum);
3198 }
3199 }
3200
3201 static void
3202 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3203 struct dwarf2_frame_state_reg *reg,
3204 struct frame_info *this_frame)
3205 {
3206 switch (regnum)
3207 {
3208 case ARM_PC_REGNUM:
3209 case ARM_PS_REGNUM:
3210 reg->how = DWARF2_FRAME_REG_FN;
3211 reg->loc.fn = arm_dwarf2_prev_register;
3212 break;
3213 case ARM_SP_REGNUM:
3214 reg->how = DWARF2_FRAME_REG_CFA;
3215 break;
3216 }
3217 }
3218
3219 /* Return true if we are in the function's epilogue, i.e. after the
3220 instruction that destroyed the function's stack frame. */
3221
3222 static int
3223 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3224 {
3225 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3226 unsigned int insn, insn2;
3227 int found_return = 0, found_stack_adjust = 0;
3228 CORE_ADDR func_start, func_end;
3229 CORE_ADDR scan_pc;
3230 gdb_byte buf[4];
3231
3232 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3233 return 0;
3234
3235 /* The epilogue is a sequence of instructions along the following lines:
3236
3237 - add stack frame size to SP or FP
3238 - [if frame pointer used] restore SP from FP
3239 - restore registers from SP [may include PC]
3240 - a return-type instruction [if PC wasn't already restored]
3241
3242 In a first pass, we scan forward from the current PC and verify the
3243 instructions we find as compatible with this sequence, ending in a
3244 return instruction.
3245
3246 However, this is not sufficient to distinguish indirect function calls
3247 within a function from indirect tail calls in the epilogue in some cases.
3248 Therefore, if we didn't already find any SP-changing instruction during
3249 forward scan, we add a backward scanning heuristic to ensure we actually
3250 are in the epilogue. */
3251
3252 scan_pc = pc;
3253 while (scan_pc < func_end && !found_return)
3254 {
3255 if (target_read_memory (scan_pc, buf, 2))
3256 break;
3257
3258 scan_pc += 2;
3259 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3260
3261 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3262 found_return = 1;
3263 else if (insn == 0x46f7) /* mov pc, lr */
3264 found_return = 1;
3265 else if (insn == 0x46bd) /* mov sp, r7 */
3266 found_stack_adjust = 1;
3267 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3268 found_stack_adjust = 1;
3269 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3270 {
3271 found_stack_adjust = 1;
3272 if (insn & 0x0100) /* <registers> include PC. */
3273 found_return = 1;
3274 }
3275 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3276 {
3277 if (target_read_memory (scan_pc, buf, 2))
3278 break;
3279
3280 scan_pc += 2;
3281 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3282
3283 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3284 {
3285 found_stack_adjust = 1;
3286 if (insn2 & 0x8000) /* <registers> include PC. */
3287 found_return = 1;
3288 }
3289 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3290 && (insn2 & 0x0fff) == 0x0b04)
3291 {
3292 found_stack_adjust = 1;
3293 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3294 found_return = 1;
3295 }
3296 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3297 && (insn2 & 0x0e00) == 0x0a00)
3298 found_stack_adjust = 1;
3299 else
3300 break;
3301 }
3302 else
3303 break;
3304 }
3305
3306 if (!found_return)
3307 return 0;
3308
3309 /* Since any instruction in the epilogue sequence, with the possible
3310 exception of return itself, updates the stack pointer, we need to
3311 scan backwards for at most one instruction. Try either a 16-bit or
3312 a 32-bit instruction. This is just a heuristic, so we do not worry
3313 too much about false positives. */
3314
3315 if (!found_stack_adjust)
3316 {
3317 if (pc - 4 < func_start)
3318 return 0;
3319 if (target_read_memory (pc - 4, buf, 4))
3320 return 0;
3321
3322 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3323 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3324
3325 if (insn2 == 0x46bd) /* mov sp, r7 */
3326 found_stack_adjust = 1;
3327 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3328 found_stack_adjust = 1;
3329 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3330 found_stack_adjust = 1;
3331 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3332 found_stack_adjust = 1;
3333 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3334 && (insn2 & 0x0fff) == 0x0b04)
3335 found_stack_adjust = 1;
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 found_stack_adjust = 1;
3339 }
3340
3341 return found_stack_adjust;
3342 }
3343
3344 /* Return true if we are in the function's epilogue, i.e. after the
3345 instruction that destroyed the function's stack frame. */
3346
3347 static int
3348 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3349 {
3350 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3351 unsigned int insn;
3352 int found_return, found_stack_adjust;
3353 CORE_ADDR func_start, func_end;
3354
3355 if (arm_pc_is_thumb (gdbarch, pc))
3356 return thumb_in_function_epilogue_p (gdbarch, pc);
3357
3358 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3359 return 0;
3360
3361 /* We are in the epilogue if the previous instruction was a stack
3362 adjustment and the next instruction is a possible return (bx, mov
3363 pc, or pop). We could have to scan backwards to find the stack
3364 adjustment, or forwards to find the return, but this is a decent
3365 approximation. First scan forwards. */
3366
3367 found_return = 0;
3368 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3369 if (bits (insn, 28, 31) != INST_NV)
3370 {
3371 if ((insn & 0x0ffffff0) == 0x012fff10)
3372 /* BX. */
3373 found_return = 1;
3374 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3375 /* MOV PC. */
3376 found_return = 1;
3377 else if ((insn & 0x0fff0000) == 0x08bd0000
3378 && (insn & 0x0000c000) != 0)
3379 /* POP (LDMIA), including PC or LR. */
3380 found_return = 1;
3381 }
3382
3383 if (!found_return)
3384 return 0;
3385
3386 /* Scan backwards. This is just a heuristic, so do not worry about
3387 false positives from mode changes. */
3388
3389 if (pc < func_start + 4)
3390 return 0;
3391
3392 found_stack_adjust = 0;
3393 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3394 if (bits (insn, 28, 31) != INST_NV)
3395 {
3396 if ((insn & 0x0df0f000) == 0x0080d000)
3397 /* ADD SP (register or immediate). */
3398 found_stack_adjust = 1;
3399 else if ((insn & 0x0df0f000) == 0x0040d000)
3400 /* SUB SP (register or immediate). */
3401 found_stack_adjust = 1;
3402 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3403 /* MOV SP. */
3404 found_stack_adjust = 1;
3405 else if ((insn & 0x0fff0000) == 0x08bd0000)
3406 /* POP (LDMIA). */
3407 found_stack_adjust = 1;
3408 else if ((insn & 0x0fff0000) == 0x049d0000)
3409 /* POP of a single register. */
3410 found_stack_adjust = 1;
3411 }
3412
3413 if (found_stack_adjust)
3414 return 1;
3415
3416 return 0;
3417 }
3418
3419
3420 /* When arguments must be pushed onto the stack, they go on in reverse
3421 order. The code below implements a FILO (stack) to do this. */
3422
3423 struct stack_item
3424 {
3425 int len;
3426 struct stack_item *prev;
3427 void *data;
3428 };
3429
3430 static struct stack_item *
3431 push_stack_item (struct stack_item *prev, const void *contents, int len)
3432 {
3433 struct stack_item *si;
3434 si = xmalloc (sizeof (struct stack_item));
3435 si->data = xmalloc (len);
3436 si->len = len;
3437 si->prev = prev;
3438 memcpy (si->data, contents, len);
3439 return si;
3440 }
3441
3442 static struct stack_item *
3443 pop_stack_item (struct stack_item *si)
3444 {
3445 struct stack_item *dead = si;
3446 si = si->prev;
3447 xfree (dead->data);
3448 xfree (dead);
3449 return si;
3450 }
3451
3452
3453 /* Return the alignment (in bytes) of the given type. */
3454
3455 static int
3456 arm_type_align (struct type *t)
3457 {
3458 int n;
3459 int align;
3460 int falign;
3461
3462 t = check_typedef (t);
3463 switch (TYPE_CODE (t))
3464 {
3465 default:
3466 /* Should never happen. */
3467 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3468 return 4;
3469
3470 case TYPE_CODE_PTR:
3471 case TYPE_CODE_ENUM:
3472 case TYPE_CODE_INT:
3473 case TYPE_CODE_FLT:
3474 case TYPE_CODE_SET:
3475 case TYPE_CODE_RANGE:
3476 case TYPE_CODE_REF:
3477 case TYPE_CODE_CHAR:
3478 case TYPE_CODE_BOOL:
3479 return TYPE_LENGTH (t);
3480
3481 case TYPE_CODE_ARRAY:
3482 case TYPE_CODE_COMPLEX:
3483 /* TODO: What about vector types? */
3484 return arm_type_align (TYPE_TARGET_TYPE (t));
3485
3486 case TYPE_CODE_STRUCT:
3487 case TYPE_CODE_UNION:
3488 align = 1;
3489 for (n = 0; n < TYPE_NFIELDS (t); n++)
3490 {
3491 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3492 if (falign > align)
3493 align = falign;
3494 }
3495 return align;
3496 }
3497 }
3498
3499 /* Possible base types for a candidate for passing and returning in
3500 VFP registers. */
3501
3502 enum arm_vfp_cprc_base_type
3503 {
3504 VFP_CPRC_UNKNOWN,
3505 VFP_CPRC_SINGLE,
3506 VFP_CPRC_DOUBLE,
3507 VFP_CPRC_VEC64,
3508 VFP_CPRC_VEC128
3509 };
3510
3511 /* The length of one element of base type B. */
3512
3513 static unsigned
3514 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3515 {
3516 switch (b)
3517 {
3518 case VFP_CPRC_SINGLE:
3519 return 4;
3520 case VFP_CPRC_DOUBLE:
3521 return 8;
3522 case VFP_CPRC_VEC64:
3523 return 8;
3524 case VFP_CPRC_VEC128:
3525 return 16;
3526 default:
3527 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3528 (int) b);
3529 }
3530 }
3531
3532 /* The character ('s', 'd' or 'q') for the type of VFP register used
3533 for passing base type B. */
3534
3535 static int
3536 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3537 {
3538 switch (b)
3539 {
3540 case VFP_CPRC_SINGLE:
3541 return 's';
3542 case VFP_CPRC_DOUBLE:
3543 return 'd';
3544 case VFP_CPRC_VEC64:
3545 return 'd';
3546 case VFP_CPRC_VEC128:
3547 return 'q';
3548 default:
3549 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3550 (int) b);
3551 }
3552 }
3553
3554 /* Determine whether T may be part of a candidate for passing and
3555 returning in VFP registers, ignoring the limit on the total number
3556 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3557 classification of the first valid component found; if it is not
3558 VFP_CPRC_UNKNOWN, all components must have the same classification
3559 as *BASE_TYPE. If it is found that T contains a type not permitted
3560 for passing and returning in VFP registers, a type differently
3561 classified from *BASE_TYPE, or two types differently classified
3562 from each other, return -1, otherwise return the total number of
3563 base-type elements found (possibly 0 in an empty structure or
3564 array). Vectors and complex types are not currently supported,
3565 matching the generic AAPCS support. */
3566
3567 static int
3568 arm_vfp_cprc_sub_candidate (struct type *t,
3569 enum arm_vfp_cprc_base_type *base_type)
3570 {
3571 t = check_typedef (t);
3572 switch (TYPE_CODE (t))
3573 {
3574 case TYPE_CODE_FLT:
3575 switch (TYPE_LENGTH (t))
3576 {
3577 case 4:
3578 if (*base_type == VFP_CPRC_UNKNOWN)
3579 *base_type = VFP_CPRC_SINGLE;
3580 else if (*base_type != VFP_CPRC_SINGLE)
3581 return -1;
3582 return 1;
3583
3584 case 8:
3585 if (*base_type == VFP_CPRC_UNKNOWN)
3586 *base_type = VFP_CPRC_DOUBLE;
3587 else if (*base_type != VFP_CPRC_DOUBLE)
3588 return -1;
3589 return 1;
3590
3591 default:
3592 return -1;
3593 }
3594 break;
3595
3596 case TYPE_CODE_ARRAY:
3597 {
3598 int count;
3599 unsigned unitlen;
3600 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3601 if (count == -1)
3602 return -1;
3603 if (TYPE_LENGTH (t) == 0)
3604 {
3605 gdb_assert (count == 0);
3606 return 0;
3607 }
3608 else if (count == 0)
3609 return -1;
3610 unitlen = arm_vfp_cprc_unit_length (*base_type);
3611 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3612 return TYPE_LENGTH (t) / unitlen;
3613 }
3614 break;
3615
3616 case TYPE_CODE_STRUCT:
3617 {
3618 int count = 0;
3619 unsigned unitlen;
3620 int i;
3621 for (i = 0; i < TYPE_NFIELDS (t); i++)
3622 {
3623 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3624 base_type);
3625 if (sub_count == -1)
3626 return -1;
3627 count += sub_count;
3628 }
3629 if (TYPE_LENGTH (t) == 0)
3630 {
3631 gdb_assert (count == 0);
3632 return 0;
3633 }
3634 else if (count == 0)
3635 return -1;
3636 unitlen = arm_vfp_cprc_unit_length (*base_type);
3637 if (TYPE_LENGTH (t) != unitlen * count)
3638 return -1;
3639 return count;
3640 }
3641
3642 case TYPE_CODE_UNION:
3643 {
3644 int count = 0;
3645 unsigned unitlen;
3646 int i;
3647 for (i = 0; i < TYPE_NFIELDS (t); i++)
3648 {
3649 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3650 base_type);
3651 if (sub_count == -1)
3652 return -1;
3653 count = (count > sub_count ? count : sub_count);
3654 }
3655 if (TYPE_LENGTH (t) == 0)
3656 {
3657 gdb_assert (count == 0);
3658 return 0;
3659 }
3660 else if (count == 0)
3661 return -1;
3662 unitlen = arm_vfp_cprc_unit_length (*base_type);
3663 if (TYPE_LENGTH (t) != unitlen * count)
3664 return -1;
3665 return count;
3666 }
3667
3668 default:
3669 break;
3670 }
3671
3672 return -1;
3673 }
3674
3675 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3676 if passed to or returned from a non-variadic function with the VFP
3677 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3678 *BASE_TYPE to the base type for T and *COUNT to the number of
3679 elements of that base type before returning. */
3680
3681 static int
3682 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3683 int *count)
3684 {
3685 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3686 int c = arm_vfp_cprc_sub_candidate (t, &b);
3687 if (c <= 0 || c > 4)
3688 return 0;
3689 *base_type = b;
3690 *count = c;
3691 return 1;
3692 }
3693
3694 /* Return 1 if the VFP ABI should be used for passing arguments to and
3695 returning values from a function of type FUNC_TYPE, 0
3696 otherwise. */
3697
3698 static int
3699 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3700 {
3701 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3702 /* Variadic functions always use the base ABI. Assume that functions
3703 without debug info are not variadic. */
3704 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3705 return 0;
3706 /* The VFP ABI is only supported as a variant of AAPCS. */
3707 if (tdep->arm_abi != ARM_ABI_AAPCS)
3708 return 0;
3709 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3710 }
3711
3712 /* We currently only support passing parameters in integer registers, which
3713 conforms with GCC's default model, and VFP argument passing following
3714 the VFP variant of AAPCS. Several other variants exist and
3715 we should probably support some of them based on the selected ABI. */
3716
3717 static CORE_ADDR
3718 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3719 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3720 struct value **args, CORE_ADDR sp, int struct_return,
3721 CORE_ADDR struct_addr)
3722 {
3723 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3724 int argnum;
3725 int argreg;
3726 int nstack;
3727 struct stack_item *si = NULL;
3728 int use_vfp_abi;
3729 struct type *ftype;
3730 unsigned vfp_regs_free = (1 << 16) - 1;
3731
3732 /* Determine the type of this function and whether the VFP ABI
3733 applies. */
3734 ftype = check_typedef (value_type (function));
3735 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3736 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3737 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3738
3739 /* Set the return address. For the ARM, the return breakpoint is
3740 always at BP_ADDR. */
3741 if (arm_pc_is_thumb (gdbarch, bp_addr))
3742 bp_addr |= 1;
3743 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3744
3745 /* Walk through the list of args and determine how large a temporary
3746 stack is required. Need to take care here as structs may be
3747 passed on the stack, and we have to push them. */
3748 nstack = 0;
3749
3750 argreg = ARM_A1_REGNUM;
3751 nstack = 0;
3752
3753 /* The struct_return pointer occupies the first parameter
3754 passing register. */
3755 if (struct_return)
3756 {
3757 if (arm_debug)
3758 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3759 gdbarch_register_name (gdbarch, argreg),
3760 paddress (gdbarch, struct_addr));
3761 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3762 argreg++;
3763 }
3764
3765 for (argnum = 0; argnum < nargs; argnum++)
3766 {
3767 int len;
3768 struct type *arg_type;
3769 struct type *target_type;
3770 enum type_code typecode;
3771 const bfd_byte *val;
3772 int align;
3773 enum arm_vfp_cprc_base_type vfp_base_type;
3774 int vfp_base_count;
3775 int may_use_core_reg = 1;
3776
3777 arg_type = check_typedef (value_type (args[argnum]));
3778 len = TYPE_LENGTH (arg_type);
3779 target_type = TYPE_TARGET_TYPE (arg_type);
3780 typecode = TYPE_CODE (arg_type);
3781 val = value_contents (args[argnum]);
3782
3783 align = arm_type_align (arg_type);
3784 /* Round alignment up to a whole number of words. */
3785 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3786 /* Different ABIs have different maximum alignments. */
3787 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3788 {
3789 /* The APCS ABI only requires word alignment. */
3790 align = INT_REGISTER_SIZE;
3791 }
3792 else
3793 {
3794 /* The AAPCS requires at most doubleword alignment. */
3795 if (align > INT_REGISTER_SIZE * 2)
3796 align = INT_REGISTER_SIZE * 2;
3797 }
3798
3799 if (use_vfp_abi
3800 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3801 &vfp_base_count))
3802 {
3803 int regno;
3804 int unit_length;
3805 int shift;
3806 unsigned mask;
3807
3808 /* Because this is a CPRC it cannot go in a core register or
3809 cause a core register to be skipped for alignment.
3810 Either it goes in VFP registers and the rest of this loop
3811 iteration is skipped for this argument, or it goes on the
3812 stack (and the stack alignment code is correct for this
3813 case). */
3814 may_use_core_reg = 0;
3815
3816 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3817 shift = unit_length / 4;
3818 mask = (1 << (shift * vfp_base_count)) - 1;
3819 for (regno = 0; regno < 16; regno += shift)
3820 if (((vfp_regs_free >> regno) & mask) == mask)
3821 break;
3822
3823 if (regno < 16)
3824 {
3825 int reg_char;
3826 int reg_scaled;
3827 int i;
3828
3829 vfp_regs_free &= ~(mask << regno);
3830 reg_scaled = regno / shift;
3831 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3832 for (i = 0; i < vfp_base_count; i++)
3833 {
3834 char name_buf[4];
3835 int regnum;
3836 if (reg_char == 'q')
3837 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3838 val + i * unit_length);
3839 else
3840 {
3841 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3842 reg_char, reg_scaled + i);
3843 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3844 strlen (name_buf));
3845 regcache_cooked_write (regcache, regnum,
3846 val + i * unit_length);
3847 }
3848 }
3849 continue;
3850 }
3851 else
3852 {
3853 /* This CPRC could not go in VFP registers, so all VFP
3854 registers are now marked as used. */
3855 vfp_regs_free = 0;
3856 }
3857 }
3858
3859 /* Push stack padding for dowubleword alignment. */
3860 if (nstack & (align - 1))
3861 {
3862 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3863 nstack += INT_REGISTER_SIZE;
3864 }
3865
3866 /* Doubleword aligned quantities must go in even register pairs. */
3867 if (may_use_core_reg
3868 && argreg <= ARM_LAST_ARG_REGNUM
3869 && align > INT_REGISTER_SIZE
3870 && argreg & 1)
3871 argreg++;
3872
3873 /* If the argument is a pointer to a function, and it is a
3874 Thumb function, create a LOCAL copy of the value and set
3875 the THUMB bit in it. */
3876 if (TYPE_CODE_PTR == typecode
3877 && target_type != NULL
3878 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3879 {
3880 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3881 if (arm_pc_is_thumb (gdbarch, regval))
3882 {
3883 bfd_byte *copy = alloca (len);
3884 store_unsigned_integer (copy, len, byte_order,
3885 MAKE_THUMB_ADDR (regval));
3886 val = copy;
3887 }
3888 }
3889
3890 /* Copy the argument to general registers or the stack in
3891 register-sized pieces. Large arguments are split between
3892 registers and stack. */
3893 while (len > 0)
3894 {
3895 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3896
3897 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3898 {
3899 /* The argument is being passed in a general purpose
3900 register. */
3901 CORE_ADDR regval
3902 = extract_unsigned_integer (val, partial_len, byte_order);
3903 if (byte_order == BFD_ENDIAN_BIG)
3904 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3905 if (arm_debug)
3906 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3907 argnum,
3908 gdbarch_register_name
3909 (gdbarch, argreg),
3910 phex (regval, INT_REGISTER_SIZE));
3911 regcache_cooked_write_unsigned (regcache, argreg, regval);
3912 argreg++;
3913 }
3914 else
3915 {
3916 /* Push the arguments onto the stack. */
3917 if (arm_debug)
3918 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3919 argnum, nstack);
3920 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3921 nstack += INT_REGISTER_SIZE;
3922 }
3923
3924 len -= partial_len;
3925 val += partial_len;
3926 }
3927 }
3928 /* If we have an odd number of words to push, then decrement the stack
3929 by one word now, so first stack argument will be dword aligned. */
3930 if (nstack & 4)
3931 sp -= 4;
3932
3933 while (si)
3934 {
3935 sp -= si->len;
3936 write_memory (sp, si->data, si->len);
3937 si = pop_stack_item (si);
3938 }
3939
3940 /* Finally, update teh SP register. */
3941 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3942
3943 return sp;
3944 }
3945
3946
3947 /* Always align the frame to an 8-byte boundary. This is required on
3948 some platforms and harmless on the rest. */
3949
3950 static CORE_ADDR
3951 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3952 {
3953 /* Align the stack to eight bytes. */
3954 return sp & ~ (CORE_ADDR) 7;
3955 }
3956
3957 static void
3958 print_fpu_flags (struct ui_file *file, int flags)
3959 {
3960 if (flags & (1 << 0))
3961 fputs_filtered ("IVO ", file);
3962 if (flags & (1 << 1))
3963 fputs_filtered ("DVZ ", file);
3964 if (flags & (1 << 2))
3965 fputs_filtered ("OFL ", file);
3966 if (flags & (1 << 3))
3967 fputs_filtered ("UFL ", file);
3968 if (flags & (1 << 4))
3969 fputs_filtered ("INX ", file);
3970 fputc_filtered ('\n', file);
3971 }
3972
3973 /* Print interesting information about the floating point processor
3974 (if present) or emulator. */
3975 static void
3976 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3977 struct frame_info *frame, const char *args)
3978 {
3979 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3980 int type;
3981
3982 type = (status >> 24) & 127;
3983 if (status & (1 << 31))
3984 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3985 else
3986 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3987 /* i18n: [floating point unit] mask */
3988 fputs_filtered (_("mask: "), file);
3989 print_fpu_flags (file, status >> 16);
3990 /* i18n: [floating point unit] flags */
3991 fputs_filtered (_("flags: "), file);
3992 print_fpu_flags (file, status);
3993 }
3994
3995 /* Construct the ARM extended floating point type. */
3996 static struct type *
3997 arm_ext_type (struct gdbarch *gdbarch)
3998 {
3999 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4000
4001 if (!tdep->arm_ext_type)
4002 tdep->arm_ext_type
4003 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4004 floatformats_arm_ext);
4005
4006 return tdep->arm_ext_type;
4007 }
4008
4009 static struct type *
4010 arm_neon_double_type (struct gdbarch *gdbarch)
4011 {
4012 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4013
4014 if (tdep->neon_double_type == NULL)
4015 {
4016 struct type *t, *elem;
4017
4018 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4019 TYPE_CODE_UNION);
4020 elem = builtin_type (gdbarch)->builtin_uint8;
4021 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4022 elem = builtin_type (gdbarch)->builtin_uint16;
4023 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4024 elem = builtin_type (gdbarch)->builtin_uint32;
4025 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4026 elem = builtin_type (gdbarch)->builtin_uint64;
4027 append_composite_type_field (t, "u64", elem);
4028 elem = builtin_type (gdbarch)->builtin_float;
4029 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4030 elem = builtin_type (gdbarch)->builtin_double;
4031 append_composite_type_field (t, "f64", elem);
4032
4033 TYPE_VECTOR (t) = 1;
4034 TYPE_NAME (t) = "neon_d";
4035 tdep->neon_double_type = t;
4036 }
4037
4038 return tdep->neon_double_type;
4039 }
4040
4041 /* FIXME: The vector types are not correctly ordered on big-endian
4042 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4043 bits of d0 - regardless of what unit size is being held in d0. So
4044 the offset of the first uint8 in d0 is 7, but the offset of the
4045 first float is 4. This code works as-is for little-endian
4046 targets. */
4047
4048 static struct type *
4049 arm_neon_quad_type (struct gdbarch *gdbarch)
4050 {
4051 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4052
4053 if (tdep->neon_quad_type == NULL)
4054 {
4055 struct type *t, *elem;
4056
4057 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4058 TYPE_CODE_UNION);
4059 elem = builtin_type (gdbarch)->builtin_uint8;
4060 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4061 elem = builtin_type (gdbarch)->builtin_uint16;
4062 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint32;
4064 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint64;
4066 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_float;
4068 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4069 elem = builtin_type (gdbarch)->builtin_double;
4070 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4071
4072 TYPE_VECTOR (t) = 1;
4073 TYPE_NAME (t) = "neon_q";
4074 tdep->neon_quad_type = t;
4075 }
4076
4077 return tdep->neon_quad_type;
4078 }
4079
4080 /* Return the GDB type object for the "standard" data type of data in
4081 register N. */
4082
4083 static struct type *
4084 arm_register_type (struct gdbarch *gdbarch, int regnum)
4085 {
4086 int num_regs = gdbarch_num_regs (gdbarch);
4087
4088 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4089 && regnum >= num_regs && regnum < num_regs + 32)
4090 return builtin_type (gdbarch)->builtin_float;
4091
4092 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4093 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4094 return arm_neon_quad_type (gdbarch);
4095
4096 /* If the target description has register information, we are only
4097 in this function so that we can override the types of
4098 double-precision registers for NEON. */
4099 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4100 {
4101 struct type *t = tdesc_register_type (gdbarch, regnum);
4102
4103 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4104 && TYPE_CODE (t) == TYPE_CODE_FLT
4105 && gdbarch_tdep (gdbarch)->have_neon)
4106 return arm_neon_double_type (gdbarch);
4107 else
4108 return t;
4109 }
4110
4111 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4112 {
4113 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4114 return builtin_type (gdbarch)->builtin_void;
4115
4116 return arm_ext_type (gdbarch);
4117 }
4118 else if (regnum == ARM_SP_REGNUM)
4119 return builtin_type (gdbarch)->builtin_data_ptr;
4120 else if (regnum == ARM_PC_REGNUM)
4121 return builtin_type (gdbarch)->builtin_func_ptr;
4122 else if (regnum >= ARRAY_SIZE (arm_register_names))
4123 /* These registers are only supported on targets which supply
4124 an XML description. */
4125 return builtin_type (gdbarch)->builtin_int0;
4126 else
4127 return builtin_type (gdbarch)->builtin_uint32;
4128 }
4129
4130 /* Map a DWARF register REGNUM onto the appropriate GDB register
4131 number. */
4132
4133 static int
4134 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4135 {
4136 /* Core integer regs. */
4137 if (reg >= 0 && reg <= 15)
4138 return reg;
4139
4140 /* Legacy FPA encoding. These were once used in a way which
4141 overlapped with VFP register numbering, so their use is
4142 discouraged, but GDB doesn't support the ARM toolchain
4143 which used them for VFP. */
4144 if (reg >= 16 && reg <= 23)
4145 return ARM_F0_REGNUM + reg - 16;
4146
4147 /* New assignments for the FPA registers. */
4148 if (reg >= 96 && reg <= 103)
4149 return ARM_F0_REGNUM + reg - 96;
4150
4151 /* WMMX register assignments. */
4152 if (reg >= 104 && reg <= 111)
4153 return ARM_WCGR0_REGNUM + reg - 104;
4154
4155 if (reg >= 112 && reg <= 127)
4156 return ARM_WR0_REGNUM + reg - 112;
4157
4158 if (reg >= 192 && reg <= 199)
4159 return ARM_WC0_REGNUM + reg - 192;
4160
4161 /* VFP v2 registers. A double precision value is actually
4162 in d1 rather than s2, but the ABI only defines numbering
4163 for the single precision registers. This will "just work"
4164 in GDB for little endian targets (we'll read eight bytes,
4165 starting in s0 and then progressing to s1), but will be
4166 reversed on big endian targets with VFP. This won't
4167 be a problem for the new Neon quad registers; you're supposed
4168 to use DW_OP_piece for those. */
4169 if (reg >= 64 && reg <= 95)
4170 {
4171 char name_buf[4];
4172
4173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4175 strlen (name_buf));
4176 }
4177
4178 /* VFP v3 / Neon registers. This range is also used for VFP v2
4179 registers, except that it now describes d0 instead of s0. */
4180 if (reg >= 256 && reg <= 287)
4181 {
4182 char name_buf[4];
4183
4184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4186 strlen (name_buf));
4187 }
4188
4189 return -1;
4190 }
4191
4192 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4193 static int
4194 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4195 {
4196 int reg = regnum;
4197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4198
4199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4201
4202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4204
4205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4207
4208 if (reg < NUM_GREGS)
4209 return SIM_ARM_R0_REGNUM + reg;
4210 reg -= NUM_GREGS;
4211
4212 if (reg < NUM_FREGS)
4213 return SIM_ARM_FP0_REGNUM + reg;
4214 reg -= NUM_FREGS;
4215
4216 if (reg < NUM_SREGS)
4217 return SIM_ARM_FPS_REGNUM + reg;
4218 reg -= NUM_SREGS;
4219
4220 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4221 }
4222
4223 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4224 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4225 It is thought that this is is the floating-point register format on
4226 little-endian systems. */
4227
4228 static void
4229 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4230 void *dbl, int endianess)
4231 {
4232 DOUBLEST d;
4233
4234 if (endianess == BFD_ENDIAN_BIG)
4235 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4236 else
4237 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4238 ptr, &d);
4239 floatformat_from_doublest (fmt, &d, dbl);
4240 }
4241
4242 static void
4243 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4244 int endianess)
4245 {
4246 DOUBLEST d;
4247
4248 floatformat_to_doublest (fmt, ptr, &d);
4249 if (endianess == BFD_ENDIAN_BIG)
4250 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4251 else
4252 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4253 &d, dbl);
4254 }
4255
4256 static int
4257 condition_true (unsigned long cond, unsigned long status_reg)
4258 {
4259 if (cond == INST_AL || cond == INST_NV)
4260 return 1;
4261
4262 switch (cond)
4263 {
4264 case INST_EQ:
4265 return ((status_reg & FLAG_Z) != 0);
4266 case INST_NE:
4267 return ((status_reg & FLAG_Z) == 0);
4268 case INST_CS:
4269 return ((status_reg & FLAG_C) != 0);
4270 case INST_CC:
4271 return ((status_reg & FLAG_C) == 0);
4272 case INST_MI:
4273 return ((status_reg & FLAG_N) != 0);
4274 case INST_PL:
4275 return ((status_reg & FLAG_N) == 0);
4276 case INST_VS:
4277 return ((status_reg & FLAG_V) != 0);
4278 case INST_VC:
4279 return ((status_reg & FLAG_V) == 0);
4280 case INST_HI:
4281 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4282 case INST_LS:
4283 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4284 case INST_GE:
4285 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4286 case INST_LT:
4287 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4288 case INST_GT:
4289 return (((status_reg & FLAG_Z) == 0)
4290 && (((status_reg & FLAG_N) == 0)
4291 == ((status_reg & FLAG_V) == 0)));
4292 case INST_LE:
4293 return (((status_reg & FLAG_Z) != 0)
4294 || (((status_reg & FLAG_N) == 0)
4295 != ((status_reg & FLAG_V) == 0)));
4296 }
4297 return 1;
4298 }
4299
4300 static unsigned long
4301 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4302 unsigned long pc_val, unsigned long status_reg)
4303 {
4304 unsigned long res, shift;
4305 int rm = bits (inst, 0, 3);
4306 unsigned long shifttype = bits (inst, 5, 6);
4307
4308 if (bit (inst, 4))
4309 {
4310 int rs = bits (inst, 8, 11);
4311 shift = (rs == 15 ? pc_val + 8
4312 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4313 }
4314 else
4315 shift = bits (inst, 7, 11);
4316
4317 res = (rm == ARM_PC_REGNUM
4318 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4319 : get_frame_register_unsigned (frame, rm));
4320
4321 switch (shifttype)
4322 {
4323 case 0: /* LSL */
4324 res = shift >= 32 ? 0 : res << shift;
4325 break;
4326
4327 case 1: /* LSR */
4328 res = shift >= 32 ? 0 : res >> shift;
4329 break;
4330
4331 case 2: /* ASR */
4332 if (shift >= 32)
4333 shift = 31;
4334 res = ((res & 0x80000000L)
4335 ? ~((~res) >> shift) : res >> shift);
4336 break;
4337
4338 case 3: /* ROR/RRX */
4339 shift &= 31;
4340 if (shift == 0)
4341 res = (res >> 1) | (carry ? 0x80000000L : 0);
4342 else
4343 res = (res >> shift) | (res << (32 - shift));
4344 break;
4345 }
4346
4347 return res & 0xffffffff;
4348 }
4349
4350 /* Return number of 1-bits in VAL. */
4351
4352 static int
4353 bitcount (unsigned long val)
4354 {
4355 int nbits;
4356 for (nbits = 0; val != 0; nbits++)
4357 val &= val - 1; /* Delete rightmost 1-bit in val. */
4358 return nbits;
4359 }
4360
4361 /* Return the size in bytes of the complete Thumb instruction whose
4362 first halfword is INST1. */
4363
4364 static int
4365 thumb_insn_size (unsigned short inst1)
4366 {
4367 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4368 return 4;
4369 else
4370 return 2;
4371 }
4372
4373 static int
4374 thumb_advance_itstate (unsigned int itstate)
4375 {
4376 /* Preserve IT[7:5], the first three bits of the condition. Shift
4377 the upcoming condition flags left by one bit. */
4378 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4379
4380 /* If we have finished the IT block, clear the state. */
4381 if ((itstate & 0x0f) == 0)
4382 itstate = 0;
4383
4384 return itstate;
4385 }
4386
4387 /* Find the next PC after the current instruction executes. In some
4388 cases we can not statically determine the answer (see the IT state
4389 handling in this function); in that case, a breakpoint may be
4390 inserted in addition to the returned PC, which will be used to set
4391 another breakpoint by our caller. */
4392
4393 static CORE_ADDR
4394 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4395 {
4396 struct gdbarch *gdbarch = get_frame_arch (frame);
4397 struct address_space *aspace = get_frame_address_space (frame);
4398 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4399 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4400 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4401 unsigned short inst1;
4402 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4403 unsigned long offset;
4404 ULONGEST status, itstate;
4405
4406 nextpc = MAKE_THUMB_ADDR (nextpc);
4407 pc_val = MAKE_THUMB_ADDR (pc_val);
4408
4409 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4410
4411 /* Thumb-2 conditional execution support. There are eight bits in
4412 the CPSR which describe conditional execution state. Once
4413 reconstructed (they're in a funny order), the low five bits
4414 describe the low bit of the condition for each instruction and
4415 how many instructions remain. The high three bits describe the
4416 base condition. One of the low four bits will be set if an IT
4417 block is active. These bits read as zero on earlier
4418 processors. */
4419 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4420 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4421
4422 /* If-Then handling. On GNU/Linux, where this routine is used, we
4423 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4424 can disable execution of the undefined instruction. So we might
4425 miss the breakpoint if we set it on a skipped conditional
4426 instruction. Because conditional instructions can change the
4427 flags, affecting the execution of further instructions, we may
4428 need to set two breakpoints. */
4429
4430 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4431 {
4432 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4433 {
4434 /* An IT instruction. Because this instruction does not
4435 modify the flags, we can accurately predict the next
4436 executed instruction. */
4437 itstate = inst1 & 0x00ff;
4438 pc += thumb_insn_size (inst1);
4439
4440 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4441 {
4442 inst1 = read_memory_unsigned_integer (pc, 2,
4443 byte_order_for_code);
4444 pc += thumb_insn_size (inst1);
4445 itstate = thumb_advance_itstate (itstate);
4446 }
4447
4448 return MAKE_THUMB_ADDR (pc);
4449 }
4450 else if (itstate != 0)
4451 {
4452 /* We are in a conditional block. Check the condition. */
4453 if (! condition_true (itstate >> 4, status))
4454 {
4455 /* Advance to the next executed instruction. */
4456 pc += thumb_insn_size (inst1);
4457 itstate = thumb_advance_itstate (itstate);
4458
4459 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4460 {
4461 inst1 = read_memory_unsigned_integer (pc, 2,
4462 byte_order_for_code);
4463 pc += thumb_insn_size (inst1);
4464 itstate = thumb_advance_itstate (itstate);
4465 }
4466
4467 return MAKE_THUMB_ADDR (pc);
4468 }
4469 else if ((itstate & 0x0f) == 0x08)
4470 {
4471 /* This is the last instruction of the conditional
4472 block, and it is executed. We can handle it normally
4473 because the following instruction is not conditional,
4474 and we must handle it normally because it is
4475 permitted to branch. Fall through. */
4476 }
4477 else
4478 {
4479 int cond_negated;
4480
4481 /* There are conditional instructions after this one.
4482 If this instruction modifies the flags, then we can
4483 not predict what the next executed instruction will
4484 be. Fortunately, this instruction is architecturally
4485 forbidden to branch; we know it will fall through.
4486 Start by skipping past it. */
4487 pc += thumb_insn_size (inst1);
4488 itstate = thumb_advance_itstate (itstate);
4489
4490 /* Set a breakpoint on the following instruction. */
4491 gdb_assert ((itstate & 0x0f) != 0);
4492 arm_insert_single_step_breakpoint (gdbarch, aspace,
4493 MAKE_THUMB_ADDR (pc));
4494 cond_negated = (itstate >> 4) & 1;
4495
4496 /* Skip all following instructions with the same
4497 condition. If there is a later instruction in the IT
4498 block with the opposite condition, set the other
4499 breakpoint there. If not, then set a breakpoint on
4500 the instruction after the IT block. */
4501 do
4502 {
4503 inst1 = read_memory_unsigned_integer (pc, 2,
4504 byte_order_for_code);
4505 pc += thumb_insn_size (inst1);
4506 itstate = thumb_advance_itstate (itstate);
4507 }
4508 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4509
4510 return MAKE_THUMB_ADDR (pc);
4511 }
4512 }
4513 }
4514 else if (itstate & 0x0f)
4515 {
4516 /* We are in a conditional block. Check the condition. */
4517 int cond = itstate >> 4;
4518
4519 if (! condition_true (cond, status))
4520 /* Advance to the next instruction. All the 32-bit
4521 instructions share a common prefix. */
4522 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4523
4524 /* Otherwise, handle the instruction normally. */
4525 }
4526
4527 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4528 {
4529 CORE_ADDR sp;
4530
4531 /* Fetch the saved PC from the stack. It's stored above
4532 all of the other registers. */
4533 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4534 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4535 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4536 }
4537 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4538 {
4539 unsigned long cond = bits (inst1, 8, 11);
4540 if (cond == 0x0f) /* 0x0f = SWI */
4541 {
4542 struct gdbarch_tdep *tdep;
4543 tdep = gdbarch_tdep (gdbarch);
4544
4545 if (tdep->syscall_next_pc != NULL)
4546 nextpc = tdep->syscall_next_pc (frame);
4547
4548 }
4549 else if (cond != 0x0f && condition_true (cond, status))
4550 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4551 }
4552 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4553 {
4554 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4555 }
4556 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4557 {
4558 unsigned short inst2;
4559 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4560
4561 /* Default to the next instruction. */
4562 nextpc = pc + 4;
4563 nextpc = MAKE_THUMB_ADDR (nextpc);
4564
4565 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4566 {
4567 /* Branches and miscellaneous control instructions. */
4568
4569 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4570 {
4571 /* B, BL, BLX. */
4572 int j1, j2, imm1, imm2;
4573
4574 imm1 = sbits (inst1, 0, 10);
4575 imm2 = bits (inst2, 0, 10);
4576 j1 = bit (inst2, 13);
4577 j2 = bit (inst2, 11);
4578
4579 offset = ((imm1 << 12) + (imm2 << 1));
4580 offset ^= ((!j2) << 22) | ((!j1) << 23);
4581
4582 nextpc = pc_val + offset;
4583 /* For BLX make sure to clear the low bits. */
4584 if (bit (inst2, 12) == 0)
4585 nextpc = nextpc & 0xfffffffc;
4586 }
4587 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4588 {
4589 /* SUBS PC, LR, #imm8. */
4590 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4591 nextpc -= inst2 & 0x00ff;
4592 }
4593 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4594 {
4595 /* Conditional branch. */
4596 if (condition_true (bits (inst1, 6, 9), status))
4597 {
4598 int sign, j1, j2, imm1, imm2;
4599
4600 sign = sbits (inst1, 10, 10);
4601 imm1 = bits (inst1, 0, 5);
4602 imm2 = bits (inst2, 0, 10);
4603 j1 = bit (inst2, 13);
4604 j2 = bit (inst2, 11);
4605
4606 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4607 offset += (imm1 << 12) + (imm2 << 1);
4608
4609 nextpc = pc_val + offset;
4610 }
4611 }
4612 }
4613 else if ((inst1 & 0xfe50) == 0xe810)
4614 {
4615 /* Load multiple or RFE. */
4616 int rn, offset, load_pc = 1;
4617
4618 rn = bits (inst1, 0, 3);
4619 if (bit (inst1, 7) && !bit (inst1, 8))
4620 {
4621 /* LDMIA or POP */
4622 if (!bit (inst2, 15))
4623 load_pc = 0;
4624 offset = bitcount (inst2) * 4 - 4;
4625 }
4626 else if (!bit (inst1, 7) && bit (inst1, 8))
4627 {
4628 /* LDMDB */
4629 if (!bit (inst2, 15))
4630 load_pc = 0;
4631 offset = -4;
4632 }
4633 else if (bit (inst1, 7) && bit (inst1, 8))
4634 {
4635 /* RFEIA */
4636 offset = 0;
4637 }
4638 else if (!bit (inst1, 7) && !bit (inst1, 8))
4639 {
4640 /* RFEDB */
4641 offset = -8;
4642 }
4643 else
4644 load_pc = 0;
4645
4646 if (load_pc)
4647 {
4648 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4649 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4650 }
4651 }
4652 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4653 {
4654 /* MOV PC or MOVS PC. */
4655 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4656 nextpc = MAKE_THUMB_ADDR (nextpc);
4657 }
4658 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4659 {
4660 /* LDR PC. */
4661 CORE_ADDR base;
4662 int rn, load_pc = 1;
4663
4664 rn = bits (inst1, 0, 3);
4665 base = get_frame_register_unsigned (frame, rn);
4666 if (rn == ARM_PC_REGNUM)
4667 {
4668 base = (base + 4) & ~(CORE_ADDR) 0x3;
4669 if (bit (inst1, 7))
4670 base += bits (inst2, 0, 11);
4671 else
4672 base -= bits (inst2, 0, 11);
4673 }
4674 else if (bit (inst1, 7))
4675 base += bits (inst2, 0, 11);
4676 else if (bit (inst2, 11))
4677 {
4678 if (bit (inst2, 10))
4679 {
4680 if (bit (inst2, 9))
4681 base += bits (inst2, 0, 7);
4682 else
4683 base -= bits (inst2, 0, 7);
4684 }
4685 }
4686 else if ((inst2 & 0x0fc0) == 0x0000)
4687 {
4688 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4689 base += get_frame_register_unsigned (frame, rm) << shift;
4690 }
4691 else
4692 /* Reserved. */
4693 load_pc = 0;
4694
4695 if (load_pc)
4696 nextpc = get_frame_memory_unsigned (frame, base, 4);
4697 }
4698 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4699 {
4700 /* TBB. */
4701 CORE_ADDR tbl_reg, table, offset, length;
4702
4703 tbl_reg = bits (inst1, 0, 3);
4704 if (tbl_reg == 0x0f)
4705 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4706 else
4707 table = get_frame_register_unsigned (frame, tbl_reg);
4708
4709 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4710 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4711 nextpc = pc_val + length;
4712 }
4713 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4714 {
4715 /* TBH. */
4716 CORE_ADDR tbl_reg, table, offset, length;
4717
4718 tbl_reg = bits (inst1, 0, 3);
4719 if (tbl_reg == 0x0f)
4720 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4721 else
4722 table = get_frame_register_unsigned (frame, tbl_reg);
4723
4724 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4725 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4726 nextpc = pc_val + length;
4727 }
4728 }
4729 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4730 {
4731 if (bits (inst1, 3, 6) == 0x0f)
4732 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4733 else
4734 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4735 }
4736 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4737 {
4738 if (bits (inst1, 3, 6) == 0x0f)
4739 nextpc = pc_val;
4740 else
4741 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4742
4743 nextpc = MAKE_THUMB_ADDR (nextpc);
4744 }
4745 else if ((inst1 & 0xf500) == 0xb100)
4746 {
4747 /* CBNZ or CBZ. */
4748 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4749 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4750
4751 if (bit (inst1, 11) && reg != 0)
4752 nextpc = pc_val + imm;
4753 else if (!bit (inst1, 11) && reg == 0)
4754 nextpc = pc_val + imm;
4755 }
4756 return nextpc;
4757 }
4758
4759 /* Get the raw next address. PC is the current program counter, in
4760 FRAME, which is assumed to be executing in ARM mode.
4761
4762 The value returned has the execution state of the next instruction
4763 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4764 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4765 address. */
4766
4767 static CORE_ADDR
4768 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4769 {
4770 struct gdbarch *gdbarch = get_frame_arch (frame);
4771 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4772 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4773 unsigned long pc_val;
4774 unsigned long this_instr;
4775 unsigned long status;
4776 CORE_ADDR nextpc;
4777
4778 pc_val = (unsigned long) pc;
4779 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4780
4781 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4782 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4783
4784 if (bits (this_instr, 28, 31) == INST_NV)
4785 switch (bits (this_instr, 24, 27))
4786 {
4787 case 0xa:
4788 case 0xb:
4789 {
4790 /* Branch with Link and change to Thumb. */
4791 nextpc = BranchDest (pc, this_instr);
4792 nextpc |= bit (this_instr, 24) << 1;
4793 nextpc = MAKE_THUMB_ADDR (nextpc);
4794 break;
4795 }
4796 case 0xc:
4797 case 0xd:
4798 case 0xe:
4799 /* Coprocessor register transfer. */
4800 if (bits (this_instr, 12, 15) == 15)
4801 error (_("Invalid update to pc in instruction"));
4802 break;
4803 }
4804 else if (condition_true (bits (this_instr, 28, 31), status))
4805 {
4806 switch (bits (this_instr, 24, 27))
4807 {
4808 case 0x0:
4809 case 0x1: /* data processing */
4810 case 0x2:
4811 case 0x3:
4812 {
4813 unsigned long operand1, operand2, result = 0;
4814 unsigned long rn;
4815 int c;
4816
4817 if (bits (this_instr, 12, 15) != 15)
4818 break;
4819
4820 if (bits (this_instr, 22, 25) == 0
4821 && bits (this_instr, 4, 7) == 9) /* multiply */
4822 error (_("Invalid update to pc in instruction"));
4823
4824 /* BX <reg>, BLX <reg> */
4825 if (bits (this_instr, 4, 27) == 0x12fff1
4826 || bits (this_instr, 4, 27) == 0x12fff3)
4827 {
4828 rn = bits (this_instr, 0, 3);
4829 nextpc = ((rn == ARM_PC_REGNUM)
4830 ? (pc_val + 8)
4831 : get_frame_register_unsigned (frame, rn));
4832
4833 return nextpc;
4834 }
4835
4836 /* Multiply into PC. */
4837 c = (status & FLAG_C) ? 1 : 0;
4838 rn = bits (this_instr, 16, 19);
4839 operand1 = ((rn == ARM_PC_REGNUM)
4840 ? (pc_val + 8)
4841 : get_frame_register_unsigned (frame, rn));
4842
4843 if (bit (this_instr, 25))
4844 {
4845 unsigned long immval = bits (this_instr, 0, 7);
4846 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4847 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4848 & 0xffffffff;
4849 }
4850 else /* operand 2 is a shifted register. */
4851 operand2 = shifted_reg_val (frame, this_instr, c,
4852 pc_val, status);
4853
4854 switch (bits (this_instr, 21, 24))
4855 {
4856 case 0x0: /*and */
4857 result = operand1 & operand2;
4858 break;
4859
4860 case 0x1: /*eor */
4861 result = operand1 ^ operand2;
4862 break;
4863
4864 case 0x2: /*sub */
4865 result = operand1 - operand2;
4866 break;
4867
4868 case 0x3: /*rsb */
4869 result = operand2 - operand1;
4870 break;
4871
4872 case 0x4: /*add */
4873 result = operand1 + operand2;
4874 break;
4875
4876 case 0x5: /*adc */
4877 result = operand1 + operand2 + c;
4878 break;
4879
4880 case 0x6: /*sbc */
4881 result = operand1 - operand2 + c;
4882 break;
4883
4884 case 0x7: /*rsc */
4885 result = operand2 - operand1 + c;
4886 break;
4887
4888 case 0x8:
4889 case 0x9:
4890 case 0xa:
4891 case 0xb: /* tst, teq, cmp, cmn */
4892 result = (unsigned long) nextpc;
4893 break;
4894
4895 case 0xc: /*orr */
4896 result = operand1 | operand2;
4897 break;
4898
4899 case 0xd: /*mov */
4900 /* Always step into a function. */
4901 result = operand2;
4902 break;
4903
4904 case 0xe: /*bic */
4905 result = operand1 & ~operand2;
4906 break;
4907
4908 case 0xf: /*mvn */
4909 result = ~operand2;
4910 break;
4911 }
4912
4913 /* In 26-bit APCS the bottom two bits of the result are
4914 ignored, and we always end up in ARM state. */
4915 if (!arm_apcs_32)
4916 nextpc = arm_addr_bits_remove (gdbarch, result);
4917 else
4918 nextpc = result;
4919
4920 break;
4921 }
4922
4923 case 0x4:
4924 case 0x5: /* data transfer */
4925 case 0x6:
4926 case 0x7:
4927 if (bit (this_instr, 20))
4928 {
4929 /* load */
4930 if (bits (this_instr, 12, 15) == 15)
4931 {
4932 /* rd == pc */
4933 unsigned long rn;
4934 unsigned long base;
4935
4936 if (bit (this_instr, 22))
4937 error (_("Invalid update to pc in instruction"));
4938
4939 /* byte write to PC */
4940 rn = bits (this_instr, 16, 19);
4941 base = ((rn == ARM_PC_REGNUM)
4942 ? (pc_val + 8)
4943 : get_frame_register_unsigned (frame, rn));
4944
4945 if (bit (this_instr, 24))
4946 {
4947 /* pre-indexed */
4948 int c = (status & FLAG_C) ? 1 : 0;
4949 unsigned long offset =
4950 (bit (this_instr, 25)
4951 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4952 : bits (this_instr, 0, 11));
4953
4954 if (bit (this_instr, 23))
4955 base += offset;
4956 else
4957 base -= offset;
4958 }
4959 nextpc =
4960 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4961 4, byte_order);
4962 }
4963 }
4964 break;
4965
4966 case 0x8:
4967 case 0x9: /* block transfer */
4968 if (bit (this_instr, 20))
4969 {
4970 /* LDM */
4971 if (bit (this_instr, 15))
4972 {
4973 /* loading pc */
4974 int offset = 0;
4975 unsigned long rn_val
4976 = get_frame_register_unsigned (frame,
4977 bits (this_instr, 16, 19));
4978
4979 if (bit (this_instr, 23))
4980 {
4981 /* up */
4982 unsigned long reglist = bits (this_instr, 0, 14);
4983 offset = bitcount (reglist) * 4;
4984 if (bit (this_instr, 24)) /* pre */
4985 offset += 4;
4986 }
4987 else if (bit (this_instr, 24))
4988 offset = -4;
4989
4990 nextpc =
4991 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4992 (rn_val + offset),
4993 4, byte_order);
4994 }
4995 }
4996 break;
4997
4998 case 0xb: /* branch & link */
4999 case 0xa: /* branch */
5000 {
5001 nextpc = BranchDest (pc, this_instr);
5002 break;
5003 }
5004
5005 case 0xc:
5006 case 0xd:
5007 case 0xe: /* coproc ops */
5008 break;
5009 case 0xf: /* SWI */
5010 {
5011 struct gdbarch_tdep *tdep;
5012 tdep = gdbarch_tdep (gdbarch);
5013
5014 if (tdep->syscall_next_pc != NULL)
5015 nextpc = tdep->syscall_next_pc (frame);
5016
5017 }
5018 break;
5019
5020 default:
5021 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5022 return (pc);
5023 }
5024 }
5025
5026 return nextpc;
5027 }
5028
5029 /* Determine next PC after current instruction executes. Will call either
5030 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5031 loop is detected. */
5032
5033 CORE_ADDR
5034 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5035 {
5036 CORE_ADDR nextpc;
5037
5038 if (arm_frame_is_thumb (frame))
5039 nextpc = thumb_get_next_pc_raw (frame, pc);
5040 else
5041 nextpc = arm_get_next_pc_raw (frame, pc);
5042
5043 return nextpc;
5044 }
5045
5046 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5047 of the appropriate mode (as encoded in the PC value), even if this
5048 differs from what would be expected according to the symbol tables. */
5049
5050 void
5051 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5052 struct address_space *aspace,
5053 CORE_ADDR pc)
5054 {
5055 struct cleanup *old_chain
5056 = make_cleanup_restore_integer (&arm_override_mode);
5057
5058 arm_override_mode = IS_THUMB_ADDR (pc);
5059 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5060
5061 insert_single_step_breakpoint (gdbarch, aspace, pc);
5062
5063 do_cleanups (old_chain);
5064 }
5065
5066 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5067 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5068 is found, attempt to step through it. A breakpoint is placed at the end of
5069 the sequence. */
5070
5071 static int
5072 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5073 {
5074 struct gdbarch *gdbarch = get_frame_arch (frame);
5075 struct address_space *aspace = get_frame_address_space (frame);
5076 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5077 CORE_ADDR pc = get_frame_pc (frame);
5078 CORE_ADDR breaks[2] = {-1, -1};
5079 CORE_ADDR loc = pc;
5080 unsigned short insn1, insn2;
5081 int insn_count;
5082 int index;
5083 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5084 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5085 ULONGEST status, itstate;
5086
5087 /* We currently do not support atomic sequences within an IT block. */
5088 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5089 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5090 if (itstate & 0x0f)
5091 return 0;
5092
5093 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5094 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5095 loc += 2;
5096 if (thumb_insn_size (insn1) != 4)
5097 return 0;
5098
5099 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5100 loc += 2;
5101 if (!((insn1 & 0xfff0) == 0xe850
5102 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5103 return 0;
5104
5105 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5106 instructions. */
5107 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5108 {
5109 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5110 loc += 2;
5111
5112 if (thumb_insn_size (insn1) != 4)
5113 {
5114 /* Assume that there is at most one conditional branch in the
5115 atomic sequence. If a conditional branch is found, put a
5116 breakpoint in its destination address. */
5117 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5118 {
5119 if (last_breakpoint > 0)
5120 return 0; /* More than one conditional branch found,
5121 fallback to the standard code. */
5122
5123 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5124 last_breakpoint++;
5125 }
5126
5127 /* We do not support atomic sequences that use any *other*
5128 instructions but conditional branches to change the PC.
5129 Fall back to standard code to avoid losing control of
5130 execution. */
5131 else if (thumb_instruction_changes_pc (insn1))
5132 return 0;
5133 }
5134 else
5135 {
5136 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5137 loc += 2;
5138
5139 /* Assume that there is at most one conditional branch in the
5140 atomic sequence. If a conditional branch is found, put a
5141 breakpoint in its destination address. */
5142 if ((insn1 & 0xf800) == 0xf000
5143 && (insn2 & 0xd000) == 0x8000
5144 && (insn1 & 0x0380) != 0x0380)
5145 {
5146 int sign, j1, j2, imm1, imm2;
5147 unsigned int offset;
5148
5149 sign = sbits (insn1, 10, 10);
5150 imm1 = bits (insn1, 0, 5);
5151 imm2 = bits (insn2, 0, 10);
5152 j1 = bit (insn2, 13);
5153 j2 = bit (insn2, 11);
5154
5155 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5156 offset += (imm1 << 12) + (imm2 << 1);
5157
5158 if (last_breakpoint > 0)
5159 return 0; /* More than one conditional branch found,
5160 fallback to the standard code. */
5161
5162 breaks[1] = loc + offset;
5163 last_breakpoint++;
5164 }
5165
5166 /* We do not support atomic sequences that use any *other*
5167 instructions but conditional branches to change the PC.
5168 Fall back to standard code to avoid losing control of
5169 execution. */
5170 else if (thumb2_instruction_changes_pc (insn1, insn2))
5171 return 0;
5172
5173 /* If we find a strex{,b,h,d}, we're done. */
5174 if ((insn1 & 0xfff0) == 0xe840
5175 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5176 break;
5177 }
5178 }
5179
5180 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5181 if (insn_count == atomic_sequence_length)
5182 return 0;
5183
5184 /* Insert a breakpoint right after the end of the atomic sequence. */
5185 breaks[0] = loc;
5186
5187 /* Check for duplicated breakpoints. Check also for a breakpoint
5188 placed (branch instruction's destination) anywhere in sequence. */
5189 if (last_breakpoint
5190 && (breaks[1] == breaks[0]
5191 || (breaks[1] >= pc && breaks[1] < loc)))
5192 last_breakpoint = 0;
5193
5194 /* Effectively inserts the breakpoints. */
5195 for (index = 0; index <= last_breakpoint; index++)
5196 arm_insert_single_step_breakpoint (gdbarch, aspace,
5197 MAKE_THUMB_ADDR (breaks[index]));
5198
5199 return 1;
5200 }
5201
5202 static int
5203 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5204 {
5205 struct gdbarch *gdbarch = get_frame_arch (frame);
5206 struct address_space *aspace = get_frame_address_space (frame);
5207 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5208 CORE_ADDR pc = get_frame_pc (frame);
5209 CORE_ADDR breaks[2] = {-1, -1};
5210 CORE_ADDR loc = pc;
5211 unsigned int insn;
5212 int insn_count;
5213 int index;
5214 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5215 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5216
5217 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5218 Note that we do not currently support conditionally executed atomic
5219 instructions. */
5220 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5221 loc += 4;
5222 if ((insn & 0xff9000f0) != 0xe1900090)
5223 return 0;
5224
5225 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5226 instructions. */
5227 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5228 {
5229 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5230 loc += 4;
5231
5232 /* Assume that there is at most one conditional branch in the atomic
5233 sequence. If a conditional branch is found, put a breakpoint in
5234 its destination address. */
5235 if (bits (insn, 24, 27) == 0xa)
5236 {
5237 if (last_breakpoint > 0)
5238 return 0; /* More than one conditional branch found, fallback
5239 to the standard single-step code. */
5240
5241 breaks[1] = BranchDest (loc - 4, insn);
5242 last_breakpoint++;
5243 }
5244
5245 /* We do not support atomic sequences that use any *other* instructions
5246 but conditional branches to change the PC. Fall back to standard
5247 code to avoid losing control of execution. */
5248 else if (arm_instruction_changes_pc (insn))
5249 return 0;
5250
5251 /* If we find a strex{,b,h,d}, we're done. */
5252 if ((insn & 0xff9000f0) == 0xe1800090)
5253 break;
5254 }
5255
5256 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5257 if (insn_count == atomic_sequence_length)
5258 return 0;
5259
5260 /* Insert a breakpoint right after the end of the atomic sequence. */
5261 breaks[0] = loc;
5262
5263 /* Check for duplicated breakpoints. Check also for a breakpoint
5264 placed (branch instruction's destination) anywhere in sequence. */
5265 if (last_breakpoint
5266 && (breaks[1] == breaks[0]
5267 || (breaks[1] >= pc && breaks[1] < loc)))
5268 last_breakpoint = 0;
5269
5270 /* Effectively inserts the breakpoints. */
5271 for (index = 0; index <= last_breakpoint; index++)
5272 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5273
5274 return 1;
5275 }
5276
5277 int
5278 arm_deal_with_atomic_sequence (struct frame_info *frame)
5279 {
5280 if (arm_frame_is_thumb (frame))
5281 return thumb_deal_with_atomic_sequence_raw (frame);
5282 else
5283 return arm_deal_with_atomic_sequence_raw (frame);
5284 }
5285
5286 /* single_step() is called just before we want to resume the inferior,
5287 if we want to single-step it but there is no hardware or kernel
5288 single-step support. We find the target of the coming instruction
5289 and breakpoint it. */
5290
5291 int
5292 arm_software_single_step (struct frame_info *frame)
5293 {
5294 struct gdbarch *gdbarch = get_frame_arch (frame);
5295 struct address_space *aspace = get_frame_address_space (frame);
5296 CORE_ADDR next_pc;
5297
5298 if (arm_deal_with_atomic_sequence (frame))
5299 return 1;
5300
5301 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5302 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5303
5304 return 1;
5305 }
5306
5307 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5308 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5309 NULL if an error occurs. BUF is freed. */
5310
5311 static gdb_byte *
5312 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5313 int old_len, int new_len)
5314 {
5315 gdb_byte *new_buf;
5316 int bytes_to_read = new_len - old_len;
5317
5318 new_buf = xmalloc (new_len);
5319 memcpy (new_buf + bytes_to_read, buf, old_len);
5320 xfree (buf);
5321 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5322 {
5323 xfree (new_buf);
5324 return NULL;
5325 }
5326 return new_buf;
5327 }
5328
5329 /* An IT block is at most the 2-byte IT instruction followed by
5330 four 4-byte instructions. The furthest back we must search to
5331 find an IT block that affects the current instruction is thus
5332 2 + 3 * 4 == 14 bytes. */
5333 #define MAX_IT_BLOCK_PREFIX 14
5334
5335 /* Use a quick scan if there are more than this many bytes of
5336 code. */
5337 #define IT_SCAN_THRESHOLD 32
5338
5339 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5340 A breakpoint in an IT block may not be hit, depending on the
5341 condition flags. */
5342 static CORE_ADDR
5343 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5344 {
5345 gdb_byte *buf;
5346 char map_type;
5347 CORE_ADDR boundary, func_start;
5348 int buf_len;
5349 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5350 int i, any, last_it, last_it_count;
5351
5352 /* If we are using BKPT breakpoints, none of this is necessary. */
5353 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5354 return bpaddr;
5355
5356 /* ARM mode does not have this problem. */
5357 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5358 return bpaddr;
5359
5360 /* We are setting a breakpoint in Thumb code that could potentially
5361 contain an IT block. The first step is to find how much Thumb
5362 code there is; we do not need to read outside of known Thumb
5363 sequences. */
5364 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5365 if (map_type == 0)
5366 /* Thumb-2 code must have mapping symbols to have a chance. */
5367 return bpaddr;
5368
5369 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5370
5371 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5372 && func_start > boundary)
5373 boundary = func_start;
5374
5375 /* Search for a candidate IT instruction. We have to do some fancy
5376 footwork to distinguish a real IT instruction from the second
5377 half of a 32-bit instruction, but there is no need for that if
5378 there's no candidate. */
5379 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5380 if (buf_len == 0)
5381 /* No room for an IT instruction. */
5382 return bpaddr;
5383
5384 buf = xmalloc (buf_len);
5385 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5386 return bpaddr;
5387 any = 0;
5388 for (i = 0; i < buf_len; i += 2)
5389 {
5390 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5391 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5392 {
5393 any = 1;
5394 break;
5395 }
5396 }
5397 if (any == 0)
5398 {
5399 xfree (buf);
5400 return bpaddr;
5401 }
5402
5403 /* OK, the code bytes before this instruction contain at least one
5404 halfword which resembles an IT instruction. We know that it's
5405 Thumb code, but there are still two possibilities. Either the
5406 halfword really is an IT instruction, or it is the second half of
5407 a 32-bit Thumb instruction. The only way we can tell is to
5408 scan forwards from a known instruction boundary. */
5409 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5410 {
5411 int definite;
5412
5413 /* There's a lot of code before this instruction. Start with an
5414 optimistic search; it's easy to recognize halfwords that can
5415 not be the start of a 32-bit instruction, and use that to
5416 lock on to the instruction boundaries. */
5417 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5418 if (buf == NULL)
5419 return bpaddr;
5420 buf_len = IT_SCAN_THRESHOLD;
5421
5422 definite = 0;
5423 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5424 {
5425 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5426 if (thumb_insn_size (inst1) == 2)
5427 {
5428 definite = 1;
5429 break;
5430 }
5431 }
5432
5433 /* At this point, if DEFINITE, BUF[I] is the first place we
5434 are sure that we know the instruction boundaries, and it is far
5435 enough from BPADDR that we could not miss an IT instruction
5436 affecting BPADDR. If ! DEFINITE, give up - start from a
5437 known boundary. */
5438 if (! definite)
5439 {
5440 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5441 bpaddr - boundary);
5442 if (buf == NULL)
5443 return bpaddr;
5444 buf_len = bpaddr - boundary;
5445 i = 0;
5446 }
5447 }
5448 else
5449 {
5450 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5451 if (buf == NULL)
5452 return bpaddr;
5453 buf_len = bpaddr - boundary;
5454 i = 0;
5455 }
5456
5457 /* Scan forwards. Find the last IT instruction before BPADDR. */
5458 last_it = -1;
5459 last_it_count = 0;
5460 while (i < buf_len)
5461 {
5462 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5463 last_it_count--;
5464 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5465 {
5466 last_it = i;
5467 if (inst1 & 0x0001)
5468 last_it_count = 4;
5469 else if (inst1 & 0x0002)
5470 last_it_count = 3;
5471 else if (inst1 & 0x0004)
5472 last_it_count = 2;
5473 else
5474 last_it_count = 1;
5475 }
5476 i += thumb_insn_size (inst1);
5477 }
5478
5479 xfree (buf);
5480
5481 if (last_it == -1)
5482 /* There wasn't really an IT instruction after all. */
5483 return bpaddr;
5484
5485 if (last_it_count < 1)
5486 /* It was too far away. */
5487 return bpaddr;
5488
5489 /* This really is a trouble spot. Move the breakpoint to the IT
5490 instruction. */
5491 return bpaddr - buf_len + last_it;
5492 }
5493
5494 /* ARM displaced stepping support.
5495
5496 Generally ARM displaced stepping works as follows:
5497
5498 1. When an instruction is to be single-stepped, it is first decoded by
5499 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5500 Depending on the type of instruction, it is then copied to a scratch
5501 location, possibly in a modified form. The copy_* set of functions
5502 performs such modification, as necessary. A breakpoint is placed after
5503 the modified instruction in the scratch space to return control to GDB.
5504 Note in particular that instructions which modify the PC will no longer
5505 do so after modification.
5506
5507 2. The instruction is single-stepped, by setting the PC to the scratch
5508 location address, and resuming. Control returns to GDB when the
5509 breakpoint is hit.
5510
5511 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5512 function used for the current instruction. This function's job is to
5513 put the CPU/memory state back to what it would have been if the
5514 instruction had been executed unmodified in its original location. */
5515
5516 /* NOP instruction (mov r0, r0). */
5517 #define ARM_NOP 0xe1a00000
5518 #define THUMB_NOP 0x4600
5519
5520 /* Helper for register reads for displaced stepping. In particular, this
5521 returns the PC as it would be seen by the instruction at its original
5522 location. */
5523
5524 ULONGEST
5525 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5526 int regno)
5527 {
5528 ULONGEST ret;
5529 CORE_ADDR from = dsc->insn_addr;
5530
5531 if (regno == ARM_PC_REGNUM)
5532 {
5533 /* Compute pipeline offset:
5534 - When executing an ARM instruction, PC reads as the address of the
5535 current instruction plus 8.
5536 - When executing a Thumb instruction, PC reads as the address of the
5537 current instruction plus 4. */
5538
5539 if (!dsc->is_thumb)
5540 from += 8;
5541 else
5542 from += 4;
5543
5544 if (debug_displaced)
5545 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5546 (unsigned long) from);
5547 return (ULONGEST) from;
5548 }
5549 else
5550 {
5551 regcache_cooked_read_unsigned (regs, regno, &ret);
5552 if (debug_displaced)
5553 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5554 regno, (unsigned long) ret);
5555 return ret;
5556 }
5557 }
5558
5559 static int
5560 displaced_in_arm_mode (struct regcache *regs)
5561 {
5562 ULONGEST ps;
5563 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5564
5565 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5566
5567 return (ps & t_bit) == 0;
5568 }
5569
5570 /* Write to the PC as from a branch instruction. */
5571
5572 static void
5573 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5574 ULONGEST val)
5575 {
5576 if (!dsc->is_thumb)
5577 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5578 architecture versions < 6. */
5579 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5580 val & ~(ULONGEST) 0x3);
5581 else
5582 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5583 val & ~(ULONGEST) 0x1);
5584 }
5585
5586 /* Write to the PC as from a branch-exchange instruction. */
5587
5588 static void
5589 bx_write_pc (struct regcache *regs, ULONGEST val)
5590 {
5591 ULONGEST ps;
5592 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5593
5594 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5595
5596 if ((val & 1) == 1)
5597 {
5598 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5600 }
5601 else if ((val & 2) == 0)
5602 {
5603 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5604 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5605 }
5606 else
5607 {
5608 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5609 mode, align dest to 4 bytes). */
5610 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5611 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5612 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5613 }
5614 }
5615
5616 /* Write to the PC as if from a load instruction. */
5617
5618 static void
5619 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5620 ULONGEST val)
5621 {
5622 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5623 bx_write_pc (regs, val);
5624 else
5625 branch_write_pc (regs, dsc, val);
5626 }
5627
5628 /* Write to the PC as if from an ALU instruction. */
5629
5630 static void
5631 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5632 ULONGEST val)
5633 {
5634 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5635 bx_write_pc (regs, val);
5636 else
5637 branch_write_pc (regs, dsc, val);
5638 }
5639
5640 /* Helper for writing to registers for displaced stepping. Writing to the PC
5641 has a varying effects depending on the instruction which does the write:
5642 this is controlled by the WRITE_PC argument. */
5643
5644 void
5645 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5646 int regno, ULONGEST val, enum pc_write_style write_pc)
5647 {
5648 if (regno == ARM_PC_REGNUM)
5649 {
5650 if (debug_displaced)
5651 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5652 (unsigned long) val);
5653 switch (write_pc)
5654 {
5655 case BRANCH_WRITE_PC:
5656 branch_write_pc (regs, dsc, val);
5657 break;
5658
5659 case BX_WRITE_PC:
5660 bx_write_pc (regs, val);
5661 break;
5662
5663 case LOAD_WRITE_PC:
5664 load_write_pc (regs, dsc, val);
5665 break;
5666
5667 case ALU_WRITE_PC:
5668 alu_write_pc (regs, dsc, val);
5669 break;
5670
5671 case CANNOT_WRITE_PC:
5672 warning (_("Instruction wrote to PC in an unexpected way when "
5673 "single-stepping"));
5674 break;
5675
5676 default:
5677 internal_error (__FILE__, __LINE__,
5678 _("Invalid argument to displaced_write_reg"));
5679 }
5680
5681 dsc->wrote_to_pc = 1;
5682 }
5683 else
5684 {
5685 if (debug_displaced)
5686 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5687 regno, (unsigned long) val);
5688 regcache_cooked_write_unsigned (regs, regno, val);
5689 }
5690 }
5691
5692 /* This function is used to concisely determine if an instruction INSN
5693 references PC. Register fields of interest in INSN should have the
5694 corresponding fields of BITMASK set to 0b1111. The function
5695 returns return 1 if any of these fields in INSN reference the PC
5696 (also 0b1111, r15), else it returns 0. */
5697
5698 static int
5699 insn_references_pc (uint32_t insn, uint32_t bitmask)
5700 {
5701 uint32_t lowbit = 1;
5702
5703 while (bitmask != 0)
5704 {
5705 uint32_t mask;
5706
5707 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5708 ;
5709
5710 if (!lowbit)
5711 break;
5712
5713 mask = lowbit * 0xf;
5714
5715 if ((insn & mask) == mask)
5716 return 1;
5717
5718 bitmask &= ~mask;
5719 }
5720
5721 return 0;
5722 }
5723
5724 /* The simplest copy function. Many instructions have the same effect no
5725 matter what address they are executed at: in those cases, use this. */
5726
5727 static int
5728 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5729 const char *iname, struct displaced_step_closure *dsc)
5730 {
5731 if (debug_displaced)
5732 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5733 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5734 iname);
5735
5736 dsc->modinsn[0] = insn;
5737
5738 return 0;
5739 }
5740
5741 static int
5742 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5743 uint16_t insn2, const char *iname,
5744 struct displaced_step_closure *dsc)
5745 {
5746 if (debug_displaced)
5747 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5748 "opcode/class '%s' unmodified\n", insn1, insn2,
5749 iname);
5750
5751 dsc->modinsn[0] = insn1;
5752 dsc->modinsn[1] = insn2;
5753 dsc->numinsns = 2;
5754
5755 return 0;
5756 }
5757
5758 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5759 modification. */
5760 static int
5761 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5762 const char *iname,
5763 struct displaced_step_closure *dsc)
5764 {
5765 if (debug_displaced)
5766 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5767 "opcode/class '%s' unmodified\n", insn,
5768 iname);
5769
5770 dsc->modinsn[0] = insn;
5771
5772 return 0;
5773 }
5774
5775 /* Preload instructions with immediate offset. */
5776
5777 static void
5778 cleanup_preload (struct gdbarch *gdbarch,
5779 struct regcache *regs, struct displaced_step_closure *dsc)
5780 {
5781 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5782 if (!dsc->u.preload.immed)
5783 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5784 }
5785
5786 static void
5787 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5788 struct displaced_step_closure *dsc, unsigned int rn)
5789 {
5790 ULONGEST rn_val;
5791 /* Preload instructions:
5792
5793 {pli/pld} [rn, #+/-imm]
5794 ->
5795 {pli/pld} [r0, #+/-imm]. */
5796
5797 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5798 rn_val = displaced_read_reg (regs, dsc, rn);
5799 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5800 dsc->u.preload.immed = 1;
5801
5802 dsc->cleanup = &cleanup_preload;
5803 }
5804
5805 static int
5806 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5807 struct displaced_step_closure *dsc)
5808 {
5809 unsigned int rn = bits (insn, 16, 19);
5810
5811 if (!insn_references_pc (insn, 0x000f0000ul))
5812 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5813
5814 if (debug_displaced)
5815 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5816 (unsigned long) insn);
5817
5818 dsc->modinsn[0] = insn & 0xfff0ffff;
5819
5820 install_preload (gdbarch, regs, dsc, rn);
5821
5822 return 0;
5823 }
5824
5825 static int
5826 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5827 struct regcache *regs, struct displaced_step_closure *dsc)
5828 {
5829 unsigned int rn = bits (insn1, 0, 3);
5830 unsigned int u_bit = bit (insn1, 7);
5831 int imm12 = bits (insn2, 0, 11);
5832 ULONGEST pc_val;
5833
5834 if (rn != ARM_PC_REGNUM)
5835 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5836
5837 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5838 PLD (literal) Encoding T1. */
5839 if (debug_displaced)
5840 fprintf_unfiltered (gdb_stdlog,
5841 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5842 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5843 imm12);
5844
5845 if (!u_bit)
5846 imm12 = -1 * imm12;
5847
5848 /* Rewrite instruction {pli/pld} PC imm12 into:
5849 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5850
5851 {pli/pld} [r0, r1]
5852
5853 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5854
5855 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5856 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5857
5858 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5859
5860 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5861 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5862 dsc->u.preload.immed = 0;
5863
5864 /* {pli/pld} [r0, r1] */
5865 dsc->modinsn[0] = insn1 & 0xfff0;
5866 dsc->modinsn[1] = 0xf001;
5867 dsc->numinsns = 2;
5868
5869 dsc->cleanup = &cleanup_preload;
5870 return 0;
5871 }
5872
5873 /* Preload instructions with register offset. */
5874
5875 static void
5876 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5877 struct displaced_step_closure *dsc, unsigned int rn,
5878 unsigned int rm)
5879 {
5880 ULONGEST rn_val, rm_val;
5881
5882 /* Preload register-offset instructions:
5883
5884 {pli/pld} [rn, rm {, shift}]
5885 ->
5886 {pli/pld} [r0, r1 {, shift}]. */
5887
5888 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5889 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5890 rn_val = displaced_read_reg (regs, dsc, rn);
5891 rm_val = displaced_read_reg (regs, dsc, rm);
5892 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5893 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5894 dsc->u.preload.immed = 0;
5895
5896 dsc->cleanup = &cleanup_preload;
5897 }
5898
5899 static int
5900 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5901 struct regcache *regs,
5902 struct displaced_step_closure *dsc)
5903 {
5904 unsigned int rn = bits (insn, 16, 19);
5905 unsigned int rm = bits (insn, 0, 3);
5906
5907
5908 if (!insn_references_pc (insn, 0x000f000ful))
5909 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5910
5911 if (debug_displaced)
5912 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5913 (unsigned long) insn);
5914
5915 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5916
5917 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5918 return 0;
5919 }
5920
5921 /* Copy/cleanup coprocessor load and store instructions. */
5922
5923 static void
5924 cleanup_copro_load_store (struct gdbarch *gdbarch,
5925 struct regcache *regs,
5926 struct displaced_step_closure *dsc)
5927 {
5928 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5929
5930 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5931
5932 if (dsc->u.ldst.writeback)
5933 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5934 }
5935
5936 static void
5937 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5938 struct displaced_step_closure *dsc,
5939 int writeback, unsigned int rn)
5940 {
5941 ULONGEST rn_val;
5942
5943 /* Coprocessor load/store instructions:
5944
5945 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5946 ->
5947 {stc/stc2} [r0, #+/-imm].
5948
5949 ldc/ldc2 are handled identically. */
5950
5951 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5952 rn_val = displaced_read_reg (regs, dsc, rn);
5953 /* PC should be 4-byte aligned. */
5954 rn_val = rn_val & 0xfffffffc;
5955 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5956
5957 dsc->u.ldst.writeback = writeback;
5958 dsc->u.ldst.rn = rn;
5959
5960 dsc->cleanup = &cleanup_copro_load_store;
5961 }
5962
5963 static int
5964 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5965 struct regcache *regs,
5966 struct displaced_step_closure *dsc)
5967 {
5968 unsigned int rn = bits (insn, 16, 19);
5969
5970 if (!insn_references_pc (insn, 0x000f0000ul))
5971 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5972
5973 if (debug_displaced)
5974 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5975 "load/store insn %.8lx\n", (unsigned long) insn);
5976
5977 dsc->modinsn[0] = insn & 0xfff0ffff;
5978
5979 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5980
5981 return 0;
5982 }
5983
5984 static int
5985 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5986 uint16_t insn2, struct regcache *regs,
5987 struct displaced_step_closure *dsc)
5988 {
5989 unsigned int rn = bits (insn1, 0, 3);
5990
5991 if (rn != ARM_PC_REGNUM)
5992 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5993 "copro load/store", dsc);
5994
5995 if (debug_displaced)
5996 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5997 "load/store insn %.4x%.4x\n", insn1, insn2);
5998
5999 dsc->modinsn[0] = insn1 & 0xfff0;
6000 dsc->modinsn[1] = insn2;
6001 dsc->numinsns = 2;
6002
6003 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6004 doesn't support writeback, so pass 0. */
6005 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6006
6007 return 0;
6008 }
6009
6010 /* Clean up branch instructions (actually perform the branch, by setting
6011 PC). */
6012
6013 static void
6014 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6015 struct displaced_step_closure *dsc)
6016 {
6017 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6018 int branch_taken = condition_true (dsc->u.branch.cond, status);
6019 enum pc_write_style write_pc = dsc->u.branch.exchange
6020 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6021
6022 if (!branch_taken)
6023 return;
6024
6025 if (dsc->u.branch.link)
6026 {
6027 /* The value of LR should be the next insn of current one. In order
6028 not to confuse logic hanlding later insn `bx lr', if current insn mode
6029 is Thumb, the bit 0 of LR value should be set to 1. */
6030 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6031
6032 if (dsc->is_thumb)
6033 next_insn_addr |= 0x1;
6034
6035 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6036 CANNOT_WRITE_PC);
6037 }
6038
6039 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6040 }
6041
6042 /* Copy B/BL/BLX instructions with immediate destinations. */
6043
6044 static void
6045 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6046 struct displaced_step_closure *dsc,
6047 unsigned int cond, int exchange, int link, long offset)
6048 {
6049 /* Implement "BL<cond> <label>" as:
6050
6051 Preparation: cond <- instruction condition
6052 Insn: mov r0, r0 (nop)
6053 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6054
6055 B<cond> similar, but don't set r14 in cleanup. */
6056
6057 dsc->u.branch.cond = cond;
6058 dsc->u.branch.link = link;
6059 dsc->u.branch.exchange = exchange;
6060
6061 dsc->u.branch.dest = dsc->insn_addr;
6062 if (link && exchange)
6063 /* For BLX, offset is computed from the Align (PC, 4). */
6064 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6065
6066 if (dsc->is_thumb)
6067 dsc->u.branch.dest += 4 + offset;
6068 else
6069 dsc->u.branch.dest += 8 + offset;
6070
6071 dsc->cleanup = &cleanup_branch;
6072 }
6073 static int
6074 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6075 struct regcache *regs, struct displaced_step_closure *dsc)
6076 {
6077 unsigned int cond = bits (insn, 28, 31);
6078 int exchange = (cond == 0xf);
6079 int link = exchange || bit (insn, 24);
6080 long offset;
6081
6082 if (debug_displaced)
6083 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6084 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6085 (unsigned long) insn);
6086 if (exchange)
6087 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6088 then arrange the switch into Thumb mode. */
6089 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6090 else
6091 offset = bits (insn, 0, 23) << 2;
6092
6093 if (bit (offset, 25))
6094 offset = offset | ~0x3ffffff;
6095
6096 dsc->modinsn[0] = ARM_NOP;
6097
6098 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6099 return 0;
6100 }
6101
6102 static int
6103 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6104 uint16_t insn2, struct regcache *regs,
6105 struct displaced_step_closure *dsc)
6106 {
6107 int link = bit (insn2, 14);
6108 int exchange = link && !bit (insn2, 12);
6109 int cond = INST_AL;
6110 long offset = 0;
6111 int j1 = bit (insn2, 13);
6112 int j2 = bit (insn2, 11);
6113 int s = sbits (insn1, 10, 10);
6114 int i1 = !(j1 ^ bit (insn1, 10));
6115 int i2 = !(j2 ^ bit (insn1, 10));
6116
6117 if (!link && !exchange) /* B */
6118 {
6119 offset = (bits (insn2, 0, 10) << 1);
6120 if (bit (insn2, 12)) /* Encoding T4 */
6121 {
6122 offset |= (bits (insn1, 0, 9) << 12)
6123 | (i2 << 22)
6124 | (i1 << 23)
6125 | (s << 24);
6126 cond = INST_AL;
6127 }
6128 else /* Encoding T3 */
6129 {
6130 offset |= (bits (insn1, 0, 5) << 12)
6131 | (j1 << 18)
6132 | (j2 << 19)
6133 | (s << 20);
6134 cond = bits (insn1, 6, 9);
6135 }
6136 }
6137 else
6138 {
6139 offset = (bits (insn1, 0, 9) << 12);
6140 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6141 offset |= exchange ?
6142 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6143 }
6144
6145 if (debug_displaced)
6146 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6147 "%.4x %.4x with offset %.8lx\n",
6148 link ? (exchange) ? "blx" : "bl" : "b",
6149 insn1, insn2, offset);
6150
6151 dsc->modinsn[0] = THUMB_NOP;
6152
6153 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6154 return 0;
6155 }
6156
6157 /* Copy B Thumb instructions. */
6158 static int
6159 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6160 struct displaced_step_closure *dsc)
6161 {
6162 unsigned int cond = 0;
6163 int offset = 0;
6164 unsigned short bit_12_15 = bits (insn, 12, 15);
6165 CORE_ADDR from = dsc->insn_addr;
6166
6167 if (bit_12_15 == 0xd)
6168 {
6169 /* offset = SignExtend (imm8:0, 32) */
6170 offset = sbits ((insn << 1), 0, 8);
6171 cond = bits (insn, 8, 11);
6172 }
6173 else if (bit_12_15 == 0xe) /* Encoding T2 */
6174 {
6175 offset = sbits ((insn << 1), 0, 11);
6176 cond = INST_AL;
6177 }
6178
6179 if (debug_displaced)
6180 fprintf_unfiltered (gdb_stdlog,
6181 "displaced: copying b immediate insn %.4x "
6182 "with offset %d\n", insn, offset);
6183
6184 dsc->u.branch.cond = cond;
6185 dsc->u.branch.link = 0;
6186 dsc->u.branch.exchange = 0;
6187 dsc->u.branch.dest = from + 4 + offset;
6188
6189 dsc->modinsn[0] = THUMB_NOP;
6190
6191 dsc->cleanup = &cleanup_branch;
6192
6193 return 0;
6194 }
6195
6196 /* Copy BX/BLX with register-specified destinations. */
6197
6198 static void
6199 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6200 struct displaced_step_closure *dsc, int link,
6201 unsigned int cond, unsigned int rm)
6202 {
6203 /* Implement {BX,BLX}<cond> <reg>" as:
6204
6205 Preparation: cond <- instruction condition
6206 Insn: mov r0, r0 (nop)
6207 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6208
6209 Don't set r14 in cleanup for BX. */
6210
6211 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6212
6213 dsc->u.branch.cond = cond;
6214 dsc->u.branch.link = link;
6215
6216 dsc->u.branch.exchange = 1;
6217
6218 dsc->cleanup = &cleanup_branch;
6219 }
6220
6221 static int
6222 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6223 struct regcache *regs, struct displaced_step_closure *dsc)
6224 {
6225 unsigned int cond = bits (insn, 28, 31);
6226 /* BX: x12xxx1x
6227 BLX: x12xxx3x. */
6228 int link = bit (insn, 5);
6229 unsigned int rm = bits (insn, 0, 3);
6230
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6233 (unsigned long) insn);
6234
6235 dsc->modinsn[0] = ARM_NOP;
6236
6237 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6238 return 0;
6239 }
6240
6241 static int
6242 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6243 struct regcache *regs,
6244 struct displaced_step_closure *dsc)
6245 {
6246 int link = bit (insn, 7);
6247 unsigned int rm = bits (insn, 3, 6);
6248
6249 if (debug_displaced)
6250 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6251 (unsigned short) insn);
6252
6253 dsc->modinsn[0] = THUMB_NOP;
6254
6255 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6256
6257 return 0;
6258 }
6259
6260
6261 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6262
6263 static void
6264 cleanup_alu_imm (struct gdbarch *gdbarch,
6265 struct regcache *regs, struct displaced_step_closure *dsc)
6266 {
6267 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6268 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6269 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6270 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6271 }
6272
6273 static int
6274 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6275 struct displaced_step_closure *dsc)
6276 {
6277 unsigned int rn = bits (insn, 16, 19);
6278 unsigned int rd = bits (insn, 12, 15);
6279 unsigned int op = bits (insn, 21, 24);
6280 int is_mov = (op == 0xd);
6281 ULONGEST rd_val, rn_val;
6282
6283 if (!insn_references_pc (insn, 0x000ff000ul))
6284 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6285
6286 if (debug_displaced)
6287 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6288 "%.8lx\n", is_mov ? "move" : "ALU",
6289 (unsigned long) insn);
6290
6291 /* Instruction is of form:
6292
6293 <op><cond> rd, [rn,] #imm
6294
6295 Rewrite as:
6296
6297 Preparation: tmp1, tmp2 <- r0, r1;
6298 r0, r1 <- rd, rn
6299 Insn: <op><cond> r0, r1, #imm
6300 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6301 */
6302
6303 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6304 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6305 rn_val = displaced_read_reg (regs, dsc, rn);
6306 rd_val = displaced_read_reg (regs, dsc, rd);
6307 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6308 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6309 dsc->rd = rd;
6310
6311 if (is_mov)
6312 dsc->modinsn[0] = insn & 0xfff00fff;
6313 else
6314 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6315
6316 dsc->cleanup = &cleanup_alu_imm;
6317
6318 return 0;
6319 }
6320
6321 static int
6322 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6323 uint16_t insn2, struct regcache *regs,
6324 struct displaced_step_closure *dsc)
6325 {
6326 unsigned int op = bits (insn1, 5, 8);
6327 unsigned int rn, rm, rd;
6328 ULONGEST rd_val, rn_val;
6329
6330 rn = bits (insn1, 0, 3); /* Rn */
6331 rm = bits (insn2, 0, 3); /* Rm */
6332 rd = bits (insn2, 8, 11); /* Rd */
6333
6334 /* This routine is only called for instruction MOV. */
6335 gdb_assert (op == 0x2 && rn == 0xf);
6336
6337 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6338 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6339
6340 if (debug_displaced)
6341 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6342 "ALU", insn1, insn2);
6343
6344 /* Instruction is of form:
6345
6346 <op><cond> rd, [rn,] #imm
6347
6348 Rewrite as:
6349
6350 Preparation: tmp1, tmp2 <- r0, r1;
6351 r0, r1 <- rd, rn
6352 Insn: <op><cond> r0, r1, #imm
6353 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6354 */
6355
6356 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6357 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6358 rn_val = displaced_read_reg (regs, dsc, rn);
6359 rd_val = displaced_read_reg (regs, dsc, rd);
6360 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6361 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6362 dsc->rd = rd;
6363
6364 dsc->modinsn[0] = insn1;
6365 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6366 dsc->numinsns = 2;
6367
6368 dsc->cleanup = &cleanup_alu_imm;
6369
6370 return 0;
6371 }
6372
6373 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6374
6375 static void
6376 cleanup_alu_reg (struct gdbarch *gdbarch,
6377 struct regcache *regs, struct displaced_step_closure *dsc)
6378 {
6379 ULONGEST rd_val;
6380 int i;
6381
6382 rd_val = displaced_read_reg (regs, dsc, 0);
6383
6384 for (i = 0; i < 3; i++)
6385 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6386
6387 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6388 }
6389
6390 static void
6391 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6392 struct displaced_step_closure *dsc,
6393 unsigned int rd, unsigned int rn, unsigned int rm)
6394 {
6395 ULONGEST rd_val, rn_val, rm_val;
6396
6397 /* Instruction is of form:
6398
6399 <op><cond> rd, [rn,] rm [, <shift>]
6400
6401 Rewrite as:
6402
6403 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6404 r0, r1, r2 <- rd, rn, rm
6405 Insn: <op><cond> r0, r1, r2 [, <shift>]
6406 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6407 */
6408
6409 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6410 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6411 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6412 rd_val = displaced_read_reg (regs, dsc, rd);
6413 rn_val = displaced_read_reg (regs, dsc, rn);
6414 rm_val = displaced_read_reg (regs, dsc, rm);
6415 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6416 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6417 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6418 dsc->rd = rd;
6419
6420 dsc->cleanup = &cleanup_alu_reg;
6421 }
6422
6423 static int
6424 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6425 struct displaced_step_closure *dsc)
6426 {
6427 unsigned int op = bits (insn, 21, 24);
6428 int is_mov = (op == 0xd);
6429
6430 if (!insn_references_pc (insn, 0x000ff00ful))
6431 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6432
6433 if (debug_displaced)
6434 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6435 is_mov ? "move" : "ALU", (unsigned long) insn);
6436
6437 if (is_mov)
6438 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6439 else
6440 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6441
6442 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6443 bits (insn, 0, 3));
6444 return 0;
6445 }
6446
6447 static int
6448 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6449 struct regcache *regs,
6450 struct displaced_step_closure *dsc)
6451 {
6452 unsigned rn, rm, rd;
6453
6454 rd = bits (insn, 3, 6);
6455 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6456 rm = 2;
6457
6458 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6459 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6460
6461 if (debug_displaced)
6462 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6463 "ALU", (unsigned short) insn);
6464
6465 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6466
6467 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6468
6469 return 0;
6470 }
6471
6472 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6473
6474 static void
6475 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6476 struct regcache *regs,
6477 struct displaced_step_closure *dsc)
6478 {
6479 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6480 int i;
6481
6482 for (i = 0; i < 4; i++)
6483 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6484
6485 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6486 }
6487
6488 static void
6489 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6490 struct displaced_step_closure *dsc,
6491 unsigned int rd, unsigned int rn, unsigned int rm,
6492 unsigned rs)
6493 {
6494 int i;
6495 ULONGEST rd_val, rn_val, rm_val, rs_val;
6496
6497 /* Instruction is of form:
6498
6499 <op><cond> rd, [rn,] rm, <shift> rs
6500
6501 Rewrite as:
6502
6503 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6504 r0, r1, r2, r3 <- rd, rn, rm, rs
6505 Insn: <op><cond> r0, r1, r2, <shift> r3
6506 Cleanup: tmp5 <- r0
6507 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6508 rd <- tmp5
6509 */
6510
6511 for (i = 0; i < 4; i++)
6512 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6513
6514 rd_val = displaced_read_reg (regs, dsc, rd);
6515 rn_val = displaced_read_reg (regs, dsc, rn);
6516 rm_val = displaced_read_reg (regs, dsc, rm);
6517 rs_val = displaced_read_reg (regs, dsc, rs);
6518 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6519 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6520 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6521 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6522 dsc->rd = rd;
6523 dsc->cleanup = &cleanup_alu_shifted_reg;
6524 }
6525
6526 static int
6527 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6528 struct regcache *regs,
6529 struct displaced_step_closure *dsc)
6530 {
6531 unsigned int op = bits (insn, 21, 24);
6532 int is_mov = (op == 0xd);
6533 unsigned int rd, rn, rm, rs;
6534
6535 if (!insn_references_pc (insn, 0x000fff0ful))
6536 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6537
6538 if (debug_displaced)
6539 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6540 "%.8lx\n", is_mov ? "move" : "ALU",
6541 (unsigned long) insn);
6542
6543 rn = bits (insn, 16, 19);
6544 rm = bits (insn, 0, 3);
6545 rs = bits (insn, 8, 11);
6546 rd = bits (insn, 12, 15);
6547
6548 if (is_mov)
6549 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6550 else
6551 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6552
6553 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6554
6555 return 0;
6556 }
6557
6558 /* Clean up load instructions. */
6559
6560 static void
6561 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6562 struct displaced_step_closure *dsc)
6563 {
6564 ULONGEST rt_val, rt_val2 = 0, rn_val;
6565
6566 rt_val = displaced_read_reg (regs, dsc, 0);
6567 if (dsc->u.ldst.xfersize == 8)
6568 rt_val2 = displaced_read_reg (regs, dsc, 1);
6569 rn_val = displaced_read_reg (regs, dsc, 2);
6570
6571 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6572 if (dsc->u.ldst.xfersize > 4)
6573 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6574 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6575 if (!dsc->u.ldst.immed)
6576 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6577
6578 /* Handle register writeback. */
6579 if (dsc->u.ldst.writeback)
6580 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6581 /* Put result in right place. */
6582 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6583 if (dsc->u.ldst.xfersize == 8)
6584 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6585 }
6586
6587 /* Clean up store instructions. */
6588
6589 static void
6590 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6591 struct displaced_step_closure *dsc)
6592 {
6593 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6594
6595 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6596 if (dsc->u.ldst.xfersize > 4)
6597 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6598 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6599 if (!dsc->u.ldst.immed)
6600 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6601 if (!dsc->u.ldst.restore_r4)
6602 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6603
6604 /* Writeback. */
6605 if (dsc->u.ldst.writeback)
6606 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6607 }
6608
6609 /* Copy "extra" load/store instructions. These are halfword/doubleword
6610 transfers, which have a different encoding to byte/word transfers. */
6611
6612 static int
6613 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6614 struct regcache *regs, struct displaced_step_closure *dsc)
6615 {
6616 unsigned int op1 = bits (insn, 20, 24);
6617 unsigned int op2 = bits (insn, 5, 6);
6618 unsigned int rt = bits (insn, 12, 15);
6619 unsigned int rn = bits (insn, 16, 19);
6620 unsigned int rm = bits (insn, 0, 3);
6621 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6622 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6623 int immed = (op1 & 0x4) != 0;
6624 int opcode;
6625 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6626
6627 if (!insn_references_pc (insn, 0x000ff00ful))
6628 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6629
6630 if (debug_displaced)
6631 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6632 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6633 (unsigned long) insn);
6634
6635 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6636
6637 if (opcode < 0)
6638 internal_error (__FILE__, __LINE__,
6639 _("copy_extra_ld_st: instruction decode error"));
6640
6641 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6642 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6643 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6644 if (!immed)
6645 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6646
6647 rt_val = displaced_read_reg (regs, dsc, rt);
6648 if (bytesize[opcode] == 8)
6649 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6650 rn_val = displaced_read_reg (regs, dsc, rn);
6651 if (!immed)
6652 rm_val = displaced_read_reg (regs, dsc, rm);
6653
6654 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6655 if (bytesize[opcode] == 8)
6656 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6657 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6658 if (!immed)
6659 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6660
6661 dsc->rd = rt;
6662 dsc->u.ldst.xfersize = bytesize[opcode];
6663 dsc->u.ldst.rn = rn;
6664 dsc->u.ldst.immed = immed;
6665 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6666 dsc->u.ldst.restore_r4 = 0;
6667
6668 if (immed)
6669 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6670 ->
6671 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6672 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6673 else
6674 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6675 ->
6676 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6677 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6678
6679 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6680
6681 return 0;
6682 }
6683
6684 /* Copy byte/half word/word loads and stores. */
6685
6686 static void
6687 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6688 struct displaced_step_closure *dsc, int load,
6689 int immed, int writeback, int size, int usermode,
6690 int rt, int rm, int rn)
6691 {
6692 ULONGEST rt_val, rn_val, rm_val = 0;
6693
6694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6695 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6696 if (!immed)
6697 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6698 if (!load)
6699 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6700
6701 rt_val = displaced_read_reg (regs, dsc, rt);
6702 rn_val = displaced_read_reg (regs, dsc, rn);
6703 if (!immed)
6704 rm_val = displaced_read_reg (regs, dsc, rm);
6705
6706 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6707 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6708 if (!immed)
6709 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6710 dsc->rd = rt;
6711 dsc->u.ldst.xfersize = size;
6712 dsc->u.ldst.rn = rn;
6713 dsc->u.ldst.immed = immed;
6714 dsc->u.ldst.writeback = writeback;
6715
6716 /* To write PC we can do:
6717
6718 Before this sequence of instructions:
6719 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6720 r2 is the Rn value got from dispalced_read_reg.
6721
6722 Insn1: push {pc} Write address of STR instruction + offset on stack
6723 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6724 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6725 = addr(Insn1) + offset - addr(Insn3) - 8
6726 = offset - 16
6727 Insn4: add r4, r4, #8 r4 = offset - 8
6728 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6729 = from + offset
6730 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6731
6732 Otherwise we don't know what value to write for PC, since the offset is
6733 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6734 of this can be found in Section "Saving from r15" in
6735 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6736
6737 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6738 }
6739
6740
6741 static int
6742 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6743 uint16_t insn2, struct regcache *regs,
6744 struct displaced_step_closure *dsc, int size)
6745 {
6746 unsigned int u_bit = bit (insn1, 7);
6747 unsigned int rt = bits (insn2, 12, 15);
6748 int imm12 = bits (insn2, 0, 11);
6749 ULONGEST pc_val;
6750
6751 if (debug_displaced)
6752 fprintf_unfiltered (gdb_stdlog,
6753 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6754 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6755 imm12);
6756
6757 if (!u_bit)
6758 imm12 = -1 * imm12;
6759
6760 /* Rewrite instruction LDR Rt imm12 into:
6761
6762 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6763
6764 LDR R0, R2, R3,
6765
6766 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6767
6768
6769 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6770 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6771 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6772
6773 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6774
6775 pc_val = pc_val & 0xfffffffc;
6776
6777 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6778 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6779
6780 dsc->rd = rt;
6781
6782 dsc->u.ldst.xfersize = size;
6783 dsc->u.ldst.immed = 0;
6784 dsc->u.ldst.writeback = 0;
6785 dsc->u.ldst.restore_r4 = 0;
6786
6787 /* LDR R0, R2, R3 */
6788 dsc->modinsn[0] = 0xf852;
6789 dsc->modinsn[1] = 0x3;
6790 dsc->numinsns = 2;
6791
6792 dsc->cleanup = &cleanup_load;
6793
6794 return 0;
6795 }
6796
6797 static int
6798 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6799 uint16_t insn2, struct regcache *regs,
6800 struct displaced_step_closure *dsc,
6801 int writeback, int immed)
6802 {
6803 unsigned int rt = bits (insn2, 12, 15);
6804 unsigned int rn = bits (insn1, 0, 3);
6805 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6806 /* In LDR (register), there is also a register Rm, which is not allowed to
6807 be PC, so we don't have to check it. */
6808
6809 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6811 dsc);
6812
6813 if (debug_displaced)
6814 fprintf_unfiltered (gdb_stdlog,
6815 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6816 rt, rn, insn1, insn2);
6817
6818 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6819 0, rt, rm, rn);
6820
6821 dsc->u.ldst.restore_r4 = 0;
6822
6823 if (immed)
6824 /* ldr[b]<cond> rt, [rn, #imm], etc.
6825 ->
6826 ldr[b]<cond> r0, [r2, #imm]. */
6827 {
6828 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6829 dsc->modinsn[1] = insn2 & 0x0fff;
6830 }
6831 else
6832 /* ldr[b]<cond> rt, [rn, rm], etc.
6833 ->
6834 ldr[b]<cond> r0, [r2, r3]. */
6835 {
6836 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6837 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6838 }
6839
6840 dsc->numinsns = 2;
6841
6842 return 0;
6843 }
6844
6845
6846 static int
6847 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6848 struct regcache *regs,
6849 struct displaced_step_closure *dsc,
6850 int load, int size, int usermode)
6851 {
6852 int immed = !bit (insn, 25);
6853 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6854 unsigned int rt = bits (insn, 12, 15);
6855 unsigned int rn = bits (insn, 16, 19);
6856 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6857
6858 if (!insn_references_pc (insn, 0x000ff00ful))
6859 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6860
6861 if (debug_displaced)
6862 fprintf_unfiltered (gdb_stdlog,
6863 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6864 load ? (size == 1 ? "ldrb" : "ldr")
6865 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6866 rt, rn,
6867 (unsigned long) insn);
6868
6869 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6870 usermode, rt, rm, rn);
6871
6872 if (load || rt != ARM_PC_REGNUM)
6873 {
6874 dsc->u.ldst.restore_r4 = 0;
6875
6876 if (immed)
6877 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6878 ->
6879 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6880 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6881 else
6882 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6883 ->
6884 {ldr,str}[b]<cond> r0, [r2, r3]. */
6885 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6886 }
6887 else
6888 {
6889 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6890 dsc->u.ldst.restore_r4 = 1;
6891 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6892 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6893 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6894 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6895 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6896
6897 /* As above. */
6898 if (immed)
6899 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6900 else
6901 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6902
6903 dsc->numinsns = 6;
6904 }
6905
6906 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6907
6908 return 0;
6909 }
6910
6911 /* Cleanup LDM instructions with fully-populated register list. This is an
6912 unfortunate corner case: it's impossible to implement correctly by modifying
6913 the instruction. The issue is as follows: we have an instruction,
6914
6915 ldm rN, {r0-r15}
6916
6917 which we must rewrite to avoid loading PC. A possible solution would be to
6918 do the load in two halves, something like (with suitable cleanup
6919 afterwards):
6920
6921 mov r8, rN
6922 ldm[id][ab] r8!, {r0-r7}
6923 str r7, <temp>
6924 ldm[id][ab] r8, {r7-r14}
6925 <bkpt>
6926
6927 but at present there's no suitable place for <temp>, since the scratch space
6928 is overwritten before the cleanup routine is called. For now, we simply
6929 emulate the instruction. */
6930
6931 static void
6932 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6933 struct displaced_step_closure *dsc)
6934 {
6935 int inc = dsc->u.block.increment;
6936 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6937 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6938 uint32_t regmask = dsc->u.block.regmask;
6939 int regno = inc ? 0 : 15;
6940 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6941 int exception_return = dsc->u.block.load && dsc->u.block.user
6942 && (regmask & 0x8000) != 0;
6943 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6944 int do_transfer = condition_true (dsc->u.block.cond, status);
6945 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6946
6947 if (!do_transfer)
6948 return;
6949
6950 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6951 sensible we can do here. Complain loudly. */
6952 if (exception_return)
6953 error (_("Cannot single-step exception return"));
6954
6955 /* We don't handle any stores here for now. */
6956 gdb_assert (dsc->u.block.load != 0);
6957
6958 if (debug_displaced)
6959 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6960 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6961 dsc->u.block.increment ? "inc" : "dec",
6962 dsc->u.block.before ? "before" : "after");
6963
6964 while (regmask)
6965 {
6966 uint32_t memword;
6967
6968 if (inc)
6969 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6970 regno++;
6971 else
6972 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6973 regno--;
6974
6975 xfer_addr += bump_before;
6976
6977 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6978 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6979
6980 xfer_addr += bump_after;
6981
6982 regmask &= ~(1 << regno);
6983 }
6984
6985 if (dsc->u.block.writeback)
6986 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6987 CANNOT_WRITE_PC);
6988 }
6989
6990 /* Clean up an STM which included the PC in the register list. */
6991
6992 static void
6993 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6994 struct displaced_step_closure *dsc)
6995 {
6996 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6997 int store_executed = condition_true (dsc->u.block.cond, status);
6998 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6999 CORE_ADDR stm_insn_addr;
7000 uint32_t pc_val;
7001 long offset;
7002 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7003
7004 /* If condition code fails, there's nothing else to do. */
7005 if (!store_executed)
7006 return;
7007
7008 if (dsc->u.block.increment)
7009 {
7010 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7011
7012 if (dsc->u.block.before)
7013 pc_stored_at += 4;
7014 }
7015 else
7016 {
7017 pc_stored_at = dsc->u.block.xfer_addr;
7018
7019 if (dsc->u.block.before)
7020 pc_stored_at -= 4;
7021 }
7022
7023 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7024 stm_insn_addr = dsc->scratch_base;
7025 offset = pc_val - stm_insn_addr;
7026
7027 if (debug_displaced)
7028 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7029 "STM instruction\n", offset);
7030
7031 /* Rewrite the stored PC to the proper value for the non-displaced original
7032 instruction. */
7033 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7034 dsc->insn_addr + offset);
7035 }
7036
7037 /* Clean up an LDM which includes the PC in the register list. We clumped all
7038 the registers in the transferred list into a contiguous range r0...rX (to
7039 avoid loading PC directly and losing control of the debugged program), so we
7040 must undo that here. */
7041
7042 static void
7043 cleanup_block_load_pc (struct gdbarch *gdbarch,
7044 struct regcache *regs,
7045 struct displaced_step_closure *dsc)
7046 {
7047 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7048 int load_executed = condition_true (dsc->u.block.cond, status);
7049 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7050 unsigned int regs_loaded = bitcount (mask);
7051 unsigned int num_to_shuffle = regs_loaded, clobbered;
7052
7053 /* The method employed here will fail if the register list is fully populated
7054 (we need to avoid loading PC directly). */
7055 gdb_assert (num_to_shuffle < 16);
7056
7057 if (!load_executed)
7058 return;
7059
7060 clobbered = (1 << num_to_shuffle) - 1;
7061
7062 while (num_to_shuffle > 0)
7063 {
7064 if ((mask & (1 << write_reg)) != 0)
7065 {
7066 unsigned int read_reg = num_to_shuffle - 1;
7067
7068 if (read_reg != write_reg)
7069 {
7070 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7071 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7074 "loaded register r%d to r%d\n"), read_reg,
7075 write_reg);
7076 }
7077 else if (debug_displaced)
7078 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7079 "r%d already in the right place\n"),
7080 write_reg);
7081
7082 clobbered &= ~(1 << write_reg);
7083
7084 num_to_shuffle--;
7085 }
7086
7087 write_reg--;
7088 }
7089
7090 /* Restore any registers we scribbled over. */
7091 for (write_reg = 0; clobbered != 0; write_reg++)
7092 {
7093 if ((clobbered & (1 << write_reg)) != 0)
7094 {
7095 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7096 CANNOT_WRITE_PC);
7097 if (debug_displaced)
7098 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7099 "clobbered register r%d\n"), write_reg);
7100 clobbered &= ~(1 << write_reg);
7101 }
7102 }
7103
7104 /* Perform register writeback manually. */
7105 if (dsc->u.block.writeback)
7106 {
7107 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7108
7109 if (dsc->u.block.increment)
7110 new_rn_val += regs_loaded * 4;
7111 else
7112 new_rn_val -= regs_loaded * 4;
7113
7114 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7115 CANNOT_WRITE_PC);
7116 }
7117 }
7118
7119 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7120 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7121
7122 static int
7123 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7124 struct regcache *regs,
7125 struct displaced_step_closure *dsc)
7126 {
7127 int load = bit (insn, 20);
7128 int user = bit (insn, 22);
7129 int increment = bit (insn, 23);
7130 int before = bit (insn, 24);
7131 int writeback = bit (insn, 21);
7132 int rn = bits (insn, 16, 19);
7133
7134 /* Block transfers which don't mention PC can be run directly
7135 out-of-line. */
7136 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7137 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7138
7139 if (rn == ARM_PC_REGNUM)
7140 {
7141 warning (_("displaced: Unpredictable LDM or STM with "
7142 "base register r15"));
7143 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7144 }
7145
7146 if (debug_displaced)
7147 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7148 "%.8lx\n", (unsigned long) insn);
7149
7150 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7151 dsc->u.block.rn = rn;
7152
7153 dsc->u.block.load = load;
7154 dsc->u.block.user = user;
7155 dsc->u.block.increment = increment;
7156 dsc->u.block.before = before;
7157 dsc->u.block.writeback = writeback;
7158 dsc->u.block.cond = bits (insn, 28, 31);
7159
7160 dsc->u.block.regmask = insn & 0xffff;
7161
7162 if (load)
7163 {
7164 if ((insn & 0xffff) == 0xffff)
7165 {
7166 /* LDM with a fully-populated register list. This case is
7167 particularly tricky. Implement for now by fully emulating the
7168 instruction (which might not behave perfectly in all cases, but
7169 these instructions should be rare enough for that not to matter
7170 too much). */
7171 dsc->modinsn[0] = ARM_NOP;
7172
7173 dsc->cleanup = &cleanup_block_load_all;
7174 }
7175 else
7176 {
7177 /* LDM of a list of registers which includes PC. Implement by
7178 rewriting the list of registers to be transferred into a
7179 contiguous chunk r0...rX before doing the transfer, then shuffling
7180 registers into the correct places in the cleanup routine. */
7181 unsigned int regmask = insn & 0xffff;
7182 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7183 unsigned int to = 0, from = 0, i, new_rn;
7184
7185 for (i = 0; i < num_in_list; i++)
7186 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7187
7188 /* Writeback makes things complicated. We need to avoid clobbering
7189 the base register with one of the registers in our modified
7190 register list, but just using a different register can't work in
7191 all cases, e.g.:
7192
7193 ldm r14!, {r0-r13,pc}
7194
7195 which would need to be rewritten as:
7196
7197 ldm rN!, {r0-r14}
7198
7199 but that can't work, because there's no free register for N.
7200
7201 Solve this by turning off the writeback bit, and emulating
7202 writeback manually in the cleanup routine. */
7203
7204 if (writeback)
7205 insn &= ~(1 << 21);
7206
7207 new_regmask = (1 << num_in_list) - 1;
7208
7209 if (debug_displaced)
7210 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7211 "{..., pc}: original reg list %.4x, modified "
7212 "list %.4x\n"), rn, writeback ? "!" : "",
7213 (int) insn & 0xffff, new_regmask);
7214
7215 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7216
7217 dsc->cleanup = &cleanup_block_load_pc;
7218 }
7219 }
7220 else
7221 {
7222 /* STM of a list of registers which includes PC. Run the instruction
7223 as-is, but out of line: this will store the wrong value for the PC,
7224 so we must manually fix up the memory in the cleanup routine.
7225 Doing things this way has the advantage that we can auto-detect
7226 the offset of the PC write (which is architecture-dependent) in
7227 the cleanup routine. */
7228 dsc->modinsn[0] = insn;
7229
7230 dsc->cleanup = &cleanup_block_store_pc;
7231 }
7232
7233 return 0;
7234 }
7235
7236 static int
7237 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7238 struct regcache *regs,
7239 struct displaced_step_closure *dsc)
7240 {
7241 int rn = bits (insn1, 0, 3);
7242 int load = bit (insn1, 4);
7243 int writeback = bit (insn1, 5);
7244
7245 /* Block transfers which don't mention PC can be run directly
7246 out-of-line. */
7247 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7248 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7249
7250 if (rn == ARM_PC_REGNUM)
7251 {
7252 warning (_("displaced: Unpredictable LDM or STM with "
7253 "base register r15"));
7254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7255 "unpredictable ldm/stm", dsc);
7256 }
7257
7258 if (debug_displaced)
7259 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7260 "%.4x%.4x\n", insn1, insn2);
7261
7262 /* Clear bit 13, since it should be always zero. */
7263 dsc->u.block.regmask = (insn2 & 0xdfff);
7264 dsc->u.block.rn = rn;
7265
7266 dsc->u.block.load = load;
7267 dsc->u.block.user = 0;
7268 dsc->u.block.increment = bit (insn1, 7);
7269 dsc->u.block.before = bit (insn1, 8);
7270 dsc->u.block.writeback = writeback;
7271 dsc->u.block.cond = INST_AL;
7272 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7273
7274 if (load)
7275 {
7276 if (dsc->u.block.regmask == 0xffff)
7277 {
7278 /* This branch is impossible to happen. */
7279 gdb_assert (0);
7280 }
7281 else
7282 {
7283 unsigned int regmask = dsc->u.block.regmask;
7284 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7285 unsigned int to = 0, from = 0, i, new_rn;
7286
7287 for (i = 0; i < num_in_list; i++)
7288 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7289
7290 if (writeback)
7291 insn1 &= ~(1 << 5);
7292
7293 new_regmask = (1 << num_in_list) - 1;
7294
7295 if (debug_displaced)
7296 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7297 "{..., pc}: original reg list %.4x, modified "
7298 "list %.4x\n"), rn, writeback ? "!" : "",
7299 (int) dsc->u.block.regmask, new_regmask);
7300
7301 dsc->modinsn[0] = insn1;
7302 dsc->modinsn[1] = (new_regmask & 0xffff);
7303 dsc->numinsns = 2;
7304
7305 dsc->cleanup = &cleanup_block_load_pc;
7306 }
7307 }
7308 else
7309 {
7310 dsc->modinsn[0] = insn1;
7311 dsc->modinsn[1] = insn2;
7312 dsc->numinsns = 2;
7313 dsc->cleanup = &cleanup_block_store_pc;
7314 }
7315 return 0;
7316 }
7317
7318 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7319 for Linux, where some SVC instructions must be treated specially. */
7320
7321 static void
7322 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7323 struct displaced_step_closure *dsc)
7324 {
7325 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7326
7327 if (debug_displaced)
7328 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7329 "%.8lx\n", (unsigned long) resume_addr);
7330
7331 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7332 }
7333
7334
7335 /* Common copy routine for svc instruciton. */
7336
7337 static int
7338 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7339 struct displaced_step_closure *dsc)
7340 {
7341 /* Preparation: none.
7342 Insn: unmodified svc.
7343 Cleanup: pc <- insn_addr + insn_size. */
7344
7345 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7346 instruction. */
7347 dsc->wrote_to_pc = 1;
7348
7349 /* Allow OS-specific code to override SVC handling. */
7350 if (dsc->u.svc.copy_svc_os)
7351 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7352 else
7353 {
7354 dsc->cleanup = &cleanup_svc;
7355 return 0;
7356 }
7357 }
7358
7359 static int
7360 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7361 struct regcache *regs, struct displaced_step_closure *dsc)
7362 {
7363
7364 if (debug_displaced)
7365 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7366 (unsigned long) insn);
7367
7368 dsc->modinsn[0] = insn;
7369
7370 return install_svc (gdbarch, regs, dsc);
7371 }
7372
7373 static int
7374 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7375 struct regcache *regs, struct displaced_step_closure *dsc)
7376 {
7377
7378 if (debug_displaced)
7379 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7380 insn);
7381
7382 dsc->modinsn[0] = insn;
7383
7384 return install_svc (gdbarch, regs, dsc);
7385 }
7386
7387 /* Copy undefined instructions. */
7388
7389 static int
7390 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7391 struct displaced_step_closure *dsc)
7392 {
7393 if (debug_displaced)
7394 fprintf_unfiltered (gdb_stdlog,
7395 "displaced: copying undefined insn %.8lx\n",
7396 (unsigned long) insn);
7397
7398 dsc->modinsn[0] = insn;
7399
7400 return 0;
7401 }
7402
7403 static int
7404 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7405 struct displaced_step_closure *dsc)
7406 {
7407
7408 if (debug_displaced)
7409 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7410 "%.4x %.4x\n", (unsigned short) insn1,
7411 (unsigned short) insn2);
7412
7413 dsc->modinsn[0] = insn1;
7414 dsc->modinsn[1] = insn2;
7415 dsc->numinsns = 2;
7416
7417 return 0;
7418 }
7419
7420 /* Copy unpredictable instructions. */
7421
7422 static int
7423 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7424 struct displaced_step_closure *dsc)
7425 {
7426 if (debug_displaced)
7427 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7428 "%.8lx\n", (unsigned long) insn);
7429
7430 dsc->modinsn[0] = insn;
7431
7432 return 0;
7433 }
7434
7435 /* The decode_* functions are instruction decoding helpers. They mostly follow
7436 the presentation in the ARM ARM. */
7437
7438 static int
7439 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7440 struct regcache *regs,
7441 struct displaced_step_closure *dsc)
7442 {
7443 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7444 unsigned int rn = bits (insn, 16, 19);
7445
7446 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7447 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7448 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7449 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7450 else if ((op1 & 0x60) == 0x20)
7451 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7452 else if ((op1 & 0x71) == 0x40)
7453 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7454 dsc);
7455 else if ((op1 & 0x77) == 0x41)
7456 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7457 else if ((op1 & 0x77) == 0x45)
7458 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7459 else if ((op1 & 0x77) == 0x51)
7460 {
7461 if (rn != 0xf)
7462 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7463 else
7464 return arm_copy_unpred (gdbarch, insn, dsc);
7465 }
7466 else if ((op1 & 0x77) == 0x55)
7467 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7468 else if (op1 == 0x57)
7469 switch (op2)
7470 {
7471 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7472 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7473 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7474 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7475 default: return arm_copy_unpred (gdbarch, insn, dsc);
7476 }
7477 else if ((op1 & 0x63) == 0x43)
7478 return arm_copy_unpred (gdbarch, insn, dsc);
7479 else if ((op2 & 0x1) == 0x0)
7480 switch (op1 & ~0x80)
7481 {
7482 case 0x61:
7483 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7484 case 0x65:
7485 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7486 case 0x71: case 0x75:
7487 /* pld/pldw reg. */
7488 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7489 case 0x63: case 0x67: case 0x73: case 0x77:
7490 return arm_copy_unpred (gdbarch, insn, dsc);
7491 default:
7492 return arm_copy_undef (gdbarch, insn, dsc);
7493 }
7494 else
7495 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7496 }
7497
7498 static int
7499 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7500 struct regcache *regs,
7501 struct displaced_step_closure *dsc)
7502 {
7503 if (bit (insn, 27) == 0)
7504 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7505 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7506 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7507 {
7508 case 0x0: case 0x2:
7509 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7510
7511 case 0x1: case 0x3:
7512 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7513
7514 case 0x4: case 0x5: case 0x6: case 0x7:
7515 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7516
7517 case 0x8:
7518 switch ((insn & 0xe00000) >> 21)
7519 {
7520 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7521 /* stc/stc2. */
7522 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7523
7524 case 0x2:
7525 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7526
7527 default:
7528 return arm_copy_undef (gdbarch, insn, dsc);
7529 }
7530
7531 case 0x9:
7532 {
7533 int rn_f = (bits (insn, 16, 19) == 0xf);
7534 switch ((insn & 0xe00000) >> 21)
7535 {
7536 case 0x1: case 0x3:
7537 /* ldc/ldc2 imm (undefined for rn == pc). */
7538 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7539 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7540
7541 case 0x2:
7542 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7543
7544 case 0x4: case 0x5: case 0x6: case 0x7:
7545 /* ldc/ldc2 lit (undefined for rn != pc). */
7546 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7547 : arm_copy_undef (gdbarch, insn, dsc);
7548
7549 default:
7550 return arm_copy_undef (gdbarch, insn, dsc);
7551 }
7552 }
7553
7554 case 0xa:
7555 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7556
7557 case 0xb:
7558 if (bits (insn, 16, 19) == 0xf)
7559 /* ldc/ldc2 lit. */
7560 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7561 else
7562 return arm_copy_undef (gdbarch, insn, dsc);
7563
7564 case 0xc:
7565 if (bit (insn, 4))
7566 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7567 else
7568 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7569
7570 case 0xd:
7571 if (bit (insn, 4))
7572 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7573 else
7574 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7575
7576 default:
7577 return arm_copy_undef (gdbarch, insn, dsc);
7578 }
7579 }
7580
7581 /* Decode miscellaneous instructions in dp/misc encoding space. */
7582
7583 static int
7584 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7585 struct regcache *regs,
7586 struct displaced_step_closure *dsc)
7587 {
7588 unsigned int op2 = bits (insn, 4, 6);
7589 unsigned int op = bits (insn, 21, 22);
7590 unsigned int op1 = bits (insn, 16, 19);
7591
7592 switch (op2)
7593 {
7594 case 0x0:
7595 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7596
7597 case 0x1:
7598 if (op == 0x1) /* bx. */
7599 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7600 else if (op == 0x3)
7601 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7602 else
7603 return arm_copy_undef (gdbarch, insn, dsc);
7604
7605 case 0x2:
7606 if (op == 0x1)
7607 /* Not really supported. */
7608 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7609 else
7610 return arm_copy_undef (gdbarch, insn, dsc);
7611
7612 case 0x3:
7613 if (op == 0x1)
7614 return arm_copy_bx_blx_reg (gdbarch, insn,
7615 regs, dsc); /* blx register. */
7616 else
7617 return arm_copy_undef (gdbarch, insn, dsc);
7618
7619 case 0x5:
7620 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7621
7622 case 0x7:
7623 if (op == 0x1)
7624 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7625 else if (op == 0x3)
7626 /* Not really supported. */
7627 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7628
7629 default:
7630 return arm_copy_undef (gdbarch, insn, dsc);
7631 }
7632 }
7633
7634 static int
7635 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7636 struct regcache *regs,
7637 struct displaced_step_closure *dsc)
7638 {
7639 if (bit (insn, 25))
7640 switch (bits (insn, 20, 24))
7641 {
7642 case 0x10:
7643 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7644
7645 case 0x14:
7646 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7647
7648 case 0x12: case 0x16:
7649 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7650
7651 default:
7652 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7653 }
7654 else
7655 {
7656 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7657
7658 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7659 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7660 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7661 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7662 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7663 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7664 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7665 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7666 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7667 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7668 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7669 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7670 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7671 /* 2nd arg means "unpriveleged". */
7672 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7673 dsc);
7674 }
7675
7676 /* Should be unreachable. */
7677 return 1;
7678 }
7679
7680 static int
7681 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7682 struct regcache *regs,
7683 struct displaced_step_closure *dsc)
7684 {
7685 int a = bit (insn, 25), b = bit (insn, 4);
7686 uint32_t op1 = bits (insn, 20, 24);
7687 int rn_f = bits (insn, 16, 19) == 0xf;
7688
7689 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7690 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7691 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7692 else if ((!a && (op1 & 0x17) == 0x02)
7693 || (a && (op1 & 0x17) == 0x02 && !b))
7694 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7695 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7696 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7697 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7698 else if ((!a && (op1 & 0x17) == 0x03)
7699 || (a && (op1 & 0x17) == 0x03 && !b))
7700 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7701 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7702 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7704 else if ((!a && (op1 & 0x17) == 0x06)
7705 || (a && (op1 & 0x17) == 0x06 && !b))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7707 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7708 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7710 else if ((!a && (op1 & 0x17) == 0x07)
7711 || (a && (op1 & 0x17) == 0x07 && !b))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7713
7714 /* Should be unreachable. */
7715 return 1;
7716 }
7717
7718 static int
7719 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7720 struct displaced_step_closure *dsc)
7721 {
7722 switch (bits (insn, 20, 24))
7723 {
7724 case 0x00: case 0x01: case 0x02: case 0x03:
7725 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7726
7727 case 0x04: case 0x05: case 0x06: case 0x07:
7728 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7729
7730 case 0x08: case 0x09: case 0x0a: case 0x0b:
7731 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7732 return arm_copy_unmodified (gdbarch, insn,
7733 "decode/pack/unpack/saturate/reverse", dsc);
7734
7735 case 0x18:
7736 if (bits (insn, 5, 7) == 0) /* op2. */
7737 {
7738 if (bits (insn, 12, 15) == 0xf)
7739 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7740 else
7741 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7742 }
7743 else
7744 return arm_copy_undef (gdbarch, insn, dsc);
7745
7746 case 0x1a: case 0x1b:
7747 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7748 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7749 else
7750 return arm_copy_undef (gdbarch, insn, dsc);
7751
7752 case 0x1c: case 0x1d:
7753 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7754 {
7755 if (bits (insn, 0, 3) == 0xf)
7756 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7757 else
7758 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7759 }
7760 else
7761 return arm_copy_undef (gdbarch, insn, dsc);
7762
7763 case 0x1e: case 0x1f:
7764 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7765 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7766 else
7767 return arm_copy_undef (gdbarch, insn, dsc);
7768 }
7769
7770 /* Should be unreachable. */
7771 return 1;
7772 }
7773
7774 static int
7775 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7776 struct regcache *regs,
7777 struct displaced_step_closure *dsc)
7778 {
7779 if (bit (insn, 25))
7780 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7781 else
7782 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7783 }
7784
7785 static int
7786 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7787 struct regcache *regs,
7788 struct displaced_step_closure *dsc)
7789 {
7790 unsigned int opcode = bits (insn, 20, 24);
7791
7792 switch (opcode)
7793 {
7794 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7795 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7796
7797 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7798 case 0x12: case 0x16:
7799 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7800
7801 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7802 case 0x13: case 0x17:
7803 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7804
7805 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7806 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7807 /* Note: no writeback for these instructions. Bit 25 will always be
7808 zero though (via caller), so the following works OK. */
7809 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7810 }
7811
7812 /* Should be unreachable. */
7813 return 1;
7814 }
7815
7816 /* Decode shifted register instructions. */
7817
7818 static int
7819 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7820 uint16_t insn2, struct regcache *regs,
7821 struct displaced_step_closure *dsc)
7822 {
7823 /* PC is only allowed to be used in instruction MOV. */
7824
7825 unsigned int op = bits (insn1, 5, 8);
7826 unsigned int rn = bits (insn1, 0, 3);
7827
7828 if (op == 0x2 && rn == 0xf) /* MOV */
7829 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7830 else
7831 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7832 "dp (shift reg)", dsc);
7833 }
7834
7835
7836 /* Decode extension register load/store. Exactly the same as
7837 arm_decode_ext_reg_ld_st. */
7838
7839 static int
7840 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7841 uint16_t insn2, struct regcache *regs,
7842 struct displaced_step_closure *dsc)
7843 {
7844 unsigned int opcode = bits (insn1, 4, 8);
7845
7846 switch (opcode)
7847 {
7848 case 0x04: case 0x05:
7849 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7850 "vfp/neon vmov", dsc);
7851
7852 case 0x08: case 0x0c: /* 01x00 */
7853 case 0x0a: case 0x0e: /* 01x10 */
7854 case 0x12: case 0x16: /* 10x10 */
7855 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7856 "vfp/neon vstm/vpush", dsc);
7857
7858 case 0x09: case 0x0d: /* 01x01 */
7859 case 0x0b: case 0x0f: /* 01x11 */
7860 case 0x13: case 0x17: /* 10x11 */
7861 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7862 "vfp/neon vldm/vpop", dsc);
7863
7864 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7865 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7866 "vstr", dsc);
7867 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7868 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7869 }
7870
7871 /* Should be unreachable. */
7872 return 1;
7873 }
7874
7875 static int
7876 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7877 struct regcache *regs, struct displaced_step_closure *dsc)
7878 {
7879 unsigned int op1 = bits (insn, 20, 25);
7880 int op = bit (insn, 4);
7881 unsigned int coproc = bits (insn, 8, 11);
7882 unsigned int rn = bits (insn, 16, 19);
7883
7884 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7885 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7886 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7887 && (coproc & 0xe) != 0xa)
7888 /* stc/stc2. */
7889 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7890 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7891 && (coproc & 0xe) != 0xa)
7892 /* ldc/ldc2 imm/lit. */
7893 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7894 else if ((op1 & 0x3e) == 0x00)
7895 return arm_copy_undef (gdbarch, insn, dsc);
7896 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7897 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7898 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7899 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7900 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7901 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7902 else if ((op1 & 0x30) == 0x20 && !op)
7903 {
7904 if ((coproc & 0xe) == 0xa)
7905 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7906 else
7907 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7908 }
7909 else if ((op1 & 0x30) == 0x20 && op)
7910 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7911 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7912 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7913 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7914 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7915 else if ((op1 & 0x30) == 0x30)
7916 return arm_copy_svc (gdbarch, insn, regs, dsc);
7917 else
7918 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7919 }
7920
7921 static int
7922 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7923 uint16_t insn2, struct regcache *regs,
7924 struct displaced_step_closure *dsc)
7925 {
7926 unsigned int coproc = bits (insn2, 8, 11);
7927 unsigned int op1 = bits (insn1, 4, 9);
7928 unsigned int bit_5_8 = bits (insn1, 5, 8);
7929 unsigned int bit_9 = bit (insn1, 9);
7930 unsigned int bit_4 = bit (insn1, 4);
7931 unsigned int rn = bits (insn1, 0, 3);
7932
7933 if (bit_9 == 0)
7934 {
7935 if (bit_5_8 == 2)
7936 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7937 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7938 dsc);
7939 else if (bit_5_8 == 0) /* UNDEFINED. */
7940 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7941 else
7942 {
7943 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7944 if ((coproc & 0xe) == 0xa)
7945 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7946 dsc);
7947 else /* coproc is not 101x. */
7948 {
7949 if (bit_4 == 0) /* STC/STC2. */
7950 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7951 "stc/stc2", dsc);
7952 else /* LDC/LDC2 {literal, immeidate}. */
7953 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7954 regs, dsc);
7955 }
7956 }
7957 }
7958 else
7959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7960
7961 return 0;
7962 }
7963
7964 static void
7965 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7966 struct displaced_step_closure *dsc, int rd)
7967 {
7968 /* ADR Rd, #imm
7969
7970 Rewrite as:
7971
7972 Preparation: Rd <- PC
7973 Insn: ADD Rd, #imm
7974 Cleanup: Null.
7975 */
7976
7977 /* Rd <- PC */
7978 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7979 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7980 }
7981
7982 static int
7983 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7984 struct displaced_step_closure *dsc,
7985 int rd, unsigned int imm)
7986 {
7987
7988 /* Encoding T2: ADDS Rd, #imm */
7989 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7990
7991 install_pc_relative (gdbarch, regs, dsc, rd);
7992
7993 return 0;
7994 }
7995
7996 static int
7997 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7998 struct regcache *regs,
7999 struct displaced_step_closure *dsc)
8000 {
8001 unsigned int rd = bits (insn, 8, 10);
8002 unsigned int imm8 = bits (insn, 0, 7);
8003
8004 if (debug_displaced)
8005 fprintf_unfiltered (gdb_stdlog,
8006 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8007 rd, imm8, insn);
8008
8009 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8010 }
8011
8012 static int
8013 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8014 uint16_t insn2, struct regcache *regs,
8015 struct displaced_step_closure *dsc)
8016 {
8017 unsigned int rd = bits (insn2, 8, 11);
8018 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8019 extract raw immediate encoding rather than computing immediate. When
8020 generating ADD or SUB instruction, we can simply perform OR operation to
8021 set immediate into ADD. */
8022 unsigned int imm_3_8 = insn2 & 0x70ff;
8023 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8024
8025 if (debug_displaced)
8026 fprintf_unfiltered (gdb_stdlog,
8027 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8028 rd, imm_i, imm_3_8, insn1, insn2);
8029
8030 if (bit (insn1, 7)) /* Encoding T2 */
8031 {
8032 /* Encoding T3: SUB Rd, Rd, #imm */
8033 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8034 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8035 }
8036 else /* Encoding T3 */
8037 {
8038 /* Encoding T3: ADD Rd, Rd, #imm */
8039 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8040 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8041 }
8042 dsc->numinsns = 2;
8043
8044 install_pc_relative (gdbarch, regs, dsc, rd);
8045
8046 return 0;
8047 }
8048
8049 static int
8050 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8051 struct regcache *regs,
8052 struct displaced_step_closure *dsc)
8053 {
8054 unsigned int rt = bits (insn1, 8, 10);
8055 unsigned int pc;
8056 int imm8 = (bits (insn1, 0, 7) << 2);
8057 CORE_ADDR from = dsc->insn_addr;
8058
8059 /* LDR Rd, #imm8
8060
8061 Rwrite as:
8062
8063 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8064
8065 Insn: LDR R0, [R2, R3];
8066 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8067
8068 if (debug_displaced)
8069 fprintf_unfiltered (gdb_stdlog,
8070 "displaced: copying thumb ldr r%d [pc #%d]\n"
8071 , rt, imm8);
8072
8073 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8074 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8075 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8076 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8077 /* The assembler calculates the required value of the offset from the
8078 Align(PC,4) value of this instruction to the label. */
8079 pc = pc & 0xfffffffc;
8080
8081 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8082 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8083
8084 dsc->rd = rt;
8085 dsc->u.ldst.xfersize = 4;
8086 dsc->u.ldst.rn = 0;
8087 dsc->u.ldst.immed = 0;
8088 dsc->u.ldst.writeback = 0;
8089 dsc->u.ldst.restore_r4 = 0;
8090
8091 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8092
8093 dsc->cleanup = &cleanup_load;
8094
8095 return 0;
8096 }
8097
8098 /* Copy Thumb cbnz/cbz insruction. */
8099
8100 static int
8101 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8102 struct regcache *regs,
8103 struct displaced_step_closure *dsc)
8104 {
8105 int non_zero = bit (insn1, 11);
8106 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8107 CORE_ADDR from = dsc->insn_addr;
8108 int rn = bits (insn1, 0, 2);
8109 int rn_val = displaced_read_reg (regs, dsc, rn);
8110
8111 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8112 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8113 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8114 condition is false, let it be, cleanup_branch will do nothing. */
8115 if (dsc->u.branch.cond)
8116 {
8117 dsc->u.branch.cond = INST_AL;
8118 dsc->u.branch.dest = from + 4 + imm5;
8119 }
8120 else
8121 dsc->u.branch.dest = from + 2;
8122
8123 dsc->u.branch.link = 0;
8124 dsc->u.branch.exchange = 0;
8125
8126 if (debug_displaced)
8127 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8128 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8129 rn, rn_val, insn1, dsc->u.branch.dest);
8130
8131 dsc->modinsn[0] = THUMB_NOP;
8132
8133 dsc->cleanup = &cleanup_branch;
8134 return 0;
8135 }
8136
8137 /* Copy Table Branch Byte/Halfword */
8138 static int
8139 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8140 uint16_t insn2, struct regcache *regs,
8141 struct displaced_step_closure *dsc)
8142 {
8143 ULONGEST rn_val, rm_val;
8144 int is_tbh = bit (insn2, 4);
8145 CORE_ADDR halfwords = 0;
8146 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8147
8148 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8149 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8150
8151 if (is_tbh)
8152 {
8153 gdb_byte buf[2];
8154
8155 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8156 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8157 }
8158 else
8159 {
8160 gdb_byte buf[1];
8161
8162 target_read_memory (rn_val + rm_val, buf, 1);
8163 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8164 }
8165
8166 if (debug_displaced)
8167 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8168 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8169 (unsigned int) rn_val, (unsigned int) rm_val,
8170 (unsigned int) halfwords);
8171
8172 dsc->u.branch.cond = INST_AL;
8173 dsc->u.branch.link = 0;
8174 dsc->u.branch.exchange = 0;
8175 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8176
8177 dsc->cleanup = &cleanup_branch;
8178
8179 return 0;
8180 }
8181
8182 static void
8183 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8184 struct displaced_step_closure *dsc)
8185 {
8186 /* PC <- r7 */
8187 int val = displaced_read_reg (regs, dsc, 7);
8188 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8189
8190 /* r7 <- r8 */
8191 val = displaced_read_reg (regs, dsc, 8);
8192 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8193
8194 /* r8 <- tmp[0] */
8195 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8196
8197 }
8198
8199 static int
8200 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8201 struct regcache *regs,
8202 struct displaced_step_closure *dsc)
8203 {
8204 dsc->u.block.regmask = insn1 & 0x00ff;
8205
8206 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8207 to :
8208
8209 (1) register list is full, that is, r0-r7 are used.
8210 Prepare: tmp[0] <- r8
8211
8212 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8213 MOV r8, r7; Move value of r7 to r8;
8214 POP {r7}; Store PC value into r7.
8215
8216 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8217
8218 (2) register list is not full, supposing there are N registers in
8219 register list (except PC, 0 <= N <= 7).
8220 Prepare: for each i, 0 - N, tmp[i] <- ri.
8221
8222 POP {r0, r1, ...., rN};
8223
8224 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8225 from tmp[] properly.
8226 */
8227 if (debug_displaced)
8228 fprintf_unfiltered (gdb_stdlog,
8229 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8230 dsc->u.block.regmask, insn1);
8231
8232 if (dsc->u.block.regmask == 0xff)
8233 {
8234 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8235
8236 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8237 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8238 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8239
8240 dsc->numinsns = 3;
8241 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8242 }
8243 else
8244 {
8245 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8246 unsigned int new_regmask, bit = 1;
8247 unsigned int to = 0, from = 0, i, new_rn;
8248
8249 for (i = 0; i < num_in_list + 1; i++)
8250 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8251
8252 new_regmask = (1 << (num_in_list + 1)) - 1;
8253
8254 if (debug_displaced)
8255 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8256 "{..., pc}: original reg list %.4x,"
8257 " modified list %.4x\n"),
8258 (int) dsc->u.block.regmask, new_regmask);
8259
8260 dsc->u.block.regmask |= 0x8000;
8261 dsc->u.block.writeback = 0;
8262 dsc->u.block.cond = INST_AL;
8263
8264 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8265
8266 dsc->cleanup = &cleanup_block_load_pc;
8267 }
8268
8269 return 0;
8270 }
8271
8272 static void
8273 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8274 struct regcache *regs,
8275 struct displaced_step_closure *dsc)
8276 {
8277 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8278 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8279 int err = 0;
8280
8281 /* 16-bit thumb instructions. */
8282 switch (op_bit_12_15)
8283 {
8284 /* Shift (imme), add, subtract, move and compare. */
8285 case 0: case 1: case 2: case 3:
8286 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8287 "shift/add/sub/mov/cmp",
8288 dsc);
8289 break;
8290 case 4:
8291 switch (op_bit_10_11)
8292 {
8293 case 0: /* Data-processing */
8294 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8295 "data-processing",
8296 dsc);
8297 break;
8298 case 1: /* Special data instructions and branch and exchange. */
8299 {
8300 unsigned short op = bits (insn1, 7, 9);
8301 if (op == 6 || op == 7) /* BX or BLX */
8302 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8303 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8304 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8305 else
8306 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8307 dsc);
8308 }
8309 break;
8310 default: /* LDR (literal) */
8311 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8312 }
8313 break;
8314 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8315 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8316 break;
8317 case 10:
8318 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8319 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8320 else /* Generate SP-relative address */
8321 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8322 break;
8323 case 11: /* Misc 16-bit instructions */
8324 {
8325 switch (bits (insn1, 8, 11))
8326 {
8327 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8328 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8329 break;
8330 case 12: case 13: /* POP */
8331 if (bit (insn1, 8)) /* PC is in register list. */
8332 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8333 else
8334 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8335 break;
8336 case 15: /* If-Then, and hints */
8337 if (bits (insn1, 0, 3))
8338 /* If-Then makes up to four following instructions conditional.
8339 IT instruction itself is not conditional, so handle it as a
8340 common unmodified instruction. */
8341 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8342 dsc);
8343 else
8344 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8345 break;
8346 default:
8347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8348 }
8349 }
8350 break;
8351 case 12:
8352 if (op_bit_10_11 < 2) /* Store multiple registers */
8353 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8354 else /* Load multiple registers */
8355 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8356 break;
8357 case 13: /* Conditional branch and supervisor call */
8358 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8359 err = thumb_copy_b (gdbarch, insn1, dsc);
8360 else
8361 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8362 break;
8363 case 14: /* Unconditional branch */
8364 err = thumb_copy_b (gdbarch, insn1, dsc);
8365 break;
8366 default:
8367 err = 1;
8368 }
8369
8370 if (err)
8371 internal_error (__FILE__, __LINE__,
8372 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8373 }
8374
8375 static int
8376 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8377 uint16_t insn1, uint16_t insn2,
8378 struct regcache *regs,
8379 struct displaced_step_closure *dsc)
8380 {
8381 int rt = bits (insn2, 12, 15);
8382 int rn = bits (insn1, 0, 3);
8383 int op1 = bits (insn1, 7, 8);
8384 int err = 0;
8385
8386 switch (bits (insn1, 5, 6))
8387 {
8388 case 0: /* Load byte and memory hints */
8389 if (rt == 0xf) /* PLD/PLI */
8390 {
8391 if (rn == 0xf)
8392 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8393 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8394 else
8395 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8396 "pli/pld", dsc);
8397 }
8398 else
8399 {
8400 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8401 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8402 1);
8403 else
8404 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8405 "ldrb{reg, immediate}/ldrbt",
8406 dsc);
8407 }
8408
8409 break;
8410 case 1: /* Load halfword and memory hints. */
8411 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8413 "pld/unalloc memhint", dsc);
8414 else
8415 {
8416 if (rn == 0xf)
8417 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8418 2);
8419 else
8420 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8421 "ldrh/ldrht", dsc);
8422 }
8423 break;
8424 case 2: /* Load word */
8425 {
8426 int insn2_bit_8_11 = bits (insn2, 8, 11);
8427
8428 if (rn == 0xf)
8429 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8430 else if (op1 == 0x1) /* Encoding T3 */
8431 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8432 0, 1);
8433 else /* op1 == 0x0 */
8434 {
8435 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8436 /* LDR (immediate) */
8437 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8438 dsc, bit (insn2, 8), 1);
8439 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8440 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8441 "ldrt", dsc);
8442 else
8443 /* LDR (register) */
8444 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8445 dsc, 0, 0);
8446 }
8447 break;
8448 }
8449 default:
8450 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8451 break;
8452 }
8453 return 0;
8454 }
8455
8456 static void
8457 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8458 uint16_t insn2, struct regcache *regs,
8459 struct displaced_step_closure *dsc)
8460 {
8461 int err = 0;
8462 unsigned short op = bit (insn2, 15);
8463 unsigned int op1 = bits (insn1, 11, 12);
8464
8465 switch (op1)
8466 {
8467 case 1:
8468 {
8469 switch (bits (insn1, 9, 10))
8470 {
8471 case 0:
8472 if (bit (insn1, 6))
8473 {
8474 /* Load/store {dual, execlusive}, table branch. */
8475 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8476 && bits (insn2, 5, 7) == 0)
8477 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8478 dsc);
8479 else
8480 /* PC is not allowed to use in load/store {dual, exclusive}
8481 instructions. */
8482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8483 "load/store dual/ex", dsc);
8484 }
8485 else /* load/store multiple */
8486 {
8487 switch (bits (insn1, 7, 8))
8488 {
8489 case 0: case 3: /* SRS, RFE */
8490 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8491 "srs/rfe", dsc);
8492 break;
8493 case 1: case 2: /* LDM/STM/PUSH/POP */
8494 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8495 break;
8496 }
8497 }
8498 break;
8499
8500 case 1:
8501 /* Data-processing (shift register). */
8502 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8503 dsc);
8504 break;
8505 default: /* Coprocessor instructions. */
8506 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8507 break;
8508 }
8509 break;
8510 }
8511 case 2: /* op1 = 2 */
8512 if (op) /* Branch and misc control. */
8513 {
8514 if (bit (insn2, 14) /* BLX/BL */
8515 || bit (insn2, 12) /* Unconditional branch */
8516 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8517 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8518 else
8519 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8520 "misc ctrl", dsc);
8521 }
8522 else
8523 {
8524 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8525 {
8526 int op = bits (insn1, 4, 8);
8527 int rn = bits (insn1, 0, 3);
8528 if ((op == 0 || op == 0xa) && rn == 0xf)
8529 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8530 regs, dsc);
8531 else
8532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8533 "dp/pb", dsc);
8534 }
8535 else /* Data processing (modified immeidate) */
8536 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8537 "dp/mi", dsc);
8538 }
8539 break;
8540 case 3: /* op1 = 3 */
8541 switch (bits (insn1, 9, 10))
8542 {
8543 case 0:
8544 if (bit (insn1, 4))
8545 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8546 regs, dsc);
8547 else /* NEON Load/Store and Store single data item */
8548 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8549 "neon elt/struct load/store",
8550 dsc);
8551 break;
8552 case 1: /* op1 = 3, bits (9, 10) == 1 */
8553 switch (bits (insn1, 7, 8))
8554 {
8555 case 0: case 1: /* Data processing (register) */
8556 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8557 "dp(reg)", dsc);
8558 break;
8559 case 2: /* Multiply and absolute difference */
8560 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8561 "mul/mua/diff", dsc);
8562 break;
8563 case 3: /* Long multiply and divide */
8564 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8565 "lmul/lmua", dsc);
8566 break;
8567 }
8568 break;
8569 default: /* Coprocessor instructions */
8570 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8571 break;
8572 }
8573 break;
8574 default:
8575 err = 1;
8576 }
8577
8578 if (err)
8579 internal_error (__FILE__, __LINE__,
8580 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8581
8582 }
8583
8584 static void
8585 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8586 CORE_ADDR to, struct regcache *regs,
8587 struct displaced_step_closure *dsc)
8588 {
8589 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8590 uint16_t insn1
8591 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8592
8593 if (debug_displaced)
8594 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8595 "at %.8lx\n", insn1, (unsigned long) from);
8596
8597 dsc->is_thumb = 1;
8598 dsc->insn_size = thumb_insn_size (insn1);
8599 if (thumb_insn_size (insn1) == 4)
8600 {
8601 uint16_t insn2
8602 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8603 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8604 }
8605 else
8606 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8607 }
8608
8609 void
8610 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8611 CORE_ADDR to, struct regcache *regs,
8612 struct displaced_step_closure *dsc)
8613 {
8614 int err = 0;
8615 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8616 uint32_t insn;
8617
8618 /* Most displaced instructions use a 1-instruction scratch space, so set this
8619 here and override below if/when necessary. */
8620 dsc->numinsns = 1;
8621 dsc->insn_addr = from;
8622 dsc->scratch_base = to;
8623 dsc->cleanup = NULL;
8624 dsc->wrote_to_pc = 0;
8625
8626 if (!displaced_in_arm_mode (regs))
8627 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8628
8629 dsc->is_thumb = 0;
8630 dsc->insn_size = 4;
8631 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8632 if (debug_displaced)
8633 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8634 "at %.8lx\n", (unsigned long) insn,
8635 (unsigned long) from);
8636
8637 if ((insn & 0xf0000000) == 0xf0000000)
8638 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8639 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8640 {
8641 case 0x0: case 0x1: case 0x2: case 0x3:
8642 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8643 break;
8644
8645 case 0x4: case 0x5: case 0x6:
8646 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8647 break;
8648
8649 case 0x7:
8650 err = arm_decode_media (gdbarch, insn, dsc);
8651 break;
8652
8653 case 0x8: case 0x9: case 0xa: case 0xb:
8654 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8655 break;
8656
8657 case 0xc: case 0xd: case 0xe: case 0xf:
8658 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8659 break;
8660 }
8661
8662 if (err)
8663 internal_error (__FILE__, __LINE__,
8664 _("arm_process_displaced_insn: Instruction decode error"));
8665 }
8666
8667 /* Actually set up the scratch space for a displaced instruction. */
8668
8669 void
8670 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8671 CORE_ADDR to, struct displaced_step_closure *dsc)
8672 {
8673 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8674 unsigned int i, len, offset;
8675 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8676 int size = dsc->is_thumb? 2 : 4;
8677 const gdb_byte *bkp_insn;
8678
8679 offset = 0;
8680 /* Poke modified instruction(s). */
8681 for (i = 0; i < dsc->numinsns; i++)
8682 {
8683 if (debug_displaced)
8684 {
8685 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8686 if (size == 4)
8687 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8688 dsc->modinsn[i]);
8689 else if (size == 2)
8690 fprintf_unfiltered (gdb_stdlog, "%.4x",
8691 (unsigned short)dsc->modinsn[i]);
8692
8693 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8694 (unsigned long) to + offset);
8695
8696 }
8697 write_memory_unsigned_integer (to + offset, size,
8698 byte_order_for_code,
8699 dsc->modinsn[i]);
8700 offset += size;
8701 }
8702
8703 /* Choose the correct breakpoint instruction. */
8704 if (dsc->is_thumb)
8705 {
8706 bkp_insn = tdep->thumb_breakpoint;
8707 len = tdep->thumb_breakpoint_size;
8708 }
8709 else
8710 {
8711 bkp_insn = tdep->arm_breakpoint;
8712 len = tdep->arm_breakpoint_size;
8713 }
8714
8715 /* Put breakpoint afterwards. */
8716 write_memory (to + offset, bkp_insn, len);
8717
8718 if (debug_displaced)
8719 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8720 paddress (gdbarch, from), paddress (gdbarch, to));
8721 }
8722
8723 /* Entry point for copying an instruction into scratch space for displaced
8724 stepping. */
8725
8726 struct displaced_step_closure *
8727 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8728 CORE_ADDR from, CORE_ADDR to,
8729 struct regcache *regs)
8730 {
8731 struct displaced_step_closure *dsc
8732 = xmalloc (sizeof (struct displaced_step_closure));
8733 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8734 arm_displaced_init_closure (gdbarch, from, to, dsc);
8735
8736 return dsc;
8737 }
8738
8739 /* Entry point for cleaning things up after a displaced instruction has been
8740 single-stepped. */
8741
8742 void
8743 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8744 struct displaced_step_closure *dsc,
8745 CORE_ADDR from, CORE_ADDR to,
8746 struct regcache *regs)
8747 {
8748 if (dsc->cleanup)
8749 dsc->cleanup (gdbarch, regs, dsc);
8750
8751 if (!dsc->wrote_to_pc)
8752 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8753 dsc->insn_addr + dsc->insn_size);
8754
8755 }
8756
8757 #include "bfd-in2.h"
8758 #include "libcoff.h"
8759
8760 static int
8761 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8762 {
8763 struct gdbarch *gdbarch = info->application_data;
8764
8765 if (arm_pc_is_thumb (gdbarch, memaddr))
8766 {
8767 static asymbol *asym;
8768 static combined_entry_type ce;
8769 static struct coff_symbol_struct csym;
8770 static struct bfd fake_bfd;
8771 static bfd_target fake_target;
8772
8773 if (csym.native == NULL)
8774 {
8775 /* Create a fake symbol vector containing a Thumb symbol.
8776 This is solely so that the code in print_insn_little_arm()
8777 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8778 the presence of a Thumb symbol and switch to decoding
8779 Thumb instructions. */
8780
8781 fake_target.flavour = bfd_target_coff_flavour;
8782 fake_bfd.xvec = &fake_target;
8783 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8784 csym.native = &ce;
8785 csym.symbol.the_bfd = &fake_bfd;
8786 csym.symbol.name = "fake";
8787 asym = (asymbol *) & csym;
8788 }
8789
8790 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8791 info->symbols = &asym;
8792 }
8793 else
8794 info->symbols = NULL;
8795
8796 if (info->endian == BFD_ENDIAN_BIG)
8797 return print_insn_big_arm (memaddr, info);
8798 else
8799 return print_insn_little_arm (memaddr, info);
8800 }
8801
8802 /* The following define instruction sequences that will cause ARM
8803 cpu's to take an undefined instruction trap. These are used to
8804 signal a breakpoint to GDB.
8805
8806 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8807 modes. A different instruction is required for each mode. The ARM
8808 cpu's can also be big or little endian. Thus four different
8809 instructions are needed to support all cases.
8810
8811 Note: ARMv4 defines several new instructions that will take the
8812 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8813 not in fact add the new instructions. The new undefined
8814 instructions in ARMv4 are all instructions that had no defined
8815 behaviour in earlier chips. There is no guarantee that they will
8816 raise an exception, but may be treated as NOP's. In practice, it
8817 may only safe to rely on instructions matching:
8818
8819 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8820 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8821 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8822
8823 Even this may only true if the condition predicate is true. The
8824 following use a condition predicate of ALWAYS so it is always TRUE.
8825
8826 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8827 and NetBSD all use a software interrupt rather than an undefined
8828 instruction to force a trap. This can be handled by by the
8829 abi-specific code during establishment of the gdbarch vector. */
8830
8831 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8832 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8833 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8834 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8835
8836 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8837 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8838 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8839 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8840
8841 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8842 the program counter value to determine whether a 16-bit or 32-bit
8843 breakpoint should be used. It returns a pointer to a string of
8844 bytes that encode a breakpoint instruction, stores the length of
8845 the string to *lenptr, and adjusts the program counter (if
8846 necessary) to point to the actual memory location where the
8847 breakpoint should be inserted. */
8848
8849 static const unsigned char *
8850 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8851 {
8852 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8853 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8854
8855 if (arm_pc_is_thumb (gdbarch, *pcptr))
8856 {
8857 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8858
8859 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8860 check whether we are replacing a 32-bit instruction. */
8861 if (tdep->thumb2_breakpoint != NULL)
8862 {
8863 gdb_byte buf[2];
8864 if (target_read_memory (*pcptr, buf, 2) == 0)
8865 {
8866 unsigned short inst1;
8867 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8868 if (thumb_insn_size (inst1) == 4)
8869 {
8870 *lenptr = tdep->thumb2_breakpoint_size;
8871 return tdep->thumb2_breakpoint;
8872 }
8873 }
8874 }
8875
8876 *lenptr = tdep->thumb_breakpoint_size;
8877 return tdep->thumb_breakpoint;
8878 }
8879 else
8880 {
8881 *lenptr = tdep->arm_breakpoint_size;
8882 return tdep->arm_breakpoint;
8883 }
8884 }
8885
8886 static void
8887 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8888 int *kindptr)
8889 {
8890 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8891
8892 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8893 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8894 that this is not confused with a 32-bit ARM breakpoint. */
8895 *kindptr = 3;
8896 }
8897
8898 /* Extract from an array REGBUF containing the (raw) register state a
8899 function return value of type TYPE, and copy that, in virtual
8900 format, into VALBUF. */
8901
8902 static void
8903 arm_extract_return_value (struct type *type, struct regcache *regs,
8904 gdb_byte *valbuf)
8905 {
8906 struct gdbarch *gdbarch = get_regcache_arch (regs);
8907 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8908
8909 if (TYPE_CODE_FLT == TYPE_CODE (type))
8910 {
8911 switch (gdbarch_tdep (gdbarch)->fp_model)
8912 {
8913 case ARM_FLOAT_FPA:
8914 {
8915 /* The value is in register F0 in internal format. We need to
8916 extract the raw value and then convert it to the desired
8917 internal type. */
8918 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8919
8920 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8921 convert_from_extended (floatformat_from_type (type), tmpbuf,
8922 valbuf, gdbarch_byte_order (gdbarch));
8923 }
8924 break;
8925
8926 case ARM_FLOAT_SOFT_FPA:
8927 case ARM_FLOAT_SOFT_VFP:
8928 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8929 not using the VFP ABI code. */
8930 case ARM_FLOAT_VFP:
8931 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8932 if (TYPE_LENGTH (type) > 4)
8933 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8934 valbuf + INT_REGISTER_SIZE);
8935 break;
8936
8937 default:
8938 internal_error (__FILE__, __LINE__,
8939 _("arm_extract_return_value: "
8940 "Floating point model not supported"));
8941 break;
8942 }
8943 }
8944 else if (TYPE_CODE (type) == TYPE_CODE_INT
8945 || TYPE_CODE (type) == TYPE_CODE_CHAR
8946 || TYPE_CODE (type) == TYPE_CODE_BOOL
8947 || TYPE_CODE (type) == TYPE_CODE_PTR
8948 || TYPE_CODE (type) == TYPE_CODE_REF
8949 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8950 {
8951 /* If the type is a plain integer, then the access is
8952 straight-forward. Otherwise we have to play around a bit
8953 more. */
8954 int len = TYPE_LENGTH (type);
8955 int regno = ARM_A1_REGNUM;
8956 ULONGEST tmp;
8957
8958 while (len > 0)
8959 {
8960 /* By using store_unsigned_integer we avoid having to do
8961 anything special for small big-endian values. */
8962 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8963 store_unsigned_integer (valbuf,
8964 (len > INT_REGISTER_SIZE
8965 ? INT_REGISTER_SIZE : len),
8966 byte_order, tmp);
8967 len -= INT_REGISTER_SIZE;
8968 valbuf += INT_REGISTER_SIZE;
8969 }
8970 }
8971 else
8972 {
8973 /* For a structure or union the behaviour is as if the value had
8974 been stored to word-aligned memory and then loaded into
8975 registers with 32-bit load instruction(s). */
8976 int len = TYPE_LENGTH (type);
8977 int regno = ARM_A1_REGNUM;
8978 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8979
8980 while (len > 0)
8981 {
8982 regcache_cooked_read (regs, regno++, tmpbuf);
8983 memcpy (valbuf, tmpbuf,
8984 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8985 len -= INT_REGISTER_SIZE;
8986 valbuf += INT_REGISTER_SIZE;
8987 }
8988 }
8989 }
8990
8991
8992 /* Will a function return an aggregate type in memory or in a
8993 register? Return 0 if an aggregate type can be returned in a
8994 register, 1 if it must be returned in memory. */
8995
8996 static int
8997 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8998 {
8999 int nRc;
9000 enum type_code code;
9001
9002 CHECK_TYPEDEF (type);
9003
9004 /* In the ARM ABI, "integer" like aggregate types are returned in
9005 registers. For an aggregate type to be integer like, its size
9006 must be less than or equal to INT_REGISTER_SIZE and the
9007 offset of each addressable subfield must be zero. Note that bit
9008 fields are not addressable, and all addressable subfields of
9009 unions always start at offset zero.
9010
9011 This function is based on the behaviour of GCC 2.95.1.
9012 See: gcc/arm.c: arm_return_in_memory() for details.
9013
9014 Note: All versions of GCC before GCC 2.95.2 do not set up the
9015 parameters correctly for a function returning the following
9016 structure: struct { float f;}; This should be returned in memory,
9017 not a register. Richard Earnshaw sent me a patch, but I do not
9018 know of any way to detect if a function like the above has been
9019 compiled with the correct calling convention. */
9020
9021 /* All aggregate types that won't fit in a register must be returned
9022 in memory. */
9023 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9024 {
9025 return 1;
9026 }
9027
9028 /* The AAPCS says all aggregates not larger than a word are returned
9029 in a register. */
9030 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9031 return 0;
9032
9033 /* The only aggregate types that can be returned in a register are
9034 structs and unions. Arrays must be returned in memory. */
9035 code = TYPE_CODE (type);
9036 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9037 {
9038 return 1;
9039 }
9040
9041 /* Assume all other aggregate types can be returned in a register.
9042 Run a check for structures, unions and arrays. */
9043 nRc = 0;
9044
9045 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9046 {
9047 int i;
9048 /* Need to check if this struct/union is "integer" like. For
9049 this to be true, its size must be less than or equal to
9050 INT_REGISTER_SIZE and the offset of each addressable
9051 subfield must be zero. Note that bit fields are not
9052 addressable, and unions always start at offset zero. If any
9053 of the subfields is a floating point type, the struct/union
9054 cannot be an integer type. */
9055
9056 /* For each field in the object, check:
9057 1) Is it FP? --> yes, nRc = 1;
9058 2) Is it addressable (bitpos != 0) and
9059 not packed (bitsize == 0)?
9060 --> yes, nRc = 1
9061 */
9062
9063 for (i = 0; i < TYPE_NFIELDS (type); i++)
9064 {
9065 enum type_code field_type_code;
9066 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9067 i)));
9068
9069 /* Is it a floating point type field? */
9070 if (field_type_code == TYPE_CODE_FLT)
9071 {
9072 nRc = 1;
9073 break;
9074 }
9075
9076 /* If bitpos != 0, then we have to care about it. */
9077 if (TYPE_FIELD_BITPOS (type, i) != 0)
9078 {
9079 /* Bitfields are not addressable. If the field bitsize is
9080 zero, then the field is not packed. Hence it cannot be
9081 a bitfield or any other packed type. */
9082 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9083 {
9084 nRc = 1;
9085 break;
9086 }
9087 }
9088 }
9089 }
9090
9091 return nRc;
9092 }
9093
9094 /* Write into appropriate registers a function return value of type
9095 TYPE, given in virtual format. */
9096
9097 static void
9098 arm_store_return_value (struct type *type, struct regcache *regs,
9099 const gdb_byte *valbuf)
9100 {
9101 struct gdbarch *gdbarch = get_regcache_arch (regs);
9102 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9103
9104 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9105 {
9106 gdb_byte buf[MAX_REGISTER_SIZE];
9107
9108 switch (gdbarch_tdep (gdbarch)->fp_model)
9109 {
9110 case ARM_FLOAT_FPA:
9111
9112 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9113 gdbarch_byte_order (gdbarch));
9114 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9115 break;
9116
9117 case ARM_FLOAT_SOFT_FPA:
9118 case ARM_FLOAT_SOFT_VFP:
9119 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9120 not using the VFP ABI code. */
9121 case ARM_FLOAT_VFP:
9122 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9123 if (TYPE_LENGTH (type) > 4)
9124 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9125 valbuf + INT_REGISTER_SIZE);
9126 break;
9127
9128 default:
9129 internal_error (__FILE__, __LINE__,
9130 _("arm_store_return_value: Floating "
9131 "point model not supported"));
9132 break;
9133 }
9134 }
9135 else if (TYPE_CODE (type) == TYPE_CODE_INT
9136 || TYPE_CODE (type) == TYPE_CODE_CHAR
9137 || TYPE_CODE (type) == TYPE_CODE_BOOL
9138 || TYPE_CODE (type) == TYPE_CODE_PTR
9139 || TYPE_CODE (type) == TYPE_CODE_REF
9140 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9141 {
9142 if (TYPE_LENGTH (type) <= 4)
9143 {
9144 /* Values of one word or less are zero/sign-extended and
9145 returned in r0. */
9146 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9147 LONGEST val = unpack_long (type, valbuf);
9148
9149 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9150 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9151 }
9152 else
9153 {
9154 /* Integral values greater than one word are stored in consecutive
9155 registers starting with r0. This will always be a multiple of
9156 the regiser size. */
9157 int len = TYPE_LENGTH (type);
9158 int regno = ARM_A1_REGNUM;
9159
9160 while (len > 0)
9161 {
9162 regcache_cooked_write (regs, regno++, valbuf);
9163 len -= INT_REGISTER_SIZE;
9164 valbuf += INT_REGISTER_SIZE;
9165 }
9166 }
9167 }
9168 else
9169 {
9170 /* For a structure or union the behaviour is as if the value had
9171 been stored to word-aligned memory and then loaded into
9172 registers with 32-bit load instruction(s). */
9173 int len = TYPE_LENGTH (type);
9174 int regno = ARM_A1_REGNUM;
9175 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9176
9177 while (len > 0)
9178 {
9179 memcpy (tmpbuf, valbuf,
9180 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9181 regcache_cooked_write (regs, regno++, tmpbuf);
9182 len -= INT_REGISTER_SIZE;
9183 valbuf += INT_REGISTER_SIZE;
9184 }
9185 }
9186 }
9187
9188
9189 /* Handle function return values. */
9190
9191 static enum return_value_convention
9192 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9193 struct type *valtype, struct regcache *regcache,
9194 gdb_byte *readbuf, const gdb_byte *writebuf)
9195 {
9196 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9197 struct type *func_type = function ? value_type (function) : NULL;
9198 enum arm_vfp_cprc_base_type vfp_base_type;
9199 int vfp_base_count;
9200
9201 if (arm_vfp_abi_for_function (gdbarch, func_type)
9202 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9203 {
9204 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9205 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9206 int i;
9207 for (i = 0; i < vfp_base_count; i++)
9208 {
9209 if (reg_char == 'q')
9210 {
9211 if (writebuf)
9212 arm_neon_quad_write (gdbarch, regcache, i,
9213 writebuf + i * unit_length);
9214
9215 if (readbuf)
9216 arm_neon_quad_read (gdbarch, regcache, i,
9217 readbuf + i * unit_length);
9218 }
9219 else
9220 {
9221 char name_buf[4];
9222 int regnum;
9223
9224 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9225 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9226 strlen (name_buf));
9227 if (writebuf)
9228 regcache_cooked_write (regcache, regnum,
9229 writebuf + i * unit_length);
9230 if (readbuf)
9231 regcache_cooked_read (regcache, regnum,
9232 readbuf + i * unit_length);
9233 }
9234 }
9235 return RETURN_VALUE_REGISTER_CONVENTION;
9236 }
9237
9238 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9239 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9240 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9241 {
9242 if (tdep->struct_return == pcc_struct_return
9243 || arm_return_in_memory (gdbarch, valtype))
9244 return RETURN_VALUE_STRUCT_CONVENTION;
9245 }
9246
9247 /* AAPCS returns complex types longer than a register in memory. */
9248 if (tdep->arm_abi != ARM_ABI_APCS
9249 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9250 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9251 return RETURN_VALUE_STRUCT_CONVENTION;
9252
9253 if (writebuf)
9254 arm_store_return_value (valtype, regcache, writebuf);
9255
9256 if (readbuf)
9257 arm_extract_return_value (valtype, regcache, readbuf);
9258
9259 return RETURN_VALUE_REGISTER_CONVENTION;
9260 }
9261
9262
9263 static int
9264 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9265 {
9266 struct gdbarch *gdbarch = get_frame_arch (frame);
9267 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9268 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9269 CORE_ADDR jb_addr;
9270 gdb_byte buf[INT_REGISTER_SIZE];
9271
9272 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9273
9274 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9275 INT_REGISTER_SIZE))
9276 return 0;
9277
9278 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9279 return 1;
9280 }
9281
9282 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9283 return the target PC. Otherwise return 0. */
9284
9285 CORE_ADDR
9286 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9287 {
9288 const char *name;
9289 int namelen;
9290 CORE_ADDR start_addr;
9291
9292 /* Find the starting address and name of the function containing the PC. */
9293 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9294 {
9295 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9296 check here. */
9297 start_addr = arm_skip_bx_reg (frame, pc);
9298 if (start_addr != 0)
9299 return start_addr;
9300
9301 return 0;
9302 }
9303
9304 /* If PC is in a Thumb call or return stub, return the address of the
9305 target PC, which is in a register. The thunk functions are called
9306 _call_via_xx, where x is the register name. The possible names
9307 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9308 functions, named __ARM_call_via_r[0-7]. */
9309 if (strncmp (name, "_call_via_", 10) == 0
9310 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9311 {
9312 /* Use the name suffix to determine which register contains the
9313 target PC. */
9314 static char *table[15] =
9315 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9316 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9317 };
9318 int regno;
9319 int offset = strlen (name) - 2;
9320
9321 for (regno = 0; regno <= 14; regno++)
9322 if (strcmp (&name[offset], table[regno]) == 0)
9323 return get_frame_register_unsigned (frame, regno);
9324 }
9325
9326 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9327 non-interworking calls to foo. We could decode the stubs
9328 to find the target but it's easier to use the symbol table. */
9329 namelen = strlen (name);
9330 if (name[0] == '_' && name[1] == '_'
9331 && ((namelen > 2 + strlen ("_from_thumb")
9332 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9333 strlen ("_from_thumb")) == 0)
9334 || (namelen > 2 + strlen ("_from_arm")
9335 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9336 strlen ("_from_arm")) == 0)))
9337 {
9338 char *target_name;
9339 int target_len = namelen - 2;
9340 struct bound_minimal_symbol minsym;
9341 struct objfile *objfile;
9342 struct obj_section *sec;
9343
9344 if (name[namelen - 1] == 'b')
9345 target_len -= strlen ("_from_thumb");
9346 else
9347 target_len -= strlen ("_from_arm");
9348
9349 target_name = alloca (target_len + 1);
9350 memcpy (target_name, name + 2, target_len);
9351 target_name[target_len] = '\0';
9352
9353 sec = find_pc_section (pc);
9354 objfile = (sec == NULL) ? NULL : sec->objfile;
9355 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9356 if (minsym.minsym != NULL)
9357 return BMSYMBOL_VALUE_ADDRESS (minsym);
9358 else
9359 return 0;
9360 }
9361
9362 return 0; /* not a stub */
9363 }
9364
9365 static void
9366 set_arm_command (char *args, int from_tty)
9367 {
9368 printf_unfiltered (_("\
9369 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9370 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9371 }
9372
9373 static void
9374 show_arm_command (char *args, int from_tty)
9375 {
9376 cmd_show_list (showarmcmdlist, from_tty, "");
9377 }
9378
9379 static void
9380 arm_update_current_architecture (void)
9381 {
9382 struct gdbarch_info info;
9383
9384 /* If the current architecture is not ARM, we have nothing to do. */
9385 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9386 return;
9387
9388 /* Update the architecture. */
9389 gdbarch_info_init (&info);
9390
9391 if (!gdbarch_update_p (info))
9392 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9393 }
9394
9395 static void
9396 set_fp_model_sfunc (char *args, int from_tty,
9397 struct cmd_list_element *c)
9398 {
9399 enum arm_float_model fp_model;
9400
9401 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9402 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9403 {
9404 arm_fp_model = fp_model;
9405 break;
9406 }
9407
9408 if (fp_model == ARM_FLOAT_LAST)
9409 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9410 current_fp_model);
9411
9412 arm_update_current_architecture ();
9413 }
9414
9415 static void
9416 show_fp_model (struct ui_file *file, int from_tty,
9417 struct cmd_list_element *c, const char *value)
9418 {
9419 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9420
9421 if (arm_fp_model == ARM_FLOAT_AUTO
9422 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9423 fprintf_filtered (file, _("\
9424 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9425 fp_model_strings[tdep->fp_model]);
9426 else
9427 fprintf_filtered (file, _("\
9428 The current ARM floating point model is \"%s\".\n"),
9429 fp_model_strings[arm_fp_model]);
9430 }
9431
9432 static void
9433 arm_set_abi (char *args, int from_tty,
9434 struct cmd_list_element *c)
9435 {
9436 enum arm_abi_kind arm_abi;
9437
9438 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9439 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9440 {
9441 arm_abi_global = arm_abi;
9442 break;
9443 }
9444
9445 if (arm_abi == ARM_ABI_LAST)
9446 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9447 arm_abi_string);
9448
9449 arm_update_current_architecture ();
9450 }
9451
9452 static void
9453 arm_show_abi (struct ui_file *file, int from_tty,
9454 struct cmd_list_element *c, const char *value)
9455 {
9456 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9457
9458 if (arm_abi_global == ARM_ABI_AUTO
9459 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9460 fprintf_filtered (file, _("\
9461 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9462 arm_abi_strings[tdep->arm_abi]);
9463 else
9464 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9465 arm_abi_string);
9466 }
9467
9468 static void
9469 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9470 struct cmd_list_element *c, const char *value)
9471 {
9472 fprintf_filtered (file,
9473 _("The current execution mode assumed "
9474 "(when symbols are unavailable) is \"%s\".\n"),
9475 arm_fallback_mode_string);
9476 }
9477
9478 static void
9479 arm_show_force_mode (struct ui_file *file, int from_tty,
9480 struct cmd_list_element *c, const char *value)
9481 {
9482 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9483
9484 fprintf_filtered (file,
9485 _("The current execution mode assumed "
9486 "(even when symbols are available) is \"%s\".\n"),
9487 arm_force_mode_string);
9488 }
9489
9490 /* If the user changes the register disassembly style used for info
9491 register and other commands, we have to also switch the style used
9492 in opcodes for disassembly output. This function is run in the "set
9493 arm disassembly" command, and does that. */
9494
9495 static void
9496 set_disassembly_style_sfunc (char *args, int from_tty,
9497 struct cmd_list_element *c)
9498 {
9499 set_disassembly_style ();
9500 }
9501 \f
9502 /* Return the ARM register name corresponding to register I. */
9503 static const char *
9504 arm_register_name (struct gdbarch *gdbarch, int i)
9505 {
9506 const int num_regs = gdbarch_num_regs (gdbarch);
9507
9508 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9509 && i >= num_regs && i < num_regs + 32)
9510 {
9511 static const char *const vfp_pseudo_names[] = {
9512 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9513 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9514 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9515 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9516 };
9517
9518 return vfp_pseudo_names[i - num_regs];
9519 }
9520
9521 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9522 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9523 {
9524 static const char *const neon_pseudo_names[] = {
9525 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9526 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9527 };
9528
9529 return neon_pseudo_names[i - num_regs - 32];
9530 }
9531
9532 if (i >= ARRAY_SIZE (arm_register_names))
9533 /* These registers are only supported on targets which supply
9534 an XML description. */
9535 return "";
9536
9537 return arm_register_names[i];
9538 }
9539
9540 static void
9541 set_disassembly_style (void)
9542 {
9543 int current;
9544
9545 /* Find the style that the user wants. */
9546 for (current = 0; current < num_disassembly_options; current++)
9547 if (disassembly_style == valid_disassembly_styles[current])
9548 break;
9549 gdb_assert (current < num_disassembly_options);
9550
9551 /* Synchronize the disassembler. */
9552 set_arm_regname_option (current);
9553 }
9554
9555 /* Test whether the coff symbol specific value corresponds to a Thumb
9556 function. */
9557
9558 static int
9559 coff_sym_is_thumb (int val)
9560 {
9561 return (val == C_THUMBEXT
9562 || val == C_THUMBSTAT
9563 || val == C_THUMBEXTFUNC
9564 || val == C_THUMBSTATFUNC
9565 || val == C_THUMBLABEL);
9566 }
9567
9568 /* arm_coff_make_msymbol_special()
9569 arm_elf_make_msymbol_special()
9570
9571 These functions test whether the COFF or ELF symbol corresponds to
9572 an address in thumb code, and set a "special" bit in a minimal
9573 symbol to indicate that it does. */
9574
9575 static void
9576 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9577 {
9578 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9579 == ST_BRANCH_TO_THUMB)
9580 MSYMBOL_SET_SPECIAL (msym);
9581 }
9582
9583 static void
9584 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9585 {
9586 if (coff_sym_is_thumb (val))
9587 MSYMBOL_SET_SPECIAL (msym);
9588 }
9589
9590 static void
9591 arm_objfile_data_free (struct objfile *objfile, void *arg)
9592 {
9593 struct arm_per_objfile *data = arg;
9594 unsigned int i;
9595
9596 for (i = 0; i < objfile->obfd->section_count; i++)
9597 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9598 }
9599
9600 static void
9601 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9602 asymbol *sym)
9603 {
9604 const char *name = bfd_asymbol_name (sym);
9605 struct arm_per_objfile *data;
9606 VEC(arm_mapping_symbol_s) **map_p;
9607 struct arm_mapping_symbol new_map_sym;
9608
9609 gdb_assert (name[0] == '$');
9610 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9611 return;
9612
9613 data = objfile_data (objfile, arm_objfile_data_key);
9614 if (data == NULL)
9615 {
9616 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9617 struct arm_per_objfile);
9618 set_objfile_data (objfile, arm_objfile_data_key, data);
9619 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9620 objfile->obfd->section_count,
9621 VEC(arm_mapping_symbol_s) *);
9622 }
9623 map_p = &data->section_maps[bfd_get_section (sym)->index];
9624
9625 new_map_sym.value = sym->value;
9626 new_map_sym.type = name[1];
9627
9628 /* Assume that most mapping symbols appear in order of increasing
9629 value. If they were randomly distributed, it would be faster to
9630 always push here and then sort at first use. */
9631 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9632 {
9633 struct arm_mapping_symbol *prev_map_sym;
9634
9635 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9636 if (prev_map_sym->value >= sym->value)
9637 {
9638 unsigned int idx;
9639 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9640 arm_compare_mapping_symbols);
9641 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9642 return;
9643 }
9644 }
9645
9646 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9647 }
9648
9649 static void
9650 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9651 {
9652 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9653 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9654
9655 /* If necessary, set the T bit. */
9656 if (arm_apcs_32)
9657 {
9658 ULONGEST val, t_bit;
9659 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9660 t_bit = arm_psr_thumb_bit (gdbarch);
9661 if (arm_pc_is_thumb (gdbarch, pc))
9662 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9663 val | t_bit);
9664 else
9665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9666 val & ~t_bit);
9667 }
9668 }
9669
9670 /* Read the contents of a NEON quad register, by reading from two
9671 double registers. This is used to implement the quad pseudo
9672 registers, and for argument passing in case the quad registers are
9673 missing; vectors are passed in quad registers when using the VFP
9674 ABI, even if a NEON unit is not present. REGNUM is the index of
9675 the quad register, in [0, 15]. */
9676
9677 static enum register_status
9678 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9679 int regnum, gdb_byte *buf)
9680 {
9681 char name_buf[4];
9682 gdb_byte reg_buf[8];
9683 int offset, double_regnum;
9684 enum register_status status;
9685
9686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9688 strlen (name_buf));
9689
9690 /* d0 is always the least significant half of q0. */
9691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9692 offset = 8;
9693 else
9694 offset = 0;
9695
9696 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9697 if (status != REG_VALID)
9698 return status;
9699 memcpy (buf + offset, reg_buf, 8);
9700
9701 offset = 8 - offset;
9702 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9703 if (status != REG_VALID)
9704 return status;
9705 memcpy (buf + offset, reg_buf, 8);
9706
9707 return REG_VALID;
9708 }
9709
9710 static enum register_status
9711 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9712 int regnum, gdb_byte *buf)
9713 {
9714 const int num_regs = gdbarch_num_regs (gdbarch);
9715 char name_buf[4];
9716 gdb_byte reg_buf[8];
9717 int offset, double_regnum;
9718
9719 gdb_assert (regnum >= num_regs);
9720 regnum -= num_regs;
9721
9722 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9723 /* Quad-precision register. */
9724 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9725 else
9726 {
9727 enum register_status status;
9728
9729 /* Single-precision register. */
9730 gdb_assert (regnum < 32);
9731
9732 /* s0 is always the least significant half of d0. */
9733 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9734 offset = (regnum & 1) ? 0 : 4;
9735 else
9736 offset = (regnum & 1) ? 4 : 0;
9737
9738 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9739 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9740 strlen (name_buf));
9741
9742 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9743 if (status == REG_VALID)
9744 memcpy (buf, reg_buf + offset, 4);
9745 return status;
9746 }
9747 }
9748
9749 /* Store the contents of BUF to a NEON quad register, by writing to
9750 two double registers. This is used to implement the quad pseudo
9751 registers, and for argument passing in case the quad registers are
9752 missing; vectors are passed in quad registers when using the VFP
9753 ABI, even if a NEON unit is not present. REGNUM is the index
9754 of the quad register, in [0, 15]. */
9755
9756 static void
9757 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9758 int regnum, const gdb_byte *buf)
9759 {
9760 char name_buf[4];
9761 int offset, double_regnum;
9762
9763 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9764 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9765 strlen (name_buf));
9766
9767 /* d0 is always the least significant half of q0. */
9768 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9769 offset = 8;
9770 else
9771 offset = 0;
9772
9773 regcache_raw_write (regcache, double_regnum, buf + offset);
9774 offset = 8 - offset;
9775 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9776 }
9777
9778 static void
9779 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9780 int regnum, const gdb_byte *buf)
9781 {
9782 const int num_regs = gdbarch_num_regs (gdbarch);
9783 char name_buf[4];
9784 gdb_byte reg_buf[8];
9785 int offset, double_regnum;
9786
9787 gdb_assert (regnum >= num_regs);
9788 regnum -= num_regs;
9789
9790 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9791 /* Quad-precision register. */
9792 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9793 else
9794 {
9795 /* Single-precision register. */
9796 gdb_assert (regnum < 32);
9797
9798 /* s0 is always the least significant half of d0. */
9799 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9800 offset = (regnum & 1) ? 0 : 4;
9801 else
9802 offset = (regnum & 1) ? 4 : 0;
9803
9804 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9805 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9806 strlen (name_buf));
9807
9808 regcache_raw_read (regcache, double_regnum, reg_buf);
9809 memcpy (reg_buf + offset, buf, 4);
9810 regcache_raw_write (regcache, double_regnum, reg_buf);
9811 }
9812 }
9813
9814 static struct value *
9815 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9816 {
9817 const int *reg_p = baton;
9818 return value_of_register (*reg_p, frame);
9819 }
9820 \f
9821 static enum gdb_osabi
9822 arm_elf_osabi_sniffer (bfd *abfd)
9823 {
9824 unsigned int elfosabi;
9825 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9826
9827 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9828
9829 if (elfosabi == ELFOSABI_ARM)
9830 /* GNU tools use this value. Check note sections in this case,
9831 as well. */
9832 bfd_map_over_sections (abfd,
9833 generic_elf_osabi_sniff_abi_tag_sections,
9834 &osabi);
9835
9836 /* Anything else will be handled by the generic ELF sniffer. */
9837 return osabi;
9838 }
9839
9840 static int
9841 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9842 struct reggroup *group)
9843 {
9844 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9845 this, FPS register belongs to save_regroup, restore_reggroup, and
9846 all_reggroup, of course. */
9847 if (regnum == ARM_FPS_REGNUM)
9848 return (group == float_reggroup
9849 || group == save_reggroup
9850 || group == restore_reggroup
9851 || group == all_reggroup);
9852 else
9853 return default_register_reggroup_p (gdbarch, regnum, group);
9854 }
9855
9856 \f
9857 /* For backward-compatibility we allow two 'g' packet lengths with
9858 the remote protocol depending on whether FPA registers are
9859 supplied. M-profile targets do not have FPA registers, but some
9860 stubs already exist in the wild which use a 'g' packet which
9861 supplies them albeit with dummy values. The packet format which
9862 includes FPA registers should be considered deprecated for
9863 M-profile targets. */
9864
9865 static void
9866 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9867 {
9868 if (gdbarch_tdep (gdbarch)->is_m)
9869 {
9870 /* If we know from the executable this is an M-profile target,
9871 cater for remote targets whose register set layout is the
9872 same as the FPA layout. */
9873 register_remote_g_packet_guess (gdbarch,
9874 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9875 (16 * INT_REGISTER_SIZE)
9876 + (8 * FP_REGISTER_SIZE)
9877 + (2 * INT_REGISTER_SIZE),
9878 tdesc_arm_with_m_fpa_layout);
9879
9880 /* The regular M-profile layout. */
9881 register_remote_g_packet_guess (gdbarch,
9882 /* r0-r12,sp,lr,pc; xpsr */
9883 (16 * INT_REGISTER_SIZE)
9884 + INT_REGISTER_SIZE,
9885 tdesc_arm_with_m);
9886
9887 /* M-profile plus M4F VFP. */
9888 register_remote_g_packet_guess (gdbarch,
9889 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9890 (16 * INT_REGISTER_SIZE)
9891 + (16 * VFP_REGISTER_SIZE)
9892 + (2 * INT_REGISTER_SIZE),
9893 tdesc_arm_with_m_vfp_d16);
9894 }
9895
9896 /* Otherwise we don't have a useful guess. */
9897 }
9898
9899 \f
9900 /* Initialize the current architecture based on INFO. If possible,
9901 re-use an architecture from ARCHES, which is a list of
9902 architectures already created during this debugging session.
9903
9904 Called e.g. at program startup, when reading a core file, and when
9905 reading a binary file. */
9906
9907 static struct gdbarch *
9908 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9909 {
9910 struct gdbarch_tdep *tdep;
9911 struct gdbarch *gdbarch;
9912 struct gdbarch_list *best_arch;
9913 enum arm_abi_kind arm_abi = arm_abi_global;
9914 enum arm_float_model fp_model = arm_fp_model;
9915 struct tdesc_arch_data *tdesc_data = NULL;
9916 int i, is_m = 0;
9917 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9918 int have_neon = 0;
9919 int have_fpa_registers = 1;
9920 const struct target_desc *tdesc = info.target_desc;
9921
9922 /* If we have an object to base this architecture on, try to determine
9923 its ABI. */
9924
9925 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9926 {
9927 int ei_osabi, e_flags;
9928
9929 switch (bfd_get_flavour (info.abfd))
9930 {
9931 case bfd_target_aout_flavour:
9932 /* Assume it's an old APCS-style ABI. */
9933 arm_abi = ARM_ABI_APCS;
9934 break;
9935
9936 case bfd_target_coff_flavour:
9937 /* Assume it's an old APCS-style ABI. */
9938 /* XXX WinCE? */
9939 arm_abi = ARM_ABI_APCS;
9940 break;
9941
9942 case bfd_target_elf_flavour:
9943 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9944 e_flags = elf_elfheader (info.abfd)->e_flags;
9945
9946 if (ei_osabi == ELFOSABI_ARM)
9947 {
9948 /* GNU tools used to use this value, but do not for EABI
9949 objects. There's nowhere to tag an EABI version
9950 anyway, so assume APCS. */
9951 arm_abi = ARM_ABI_APCS;
9952 }
9953 else if (ei_osabi == ELFOSABI_NONE)
9954 {
9955 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9956 int attr_arch, attr_profile;
9957
9958 switch (eabi_ver)
9959 {
9960 case EF_ARM_EABI_UNKNOWN:
9961 /* Assume GNU tools. */
9962 arm_abi = ARM_ABI_APCS;
9963 break;
9964
9965 case EF_ARM_EABI_VER4:
9966 case EF_ARM_EABI_VER5:
9967 arm_abi = ARM_ABI_AAPCS;
9968 /* EABI binaries default to VFP float ordering.
9969 They may also contain build attributes that can
9970 be used to identify if the VFP argument-passing
9971 ABI is in use. */
9972 if (fp_model == ARM_FLOAT_AUTO)
9973 {
9974 #ifdef HAVE_ELF
9975 switch (bfd_elf_get_obj_attr_int (info.abfd,
9976 OBJ_ATTR_PROC,
9977 Tag_ABI_VFP_args))
9978 {
9979 case 0:
9980 /* "The user intended FP parameter/result
9981 passing to conform to AAPCS, base
9982 variant". */
9983 fp_model = ARM_FLOAT_SOFT_VFP;
9984 break;
9985 case 1:
9986 /* "The user intended FP parameter/result
9987 passing to conform to AAPCS, VFP
9988 variant". */
9989 fp_model = ARM_FLOAT_VFP;
9990 break;
9991 case 2:
9992 /* "The user intended FP parameter/result
9993 passing to conform to tool chain-specific
9994 conventions" - we don't know any such
9995 conventions, so leave it as "auto". */
9996 break;
9997 default:
9998 /* Attribute value not mentioned in the
9999 October 2008 ABI, so leave it as
10000 "auto". */
10001 break;
10002 }
10003 #else
10004 fp_model = ARM_FLOAT_SOFT_VFP;
10005 #endif
10006 }
10007 break;
10008
10009 default:
10010 /* Leave it as "auto". */
10011 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10012 break;
10013 }
10014
10015 #ifdef HAVE_ELF
10016 /* Detect M-profile programs. This only works if the
10017 executable file includes build attributes; GCC does
10018 copy them to the executable, but e.g. RealView does
10019 not. */
10020 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10021 Tag_CPU_arch);
10022 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10023 OBJ_ATTR_PROC,
10024 Tag_CPU_arch_profile);
10025 /* GCC specifies the profile for v6-M; RealView only
10026 specifies the profile for architectures starting with
10027 V7 (as opposed to architectures with a tag
10028 numerically greater than TAG_CPU_ARCH_V7). */
10029 if (!tdesc_has_registers (tdesc)
10030 && (attr_arch == TAG_CPU_ARCH_V6_M
10031 || attr_arch == TAG_CPU_ARCH_V6S_M
10032 || attr_profile == 'M'))
10033 is_m = 1;
10034 #endif
10035 }
10036
10037 if (fp_model == ARM_FLOAT_AUTO)
10038 {
10039 int e_flags = elf_elfheader (info.abfd)->e_flags;
10040
10041 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10042 {
10043 case 0:
10044 /* Leave it as "auto". Strictly speaking this case
10045 means FPA, but almost nobody uses that now, and
10046 many toolchains fail to set the appropriate bits
10047 for the floating-point model they use. */
10048 break;
10049 case EF_ARM_SOFT_FLOAT:
10050 fp_model = ARM_FLOAT_SOFT_FPA;
10051 break;
10052 case EF_ARM_VFP_FLOAT:
10053 fp_model = ARM_FLOAT_VFP;
10054 break;
10055 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10056 fp_model = ARM_FLOAT_SOFT_VFP;
10057 break;
10058 }
10059 }
10060
10061 if (e_flags & EF_ARM_BE8)
10062 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10063
10064 break;
10065
10066 default:
10067 /* Leave it as "auto". */
10068 break;
10069 }
10070 }
10071
10072 /* Check any target description for validity. */
10073 if (tdesc_has_registers (tdesc))
10074 {
10075 /* For most registers we require GDB's default names; but also allow
10076 the numeric names for sp / lr / pc, as a convenience. */
10077 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10078 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10079 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10080
10081 const struct tdesc_feature *feature;
10082 int valid_p;
10083
10084 feature = tdesc_find_feature (tdesc,
10085 "org.gnu.gdb.arm.core");
10086 if (feature == NULL)
10087 {
10088 feature = tdesc_find_feature (tdesc,
10089 "org.gnu.gdb.arm.m-profile");
10090 if (feature == NULL)
10091 return NULL;
10092 else
10093 is_m = 1;
10094 }
10095
10096 tdesc_data = tdesc_data_alloc ();
10097
10098 valid_p = 1;
10099 for (i = 0; i < ARM_SP_REGNUM; i++)
10100 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10101 arm_register_names[i]);
10102 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10103 ARM_SP_REGNUM,
10104 arm_sp_names);
10105 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10106 ARM_LR_REGNUM,
10107 arm_lr_names);
10108 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10109 ARM_PC_REGNUM,
10110 arm_pc_names);
10111 if (is_m)
10112 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10113 ARM_PS_REGNUM, "xpsr");
10114 else
10115 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10116 ARM_PS_REGNUM, "cpsr");
10117
10118 if (!valid_p)
10119 {
10120 tdesc_data_cleanup (tdesc_data);
10121 return NULL;
10122 }
10123
10124 feature = tdesc_find_feature (tdesc,
10125 "org.gnu.gdb.arm.fpa");
10126 if (feature != NULL)
10127 {
10128 valid_p = 1;
10129 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10130 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10131 arm_register_names[i]);
10132 if (!valid_p)
10133 {
10134 tdesc_data_cleanup (tdesc_data);
10135 return NULL;
10136 }
10137 }
10138 else
10139 have_fpa_registers = 0;
10140
10141 feature = tdesc_find_feature (tdesc,
10142 "org.gnu.gdb.xscale.iwmmxt");
10143 if (feature != NULL)
10144 {
10145 static const char *const iwmmxt_names[] = {
10146 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10147 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10148 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10149 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10150 };
10151
10152 valid_p = 1;
10153 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10154 valid_p
10155 &= tdesc_numbered_register (feature, tdesc_data, i,
10156 iwmmxt_names[i - ARM_WR0_REGNUM]);
10157
10158 /* Check for the control registers, but do not fail if they
10159 are missing. */
10160 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10161 tdesc_numbered_register (feature, tdesc_data, i,
10162 iwmmxt_names[i - ARM_WR0_REGNUM]);
10163
10164 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10165 valid_p
10166 &= tdesc_numbered_register (feature, tdesc_data, i,
10167 iwmmxt_names[i - ARM_WR0_REGNUM]);
10168
10169 if (!valid_p)
10170 {
10171 tdesc_data_cleanup (tdesc_data);
10172 return NULL;
10173 }
10174 }
10175
10176 /* If we have a VFP unit, check whether the single precision registers
10177 are present. If not, then we will synthesize them as pseudo
10178 registers. */
10179 feature = tdesc_find_feature (tdesc,
10180 "org.gnu.gdb.arm.vfp");
10181 if (feature != NULL)
10182 {
10183 static const char *const vfp_double_names[] = {
10184 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10185 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10186 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10187 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10188 };
10189
10190 /* Require the double precision registers. There must be either
10191 16 or 32. */
10192 valid_p = 1;
10193 for (i = 0; i < 32; i++)
10194 {
10195 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10196 ARM_D0_REGNUM + i,
10197 vfp_double_names[i]);
10198 if (!valid_p)
10199 break;
10200 }
10201 if (!valid_p && i == 16)
10202 valid_p = 1;
10203
10204 /* Also require FPSCR. */
10205 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10206 ARM_FPSCR_REGNUM, "fpscr");
10207 if (!valid_p)
10208 {
10209 tdesc_data_cleanup (tdesc_data);
10210 return NULL;
10211 }
10212
10213 if (tdesc_unnumbered_register (feature, "s0") == 0)
10214 have_vfp_pseudos = 1;
10215
10216 have_vfp_registers = 1;
10217
10218 /* If we have VFP, also check for NEON. The architecture allows
10219 NEON without VFP (integer vector operations only), but GDB
10220 does not support that. */
10221 feature = tdesc_find_feature (tdesc,
10222 "org.gnu.gdb.arm.neon");
10223 if (feature != NULL)
10224 {
10225 /* NEON requires 32 double-precision registers. */
10226 if (i != 32)
10227 {
10228 tdesc_data_cleanup (tdesc_data);
10229 return NULL;
10230 }
10231
10232 /* If there are quad registers defined by the stub, use
10233 their type; otherwise (normally) provide them with
10234 the default type. */
10235 if (tdesc_unnumbered_register (feature, "q0") == 0)
10236 have_neon_pseudos = 1;
10237
10238 have_neon = 1;
10239 }
10240 }
10241 }
10242
10243 /* If there is already a candidate, use it. */
10244 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10245 best_arch != NULL;
10246 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10247 {
10248 if (arm_abi != ARM_ABI_AUTO
10249 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10250 continue;
10251
10252 if (fp_model != ARM_FLOAT_AUTO
10253 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10254 continue;
10255
10256 /* There are various other properties in tdep that we do not
10257 need to check here: those derived from a target description,
10258 since gdbarches with a different target description are
10259 automatically disqualified. */
10260
10261 /* Do check is_m, though, since it might come from the binary. */
10262 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10263 continue;
10264
10265 /* Found a match. */
10266 break;
10267 }
10268
10269 if (best_arch != NULL)
10270 {
10271 if (tdesc_data != NULL)
10272 tdesc_data_cleanup (tdesc_data);
10273 return best_arch->gdbarch;
10274 }
10275
10276 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10277 gdbarch = gdbarch_alloc (&info, tdep);
10278
10279 /* Record additional information about the architecture we are defining.
10280 These are gdbarch discriminators, like the OSABI. */
10281 tdep->arm_abi = arm_abi;
10282 tdep->fp_model = fp_model;
10283 tdep->is_m = is_m;
10284 tdep->have_fpa_registers = have_fpa_registers;
10285 tdep->have_vfp_registers = have_vfp_registers;
10286 tdep->have_vfp_pseudos = have_vfp_pseudos;
10287 tdep->have_neon_pseudos = have_neon_pseudos;
10288 tdep->have_neon = have_neon;
10289
10290 arm_register_g_packet_guesses (gdbarch);
10291
10292 /* Breakpoints. */
10293 switch (info.byte_order_for_code)
10294 {
10295 case BFD_ENDIAN_BIG:
10296 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10297 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10298 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10299 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10300
10301 break;
10302
10303 case BFD_ENDIAN_LITTLE:
10304 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10305 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10306 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10307 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10308
10309 break;
10310
10311 default:
10312 internal_error (__FILE__, __LINE__,
10313 _("arm_gdbarch_init: bad byte order for float format"));
10314 }
10315
10316 /* On ARM targets char defaults to unsigned. */
10317 set_gdbarch_char_signed (gdbarch, 0);
10318
10319 /* Note: for displaced stepping, this includes the breakpoint, and one word
10320 of additional scratch space. This setting isn't used for anything beside
10321 displaced stepping at present. */
10322 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10323
10324 /* This should be low enough for everything. */
10325 tdep->lowest_pc = 0x20;
10326 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10327
10328 /* The default, for both APCS and AAPCS, is to return small
10329 structures in registers. */
10330 tdep->struct_return = reg_struct_return;
10331
10332 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10333 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10334
10335 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10336
10337 /* Frame handling. */
10338 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10339 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10340 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10341
10342 frame_base_set_default (gdbarch, &arm_normal_base);
10343
10344 /* Address manipulation. */
10345 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10346
10347 /* Advance PC across function entry code. */
10348 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10349
10350 /* Detect whether PC is in function epilogue. */
10351 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10352
10353 /* Skip trampolines. */
10354 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10355
10356 /* The stack grows downward. */
10357 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10358
10359 /* Breakpoint manipulation. */
10360 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10361 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10362 arm_remote_breakpoint_from_pc);
10363
10364 /* Information about registers, etc. */
10365 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10366 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10367 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10368 set_gdbarch_register_type (gdbarch, arm_register_type);
10369 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10370
10371 /* This "info float" is FPA-specific. Use the generic version if we
10372 do not have FPA. */
10373 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10374 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10375
10376 /* Internal <-> external register number maps. */
10377 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10378 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10379
10380 set_gdbarch_register_name (gdbarch, arm_register_name);
10381
10382 /* Returning results. */
10383 set_gdbarch_return_value (gdbarch, arm_return_value);
10384
10385 /* Disassembly. */
10386 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10387
10388 /* Minsymbol frobbing. */
10389 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10390 set_gdbarch_coff_make_msymbol_special (gdbarch,
10391 arm_coff_make_msymbol_special);
10392 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10393
10394 /* Thumb-2 IT block support. */
10395 set_gdbarch_adjust_breakpoint_address (gdbarch,
10396 arm_adjust_breakpoint_address);
10397
10398 /* Virtual tables. */
10399 set_gdbarch_vbit_in_delta (gdbarch, 1);
10400
10401 /* Hook in the ABI-specific overrides, if they have been registered. */
10402 gdbarch_init_osabi (info, gdbarch);
10403
10404 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10405
10406 /* Add some default predicates. */
10407 if (is_m)
10408 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10409 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10410 dwarf2_append_unwinders (gdbarch);
10411 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10412 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10413
10414 /* Now we have tuned the configuration, set a few final things,
10415 based on what the OS ABI has told us. */
10416
10417 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10418 binaries are always marked. */
10419 if (tdep->arm_abi == ARM_ABI_AUTO)
10420 tdep->arm_abi = ARM_ABI_APCS;
10421
10422 /* Watchpoints are not steppable. */
10423 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10424
10425 /* We used to default to FPA for generic ARM, but almost nobody
10426 uses that now, and we now provide a way for the user to force
10427 the model. So default to the most useful variant. */
10428 if (tdep->fp_model == ARM_FLOAT_AUTO)
10429 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10430
10431 if (tdep->jb_pc >= 0)
10432 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10433
10434 /* Floating point sizes and format. */
10435 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10436 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10437 {
10438 set_gdbarch_double_format
10439 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10440 set_gdbarch_long_double_format
10441 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10442 }
10443 else
10444 {
10445 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10446 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10447 }
10448
10449 if (have_vfp_pseudos)
10450 {
10451 /* NOTE: These are the only pseudo registers used by
10452 the ARM target at the moment. If more are added, a
10453 little more care in numbering will be needed. */
10454
10455 int num_pseudos = 32;
10456 if (have_neon_pseudos)
10457 num_pseudos += 16;
10458 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10459 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10460 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10461 }
10462
10463 if (tdesc_data)
10464 {
10465 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10466
10467 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10468
10469 /* Override tdesc_register_type to adjust the types of VFP
10470 registers for NEON. */
10471 set_gdbarch_register_type (gdbarch, arm_register_type);
10472 }
10473
10474 /* Add standard register aliases. We add aliases even for those
10475 nanes which are used by the current architecture - it's simpler,
10476 and does no harm, since nothing ever lists user registers. */
10477 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10478 user_reg_add (gdbarch, arm_register_aliases[i].name,
10479 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10480
10481 return gdbarch;
10482 }
10483
10484 static void
10485 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10486 {
10487 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10488
10489 if (tdep == NULL)
10490 return;
10491
10492 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10493 (unsigned long) tdep->lowest_pc);
10494 }
10495
10496 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10497
10498 void
10499 _initialize_arm_tdep (void)
10500 {
10501 struct ui_file *stb;
10502 long length;
10503 struct cmd_list_element *new_set, *new_show;
10504 const char *setname;
10505 const char *setdesc;
10506 const char *const *regnames;
10507 int numregs, i, j;
10508 static char *helptext;
10509 char regdesc[1024], *rdptr = regdesc;
10510 size_t rest = sizeof (regdesc);
10511
10512 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10513
10514 arm_objfile_data_key
10515 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10516
10517 /* Add ourselves to objfile event chain. */
10518 observer_attach_new_objfile (arm_exidx_new_objfile);
10519 arm_exidx_data_key
10520 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10521
10522 /* Register an ELF OS ABI sniffer for ARM binaries. */
10523 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10524 bfd_target_elf_flavour,
10525 arm_elf_osabi_sniffer);
10526
10527 /* Initialize the standard target descriptions. */
10528 initialize_tdesc_arm_with_m ();
10529 initialize_tdesc_arm_with_m_fpa_layout ();
10530 initialize_tdesc_arm_with_m_vfp_d16 ();
10531 initialize_tdesc_arm_with_iwmmxt ();
10532 initialize_tdesc_arm_with_vfpv2 ();
10533 initialize_tdesc_arm_with_vfpv3 ();
10534 initialize_tdesc_arm_with_neon ();
10535
10536 /* Get the number of possible sets of register names defined in opcodes. */
10537 num_disassembly_options = get_arm_regname_num_options ();
10538
10539 /* Add root prefix command for all "set arm"/"show arm" commands. */
10540 add_prefix_cmd ("arm", no_class, set_arm_command,
10541 _("Various ARM-specific commands."),
10542 &setarmcmdlist, "set arm ", 0, &setlist);
10543
10544 add_prefix_cmd ("arm", no_class, show_arm_command,
10545 _("Various ARM-specific commands."),
10546 &showarmcmdlist, "show arm ", 0, &showlist);
10547
10548 /* Sync the opcode insn printer with our register viewer. */
10549 parse_arm_disassembler_option ("reg-names-std");
10550
10551 /* Initialize the array that will be passed to
10552 add_setshow_enum_cmd(). */
10553 valid_disassembly_styles
10554 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10555 for (i = 0; i < num_disassembly_options; i++)
10556 {
10557 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10558 valid_disassembly_styles[i] = setname;
10559 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10560 rdptr += length;
10561 rest -= length;
10562 /* When we find the default names, tell the disassembler to use
10563 them. */
10564 if (!strcmp (setname, "std"))
10565 {
10566 disassembly_style = setname;
10567 set_arm_regname_option (i);
10568 }
10569 }
10570 /* Mark the end of valid options. */
10571 valid_disassembly_styles[num_disassembly_options] = NULL;
10572
10573 /* Create the help text. */
10574 stb = mem_fileopen ();
10575 fprintf_unfiltered (stb, "%s%s%s",
10576 _("The valid values are:\n"),
10577 regdesc,
10578 _("The default is \"std\"."));
10579 helptext = ui_file_xstrdup (stb, NULL);
10580 ui_file_delete (stb);
10581
10582 add_setshow_enum_cmd("disassembler", no_class,
10583 valid_disassembly_styles, &disassembly_style,
10584 _("Set the disassembly style."),
10585 _("Show the disassembly style."),
10586 helptext,
10587 set_disassembly_style_sfunc,
10588 NULL, /* FIXME: i18n: The disassembly style is
10589 \"%s\". */
10590 &setarmcmdlist, &showarmcmdlist);
10591
10592 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10593 _("Set usage of ARM 32-bit mode."),
10594 _("Show usage of ARM 32-bit mode."),
10595 _("When off, a 26-bit PC will be used."),
10596 NULL,
10597 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10598 mode is %s. */
10599 &setarmcmdlist, &showarmcmdlist);
10600
10601 /* Add a command to allow the user to force the FPU model. */
10602 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10603 _("Set the floating point type."),
10604 _("Show the floating point type."),
10605 _("auto - Determine the FP typefrom the OS-ABI.\n\
10606 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10607 fpa - FPA co-processor (GCC compiled).\n\
10608 softvfp - Software FP with pure-endian doubles.\n\
10609 vfp - VFP co-processor."),
10610 set_fp_model_sfunc, show_fp_model,
10611 &setarmcmdlist, &showarmcmdlist);
10612
10613 /* Add a command to allow the user to force the ABI. */
10614 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10615 _("Set the ABI."),
10616 _("Show the ABI."),
10617 NULL, arm_set_abi, arm_show_abi,
10618 &setarmcmdlist, &showarmcmdlist);
10619
10620 /* Add two commands to allow the user to force the assumed
10621 execution mode. */
10622 add_setshow_enum_cmd ("fallback-mode", class_support,
10623 arm_mode_strings, &arm_fallback_mode_string,
10624 _("Set the mode assumed when symbols are unavailable."),
10625 _("Show the mode assumed when symbols are unavailable."),
10626 NULL, NULL, arm_show_fallback_mode,
10627 &setarmcmdlist, &showarmcmdlist);
10628 add_setshow_enum_cmd ("force-mode", class_support,
10629 arm_mode_strings, &arm_force_mode_string,
10630 _("Set the mode assumed even when symbols are available."),
10631 _("Show the mode assumed even when symbols are available."),
10632 NULL, NULL, arm_show_force_mode,
10633 &setarmcmdlist, &showarmcmdlist);
10634
10635 /* Debugging flag. */
10636 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10637 _("Set ARM debugging."),
10638 _("Show ARM debugging."),
10639 _("When on, arm-specific debugging is enabled."),
10640 NULL,
10641 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10642 &setdebuglist, &showdebuglist);
10643 }
10644
10645 /* ARM-reversible process record data structures. */
10646
10647 #define ARM_INSN_SIZE_BYTES 4
10648 #define THUMB_INSN_SIZE_BYTES 2
10649 #define THUMB2_INSN_SIZE_BYTES 4
10650
10651
10652 #define INSN_S_L_BIT_NUM 20
10653
10654 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10655 do \
10656 { \
10657 unsigned int reg_len = LENGTH; \
10658 if (reg_len) \
10659 { \
10660 REGS = XNEWVEC (uint32_t, reg_len); \
10661 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10662 } \
10663 } \
10664 while (0)
10665
10666 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10667 do \
10668 { \
10669 unsigned int mem_len = LENGTH; \
10670 if (mem_len) \
10671 { \
10672 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10673 memcpy(&MEMS->len, &RECORD_BUF[0], \
10674 sizeof(struct arm_mem_r) * LENGTH); \
10675 } \
10676 } \
10677 while (0)
10678
10679 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10680 #define INSN_RECORDED(ARM_RECORD) \
10681 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10682
10683 /* ARM memory record structure. */
10684 struct arm_mem_r
10685 {
10686 uint32_t len; /* Record length. */
10687 uint32_t addr; /* Memory address. */
10688 };
10689
10690 /* ARM instruction record contains opcode of current insn
10691 and execution state (before entry to decode_insn()),
10692 contains list of to-be-modified registers and
10693 memory blocks (on return from decode_insn()). */
10694
10695 typedef struct insn_decode_record_t
10696 {
10697 struct gdbarch *gdbarch;
10698 struct regcache *regcache;
10699 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10700 uint32_t arm_insn; /* Should accommodate thumb. */
10701 uint32_t cond; /* Condition code. */
10702 uint32_t opcode; /* Insn opcode. */
10703 uint32_t decode; /* Insn decode bits. */
10704 uint32_t mem_rec_count; /* No of mem records. */
10705 uint32_t reg_rec_count; /* No of reg records. */
10706 uint32_t *arm_regs; /* Registers to be saved for this record. */
10707 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10708 } insn_decode_record;
10709
10710
10711 /* Checks ARM SBZ and SBO mandatory fields. */
10712
10713 static int
10714 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10715 {
10716 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10717
10718 if (!len)
10719 return 1;
10720
10721 if (!sbo)
10722 ones = ~ones;
10723
10724 while (ones)
10725 {
10726 if (!(ones & sbo))
10727 {
10728 return 0;
10729 }
10730 ones = ones >> 1;
10731 }
10732 return 1;
10733 }
10734
10735 enum arm_record_result
10736 {
10737 ARM_RECORD_SUCCESS = 0,
10738 ARM_RECORD_FAILURE = 1
10739 };
10740
10741 typedef enum
10742 {
10743 ARM_RECORD_STRH=1,
10744 ARM_RECORD_STRD
10745 } arm_record_strx_t;
10746
10747 typedef enum
10748 {
10749 ARM_RECORD=1,
10750 THUMB_RECORD,
10751 THUMB2_RECORD
10752 } record_type_t;
10753
10754
10755 static int
10756 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10757 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10758 {
10759
10760 struct regcache *reg_cache = arm_insn_r->regcache;
10761 ULONGEST u_regval[2]= {0};
10762
10763 uint32_t reg_src1 = 0, reg_src2 = 0;
10764 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10765 uint32_t opcode1 = 0;
10766
10767 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10768 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10769 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10770
10771
10772 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10773 {
10774 /* 1) Handle misc store, immediate offset. */
10775 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10776 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10777 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10778 regcache_raw_read_unsigned (reg_cache, reg_src1,
10779 &u_regval[0]);
10780 if (ARM_PC_REGNUM == reg_src1)
10781 {
10782 /* If R15 was used as Rn, hence current PC+8. */
10783 u_regval[0] = u_regval[0] + 8;
10784 }
10785 offset_8 = (immed_high << 4) | immed_low;
10786 /* Calculate target store address. */
10787 if (14 == arm_insn_r->opcode)
10788 {
10789 tgt_mem_addr = u_regval[0] + offset_8;
10790 }
10791 else
10792 {
10793 tgt_mem_addr = u_regval[0] - offset_8;
10794 }
10795 if (ARM_RECORD_STRH == str_type)
10796 {
10797 record_buf_mem[0] = 2;
10798 record_buf_mem[1] = tgt_mem_addr;
10799 arm_insn_r->mem_rec_count = 1;
10800 }
10801 else if (ARM_RECORD_STRD == str_type)
10802 {
10803 record_buf_mem[0] = 4;
10804 record_buf_mem[1] = tgt_mem_addr;
10805 record_buf_mem[2] = 4;
10806 record_buf_mem[3] = tgt_mem_addr + 4;
10807 arm_insn_r->mem_rec_count = 2;
10808 }
10809 }
10810 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10811 {
10812 /* 2) Store, register offset. */
10813 /* Get Rm. */
10814 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10815 /* Get Rn. */
10816 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10817 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10818 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10819 if (15 == reg_src2)
10820 {
10821 /* If R15 was used as Rn, hence current PC+8. */
10822 u_regval[0] = u_regval[0] + 8;
10823 }
10824 /* Calculate target store address, Rn +/- Rm, register offset. */
10825 if (12 == arm_insn_r->opcode)
10826 {
10827 tgt_mem_addr = u_regval[0] + u_regval[1];
10828 }
10829 else
10830 {
10831 tgt_mem_addr = u_regval[1] - u_regval[0];
10832 }
10833 if (ARM_RECORD_STRH == str_type)
10834 {
10835 record_buf_mem[0] = 2;
10836 record_buf_mem[1] = tgt_mem_addr;
10837 arm_insn_r->mem_rec_count = 1;
10838 }
10839 else if (ARM_RECORD_STRD == str_type)
10840 {
10841 record_buf_mem[0] = 4;
10842 record_buf_mem[1] = tgt_mem_addr;
10843 record_buf_mem[2] = 4;
10844 record_buf_mem[3] = tgt_mem_addr + 4;
10845 arm_insn_r->mem_rec_count = 2;
10846 }
10847 }
10848 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10849 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10850 {
10851 /* 3) Store, immediate pre-indexed. */
10852 /* 5) Store, immediate post-indexed. */
10853 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10854 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10855 offset_8 = (immed_high << 4) | immed_low;
10856 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10857 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10858 /* Calculate target store address, Rn +/- Rm, register offset. */
10859 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10860 {
10861 tgt_mem_addr = u_regval[0] + offset_8;
10862 }
10863 else
10864 {
10865 tgt_mem_addr = u_regval[0] - offset_8;
10866 }
10867 if (ARM_RECORD_STRH == str_type)
10868 {
10869 record_buf_mem[0] = 2;
10870 record_buf_mem[1] = tgt_mem_addr;
10871 arm_insn_r->mem_rec_count = 1;
10872 }
10873 else if (ARM_RECORD_STRD == str_type)
10874 {
10875 record_buf_mem[0] = 4;
10876 record_buf_mem[1] = tgt_mem_addr;
10877 record_buf_mem[2] = 4;
10878 record_buf_mem[3] = tgt_mem_addr + 4;
10879 arm_insn_r->mem_rec_count = 2;
10880 }
10881 /* Record Rn also as it changes. */
10882 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10883 arm_insn_r->reg_rec_count = 1;
10884 }
10885 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10886 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10887 {
10888 /* 4) Store, register pre-indexed. */
10889 /* 6) Store, register post -indexed. */
10890 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10891 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10892 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10893 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10894 /* Calculate target store address, Rn +/- Rm, register offset. */
10895 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10896 {
10897 tgt_mem_addr = u_regval[0] + u_regval[1];
10898 }
10899 else
10900 {
10901 tgt_mem_addr = u_regval[1] - u_regval[0];
10902 }
10903 if (ARM_RECORD_STRH == str_type)
10904 {
10905 record_buf_mem[0] = 2;
10906 record_buf_mem[1] = tgt_mem_addr;
10907 arm_insn_r->mem_rec_count = 1;
10908 }
10909 else if (ARM_RECORD_STRD == str_type)
10910 {
10911 record_buf_mem[0] = 4;
10912 record_buf_mem[1] = tgt_mem_addr;
10913 record_buf_mem[2] = 4;
10914 record_buf_mem[3] = tgt_mem_addr + 4;
10915 arm_insn_r->mem_rec_count = 2;
10916 }
10917 /* Record Rn also as it changes. */
10918 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10919 arm_insn_r->reg_rec_count = 1;
10920 }
10921 return 0;
10922 }
10923
10924 /* Handling ARM extension space insns. */
10925
10926 static int
10927 arm_record_extension_space (insn_decode_record *arm_insn_r)
10928 {
10929 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10930 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10931 uint32_t record_buf[8], record_buf_mem[8];
10932 uint32_t reg_src1 = 0;
10933 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10934 struct regcache *reg_cache = arm_insn_r->regcache;
10935 ULONGEST u_regval = 0;
10936
10937 gdb_assert (!INSN_RECORDED(arm_insn_r));
10938 /* Handle unconditional insn extension space. */
10939
10940 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10941 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10942 if (arm_insn_r->cond)
10943 {
10944 /* PLD has no affect on architectural state, it just affects
10945 the caches. */
10946 if (5 == ((opcode1 & 0xE0) >> 5))
10947 {
10948 /* BLX(1) */
10949 record_buf[0] = ARM_PS_REGNUM;
10950 record_buf[1] = ARM_LR_REGNUM;
10951 arm_insn_r->reg_rec_count = 2;
10952 }
10953 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10954 }
10955
10956
10957 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10958 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10959 {
10960 ret = -1;
10961 /* Undefined instruction on ARM V5; need to handle if later
10962 versions define it. */
10963 }
10964
10965 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10966 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10967 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10968
10969 /* Handle arithmetic insn extension space. */
10970 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10971 && !INSN_RECORDED(arm_insn_r))
10972 {
10973 /* Handle MLA(S) and MUL(S). */
10974 if (0 <= insn_op1 && 3 >= insn_op1)
10975 {
10976 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10977 record_buf[1] = ARM_PS_REGNUM;
10978 arm_insn_r->reg_rec_count = 2;
10979 }
10980 else if (4 <= insn_op1 && 15 >= insn_op1)
10981 {
10982 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10983 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10984 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10985 record_buf[2] = ARM_PS_REGNUM;
10986 arm_insn_r->reg_rec_count = 3;
10987 }
10988 }
10989
10990 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10991 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10992 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10993
10994 /* Handle control insn extension space. */
10995
10996 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10997 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10998 {
10999 if (!bit (arm_insn_r->arm_insn,25))
11000 {
11001 if (!bits (arm_insn_r->arm_insn, 4, 7))
11002 {
11003 if ((0 == insn_op1) || (2 == insn_op1))
11004 {
11005 /* MRS. */
11006 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11007 arm_insn_r->reg_rec_count = 1;
11008 }
11009 else if (1 == insn_op1)
11010 {
11011 /* CSPR is going to be changed. */
11012 record_buf[0] = ARM_PS_REGNUM;
11013 arm_insn_r->reg_rec_count = 1;
11014 }
11015 else if (3 == insn_op1)
11016 {
11017 /* SPSR is going to be changed. */
11018 /* We need to get SPSR value, which is yet to be done. */
11019 printf_unfiltered (_("Process record does not support "
11020 "instruction 0x%0x at address %s.\n"),
11021 arm_insn_r->arm_insn,
11022 paddress (arm_insn_r->gdbarch,
11023 arm_insn_r->this_addr));
11024 return -1;
11025 }
11026 }
11027 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11028 {
11029 if (1 == insn_op1)
11030 {
11031 /* BX. */
11032 record_buf[0] = ARM_PS_REGNUM;
11033 arm_insn_r->reg_rec_count = 1;
11034 }
11035 else if (3 == insn_op1)
11036 {
11037 /* CLZ. */
11038 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11039 arm_insn_r->reg_rec_count = 1;
11040 }
11041 }
11042 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11043 {
11044 /* BLX. */
11045 record_buf[0] = ARM_PS_REGNUM;
11046 record_buf[1] = ARM_LR_REGNUM;
11047 arm_insn_r->reg_rec_count = 2;
11048 }
11049 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11050 {
11051 /* QADD, QSUB, QDADD, QDSUB */
11052 record_buf[0] = ARM_PS_REGNUM;
11053 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11054 arm_insn_r->reg_rec_count = 2;
11055 }
11056 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11057 {
11058 /* BKPT. */
11059 record_buf[0] = ARM_PS_REGNUM;
11060 record_buf[1] = ARM_LR_REGNUM;
11061 arm_insn_r->reg_rec_count = 2;
11062
11063 /* Save SPSR also;how? */
11064 printf_unfiltered (_("Process record does not support "
11065 "instruction 0x%0x at address %s.\n"),
11066 arm_insn_r->arm_insn,
11067 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11068 return -1;
11069 }
11070 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11071 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11072 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11073 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11074 )
11075 {
11076 if (0 == insn_op1 || 1 == insn_op1)
11077 {
11078 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11079 /* We dont do optimization for SMULW<y> where we
11080 need only Rd. */
11081 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11082 record_buf[1] = ARM_PS_REGNUM;
11083 arm_insn_r->reg_rec_count = 2;
11084 }
11085 else if (2 == insn_op1)
11086 {
11087 /* SMLAL<x><y>. */
11088 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11089 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11090 arm_insn_r->reg_rec_count = 2;
11091 }
11092 else if (3 == insn_op1)
11093 {
11094 /* SMUL<x><y>. */
11095 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 }
11099 }
11100 else
11101 {
11102 /* MSR : immediate form. */
11103 if (1 == insn_op1)
11104 {
11105 /* CSPR is going to be changed. */
11106 record_buf[0] = ARM_PS_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11108 }
11109 else if (3 == insn_op1)
11110 {
11111 /* SPSR is going to be changed. */
11112 /* we need to get SPSR value, which is yet to be done */
11113 printf_unfiltered (_("Process record does not support "
11114 "instruction 0x%0x at address %s.\n"),
11115 arm_insn_r->arm_insn,
11116 paddress (arm_insn_r->gdbarch,
11117 arm_insn_r->this_addr));
11118 return -1;
11119 }
11120 }
11121 }
11122
11123 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11124 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11125 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11126
11127 /* Handle load/store insn extension space. */
11128
11129 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11130 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11131 && !INSN_RECORDED(arm_insn_r))
11132 {
11133 /* SWP/SWPB. */
11134 if (0 == insn_op1)
11135 {
11136 /* These insn, changes register and memory as well. */
11137 /* SWP or SWPB insn. */
11138 /* Get memory address given by Rn. */
11139 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11140 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11141 /* SWP insn ?, swaps word. */
11142 if (8 == arm_insn_r->opcode)
11143 {
11144 record_buf_mem[0] = 4;
11145 }
11146 else
11147 {
11148 /* SWPB insn, swaps only byte. */
11149 record_buf_mem[0] = 1;
11150 }
11151 record_buf_mem[1] = u_regval;
11152 arm_insn_r->mem_rec_count = 1;
11153 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11154 arm_insn_r->reg_rec_count = 1;
11155 }
11156 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11157 {
11158 /* STRH. */
11159 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11160 ARM_RECORD_STRH);
11161 }
11162 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11163 {
11164 /* LDRD. */
11165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11166 record_buf[1] = record_buf[0] + 1;
11167 arm_insn_r->reg_rec_count = 2;
11168 }
11169 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11170 {
11171 /* STRD. */
11172 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11173 ARM_RECORD_STRD);
11174 }
11175 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11176 {
11177 /* LDRH, LDRSB, LDRSH. */
11178 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11179 arm_insn_r->reg_rec_count = 1;
11180 }
11181
11182 }
11183
11184 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11185 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11186 && !INSN_RECORDED(arm_insn_r))
11187 {
11188 ret = -1;
11189 /* Handle coprocessor insn extension space. */
11190 }
11191
11192 /* To be done for ARMv5 and later; as of now we return -1. */
11193 if (-1 == ret)
11194 printf_unfiltered (_("Process record does not support instruction x%0x "
11195 "at address %s.\n"),arm_insn_r->arm_insn,
11196 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11197
11198
11199 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11200 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11201
11202 return ret;
11203 }
11204
11205 /* Handling opcode 000 insns. */
11206
11207 static int
11208 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11209 {
11210 struct regcache *reg_cache = arm_insn_r->regcache;
11211 uint32_t record_buf[8], record_buf_mem[8];
11212 ULONGEST u_regval[2] = {0};
11213
11214 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11215 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11216 uint32_t opcode1 = 0;
11217
11218 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11219 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11220 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11221
11222 /* Data processing insn /multiply insn. */
11223 if (9 == arm_insn_r->decode
11224 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11225 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11226 {
11227 /* Handle multiply instructions. */
11228 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11229 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11230 {
11231 /* Handle MLA and MUL. */
11232 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11233 record_buf[1] = ARM_PS_REGNUM;
11234 arm_insn_r->reg_rec_count = 2;
11235 }
11236 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11237 {
11238 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11239 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11240 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11241 record_buf[2] = ARM_PS_REGNUM;
11242 arm_insn_r->reg_rec_count = 3;
11243 }
11244 }
11245 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11246 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11247 {
11248 /* Handle misc load insns, as 20th bit (L = 1). */
11249 /* LDR insn has a capability to do branching, if
11250 MOV LR, PC is precceded by LDR insn having Rn as R15
11251 in that case, it emulates branch and link insn, and hence we
11252 need to save CSPR and PC as well. I am not sure this is right
11253 place; as opcode = 010 LDR insn make this happen, if R15 was
11254 used. */
11255 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11256 if (15 != reg_dest)
11257 {
11258 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11259 arm_insn_r->reg_rec_count = 1;
11260 }
11261 else
11262 {
11263 record_buf[0] = reg_dest;
11264 record_buf[1] = ARM_PS_REGNUM;
11265 arm_insn_r->reg_rec_count = 2;
11266 }
11267 }
11268 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11269 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11270 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11271 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11272 {
11273 /* Handle MSR insn. */
11274 if (9 == arm_insn_r->opcode)
11275 {
11276 /* CSPR is going to be changed. */
11277 record_buf[0] = ARM_PS_REGNUM;
11278 arm_insn_r->reg_rec_count = 1;
11279 }
11280 else
11281 {
11282 /* SPSR is going to be changed. */
11283 /* How to read SPSR value? */
11284 printf_unfiltered (_("Process record does not support instruction "
11285 "0x%0x at address %s.\n"),
11286 arm_insn_r->arm_insn,
11287 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11288 return -1;
11289 }
11290 }
11291 else if (9 == arm_insn_r->decode
11292 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11293 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11294 {
11295 /* Handling SWP, SWPB. */
11296 /* These insn, changes register and memory as well. */
11297 /* SWP or SWPB insn. */
11298
11299 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11300 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11301 /* SWP insn ?, swaps word. */
11302 if (8 == arm_insn_r->opcode)
11303 {
11304 record_buf_mem[0] = 4;
11305 }
11306 else
11307 {
11308 /* SWPB insn, swaps only byte. */
11309 record_buf_mem[0] = 1;
11310 }
11311 record_buf_mem[1] = u_regval[0];
11312 arm_insn_r->mem_rec_count = 1;
11313 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11314 arm_insn_r->reg_rec_count = 1;
11315 }
11316 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11317 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11318 {
11319 /* Handle BLX, branch and link/exchange. */
11320 if (9 == arm_insn_r->opcode)
11321 {
11322 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11323 and R14 stores the return address. */
11324 record_buf[0] = ARM_PS_REGNUM;
11325 record_buf[1] = ARM_LR_REGNUM;
11326 arm_insn_r->reg_rec_count = 2;
11327 }
11328 }
11329 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11330 {
11331 /* Handle enhanced software breakpoint insn, BKPT. */
11332 /* CPSR is changed to be executed in ARM state, disabling normal
11333 interrupts, entering abort mode. */
11334 /* According to high vector configuration PC is set. */
11335 /* user hit breakpoint and type reverse, in
11336 that case, we need to go back with previous CPSR and
11337 Program Counter. */
11338 record_buf[0] = ARM_PS_REGNUM;
11339 record_buf[1] = ARM_LR_REGNUM;
11340 arm_insn_r->reg_rec_count = 2;
11341
11342 /* Save SPSR also; how? */
11343 printf_unfiltered (_("Process record does not support instruction "
11344 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11345 paddress (arm_insn_r->gdbarch,
11346 arm_insn_r->this_addr));
11347 return -1;
11348 }
11349 else if (11 == arm_insn_r->decode
11350 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11351 {
11352 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11353
11354 /* Handle str(x) insn */
11355 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11356 ARM_RECORD_STRH);
11357 }
11358 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11359 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11360 {
11361 /* Handle BX, branch and link/exchange. */
11362 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11363 record_buf[0] = ARM_PS_REGNUM;
11364 arm_insn_r->reg_rec_count = 1;
11365 }
11366 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11367 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11368 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11369 {
11370 /* Count leading zeros: CLZ. */
11371 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11372 arm_insn_r->reg_rec_count = 1;
11373 }
11374 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11375 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11376 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11377 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11378 )
11379 {
11380 /* Handle MRS insn. */
11381 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11382 arm_insn_r->reg_rec_count = 1;
11383 }
11384 else if (arm_insn_r->opcode <= 15)
11385 {
11386 /* Normal data processing insns. */
11387 /* Out of 11 shifter operands mode, all the insn modifies destination
11388 register, which is specified by 13-16 decode. */
11389 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11390 record_buf[1] = ARM_PS_REGNUM;
11391 arm_insn_r->reg_rec_count = 2;
11392 }
11393 else
11394 {
11395 return -1;
11396 }
11397
11398 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11399 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11400 return 0;
11401 }
11402
11403 /* Handling opcode 001 insns. */
11404
11405 static int
11406 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11407 {
11408 uint32_t record_buf[8], record_buf_mem[8];
11409
11410 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11411 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11412
11413 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11414 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11415 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11416 )
11417 {
11418 /* Handle MSR insn. */
11419 if (9 == arm_insn_r->opcode)
11420 {
11421 /* CSPR is going to be changed. */
11422 record_buf[0] = ARM_PS_REGNUM;
11423 arm_insn_r->reg_rec_count = 1;
11424 }
11425 else
11426 {
11427 /* SPSR is going to be changed. */
11428 }
11429 }
11430 else if (arm_insn_r->opcode <= 15)
11431 {
11432 /* Normal data processing insns. */
11433 /* Out of 11 shifter operands mode, all the insn modifies destination
11434 register, which is specified by 13-16 decode. */
11435 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11436 record_buf[1] = ARM_PS_REGNUM;
11437 arm_insn_r->reg_rec_count = 2;
11438 }
11439 else
11440 {
11441 return -1;
11442 }
11443
11444 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11445 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11446 return 0;
11447 }
11448
11449 /* Handling opcode 010 insns. */
11450
11451 static int
11452 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11453 {
11454 struct regcache *reg_cache = arm_insn_r->regcache;
11455
11456 uint32_t reg_src1 = 0 , reg_dest = 0;
11457 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11458 uint32_t record_buf[8], record_buf_mem[8];
11459
11460 ULONGEST u_regval = 0;
11461
11462 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11463 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11464
11465 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11466 {
11467 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11468 /* LDR insn has a capability to do branching, if
11469 MOV LR, PC is precedded by LDR insn having Rn as R15
11470 in that case, it emulates branch and link insn, and hence we
11471 need to save CSPR and PC as well. */
11472 if (ARM_PC_REGNUM != reg_dest)
11473 {
11474 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11475 arm_insn_r->reg_rec_count = 1;
11476 }
11477 else
11478 {
11479 record_buf[0] = reg_dest;
11480 record_buf[1] = ARM_PS_REGNUM;
11481 arm_insn_r->reg_rec_count = 2;
11482 }
11483 }
11484 else
11485 {
11486 /* Store, immediate offset, immediate pre-indexed,
11487 immediate post-indexed. */
11488 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11489 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11490 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11491 /* U == 1 */
11492 if (bit (arm_insn_r->arm_insn, 23))
11493 {
11494 tgt_mem_addr = u_regval + offset_12;
11495 }
11496 else
11497 {
11498 tgt_mem_addr = u_regval - offset_12;
11499 }
11500
11501 switch (arm_insn_r->opcode)
11502 {
11503 /* STR. */
11504 case 8:
11505 case 12:
11506 /* STR. */
11507 case 9:
11508 case 13:
11509 /* STRT. */
11510 case 1:
11511 case 5:
11512 /* STR. */
11513 case 4:
11514 case 0:
11515 record_buf_mem[0] = 4;
11516 break;
11517
11518 /* STRB. */
11519 case 10:
11520 case 14:
11521 /* STRB. */
11522 case 11:
11523 case 15:
11524 /* STRBT. */
11525 case 3:
11526 case 7:
11527 /* STRB. */
11528 case 2:
11529 case 6:
11530 record_buf_mem[0] = 1;
11531 break;
11532
11533 default:
11534 gdb_assert_not_reached ("no decoding pattern found");
11535 break;
11536 }
11537 record_buf_mem[1] = tgt_mem_addr;
11538 arm_insn_r->mem_rec_count = 1;
11539
11540 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11541 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11542 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11543 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11544 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11545 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11546 )
11547 {
11548 /* We are handling pre-indexed mode; post-indexed mode;
11549 where Rn is going to be changed. */
11550 record_buf[0] = reg_src1;
11551 arm_insn_r->reg_rec_count = 1;
11552 }
11553 }
11554
11555 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11556 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11557 return 0;
11558 }
11559
11560 /* Handling opcode 011 insns. */
11561
11562 static int
11563 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11564 {
11565 struct regcache *reg_cache = arm_insn_r->regcache;
11566
11567 uint32_t shift_imm = 0;
11568 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11569 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11570 uint32_t record_buf[8], record_buf_mem[8];
11571
11572 LONGEST s_word;
11573 ULONGEST u_regval[2];
11574
11575 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11576 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11577
11578 /* Handle enhanced store insns and LDRD DSP insn,
11579 order begins according to addressing modes for store insns
11580 STRH insn. */
11581
11582 /* LDR or STR? */
11583 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11584 {
11585 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11586 /* LDR insn has a capability to do branching, if
11587 MOV LR, PC is precedded by LDR insn having Rn as R15
11588 in that case, it emulates branch and link insn, and hence we
11589 need to save CSPR and PC as well. */
11590 if (15 != reg_dest)
11591 {
11592 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11593 arm_insn_r->reg_rec_count = 1;
11594 }
11595 else
11596 {
11597 record_buf[0] = reg_dest;
11598 record_buf[1] = ARM_PS_REGNUM;
11599 arm_insn_r->reg_rec_count = 2;
11600 }
11601 }
11602 else
11603 {
11604 if (! bits (arm_insn_r->arm_insn, 4, 11))
11605 {
11606 /* Store insn, register offset and register pre-indexed,
11607 register post-indexed. */
11608 /* Get Rm. */
11609 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11610 /* Get Rn. */
11611 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11612 regcache_raw_read_unsigned (reg_cache, reg_src1
11613 , &u_regval[0]);
11614 regcache_raw_read_unsigned (reg_cache, reg_src2
11615 , &u_regval[1]);
11616 if (15 == reg_src2)
11617 {
11618 /* If R15 was used as Rn, hence current PC+8. */
11619 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11620 u_regval[0] = u_regval[0] + 8;
11621 }
11622 /* Calculate target store address, Rn +/- Rm, register offset. */
11623 /* U == 1. */
11624 if (bit (arm_insn_r->arm_insn, 23))
11625 {
11626 tgt_mem_addr = u_regval[0] + u_regval[1];
11627 }
11628 else
11629 {
11630 tgt_mem_addr = u_regval[1] - u_regval[0];
11631 }
11632
11633 switch (arm_insn_r->opcode)
11634 {
11635 /* STR. */
11636 case 8:
11637 case 12:
11638 /* STR. */
11639 case 9:
11640 case 13:
11641 /* STRT. */
11642 case 1:
11643 case 5:
11644 /* STR. */
11645 case 0:
11646 case 4:
11647 record_buf_mem[0] = 4;
11648 break;
11649
11650 /* STRB. */
11651 case 10:
11652 case 14:
11653 /* STRB. */
11654 case 11:
11655 case 15:
11656 /* STRBT. */
11657 case 3:
11658 case 7:
11659 /* STRB. */
11660 case 2:
11661 case 6:
11662 record_buf_mem[0] = 1;
11663 break;
11664
11665 default:
11666 gdb_assert_not_reached ("no decoding pattern found");
11667 break;
11668 }
11669 record_buf_mem[1] = tgt_mem_addr;
11670 arm_insn_r->mem_rec_count = 1;
11671
11672 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11673 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11674 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11675 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11676 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11677 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11678 )
11679 {
11680 /* Rn is going to be changed in pre-indexed mode and
11681 post-indexed mode as well. */
11682 record_buf[0] = reg_src2;
11683 arm_insn_r->reg_rec_count = 1;
11684 }
11685 }
11686 else
11687 {
11688 /* Store insn, scaled register offset; scaled pre-indexed. */
11689 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11690 /* Get Rm. */
11691 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11692 /* Get Rn. */
11693 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11694 /* Get shift_imm. */
11695 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11696 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11697 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11698 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11699 /* Offset_12 used as shift. */
11700 switch (offset_12)
11701 {
11702 case 0:
11703 /* Offset_12 used as index. */
11704 offset_12 = u_regval[0] << shift_imm;
11705 break;
11706
11707 case 1:
11708 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11709 break;
11710
11711 case 2:
11712 if (!shift_imm)
11713 {
11714 if (bit (u_regval[0], 31))
11715 {
11716 offset_12 = 0xFFFFFFFF;
11717 }
11718 else
11719 {
11720 offset_12 = 0;
11721 }
11722 }
11723 else
11724 {
11725 /* This is arithmetic shift. */
11726 offset_12 = s_word >> shift_imm;
11727 }
11728 break;
11729
11730 case 3:
11731 if (!shift_imm)
11732 {
11733 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11734 &u_regval[1]);
11735 /* Get C flag value and shift it by 31. */
11736 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11737 | (u_regval[0]) >> 1);
11738 }
11739 else
11740 {
11741 offset_12 = (u_regval[0] >> shift_imm) \
11742 | (u_regval[0] <<
11743 (sizeof(uint32_t) - shift_imm));
11744 }
11745 break;
11746
11747 default:
11748 gdb_assert_not_reached ("no decoding pattern found");
11749 break;
11750 }
11751
11752 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11753 /* bit U set. */
11754 if (bit (arm_insn_r->arm_insn, 23))
11755 {
11756 tgt_mem_addr = u_regval[1] + offset_12;
11757 }
11758 else
11759 {
11760 tgt_mem_addr = u_regval[1] - offset_12;
11761 }
11762
11763 switch (arm_insn_r->opcode)
11764 {
11765 /* STR. */
11766 case 8:
11767 case 12:
11768 /* STR. */
11769 case 9:
11770 case 13:
11771 /* STRT. */
11772 case 1:
11773 case 5:
11774 /* STR. */
11775 case 0:
11776 case 4:
11777 record_buf_mem[0] = 4;
11778 break;
11779
11780 /* STRB. */
11781 case 10:
11782 case 14:
11783 /* STRB. */
11784 case 11:
11785 case 15:
11786 /* STRBT. */
11787 case 3:
11788 case 7:
11789 /* STRB. */
11790 case 2:
11791 case 6:
11792 record_buf_mem[0] = 1;
11793 break;
11794
11795 default:
11796 gdb_assert_not_reached ("no decoding pattern found");
11797 break;
11798 }
11799 record_buf_mem[1] = tgt_mem_addr;
11800 arm_insn_r->mem_rec_count = 1;
11801
11802 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11803 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11804 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11805 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11806 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11807 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11808 )
11809 {
11810 /* Rn is going to be changed in register scaled pre-indexed
11811 mode,and scaled post indexed mode. */
11812 record_buf[0] = reg_src2;
11813 arm_insn_r->reg_rec_count = 1;
11814 }
11815 }
11816 }
11817
11818 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11819 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11820 return 0;
11821 }
11822
11823 /* Handling opcode 100 insns. */
11824
11825 static int
11826 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11827 {
11828 struct regcache *reg_cache = arm_insn_r->regcache;
11829
11830 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11831 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11832 uint32_t start_address = 0, index = 0;
11833 uint32_t record_buf[24], record_buf_mem[48];
11834
11835 ULONGEST u_regval[2] = {0};
11836
11837 /* This mode is exclusively for load and store multiple. */
11838 /* Handle incremenrt after/before and decrment after.before mode;
11839 Rn is changing depending on W bit, but as of now we store Rn too
11840 without optimization. */
11841
11842 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11843 {
11844 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11845
11846 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11847 {
11848 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11849 no_of_regs = 15;
11850 }
11851 else
11852 {
11853 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11854 no_of_regs = 14;
11855 }
11856 /* Get Rn. */
11857 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11858 while (register_bits)
11859 {
11860 if (register_bits & 0x00000001)
11861 record_buf[index++] = register_count;
11862 register_bits = register_bits >> 1;
11863 register_count++;
11864 }
11865
11866 /* Extra space for Base Register and CPSR; wihtout optimization. */
11867 record_buf[index++] = reg_src1;
11868 record_buf[index++] = ARM_PS_REGNUM;
11869 arm_insn_r->reg_rec_count = index;
11870 }
11871 else
11872 {
11873 /* It handles both STM(1) and STM(2). */
11874 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11875
11876 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11877 /* Get Rn. */
11878 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11879 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11880 while (register_bits)
11881 {
11882 if (register_bits & 0x00000001)
11883 register_count++;
11884 register_bits = register_bits >> 1;
11885 }
11886
11887 switch (addr_mode)
11888 {
11889 /* Decrement after. */
11890 case 0:
11891 start_address = (u_regval[0]) - (register_count * 4) + 4;
11892 arm_insn_r->mem_rec_count = register_count;
11893 while (register_count)
11894 {
11895 record_buf_mem[(register_count * 2) - 1] = start_address;
11896 record_buf_mem[(register_count * 2) - 2] = 4;
11897 start_address = start_address + 4;
11898 register_count--;
11899 }
11900 break;
11901
11902 /* Increment after. */
11903 case 1:
11904 start_address = u_regval[0];
11905 arm_insn_r->mem_rec_count = register_count;
11906 while (register_count)
11907 {
11908 record_buf_mem[(register_count * 2) - 1] = start_address;
11909 record_buf_mem[(register_count * 2) - 2] = 4;
11910 start_address = start_address + 4;
11911 register_count--;
11912 }
11913 break;
11914
11915 /* Decrement before. */
11916 case 2:
11917
11918 start_address = (u_regval[0]) - (register_count * 4);
11919 arm_insn_r->mem_rec_count = register_count;
11920 while (register_count)
11921 {
11922 record_buf_mem[(register_count * 2) - 1] = start_address;
11923 record_buf_mem[(register_count * 2) - 2] = 4;
11924 start_address = start_address + 4;
11925 register_count--;
11926 }
11927 break;
11928
11929 /* Increment before. */
11930 case 3:
11931 start_address = u_regval[0] + 4;
11932 arm_insn_r->mem_rec_count = register_count;
11933 while (register_count)
11934 {
11935 record_buf_mem[(register_count * 2) - 1] = start_address;
11936 record_buf_mem[(register_count * 2) - 2] = 4;
11937 start_address = start_address + 4;
11938 register_count--;
11939 }
11940 break;
11941
11942 default:
11943 gdb_assert_not_reached ("no decoding pattern found");
11944 break;
11945 }
11946
11947 /* Base register also changes; based on condition and W bit. */
11948 /* We save it anyway without optimization. */
11949 record_buf[0] = reg_src1;
11950 arm_insn_r->reg_rec_count = 1;
11951 }
11952
11953 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11954 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11955 return 0;
11956 }
11957
11958 /* Handling opcode 101 insns. */
11959
11960 static int
11961 arm_record_b_bl (insn_decode_record *arm_insn_r)
11962 {
11963 uint32_t record_buf[8];
11964
11965 /* Handle B, BL, BLX(1) insns. */
11966 /* B simply branches so we do nothing here. */
11967 /* Note: BLX(1) doesnt fall here but instead it falls into
11968 extension space. */
11969 if (bit (arm_insn_r->arm_insn, 24))
11970 {
11971 record_buf[0] = ARM_LR_REGNUM;
11972 arm_insn_r->reg_rec_count = 1;
11973 }
11974
11975 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11976
11977 return 0;
11978 }
11979
11980 /* Handling opcode 110 insns. */
11981
11982 static int
11983 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11984 {
11985 printf_unfiltered (_("Process record does not support instruction "
11986 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11987 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11988
11989 return -1;
11990 }
11991
11992 /* Handling opcode 111 insns. */
11993
11994 static int
11995 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11996 {
11997 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11998 struct regcache *reg_cache = arm_insn_r->regcache;
11999 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12000 ULONGEST u_regval = 0;
12001
12002 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12003
12004 /* Handle arm SWI/SVC system call instructions. */
12005 if (15 == arm_insn_r->opcode)
12006 {
12007 if (tdep->arm_syscall_record != NULL)
12008 {
12009 ULONGEST svc_operand, svc_number;
12010
12011 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12012
12013 if (svc_operand) /* OABI. */
12014 svc_number = svc_operand - 0x900000;
12015 else /* EABI. */
12016 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12017
12018 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12019 }
12020 else
12021 {
12022 printf_unfiltered (_("no syscall record support\n"));
12023 ret = -1;
12024 }
12025 }
12026 else
12027 {
12028 arm_record_unsupported_insn (arm_insn_r);
12029 ret = -1;
12030 }
12031
12032 return ret;
12033 }
12034
12035 /* Handling opcode 000 insns. */
12036
12037 static int
12038 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12039 {
12040 uint32_t record_buf[8];
12041 uint32_t reg_src1 = 0;
12042
12043 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12044
12045 record_buf[0] = ARM_PS_REGNUM;
12046 record_buf[1] = reg_src1;
12047 thumb_insn_r->reg_rec_count = 2;
12048
12049 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12050
12051 return 0;
12052 }
12053
12054
12055 /* Handling opcode 001 insns. */
12056
12057 static int
12058 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12059 {
12060 uint32_t record_buf[8];
12061 uint32_t reg_src1 = 0;
12062
12063 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12064
12065 record_buf[0] = ARM_PS_REGNUM;
12066 record_buf[1] = reg_src1;
12067 thumb_insn_r->reg_rec_count = 2;
12068
12069 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12070
12071 return 0;
12072 }
12073
12074 /* Handling opcode 010 insns. */
12075
12076 static int
12077 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12078 {
12079 struct regcache *reg_cache = thumb_insn_r->regcache;
12080 uint32_t record_buf[8], record_buf_mem[8];
12081
12082 uint32_t reg_src1 = 0, reg_src2 = 0;
12083 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12084
12085 ULONGEST u_regval[2] = {0};
12086
12087 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12088
12089 if (bit (thumb_insn_r->arm_insn, 12))
12090 {
12091 /* Handle load/store register offset. */
12092 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12093 if (opcode2 >= 12 && opcode2 <= 15)
12094 {
12095 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12096 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12097 record_buf[0] = reg_src1;
12098 thumb_insn_r->reg_rec_count = 1;
12099 }
12100 else if (opcode2 >= 8 && opcode2 <= 10)
12101 {
12102 /* STR(2), STRB(2), STRH(2) . */
12103 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12104 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12105 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12106 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12107 if (8 == opcode2)
12108 record_buf_mem[0] = 4; /* STR (2). */
12109 else if (10 == opcode2)
12110 record_buf_mem[0] = 1; /* STRB (2). */
12111 else if (9 == opcode2)
12112 record_buf_mem[0] = 2; /* STRH (2). */
12113 record_buf_mem[1] = u_regval[0] + u_regval[1];
12114 thumb_insn_r->mem_rec_count = 1;
12115 }
12116 }
12117 else if (bit (thumb_insn_r->arm_insn, 11))
12118 {
12119 /* Handle load from literal pool. */
12120 /* LDR(3). */
12121 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12122 record_buf[0] = reg_src1;
12123 thumb_insn_r->reg_rec_count = 1;
12124 }
12125 else if (opcode1)
12126 {
12127 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12128 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12129 if ((3 == opcode2) && (!opcode3))
12130 {
12131 /* Branch with exchange. */
12132 record_buf[0] = ARM_PS_REGNUM;
12133 thumb_insn_r->reg_rec_count = 1;
12134 }
12135 else
12136 {
12137 /* Format 8; special data processing insns. */
12138 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12139 record_buf[0] = ARM_PS_REGNUM;
12140 record_buf[1] = reg_src1;
12141 thumb_insn_r->reg_rec_count = 2;
12142 }
12143 }
12144 else
12145 {
12146 /* Format 5; data processing insns. */
12147 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12148 if (bit (thumb_insn_r->arm_insn, 7))
12149 {
12150 reg_src1 = reg_src1 + 8;
12151 }
12152 record_buf[0] = ARM_PS_REGNUM;
12153 record_buf[1] = reg_src1;
12154 thumb_insn_r->reg_rec_count = 2;
12155 }
12156
12157 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12158 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12159 record_buf_mem);
12160
12161 return 0;
12162 }
12163
12164 /* Handling opcode 001 insns. */
12165
12166 static int
12167 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12168 {
12169 struct regcache *reg_cache = thumb_insn_r->regcache;
12170 uint32_t record_buf[8], record_buf_mem[8];
12171
12172 uint32_t reg_src1 = 0;
12173 uint32_t opcode = 0, immed_5 = 0;
12174
12175 ULONGEST u_regval = 0;
12176
12177 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12178
12179 if (opcode)
12180 {
12181 /* LDR(1). */
12182 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12183 record_buf[0] = reg_src1;
12184 thumb_insn_r->reg_rec_count = 1;
12185 }
12186 else
12187 {
12188 /* STR(1). */
12189 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12190 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12191 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12192 record_buf_mem[0] = 4;
12193 record_buf_mem[1] = u_regval + (immed_5 * 4);
12194 thumb_insn_r->mem_rec_count = 1;
12195 }
12196
12197 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12198 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12199 record_buf_mem);
12200
12201 return 0;
12202 }
12203
12204 /* Handling opcode 100 insns. */
12205
12206 static int
12207 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12208 {
12209 struct regcache *reg_cache = thumb_insn_r->regcache;
12210 uint32_t record_buf[8], record_buf_mem[8];
12211
12212 uint32_t reg_src1 = 0;
12213 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12214
12215 ULONGEST u_regval = 0;
12216
12217 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12218
12219 if (3 == opcode)
12220 {
12221 /* LDR(4). */
12222 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12223 record_buf[0] = reg_src1;
12224 thumb_insn_r->reg_rec_count = 1;
12225 }
12226 else if (1 == opcode)
12227 {
12228 /* LDRH(1). */
12229 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12230 record_buf[0] = reg_src1;
12231 thumb_insn_r->reg_rec_count = 1;
12232 }
12233 else if (2 == opcode)
12234 {
12235 /* STR(3). */
12236 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12237 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12238 record_buf_mem[0] = 4;
12239 record_buf_mem[1] = u_regval + (immed_8 * 4);
12240 thumb_insn_r->mem_rec_count = 1;
12241 }
12242 else if (0 == opcode)
12243 {
12244 /* STRH(1). */
12245 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12246 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12247 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12248 record_buf_mem[0] = 2;
12249 record_buf_mem[1] = u_regval + (immed_5 * 2);
12250 thumb_insn_r->mem_rec_count = 1;
12251 }
12252
12253 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12254 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12255 record_buf_mem);
12256
12257 return 0;
12258 }
12259
12260 /* Handling opcode 101 insns. */
12261
12262 static int
12263 thumb_record_misc (insn_decode_record *thumb_insn_r)
12264 {
12265 struct regcache *reg_cache = thumb_insn_r->regcache;
12266
12267 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12268 uint32_t register_bits = 0, register_count = 0;
12269 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12270 uint32_t record_buf[24], record_buf_mem[48];
12271 uint32_t reg_src1;
12272
12273 ULONGEST u_regval = 0;
12274
12275 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12276 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12277 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12278
12279 if (14 == opcode2)
12280 {
12281 /* POP. */
12282 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12283 while (register_bits)
12284 {
12285 if (register_bits & 0x00000001)
12286 record_buf[index++] = register_count;
12287 register_bits = register_bits >> 1;
12288 register_count++;
12289 }
12290 record_buf[index++] = ARM_PS_REGNUM;
12291 record_buf[index++] = ARM_SP_REGNUM;
12292 thumb_insn_r->reg_rec_count = index;
12293 }
12294 else if (10 == opcode2)
12295 {
12296 /* PUSH. */
12297 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12298 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12299 while (register_bits)
12300 {
12301 if (register_bits & 0x00000001)
12302 register_count++;
12303 register_bits = register_bits >> 1;
12304 }
12305 start_address = u_regval - \
12306 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12307 thumb_insn_r->mem_rec_count = register_count;
12308 while (register_count)
12309 {
12310 record_buf_mem[(register_count * 2) - 1] = start_address;
12311 record_buf_mem[(register_count * 2) - 2] = 4;
12312 start_address = start_address + 4;
12313 register_count--;
12314 }
12315 record_buf[0] = ARM_SP_REGNUM;
12316 thumb_insn_r->reg_rec_count = 1;
12317 }
12318 else if (0x1E == opcode1)
12319 {
12320 /* BKPT insn. */
12321 /* Handle enhanced software breakpoint insn, BKPT. */
12322 /* CPSR is changed to be executed in ARM state, disabling normal
12323 interrupts, entering abort mode. */
12324 /* According to high vector configuration PC is set. */
12325 /* User hits breakpoint and type reverse, in that case, we need to go back with
12326 previous CPSR and Program Counter. */
12327 record_buf[0] = ARM_PS_REGNUM;
12328 record_buf[1] = ARM_LR_REGNUM;
12329 thumb_insn_r->reg_rec_count = 2;
12330 /* We need to save SPSR value, which is not yet done. */
12331 printf_unfiltered (_("Process record does not support instruction "
12332 "0x%0x at address %s.\n"),
12333 thumb_insn_r->arm_insn,
12334 paddress (thumb_insn_r->gdbarch,
12335 thumb_insn_r->this_addr));
12336 return -1;
12337 }
12338 else if ((0 == opcode) || (1 == opcode))
12339 {
12340 /* ADD(5), ADD(6). */
12341 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12342 record_buf[0] = reg_src1;
12343 thumb_insn_r->reg_rec_count = 1;
12344 }
12345 else if (2 == opcode)
12346 {
12347 /* ADD(7), SUB(4). */
12348 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12349 record_buf[0] = ARM_SP_REGNUM;
12350 thumb_insn_r->reg_rec_count = 1;
12351 }
12352
12353 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12354 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12355 record_buf_mem);
12356
12357 return 0;
12358 }
12359
12360 /* Handling opcode 110 insns. */
12361
12362 static int
12363 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12364 {
12365 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12366 struct regcache *reg_cache = thumb_insn_r->regcache;
12367
12368 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12369 uint32_t reg_src1 = 0;
12370 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12371 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12372 uint32_t record_buf[24], record_buf_mem[48];
12373
12374 ULONGEST u_regval = 0;
12375
12376 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12377 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12378
12379 if (1 == opcode2)
12380 {
12381
12382 /* LDMIA. */
12383 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12384 /* Get Rn. */
12385 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12386 while (register_bits)
12387 {
12388 if (register_bits & 0x00000001)
12389 record_buf[index++] = register_count;
12390 register_bits = register_bits >> 1;
12391 register_count++;
12392 }
12393 record_buf[index++] = reg_src1;
12394 thumb_insn_r->reg_rec_count = index;
12395 }
12396 else if (0 == opcode2)
12397 {
12398 /* It handles both STMIA. */
12399 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12400 /* Get Rn. */
12401 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12402 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12403 while (register_bits)
12404 {
12405 if (register_bits & 0x00000001)
12406 register_count++;
12407 register_bits = register_bits >> 1;
12408 }
12409 start_address = u_regval;
12410 thumb_insn_r->mem_rec_count = register_count;
12411 while (register_count)
12412 {
12413 record_buf_mem[(register_count * 2) - 1] = start_address;
12414 record_buf_mem[(register_count * 2) - 2] = 4;
12415 start_address = start_address + 4;
12416 register_count--;
12417 }
12418 }
12419 else if (0x1F == opcode1)
12420 {
12421 /* Handle arm syscall insn. */
12422 if (tdep->arm_syscall_record != NULL)
12423 {
12424 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12425 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12426 }
12427 else
12428 {
12429 printf_unfiltered (_("no syscall record support\n"));
12430 return -1;
12431 }
12432 }
12433
12434 /* B (1), conditional branch is automatically taken care in process_record,
12435 as PC is saved there. */
12436
12437 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12438 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12439 record_buf_mem);
12440
12441 return ret;
12442 }
12443
12444 /* Handling opcode 111 insns. */
12445
12446 static int
12447 thumb_record_branch (insn_decode_record *thumb_insn_r)
12448 {
12449 uint32_t record_buf[8];
12450 uint32_t bits_h = 0;
12451
12452 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12453
12454 if (2 == bits_h || 3 == bits_h)
12455 {
12456 /* BL */
12457 record_buf[0] = ARM_LR_REGNUM;
12458 thumb_insn_r->reg_rec_count = 1;
12459 }
12460 else if (1 == bits_h)
12461 {
12462 /* BLX(1). */
12463 record_buf[0] = ARM_PS_REGNUM;
12464 record_buf[1] = ARM_LR_REGNUM;
12465 thumb_insn_r->reg_rec_count = 2;
12466 }
12467
12468 /* B(2) is automatically taken care in process_record, as PC is
12469 saved there. */
12470
12471 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12472
12473 return 0;
12474 }
12475
12476 /* Handler for thumb2 load/store multiple instructions. */
12477
12478 static int
12479 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12480 {
12481 struct regcache *reg_cache = thumb2_insn_r->regcache;
12482
12483 uint32_t reg_rn, op;
12484 uint32_t register_bits = 0, register_count = 0;
12485 uint32_t index = 0, start_address = 0;
12486 uint32_t record_buf[24], record_buf_mem[48];
12487
12488 ULONGEST u_regval = 0;
12489
12490 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12491 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12492
12493 if (0 == op || 3 == op)
12494 {
12495 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12496 {
12497 /* Handle RFE instruction. */
12498 record_buf[0] = ARM_PS_REGNUM;
12499 thumb2_insn_r->reg_rec_count = 1;
12500 }
12501 else
12502 {
12503 /* Handle SRS instruction after reading banked SP. */
12504 return arm_record_unsupported_insn (thumb2_insn_r);
12505 }
12506 }
12507 else if (1 == op || 2 == op)
12508 {
12509 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12510 {
12511 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12512 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12513 while (register_bits)
12514 {
12515 if (register_bits & 0x00000001)
12516 record_buf[index++] = register_count;
12517
12518 register_count++;
12519 register_bits = register_bits >> 1;
12520 }
12521 record_buf[index++] = reg_rn;
12522 record_buf[index++] = ARM_PS_REGNUM;
12523 thumb2_insn_r->reg_rec_count = index;
12524 }
12525 else
12526 {
12527 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12528 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12529 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12530 while (register_bits)
12531 {
12532 if (register_bits & 0x00000001)
12533 register_count++;
12534
12535 register_bits = register_bits >> 1;
12536 }
12537
12538 if (1 == op)
12539 {
12540 /* Start address calculation for LDMDB/LDMEA. */
12541 start_address = u_regval;
12542 }
12543 else if (2 == op)
12544 {
12545 /* Start address calculation for LDMDB/LDMEA. */
12546 start_address = u_regval - register_count * 4;
12547 }
12548
12549 thumb2_insn_r->mem_rec_count = register_count;
12550 while (register_count)
12551 {
12552 record_buf_mem[register_count * 2 - 1] = start_address;
12553 record_buf_mem[register_count * 2 - 2] = 4;
12554 start_address = start_address + 4;
12555 register_count--;
12556 }
12557 record_buf[0] = reg_rn;
12558 record_buf[1] = ARM_PS_REGNUM;
12559 thumb2_insn_r->reg_rec_count = 2;
12560 }
12561 }
12562
12563 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12564 record_buf_mem);
12565 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12566 record_buf);
12567 return ARM_RECORD_SUCCESS;
12568 }
12569
12570 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12571 instructions. */
12572
12573 static int
12574 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12575 {
12576 struct regcache *reg_cache = thumb2_insn_r->regcache;
12577
12578 uint32_t reg_rd, reg_rn, offset_imm;
12579 uint32_t reg_dest1, reg_dest2;
12580 uint32_t address, offset_addr;
12581 uint32_t record_buf[8], record_buf_mem[8];
12582 uint32_t op1, op2, op3;
12583 LONGEST s_word;
12584
12585 ULONGEST u_regval[2];
12586
12587 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12588 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12589 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12590
12591 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12592 {
12593 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12594 {
12595 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12596 record_buf[0] = reg_dest1;
12597 record_buf[1] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 2;
12599 }
12600
12601 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12602 {
12603 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12604 record_buf[2] = reg_dest2;
12605 thumb2_insn_r->reg_rec_count = 3;
12606 }
12607 }
12608 else
12609 {
12610 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12611 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12612
12613 if (0 == op1 && 0 == op2)
12614 {
12615 /* Handle STREX. */
12616 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12617 address = u_regval[0] + (offset_imm * 4);
12618 record_buf_mem[0] = 4;
12619 record_buf_mem[1] = address;
12620 thumb2_insn_r->mem_rec_count = 1;
12621 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12622 record_buf[0] = reg_rd;
12623 thumb2_insn_r->reg_rec_count = 1;
12624 }
12625 else if (1 == op1 && 0 == op2)
12626 {
12627 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12628 record_buf[0] = reg_rd;
12629 thumb2_insn_r->reg_rec_count = 1;
12630 address = u_regval[0];
12631 record_buf_mem[1] = address;
12632
12633 if (4 == op3)
12634 {
12635 /* Handle STREXB. */
12636 record_buf_mem[0] = 1;
12637 thumb2_insn_r->mem_rec_count = 1;
12638 }
12639 else if (5 == op3)
12640 {
12641 /* Handle STREXH. */
12642 record_buf_mem[0] = 2 ;
12643 thumb2_insn_r->mem_rec_count = 1;
12644 }
12645 else if (7 == op3)
12646 {
12647 /* Handle STREXD. */
12648 address = u_regval[0];
12649 record_buf_mem[0] = 4;
12650 record_buf_mem[2] = 4;
12651 record_buf_mem[3] = address + 4;
12652 thumb2_insn_r->mem_rec_count = 2;
12653 }
12654 }
12655 else
12656 {
12657 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12658
12659 if (bit (thumb2_insn_r->arm_insn, 24))
12660 {
12661 if (bit (thumb2_insn_r->arm_insn, 23))
12662 offset_addr = u_regval[0] + (offset_imm * 4);
12663 else
12664 offset_addr = u_regval[0] - (offset_imm * 4);
12665
12666 address = offset_addr;
12667 }
12668 else
12669 address = u_regval[0];
12670
12671 record_buf_mem[0] = 4;
12672 record_buf_mem[1] = address;
12673 record_buf_mem[2] = 4;
12674 record_buf_mem[3] = address + 4;
12675 thumb2_insn_r->mem_rec_count = 2;
12676 record_buf[0] = reg_rn;
12677 thumb2_insn_r->reg_rec_count = 1;
12678 }
12679 }
12680
12681 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12682 record_buf);
12683 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12684 record_buf_mem);
12685 return ARM_RECORD_SUCCESS;
12686 }
12687
12688 /* Handler for thumb2 data processing (shift register and modified immediate)
12689 instructions. */
12690
12691 static int
12692 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12693 {
12694 uint32_t reg_rd, op;
12695 uint32_t record_buf[8];
12696
12697 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12698 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12699
12700 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12701 {
12702 record_buf[0] = ARM_PS_REGNUM;
12703 thumb2_insn_r->reg_rec_count = 1;
12704 }
12705 else
12706 {
12707 record_buf[0] = reg_rd;
12708 record_buf[1] = ARM_PS_REGNUM;
12709 thumb2_insn_r->reg_rec_count = 2;
12710 }
12711
12712 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12713 record_buf);
12714 return ARM_RECORD_SUCCESS;
12715 }
12716
12717 /* Generic handler for thumb2 instructions which effect destination and PS
12718 registers. */
12719
12720 static int
12721 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12722 {
12723 uint32_t reg_rd;
12724 uint32_t record_buf[8];
12725
12726 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12727
12728 record_buf[0] = reg_rd;
12729 record_buf[1] = ARM_PS_REGNUM;
12730 thumb2_insn_r->reg_rec_count = 2;
12731
12732 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12733 record_buf);
12734 return ARM_RECORD_SUCCESS;
12735 }
12736
12737 /* Handler for thumb2 branch and miscellaneous control instructions. */
12738
12739 static int
12740 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12741 {
12742 uint32_t op, op1, op2;
12743 uint32_t record_buf[8];
12744
12745 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12746 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12747 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12748
12749 /* Handle MSR insn. */
12750 if (!(op1 & 0x2) && 0x38 == op)
12751 {
12752 if (!(op2 & 0x3))
12753 {
12754 /* CPSR is going to be changed. */
12755 record_buf[0] = ARM_PS_REGNUM;
12756 thumb2_insn_r->reg_rec_count = 1;
12757 }
12758 else
12759 {
12760 arm_record_unsupported_insn(thumb2_insn_r);
12761 return -1;
12762 }
12763 }
12764 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12765 {
12766 /* BLX. */
12767 record_buf[0] = ARM_PS_REGNUM;
12768 record_buf[1] = ARM_LR_REGNUM;
12769 thumb2_insn_r->reg_rec_count = 2;
12770 }
12771
12772 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12773 record_buf);
12774 return ARM_RECORD_SUCCESS;
12775 }
12776
12777 /* Handler for thumb2 store single data item instructions. */
12778
12779 static int
12780 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12781 {
12782 struct regcache *reg_cache = thumb2_insn_r->regcache;
12783
12784 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12785 uint32_t address, offset_addr;
12786 uint32_t record_buf[8], record_buf_mem[8];
12787 uint32_t op1, op2;
12788
12789 ULONGEST u_regval[2];
12790
12791 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12792 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12793 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12794 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12795
12796 if (bit (thumb2_insn_r->arm_insn, 23))
12797 {
12798 /* T2 encoding. */
12799 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12800 offset_addr = u_regval[0] + offset_imm;
12801 address = offset_addr;
12802 }
12803 else
12804 {
12805 /* T3 encoding. */
12806 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12807 {
12808 /* Handle STRB (register). */
12809 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12810 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12811 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12812 offset_addr = u_regval[1] << shift_imm;
12813 address = u_regval[0] + offset_addr;
12814 }
12815 else
12816 {
12817 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12818 if (bit (thumb2_insn_r->arm_insn, 10))
12819 {
12820 if (bit (thumb2_insn_r->arm_insn, 9))
12821 offset_addr = u_regval[0] + offset_imm;
12822 else
12823 offset_addr = u_regval[0] - offset_imm;
12824
12825 address = offset_addr;
12826 }
12827 else
12828 address = u_regval[0];
12829 }
12830 }
12831
12832 switch (op1)
12833 {
12834 /* Store byte instructions. */
12835 case 4:
12836 case 0:
12837 record_buf_mem[0] = 1;
12838 break;
12839 /* Store half word instructions. */
12840 case 1:
12841 case 5:
12842 record_buf_mem[0] = 2;
12843 break;
12844 /* Store word instructions. */
12845 case 2:
12846 case 6:
12847 record_buf_mem[0] = 4;
12848 break;
12849
12850 default:
12851 gdb_assert_not_reached ("no decoding pattern found");
12852 break;
12853 }
12854
12855 record_buf_mem[1] = address;
12856 thumb2_insn_r->mem_rec_count = 1;
12857 record_buf[0] = reg_rn;
12858 thumb2_insn_r->reg_rec_count = 1;
12859
12860 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12861 record_buf);
12862 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12863 record_buf_mem);
12864 return ARM_RECORD_SUCCESS;
12865 }
12866
12867 /* Handler for thumb2 load memory hints instructions. */
12868
12869 static int
12870 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12871 {
12872 uint32_t record_buf[8];
12873 uint32_t reg_rt, reg_rn;
12874
12875 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12876 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12877
12878 if (ARM_PC_REGNUM != reg_rt)
12879 {
12880 record_buf[0] = reg_rt;
12881 record_buf[1] = reg_rn;
12882 record_buf[2] = ARM_PS_REGNUM;
12883 thumb2_insn_r->reg_rec_count = 3;
12884
12885 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12886 record_buf);
12887 return ARM_RECORD_SUCCESS;
12888 }
12889
12890 return ARM_RECORD_FAILURE;
12891 }
12892
12893 /* Handler for thumb2 load word instructions. */
12894
12895 static int
12896 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12897 {
12898 uint32_t opcode1 = 0, opcode2 = 0;
12899 uint32_t record_buf[8];
12900
12901 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12902 record_buf[1] = ARM_PS_REGNUM;
12903 thumb2_insn_r->reg_rec_count = 2;
12904
12905 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12906 record_buf);
12907 return ARM_RECORD_SUCCESS;
12908 }
12909
12910 /* Handler for thumb2 long multiply, long multiply accumulate, and
12911 divide instructions. */
12912
12913 static int
12914 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12915 {
12916 uint32_t opcode1 = 0, opcode2 = 0;
12917 uint32_t record_buf[8];
12918 uint32_t reg_src1 = 0;
12919
12920 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12921 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12922
12923 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12924 {
12925 /* Handle SMULL, UMULL, SMULAL. */
12926 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12927 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12928 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12929 record_buf[2] = ARM_PS_REGNUM;
12930 thumb2_insn_r->reg_rec_count = 3;
12931 }
12932 else if (1 == opcode1 || 3 == opcode2)
12933 {
12934 /* Handle SDIV and UDIV. */
12935 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12936 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12937 record_buf[2] = ARM_PS_REGNUM;
12938 thumb2_insn_r->reg_rec_count = 3;
12939 }
12940 else
12941 return ARM_RECORD_FAILURE;
12942
12943 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12944 record_buf);
12945 return ARM_RECORD_SUCCESS;
12946 }
12947
12948 /* Decodes thumb2 instruction type and invokes its record handler. */
12949
12950 static unsigned int
12951 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12952 {
12953 uint32_t op, op1, op2;
12954
12955 op = bit (thumb2_insn_r->arm_insn, 15);
12956 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12957 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12958
12959 if (op1 == 0x01)
12960 {
12961 if (!(op2 & 0x64 ))
12962 {
12963 /* Load/store multiple instruction. */
12964 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12965 }
12966 else if (!((op2 & 0x64) ^ 0x04))
12967 {
12968 /* Load/store (dual/exclusive) and table branch instruction. */
12969 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12970 }
12971 else if (!((op2 & 0x20) ^ 0x20))
12972 {
12973 /* Data-processing (shifted register). */
12974 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12975 }
12976 else if (op2 & 0x40)
12977 {
12978 /* Co-processor instructions. */
12979 arm_record_unsupported_insn (thumb2_insn_r);
12980 }
12981 }
12982 else if (op1 == 0x02)
12983 {
12984 if (op)
12985 {
12986 /* Branches and miscellaneous control instructions. */
12987 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12988 }
12989 else if (op2 & 0x20)
12990 {
12991 /* Data-processing (plain binary immediate) instruction. */
12992 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12993 }
12994 else
12995 {
12996 /* Data-processing (modified immediate). */
12997 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12998 }
12999 }
13000 else if (op1 == 0x03)
13001 {
13002 if (!(op2 & 0x71 ))
13003 {
13004 /* Store single data item. */
13005 return thumb2_record_str_single_data (thumb2_insn_r);
13006 }
13007 else if (!((op2 & 0x71) ^ 0x10))
13008 {
13009 /* Advanced SIMD or structure load/store instructions. */
13010 return arm_record_unsupported_insn (thumb2_insn_r);
13011 }
13012 else if (!((op2 & 0x67) ^ 0x01))
13013 {
13014 /* Load byte, memory hints instruction. */
13015 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13016 }
13017 else if (!((op2 & 0x67) ^ 0x03))
13018 {
13019 /* Load halfword, memory hints instruction. */
13020 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13021 }
13022 else if (!((op2 & 0x67) ^ 0x05))
13023 {
13024 /* Load word instruction. */
13025 return thumb2_record_ld_word (thumb2_insn_r);
13026 }
13027 else if (!((op2 & 0x70) ^ 0x20))
13028 {
13029 /* Data-processing (register) instruction. */
13030 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13031 }
13032 else if (!((op2 & 0x78) ^ 0x30))
13033 {
13034 /* Multiply, multiply accumulate, abs diff instruction. */
13035 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13036 }
13037 else if (!((op2 & 0x78) ^ 0x38))
13038 {
13039 /* Long multiply, long multiply accumulate, and divide. */
13040 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13041 }
13042 else if (op2 & 0x40)
13043 {
13044 /* Co-processor instructions. */
13045 return arm_record_unsupported_insn (thumb2_insn_r);
13046 }
13047 }
13048
13049 return -1;
13050 }
13051
13052 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13053 and positive val on fauilure. */
13054
13055 static int
13056 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13057 {
13058 gdb_byte buf[insn_size];
13059
13060 memset (&buf[0], 0, insn_size);
13061
13062 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13063 return 1;
13064 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13065 insn_size,
13066 gdbarch_byte_order (insn_record->gdbarch));
13067 return 0;
13068 }
13069
13070 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13071
13072 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13073 dispatch it. */
13074
13075 static int
13076 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13077 uint32_t insn_size)
13078 {
13079
13080 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13081 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13082 {
13083 arm_record_data_proc_misc_ld_str, /* 000. */
13084 arm_record_data_proc_imm, /* 001. */
13085 arm_record_ld_st_imm_offset, /* 010. */
13086 arm_record_ld_st_reg_offset, /* 011. */
13087 arm_record_ld_st_multiple, /* 100. */
13088 arm_record_b_bl, /* 101. */
13089 arm_record_unsupported_insn, /* 110. */
13090 arm_record_coproc_data_proc /* 111. */
13091 };
13092
13093 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13094 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13095 { \
13096 thumb_record_shift_add_sub, /* 000. */
13097 thumb_record_add_sub_cmp_mov, /* 001. */
13098 thumb_record_ld_st_reg_offset, /* 010. */
13099 thumb_record_ld_st_imm_offset, /* 011. */
13100 thumb_record_ld_st_stack, /* 100. */
13101 thumb_record_misc, /* 101. */
13102 thumb_record_ldm_stm_swi, /* 110. */
13103 thumb_record_branch /* 111. */
13104 };
13105
13106 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13107 uint32_t insn_id = 0;
13108
13109 if (extract_arm_insn (arm_record, insn_size))
13110 {
13111 if (record_debug)
13112 {
13113 printf_unfiltered (_("Process record: error reading memory at "
13114 "addr %s len = %d.\n"),
13115 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13116 }
13117 return -1;
13118 }
13119 else if (ARM_RECORD == record_type)
13120 {
13121 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13122 insn_id = bits (arm_record->arm_insn, 25, 27);
13123 ret = arm_record_extension_space (arm_record);
13124 /* If this insn has fallen into extension space
13125 then we need not decode it anymore. */
13126 if (ret != -1 && !INSN_RECORDED(arm_record))
13127 {
13128 ret = arm_handle_insn[insn_id] (arm_record);
13129 }
13130 }
13131 else if (THUMB_RECORD == record_type)
13132 {
13133 /* As thumb does not have condition codes, we set negative. */
13134 arm_record->cond = -1;
13135 insn_id = bits (arm_record->arm_insn, 13, 15);
13136 ret = thumb_handle_insn[insn_id] (arm_record);
13137 }
13138 else if (THUMB2_RECORD == record_type)
13139 {
13140 /* As thumb does not have condition codes, we set negative. */
13141 arm_record->cond = -1;
13142
13143 /* Swap first half of 32bit thumb instruction with second half. */
13144 arm_record->arm_insn
13145 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13146
13147 insn_id = thumb2_record_decode_insn_handler (arm_record);
13148
13149 if (insn_id != ARM_RECORD_SUCCESS)
13150 {
13151 arm_record_unsupported_insn (arm_record);
13152 ret = -1;
13153 }
13154 }
13155 else
13156 {
13157 /* Throw assertion. */
13158 gdb_assert_not_reached ("not a valid instruction, could not decode");
13159 }
13160
13161 return ret;
13162 }
13163
13164
13165 /* Cleans up local record registers and memory allocations. */
13166
13167 static void
13168 deallocate_reg_mem (insn_decode_record *record)
13169 {
13170 xfree (record->arm_regs);
13171 xfree (record->arm_mems);
13172 }
13173
13174
13175 /* Parse the current instruction and record the values of the registers and
13176 memory that will be changed in current instruction to record_arch_list".
13177 Return -1 if something is wrong. */
13178
13179 int
13180 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13181 CORE_ADDR insn_addr)
13182 {
13183
13184 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13185 uint32_t no_of_rec = 0;
13186 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13187 ULONGEST t_bit = 0, insn_id = 0;
13188
13189 ULONGEST u_regval = 0;
13190
13191 insn_decode_record arm_record;
13192
13193 memset (&arm_record, 0, sizeof (insn_decode_record));
13194 arm_record.regcache = regcache;
13195 arm_record.this_addr = insn_addr;
13196 arm_record.gdbarch = gdbarch;
13197
13198
13199 if (record_debug > 1)
13200 {
13201 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13202 "addr = %s\n",
13203 paddress (gdbarch, arm_record.this_addr));
13204 }
13205
13206 if (extract_arm_insn (&arm_record, 2))
13207 {
13208 if (record_debug)
13209 {
13210 printf_unfiltered (_("Process record: error reading memory at "
13211 "addr %s len = %d.\n"),
13212 paddress (arm_record.gdbarch,
13213 arm_record.this_addr), 2);
13214 }
13215 return -1;
13216 }
13217
13218 /* Check the insn, whether it is thumb or arm one. */
13219
13220 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13221 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13222
13223
13224 if (!(u_regval & t_bit))
13225 {
13226 /* We are decoding arm insn. */
13227 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13228 }
13229 else
13230 {
13231 insn_id = bits (arm_record.arm_insn, 11, 15);
13232 /* is it thumb2 insn? */
13233 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13234 {
13235 ret = decode_insn (&arm_record, THUMB2_RECORD,
13236 THUMB2_INSN_SIZE_BYTES);
13237 }
13238 else
13239 {
13240 /* We are decoding thumb insn. */
13241 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13242 }
13243 }
13244
13245 if (0 == ret)
13246 {
13247 /* Record registers. */
13248 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13249 if (arm_record.arm_regs)
13250 {
13251 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13252 {
13253 if (record_full_arch_list_add_reg
13254 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13255 ret = -1;
13256 }
13257 }
13258 /* Record memories. */
13259 if (arm_record.arm_mems)
13260 {
13261 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13262 {
13263 if (record_full_arch_list_add_mem
13264 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13265 arm_record.arm_mems[no_of_rec].len))
13266 ret = -1;
13267 }
13268 }
13269
13270 if (record_full_arch_list_add_end ())
13271 ret = -1;
13272 }
13273
13274
13275 deallocate_reg_mem (&arm_record);
13276
13277 return ret;
13278 }
13279
This page took 0.508615 seconds and 4 git commands to generate.