Refactor disassembly code
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
61baf725 3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
e47ad6c0 30#include "disasm.h"
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
25f8c692 44#include "remote.h"
123dc839
DJ
45#include "target-descriptions.h"
46#include "user-regs.h"
0e9e9abd 47#include "observer.h"
34e8f22d 48
8689682c 49#include "arch/arm.h"
d9311bfa 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
26216b98 52#include "gdb/sim-arm.h"
34e8f22d 53
082fc60d
RE
54#include "elf-bfd.h"
55#include "coff/internal.h"
97e03143 56#include "elf/arm.h"
c906108c 57
60c5725c 58#include "vec.h"
26216b98 59
72508ac0 60#include "record.h"
d02ed0bb 61#include "record-full.h"
325fac50 62#include <algorithm>
72508ac0 63
0a69eedb
YQ
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
9779414d 71
6529d2dd
AC
72static int arm_debug;
73
082fc60d
RE
74/* Macros for setting and testing a bit in a minimal symbol that marks
75 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 76 is used for this purpose.
082fc60d
RE
77
78 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 79 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 80
0963b4bd 81#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 82 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
83
84#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 85 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 86
60c5725c
DJ
87/* Per-objfile data used for mapping symbols. */
88static const struct objfile_data *arm_objfile_data_key;
89
90struct arm_mapping_symbol
91{
92 bfd_vma value;
93 char type;
94};
95typedef struct arm_mapping_symbol arm_mapping_symbol_s;
96DEF_VEC_O(arm_mapping_symbol_s);
97
98struct arm_per_objfile
99{
100 VEC(arm_mapping_symbol_s) **section_maps;
101};
102
afd7eef0
RE
103/* The list of available "set arm ..." and "show arm ..." commands. */
104static struct cmd_list_element *setarmcmdlist = NULL;
105static struct cmd_list_element *showarmcmdlist = NULL;
106
fd50bc42
RE
107/* The type of floating-point to use. Keep this in sync with enum
108 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 109static const char *const fp_model_strings[] =
fd50bc42
RE
110{
111 "auto",
112 "softfpa",
113 "fpa",
114 "softvfp",
28e97307
DJ
115 "vfp",
116 NULL
fd50bc42
RE
117};
118
119/* A variable that can be configured by the user. */
120static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
121static const char *current_fp_model = "auto";
122
28e97307 123/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 124static const char *const arm_abi_strings[] =
28e97307
DJ
125{
126 "auto",
127 "APCS",
128 "AAPCS",
129 NULL
130};
131
132/* A variable that can be configured by the user. */
133static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
134static const char *arm_abi_string = "auto";
135
0428b8f5 136/* The execution mode to assume. */
40478521 137static const char *const arm_mode_strings[] =
0428b8f5
DJ
138 {
139 "auto",
140 "arm",
68770265
MGD
141 "thumb",
142 NULL
0428b8f5
DJ
143 };
144
145static const char *arm_fallback_mode_string = "auto";
146static const char *arm_force_mode_string = "auto";
147
94c30b78 148/* Number of different reg name sets (options). */
afd7eef0 149static int num_disassembly_options;
bc90b915 150
f32bf4a4
YQ
151/* The standard register names, and all the valid aliases for them. Note
152 that `fp', `sp' and `pc' are not added in this alias list, because they
153 have been added as builtin user registers in
154 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
155static const struct
156{
157 const char *name;
158 int regnum;
159} arm_register_aliases[] = {
160 /* Basic register numbers. */
161 { "r0", 0 },
162 { "r1", 1 },
163 { "r2", 2 },
164 { "r3", 3 },
165 { "r4", 4 },
166 { "r5", 5 },
167 { "r6", 6 },
168 { "r7", 7 },
169 { "r8", 8 },
170 { "r9", 9 },
171 { "r10", 10 },
172 { "r11", 11 },
173 { "r12", 12 },
174 { "r13", 13 },
175 { "r14", 14 },
176 { "r15", 15 },
177 /* Synonyms (argument and variable registers). */
178 { "a1", 0 },
179 { "a2", 1 },
180 { "a3", 2 },
181 { "a4", 3 },
182 { "v1", 4 },
183 { "v2", 5 },
184 { "v3", 6 },
185 { "v4", 7 },
186 { "v5", 8 },
187 { "v6", 9 },
188 { "v7", 10 },
189 { "v8", 11 },
190 /* Other platform-specific names for r9. */
191 { "sb", 9 },
192 { "tr", 9 },
193 /* Special names. */
194 { "ip", 12 },
123dc839 195 { "lr", 14 },
123dc839
DJ
196 /* Names used by GCC (not listed in the ARM EABI). */
197 { "sl", 10 },
123dc839
DJ
198 /* A special name from the older ATPCS. */
199 { "wr", 7 },
200};
bc90b915 201
123dc839 202static const char *const arm_register_names[] =
da59e081
JM
203{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
204 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
205 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
206 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
207 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
208 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 209 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 210
afd7eef0
RE
211/* Valid register name styles. */
212static const char **valid_disassembly_styles;
ed9a39eb 213
afd7eef0
RE
214/* Disassembly style to use. Default to "std" register names. */
215static const char *disassembly_style;
96baa820 216
ed9a39eb 217/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
218 style. */
219static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 220 struct cmd_list_element *);
afd7eef0 221static void set_disassembly_style (void);
ed9a39eb 222
b508a996 223static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 224 void *, int);
b508a996 225static void convert_to_extended (const struct floatformat *, void *,
be8626e0 226 const void *, int);
ed9a39eb 227
05d1431c
PA
228static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
229 struct regcache *regcache,
230 int regnum, gdb_byte *buf);
58d6951d
DJ
231static void arm_neon_quad_write (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, const gdb_byte *buf);
234
e7cf25a8 235static CORE_ADDR
553cb527 236 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
237
238
d9311bfa
AT
239/* get_next_pcs operations. */
240static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
241 arm_get_next_pcs_read_memory_unsigned_integer,
242 arm_get_next_pcs_syscall_next_pc,
243 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
244 arm_get_next_pcs_is_thumb,
245 NULL,
d9311bfa
AT
246};
247
9b8d791a 248struct arm_prologue_cache
c3b4394c 249{
eb5492fa
DJ
250 /* The stack pointer at the time this frame was created; i.e. the
251 caller's stack pointer when this function was called. It is used
252 to identify this frame. */
253 CORE_ADDR prev_sp;
254
4be43953
DJ
255 /* The frame base for this frame is just prev_sp - frame size.
256 FRAMESIZE is the distance from the frame pointer to the
257 initial stack pointer. */
eb5492fa 258
c3b4394c 259 int framesize;
eb5492fa
DJ
260
261 /* The register used to hold the frame pointer for this frame. */
c3b4394c 262 int framereg;
eb5492fa
DJ
263
264 /* Saved register offsets. */
265 struct trad_frame_saved_reg *saved_regs;
c3b4394c 266};
ed9a39eb 267
0d39a070
DJ
268static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
269 CORE_ADDR prologue_start,
270 CORE_ADDR prologue_end,
271 struct arm_prologue_cache *cache);
272
cca44b1b
JB
273/* Architecture version for displaced stepping. This effects the behaviour of
274 certain instructions, and really should not be hard-wired. */
275
276#define DISPLACED_STEPPING_ARCH_VERSION 5
277
94c30b78 278/* Set to true if the 32-bit mode is in use. */
c906108c
SS
279
280int arm_apcs_32 = 1;
281
9779414d
DJ
282/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283
478fd957 284int
9779414d
DJ
285arm_psr_thumb_bit (struct gdbarch *gdbarch)
286{
287 if (gdbarch_tdep (gdbarch)->is_m)
288 return XPSR_T;
289 else
290 return CPSR_T;
291}
292
d0e59a68
AT
293/* Determine if the processor is currently executing in Thumb mode. */
294
295int
296arm_is_thumb (struct regcache *regcache)
297{
298 ULONGEST cpsr;
299 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
300
301 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
302
303 return (cpsr & t_bit) != 0;
304}
305
b39cc962
DJ
306/* Determine if FRAME is executing in Thumb mode. */
307
25b41d01 308int
b39cc962
DJ
309arm_frame_is_thumb (struct frame_info *frame)
310{
311 CORE_ADDR cpsr;
9779414d 312 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
313
314 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
315 directly (from a signal frame or dummy frame) or by interpreting
316 the saved LR (from a prologue or DWARF frame). So consult it and
317 trust the unwinders. */
318 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
319
9779414d 320 return (cpsr & t_bit) != 0;
b39cc962
DJ
321}
322
60c5725c
DJ
323/* Callback for VEC_lower_bound. */
324
325static inline int
326arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
327 const struct arm_mapping_symbol *rhs)
328{
329 return lhs->value < rhs->value;
330}
331
f9d67f43
DJ
332/* Search for the mapping symbol covering MEMADDR. If one is found,
333 return its type. Otherwise, return 0. If START is non-NULL,
334 set *START to the location of the mapping symbol. */
c906108c 335
f9d67f43
DJ
336static char
337arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 338{
60c5725c 339 struct obj_section *sec;
0428b8f5 340
60c5725c
DJ
341 /* If there are mapping symbols, consult them. */
342 sec = find_pc_section (memaddr);
343 if (sec != NULL)
344 {
345 struct arm_per_objfile *data;
346 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
347 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
348 0 };
60c5725c
DJ
349 unsigned int idx;
350
9a3c8263
SM
351 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
352 arm_objfile_data_key);
60c5725c
DJ
353 if (data != NULL)
354 {
355 map = data->section_maps[sec->the_bfd_section->index];
356 if (!VEC_empty (arm_mapping_symbol_s, map))
357 {
358 struct arm_mapping_symbol *map_sym;
359
360 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
361 arm_compare_mapping_symbols);
362
363 /* VEC_lower_bound finds the earliest ordered insertion
364 point. If the following symbol starts at this exact
365 address, we use that; otherwise, the preceding
366 mapping symbol covers this address. */
367 if (idx < VEC_length (arm_mapping_symbol_s, map))
368 {
369 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
370 if (map_sym->value == map_key.value)
f9d67f43
DJ
371 {
372 if (start)
373 *start = map_sym->value + obj_section_addr (sec);
374 return map_sym->type;
375 }
60c5725c
DJ
376 }
377
378 if (idx > 0)
379 {
380 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
381 if (start)
382 *start = map_sym->value + obj_section_addr (sec);
383 return map_sym->type;
60c5725c
DJ
384 }
385 }
386 }
387 }
388
f9d67f43
DJ
389 return 0;
390}
391
392/* Determine if the program counter specified in MEMADDR is in a Thumb
393 function. This function should be called for addresses unrelated to
394 any executing frame; otherwise, prefer arm_frame_is_thumb. */
395
e3039479 396int
9779414d 397arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 398{
7cbd4a93 399 struct bound_minimal_symbol sym;
f9d67f43 400 char type;
a42244db
YQ
401 struct displaced_step_closure* dsc
402 = get_displaced_step_closure_by_addr(memaddr);
403
404 /* If checking the mode of displaced instruction in copy area, the mode
405 should be determined by instruction on the original address. */
406 if (dsc)
407 {
408 if (debug_displaced)
409 fprintf_unfiltered (gdb_stdlog,
410 "displaced: check mode of %.8lx instead of %.8lx\n",
411 (unsigned long) dsc->insn_addr,
412 (unsigned long) memaddr);
413 memaddr = dsc->insn_addr;
414 }
f9d67f43
DJ
415
416 /* If bit 0 of the address is set, assume this is a Thumb address. */
417 if (IS_THUMB_ADDR (memaddr))
418 return 1;
419
420 /* If the user wants to override the symbol table, let him. */
421 if (strcmp (arm_force_mode_string, "arm") == 0)
422 return 0;
423 if (strcmp (arm_force_mode_string, "thumb") == 0)
424 return 1;
425
9779414d
DJ
426 /* ARM v6-M and v7-M are always in Thumb mode. */
427 if (gdbarch_tdep (gdbarch)->is_m)
428 return 1;
429
f9d67f43
DJ
430 /* If there are mapping symbols, consult them. */
431 type = arm_find_mapping_symbol (memaddr, NULL);
432 if (type)
433 return type == 't';
434
ed9a39eb 435 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 436 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
437 if (sym.minsym)
438 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
439
440 /* If the user wants to override the fallback mode, let them. */
441 if (strcmp (arm_fallback_mode_string, "arm") == 0)
442 return 0;
443 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
444 return 1;
445
446 /* If we couldn't find any symbol, but we're talking to a running
447 target, then trust the current value of $cpsr. This lets
448 "display/i $pc" always show the correct mode (though if there is
449 a symbol table we will not reach here, so it still may not be
18819fa6 450 displayed in the mode it will be executed). */
0428b8f5 451 if (target_has_registers)
18819fa6 452 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
453
454 /* Otherwise we're out of luck; we assume ARM. */
455 return 0;
c906108c
SS
456}
457
ca90e760
FH
458/* Determine if the address specified equals any of these magic return
459 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
460 architectures.
461
462 From ARMv6-M Reference Manual B1.5.8
463 Table B1-5 Exception return behavior
464
465 EXC_RETURN Return To Return Stack
466 0xFFFFFFF1 Handler mode Main
467 0xFFFFFFF9 Thread mode Main
468 0xFFFFFFFD Thread mode Process
469
470 From ARMv7-M Reference Manual B1.5.8
471 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
472
473 EXC_RETURN Return To Return Stack
474 0xFFFFFFF1 Handler mode Main
475 0xFFFFFFF9 Thread mode Main
476 0xFFFFFFFD Thread mode Process
477
478 Table B1-9 EXC_RETURN definition of exception return behavior, with
479 FP
480
481 EXC_RETURN Return To Return Stack Frame Type
482 0xFFFFFFE1 Handler mode Main Extended
483 0xFFFFFFE9 Thread mode Main Extended
484 0xFFFFFFED Thread mode Process Extended
485 0xFFFFFFF1 Handler mode Main Basic
486 0xFFFFFFF9 Thread mode Main Basic
487 0xFFFFFFFD Thread mode Process Basic
488
489 For more details see "B1.5.8 Exception return behavior"
490 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
491
492static int
493arm_m_addr_is_magic (CORE_ADDR addr)
494{
495 switch (addr)
496 {
497 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
498 the exception return behavior. */
499 case 0xffffffe1:
500 case 0xffffffe9:
501 case 0xffffffed:
502 case 0xfffffff1:
503 case 0xfffffff9:
504 case 0xfffffffd:
505 /* Address is magic. */
506 return 1;
507
508 default:
509 /* Address is not magic. */
510 return 0;
511 }
512}
513
181c1381 514/* Remove useless bits from addresses in a running program. */
34e8f22d 515static CORE_ADDR
24568a2c 516arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 517{
2ae28aa9
YQ
518 /* On M-profile devices, do not strip the low bit from EXC_RETURN
519 (the magic exception return address). */
520 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 521 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
522 return val;
523
a3a2ee65 524 if (arm_apcs_32)
dd6be234 525 return UNMAKE_THUMB_ADDR (val);
c906108c 526 else
a3a2ee65 527 return (val & 0x03fffffc);
c906108c
SS
528}
529
0d39a070 530/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
531 can be safely ignored during prologue skipping. IS_THUMB is true
532 if the function is known to be a Thumb function due to the way it
533 is being called. */
0d39a070 534static int
e0634ccf 535skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 536{
e0634ccf 537 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 538 struct bound_minimal_symbol msym;
0d39a070
DJ
539
540 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 541 if (msym.minsym != NULL
77e371c0 542 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 543 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 544 {
efd66ac6 545 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 546
e0634ccf
UW
547 /* The GNU linker's Thumb call stub to foo is named
548 __foo_from_thumb. */
549 if (strstr (name, "_from_thumb") != NULL)
550 name += 2;
0d39a070 551
e0634ccf
UW
552 /* On soft-float targets, __truncdfsf2 is called to convert promoted
553 arguments to their argument types in non-prototyped
554 functions. */
61012eef 555 if (startswith (name, "__truncdfsf2"))
e0634ccf 556 return 1;
61012eef 557 if (startswith (name, "__aeabi_d2f"))
e0634ccf 558 return 1;
0d39a070 559
e0634ccf 560 /* Internal functions related to thread-local storage. */
61012eef 561 if (startswith (name, "__tls_get_addr"))
e0634ccf 562 return 1;
61012eef 563 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
564 return 1;
565 }
566 else
567 {
568 /* If we run against a stripped glibc, we may be unable to identify
569 special functions by name. Check for one important case,
570 __aeabi_read_tp, by comparing the *code* against the default
571 implementation (this is hand-written ARM assembler in glibc). */
572
573 if (!is_thumb
198cd59d 574 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 575 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 576 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
577 == 0xe240f01f) /* sub pc, r0, #31 */
578 return 1;
579 }
ec3d575a 580
0d39a070
DJ
581 return 0;
582}
583
621c6d5b
YQ
584/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
585 the first 16-bit of instruction, and INSN2 is the second 16-bit of
586 instruction. */
587#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
588 ((bits ((insn1), 0, 3) << 12) \
589 | (bits ((insn1), 10, 10) << 11) \
590 | (bits ((insn2), 12, 14) << 8) \
591 | bits ((insn2), 0, 7))
592
593/* Extract the immediate from instruction movw/movt of encoding A. INSN is
594 the 32-bit instruction. */
595#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
596 ((bits ((insn), 16, 19) << 12) \
597 | bits ((insn), 0, 11))
598
ec3d575a
UW
599/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
600
601static unsigned int
602thumb_expand_immediate (unsigned int imm)
603{
604 unsigned int count = imm >> 7;
605
606 if (count < 8)
607 switch (count / 2)
608 {
609 case 0:
610 return imm & 0xff;
611 case 1:
612 return (imm & 0xff) | ((imm & 0xff) << 16);
613 case 2:
614 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
615 case 3:
616 return (imm & 0xff) | ((imm & 0xff) << 8)
617 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
618 }
619
620 return (0x80 | (imm & 0x7f)) << (32 - count);
621}
622
540314bd
YQ
623/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
624 epilogue, 0 otherwise. */
625
626static int
627thumb_instruction_restores_sp (unsigned short insn)
628{
629 return (insn == 0x46bd /* mov sp, r7 */
630 || (insn & 0xff80) == 0xb000 /* add sp, imm */
631 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
632}
633
29d73ae4
DJ
634/* Analyze a Thumb prologue, looking for a recognizable stack frame
635 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
636 clobber the stack frame unexpectedly, or an unknown instruction.
637 Return the last address which is definitely safe to skip for an
638 initial breakpoint. */
c906108c
SS
639
640static CORE_ADDR
29d73ae4
DJ
641thumb_analyze_prologue (struct gdbarch *gdbarch,
642 CORE_ADDR start, CORE_ADDR limit,
643 struct arm_prologue_cache *cache)
c906108c 644{
0d39a070 645 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 646 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
647 int i;
648 pv_t regs[16];
649 struct pv_area *stack;
650 struct cleanup *back_to;
651 CORE_ADDR offset;
ec3d575a 652 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 653
29d73ae4
DJ
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
55f960e1 656 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
657 back_to = make_cleanup_free_pv_area (stack);
658
29d73ae4 659 while (start < limit)
c906108c 660 {
29d73ae4
DJ
661 unsigned short insn;
662
198cd59d 663 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 664
94c30b78 665 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 666 {
29d73ae4
DJ
667 int regno;
668 int mask;
4be43953
DJ
669
670 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
671 break;
29d73ae4
DJ
672
673 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
674 whether to save LR (R14). */
675 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676
677 /* Calculate offsets of saved R0-R7 and LR. */
678 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
679 if (mask & (1 << regno))
680 {
29d73ae4
DJ
681 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
682 -4);
683 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
684 }
da59e081 685 }
1db01f22 686 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 687 {
29d73ae4 688 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
689 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 -offset);
da59e081 691 }
808f7ab1
YQ
692 else if (thumb_instruction_restores_sp (insn))
693 {
694 /* Don't scan past the epilogue. */
695 break;
696 }
0d39a070
DJ
697 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
698 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
699 (insn & 0xff) << 2);
700 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
701 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
702 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
703 bits (insn, 6, 8));
704 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
705 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
706 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
707 bits (insn, 0, 7));
708 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
709 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
710 && pv_is_constant (regs[bits (insn, 3, 5)]))
711 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
712 regs[bits (insn, 6, 8)]);
713 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
714 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 {
716 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
717 int rm = bits (insn, 3, 6);
718 regs[rd] = pv_add (regs[rd], regs[rm]);
719 }
29d73ae4 720 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 721 {
29d73ae4
DJ
722 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
723 int src_reg = (insn & 0x78) >> 3;
724 regs[dst_reg] = regs[src_reg];
da59e081 725 }
29d73ae4 726 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 727 {
29d73ae4
DJ
728 /* Handle stores to the stack. Normally pushes are used,
729 but with GCC -mtpcs-frame, there may be other stores
730 in the prologue to create the frame. */
731 int regno = (insn >> 8) & 0x7;
732 pv_t addr;
733
734 offset = (insn & 0xff) << 2;
735 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736
737 if (pv_area_store_would_trash (stack, addr))
738 break;
739
740 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 741 }
0d39a070
DJ
742 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 {
744 int rd = bits (insn, 0, 2);
745 int rn = bits (insn, 3, 5);
746 pv_t addr;
747
748 offset = bits (insn, 6, 10) << 2;
749 addr = pv_add_constant (regs[rn], offset);
750
751 if (pv_area_store_would_trash (stack, addr))
752 break;
753
754 pv_area_store (stack, addr, 4, regs[rd]);
755 }
756 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
757 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
758 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
759 /* Ignore stores of argument registers to the stack. */
760 ;
761 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
762 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
763 /* Ignore block loads from the stack, potentially copying
764 parameters from memory. */
765 ;
766 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
767 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
768 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
769 /* Similarly ignore single loads from the stack. */
770 ;
771 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
772 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
773 /* Skip register copies, i.e. saves to another register
774 instead of the stack. */
775 ;
776 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
777 /* Recognize constant loads; even with small stacks these are necessary
778 on Thumb. */
779 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
780 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 {
782 /* Constant pool loads, for the same reason. */
783 unsigned int constant;
784 CORE_ADDR loc;
785
786 loc = start + 4 + bits (insn, 0, 7) * 4;
787 constant = read_memory_unsigned_integer (loc, 4, byte_order);
788 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 }
db24da6d 790 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 791 {
0d39a070
DJ
792 unsigned short inst2;
793
198cd59d
YQ
794 inst2 = read_code_unsigned_integer (start + 2, 2,
795 byte_order_for_code);
0d39a070
DJ
796
797 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 {
799 /* BL, BLX. Allow some special function calls when
800 skipping the prologue; GCC generates these before
801 storing arguments to the stack. */
802 CORE_ADDR nextpc;
803 int j1, j2, imm1, imm2;
804
805 imm1 = sbits (insn, 0, 10);
806 imm2 = bits (inst2, 0, 10);
807 j1 = bit (inst2, 13);
808 j2 = bit (inst2, 11);
809
810 offset = ((imm1 << 12) + (imm2 << 1));
811 offset ^= ((!j2) << 22) | ((!j1) << 23);
812
813 nextpc = start + 4 + offset;
814 /* For BLX make sure to clear the low bits. */
815 if (bit (inst2, 12) == 0)
816 nextpc = nextpc & 0xfffffffc;
817
e0634ccf
UW
818 if (!skip_prologue_function (gdbarch, nextpc,
819 bit (inst2, 12) != 0))
0d39a070
DJ
820 break;
821 }
ec3d575a 822
0963b4bd
MS
823 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
824 { registers } */
ec3d575a
UW
825 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 {
827 pv_t addr = regs[bits (insn, 0, 3)];
828 int regno;
829
830 if (pv_area_store_would_trash (stack, addr))
831 break;
832
833 /* Calculate offsets of saved registers. */
834 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
835 if (inst2 & (1 << regno))
836 {
837 addr = pv_add_constant (addr, -4);
838 pv_area_store (stack, addr, 4, regs[regno]);
839 }
840
841 if (insn & 0x0020)
842 regs[bits (insn, 0, 3)] = addr;
843 }
844
0963b4bd
MS
845 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
846 [Rn, #+/-imm]{!} */
ec3d575a
UW
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 {
849 int regno1 = bits (inst2, 12, 15);
850 int regno2 = bits (inst2, 8, 11);
851 pv_t addr = regs[bits (insn, 0, 3)];
852
853 offset = inst2 & 0xff;
854 if (insn & 0x0080)
855 addr = pv_add_constant (addr, offset);
856 else
857 addr = pv_add_constant (addr, -offset);
858
859 if (pv_area_store_would_trash (stack, addr))
860 break;
861
862 pv_area_store (stack, addr, 4, regs[regno1]);
863 pv_area_store (stack, pv_add_constant (addr, 4),
864 4, regs[regno2]);
865
866 if (insn & 0x0020)
867 regs[bits (insn, 0, 3)] = addr;
868 }
869
870 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
871 && (inst2 & 0x0c00) == 0x0c00
872 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 {
874 int regno = bits (inst2, 12, 15);
875 pv_t addr = regs[bits (insn, 0, 3)];
876
877 offset = inst2 & 0xff;
878 if (inst2 & 0x0200)
879 addr = pv_add_constant (addr, offset);
880 else
881 addr = pv_add_constant (addr, -offset);
882
883 if (pv_area_store_would_trash (stack, addr))
884 break;
885
886 pv_area_store (stack, addr, 4, regs[regno]);
887
888 if (inst2 & 0x0100)
889 regs[bits (insn, 0, 3)] = addr;
890 }
891
892 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
893 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 {
895 int regno = bits (inst2, 12, 15);
896 pv_t addr;
897
898 offset = inst2 & 0xfff;
899 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900
901 if (pv_area_store_would_trash (stack, addr))
902 break;
903
904 pv_area_store (stack, addr, 4, regs[regno]);
905 }
906
907 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 908 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 909 /* Ignore stores of argument registers to the stack. */
0d39a070 910 ;
ec3d575a
UW
911
912 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
913 && (inst2 & 0x0d00) == 0x0c00
0d39a070 914 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 915 /* Ignore stores of argument registers to the stack. */
0d39a070 916 ;
ec3d575a 917
0963b4bd
MS
918 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
919 { registers } */
ec3d575a
UW
920 && (inst2 & 0x8000) == 0x0000
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
922 /* Ignore block loads from the stack, potentially copying
923 parameters from memory. */
0d39a070 924 ;
ec3d575a 925
0963b4bd
MS
926 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 [Rn, #+/-imm] */
0d39a070 928 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 929 /* Similarly ignore dual loads from the stack. */
0d39a070 930 ;
ec3d575a
UW
931
932 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
933 && (inst2 & 0x0d00) == 0x0c00
0d39a070 934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 935 /* Similarly ignore single loads from the stack. */
0d39a070 936 ;
ec3d575a
UW
937
938 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 939 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 940 /* Similarly ignore single loads from the stack. */
0d39a070 941 ;
ec3d575a
UW
942
943 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
944 && (inst2 & 0x8000) == 0x0000)
945 {
946 unsigned int imm = ((bits (insn, 10, 10) << 11)
947 | (bits (inst2, 12, 14) << 8)
948 | bits (inst2, 0, 7));
949
950 regs[bits (inst2, 8, 11)]
951 = pv_add_constant (regs[bits (insn, 0, 3)],
952 thumb_expand_immediate (imm));
953 }
954
955 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
956 && (inst2 & 0x8000) == 0x0000)
0d39a070 957 {
ec3d575a
UW
958 unsigned int imm = ((bits (insn, 10, 10) << 11)
959 | (bits (inst2, 12, 14) << 8)
960 | bits (inst2, 0, 7));
961
962 regs[bits (inst2, 8, 11)]
963 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
964 }
965
966 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
967 && (inst2 & 0x8000) == 0x0000)
968 {
969 unsigned int imm = ((bits (insn, 10, 10) << 11)
970 | (bits (inst2, 12, 14) << 8)
971 | bits (inst2, 0, 7));
972
973 regs[bits (inst2, 8, 11)]
974 = pv_add_constant (regs[bits (insn, 0, 3)],
975 - (CORE_ADDR) thumb_expand_immediate (imm));
976 }
977
978 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
979 && (inst2 & 0x8000) == 0x0000)
980 {
981 unsigned int imm = ((bits (insn, 10, 10) << 11)
982 | (bits (inst2, 12, 14) << 8)
983 | bits (inst2, 0, 7));
984
985 regs[bits (inst2, 8, 11)]
986 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
987 }
988
989 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_constant (thumb_expand_immediate (imm));
997 }
998
999 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1000 {
621c6d5b
YQ
1001 unsigned int imm
1002 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1003
1004 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1005 }
1006
1007 else if (insn == 0xea5f /* mov.w Rd,Rm */
1008 && (inst2 & 0xf0f0) == 0)
1009 {
1010 int dst_reg = (inst2 & 0x0f00) >> 8;
1011 int src_reg = inst2 & 0xf;
1012 regs[dst_reg] = regs[src_reg];
1013 }
1014
1015 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 {
1017 /* Constant pool loads. */
1018 unsigned int constant;
1019 CORE_ADDR loc;
1020
cac395ea 1021 offset = bits (inst2, 0, 11);
ec3d575a
UW
1022 if (insn & 0x0080)
1023 loc = start + 4 + offset;
1024 else
1025 loc = start + 4 - offset;
1026
1027 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1028 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1029 }
1030
1031 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 {
1033 /* Constant pool loads. */
1034 unsigned int constant;
1035 CORE_ADDR loc;
1036
cac395ea 1037 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1038 if (insn & 0x0080)
1039 loc = start + 4 + offset;
1040 else
1041 loc = start + 4 - offset;
1042
1043 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1044 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045
1046 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1047 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1048 }
1049
1050 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 {
1052 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1053 break;
1054 }
ec3d575a
UW
1055 else
1056 {
1057 /* The optimizer might shove anything into the prologue,
1058 so we just skip what we don't recognize. */
1059 unrecognized_pc = start;
1060 }
0d39a070
DJ
1061
1062 start += 2;
1063 }
ec3d575a 1064 else if (thumb_instruction_changes_pc (insn))
3d74b771 1065 {
ec3d575a 1066 /* Don't scan past anything that might change control flow. */
da3c6d4a 1067 break;
3d74b771 1068 }
ec3d575a
UW
1069 else
1070 {
1071 /* The optimizer might shove anything into the prologue,
1072 so we just skip what we don't recognize. */
1073 unrecognized_pc = start;
1074 }
29d73ae4
DJ
1075
1076 start += 2;
c906108c
SS
1077 }
1078
0d39a070
DJ
1079 if (arm_debug)
1080 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1081 paddress (gdbarch, start));
1082
ec3d575a
UW
1083 if (unrecognized_pc == 0)
1084 unrecognized_pc = start;
1085
29d73ae4
DJ
1086 if (cache == NULL)
1087 {
1088 do_cleanups (back_to);
ec3d575a 1089 return unrecognized_pc;
29d73ae4
DJ
1090 }
1091
29d73ae4
DJ
1092 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1093 {
1094 /* Frame pointer is fp. Frame size is constant. */
1095 cache->framereg = ARM_FP_REGNUM;
1096 cache->framesize = -regs[ARM_FP_REGNUM].k;
1097 }
1098 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1099 {
1100 /* Frame pointer is r7. Frame size is constant. */
1101 cache->framereg = THUMB_FP_REGNUM;
1102 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1103 }
72a2e3dc 1104 else
29d73ae4
DJ
1105 {
1106 /* Try the stack pointer... this is a bit desperate. */
1107 cache->framereg = ARM_SP_REGNUM;
1108 cache->framesize = -regs[ARM_SP_REGNUM].k;
1109 }
29d73ae4
DJ
1110
1111 for (i = 0; i < 16; i++)
1112 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1113 cache->saved_regs[i].addr = offset;
1114
1115 do_cleanups (back_to);
ec3d575a 1116 return unrecognized_pc;
c906108c
SS
1117}
1118
621c6d5b
YQ
1119
1120/* Try to analyze the instructions starting from PC, which load symbol
1121 __stack_chk_guard. Return the address of instruction after loading this
1122 symbol, set the dest register number to *BASEREG, and set the size of
1123 instructions for loading symbol in OFFSET. Return 0 if instructions are
1124 not recognized. */
1125
1126static CORE_ADDR
1127arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1128 unsigned int *destreg, int *offset)
1129{
1130 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1131 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1132 unsigned int low, high, address;
1133
1134 address = 0;
1135 if (is_thumb)
1136 {
1137 unsigned short insn1
198cd59d 1138 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1139
1140 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1141 {
1142 *destreg = bits (insn1, 8, 10);
1143 *offset = 2;
6ae274b7
YQ
1144 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1145 address = read_memory_unsigned_integer (address, 4,
1146 byte_order_for_code);
621c6d5b
YQ
1147 }
1148 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1149 {
1150 unsigned short insn2
198cd59d 1151 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1152
1153 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1154
1155 insn1
198cd59d 1156 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1157 insn2
198cd59d 1158 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1159
1160 /* movt Rd, #const */
1161 if ((insn1 & 0xfbc0) == 0xf2c0)
1162 {
1163 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1164 *destreg = bits (insn2, 8, 11);
1165 *offset = 8;
1166 address = (high << 16 | low);
1167 }
1168 }
1169 }
1170 else
1171 {
2e9e421f 1172 unsigned int insn
198cd59d 1173 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1174
6ae274b7 1175 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1176 {
6ae274b7
YQ
1177 address = bits (insn, 0, 11) + pc + 8;
1178 address = read_memory_unsigned_integer (address, 4,
1179 byte_order_for_code);
1180
2e9e421f
UW
1181 *destreg = bits (insn, 12, 15);
1182 *offset = 4;
1183 }
1184 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1185 {
1186 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1187
1188 insn
198cd59d 1189 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1190
1191 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1192 {
1193 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1194 *destreg = bits (insn, 12, 15);
1195 *offset = 8;
1196 address = (high << 16 | low);
1197 }
1198 }
621c6d5b
YQ
1199 }
1200
1201 return address;
1202}
1203
1204/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1205 points to the first instruction of this sequence, return the address of
1206 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1207
1208 On arm, this sequence of instructions is composed of mainly three steps,
1209 Step 1: load symbol __stack_chk_guard,
1210 Step 2: load from address of __stack_chk_guard,
1211 Step 3: store it to somewhere else.
1212
1213 Usually, instructions on step 2 and step 3 are the same on various ARM
1214 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1215 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1216 instructions in step 1 vary from different ARM architectures. On ARMv7,
1217 they are,
1218
1219 movw Rn, #:lower16:__stack_chk_guard
1220 movt Rn, #:upper16:__stack_chk_guard
1221
1222 On ARMv5t, it is,
1223
1224 ldr Rn, .Label
1225 ....
1226 .Lable:
1227 .word __stack_chk_guard
1228
1229 Since ldr/str is a very popular instruction, we can't use them as
1230 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1231 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1232 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1233
1234static CORE_ADDR
1235arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1236{
1237 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1238 unsigned int basereg;
7cbd4a93 1239 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1240 int offset;
1241 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1242 CORE_ADDR addr;
1243
1244 /* Try to parse the instructions in Step 1. */
1245 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1246 &basereg, &offset);
1247 if (!addr)
1248 return pc;
1249
1250 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1251 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1252 Otherwise, this sequence cannot be for stack protector. */
1253 if (stack_chk_guard.minsym == NULL
61012eef 1254 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1255 return pc;
1256
1257 if (is_thumb)
1258 {
1259 unsigned int destreg;
1260 unsigned short insn
198cd59d 1261 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1262
1263 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1264 if ((insn & 0xf800) != 0x6800)
1265 return pc;
1266 if (bits (insn, 3, 5) != basereg)
1267 return pc;
1268 destreg = bits (insn, 0, 2);
1269
198cd59d
YQ
1270 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1271 byte_order_for_code);
621c6d5b
YQ
1272 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1273 if ((insn & 0xf800) != 0x6000)
1274 return pc;
1275 if (destreg != bits (insn, 0, 2))
1276 return pc;
1277 }
1278 else
1279 {
1280 unsigned int destreg;
1281 unsigned int insn
198cd59d 1282 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1283
1284 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1285 if ((insn & 0x0e500000) != 0x04100000)
1286 return pc;
1287 if (bits (insn, 16, 19) != basereg)
1288 return pc;
1289 destreg = bits (insn, 12, 15);
1290 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1291 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1292 4, byte_order_for_code);
1293 if ((insn & 0x0e500000) != 0x04000000)
1294 return pc;
1295 if (bits (insn, 12, 15) != destreg)
1296 return pc;
1297 }
1298 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1299 on arm. */
1300 if (is_thumb)
1301 return pc + offset + 4;
1302 else
1303 return pc + offset + 8;
1304}
1305
da3c6d4a
MS
1306/* Advance the PC across any function entry prologue instructions to
1307 reach some "real" code.
34e8f22d
RE
1308
1309 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1310 prologue:
c906108c 1311
c5aa993b
JM
1312 mov ip, sp
1313 [stmfd sp!, {a1,a2,a3,a4}]
1314 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1315 [stfe f7, [sp, #-12]!]
1316 [stfe f6, [sp, #-12]!]
1317 [stfe f5, [sp, #-12]!]
1318 [stfe f4, [sp, #-12]!]
0963b4bd 1319 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1320
34e8f22d 1321static CORE_ADDR
6093d2eb 1322arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1323{
a89fea3c 1324 CORE_ADDR func_addr, limit_pc;
c906108c 1325
a89fea3c
JL
1326 /* See if we can determine the end of the prologue via the symbol table.
1327 If so, then return either PC, or the PC after the prologue, whichever
1328 is greater. */
1329 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1330 {
d80b854b
UW
1331 CORE_ADDR post_prologue_pc
1332 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1333 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1334
621c6d5b
YQ
1335 if (post_prologue_pc)
1336 post_prologue_pc
1337 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1338
1339
0d39a070
DJ
1340 /* GCC always emits a line note before the prologue and another
1341 one after, even if the two are at the same address or on the
1342 same line. Take advantage of this so that we do not need to
1343 know every instruction that might appear in the prologue. We
1344 will have producer information for most binaries; if it is
1345 missing (e.g. for -gstabs), assuming the GNU tools. */
1346 if (post_prologue_pc
43f3e411
DE
1347 && (cust == NULL
1348 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1349 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1350 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1351 return post_prologue_pc;
1352
a89fea3c 1353 if (post_prologue_pc != 0)
0d39a070
DJ
1354 {
1355 CORE_ADDR analyzed_limit;
1356
1357 /* For non-GCC compilers, make sure the entire line is an
1358 acceptable prologue; GDB will round this function's
1359 return value up to the end of the following line so we
1360 can not skip just part of a line (and we do not want to).
1361
1362 RealView does not treat the prologue specially, but does
1363 associate prologue code with the opening brace; so this
1364 lets us skip the first line if we think it is the opening
1365 brace. */
9779414d 1366 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1367 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1368 post_prologue_pc, NULL);
1369 else
1370 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1371 post_prologue_pc, NULL);
1372
1373 if (analyzed_limit != post_prologue_pc)
1374 return func_addr;
1375
1376 return post_prologue_pc;
1377 }
c906108c
SS
1378 }
1379
a89fea3c
JL
1380 /* Can't determine prologue from the symbol table, need to examine
1381 instructions. */
c906108c 1382
a89fea3c
JL
1383 /* Find an upper limit on the function prologue using the debug
1384 information. If the debug information could not be used to provide
1385 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1386 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1387 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1388 if (limit_pc == 0)
1389 limit_pc = pc + 64; /* Magic. */
1390
c906108c 1391
29d73ae4 1392 /* Check if this is Thumb code. */
9779414d 1393 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1394 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1395 else
1396 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1397}
94c30b78 1398
c5aa993b 1399/* *INDENT-OFF* */
c906108c
SS
1400/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1401 This function decodes a Thumb function prologue to determine:
1402 1) the size of the stack frame
1403 2) which registers are saved on it
1404 3) the offsets of saved regs
1405 4) the offset from the stack pointer to the frame pointer
c906108c 1406
da59e081
JM
1407 A typical Thumb function prologue would create this stack frame
1408 (offsets relative to FP)
c906108c
SS
1409 old SP -> 24 stack parameters
1410 20 LR
1411 16 R7
1412 R7 -> 0 local variables (16 bytes)
1413 SP -> -12 additional stack space (12 bytes)
1414 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1415 12 bytes. The frame register is R7.
da59e081 1416
da3c6d4a
MS
1417 The comments for thumb_skip_prolog() describe the algorithm we use
1418 to detect the end of the prolog. */
c5aa993b
JM
1419/* *INDENT-ON* */
1420
c906108c 1421static void
be8626e0 1422thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1423 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1424{
1425 CORE_ADDR prologue_start;
1426 CORE_ADDR prologue_end;
c906108c 1427
b39cc962
DJ
1428 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1429 &prologue_end))
c906108c 1430 {
ec3d575a
UW
1431 /* See comment in arm_scan_prologue for an explanation of
1432 this heuristics. */
1433 if (prologue_end > prologue_start + 64)
1434 {
1435 prologue_end = prologue_start + 64;
1436 }
c906108c
SS
1437 }
1438 else
f7060f85
DJ
1439 /* We're in the boondocks: we have no idea where the start of the
1440 function is. */
1441 return;
c906108c 1442
325fac50 1443 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1444
be8626e0 1445 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1446}
1447
f303bc3e
YQ
1448/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1449 otherwise. */
1450
1451static int
1452arm_instruction_restores_sp (unsigned int insn)
1453{
1454 if (bits (insn, 28, 31) != INST_NV)
1455 {
1456 if ((insn & 0x0df0f000) == 0x0080d000
1457 /* ADD SP (register or immediate). */
1458 || (insn & 0x0df0f000) == 0x0040d000
1459 /* SUB SP (register or immediate). */
1460 || (insn & 0x0ffffff0) == 0x01a0d000
1461 /* MOV SP. */
1462 || (insn & 0x0fff0000) == 0x08bd0000
1463 /* POP (LDMIA). */
1464 || (insn & 0x0fff0000) == 0x049d0000)
1465 /* POP of a single register. */
1466 return 1;
1467 }
1468
1469 return 0;
1470}
1471
0d39a070
DJ
1472/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1473 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1474 fill it in. Return the first address not recognized as a prologue
1475 instruction.
eb5492fa 1476
0d39a070
DJ
1477 We recognize all the instructions typically found in ARM prologues,
1478 plus harmless instructions which can be skipped (either for analysis
1479 purposes, or a more restrictive set that can be skipped when finding
1480 the end of the prologue). */
1481
1482static CORE_ADDR
1483arm_analyze_prologue (struct gdbarch *gdbarch,
1484 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1485 struct arm_prologue_cache *cache)
1486{
0d39a070
DJ
1487 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1488 int regno;
1489 CORE_ADDR offset, current_pc;
1490 pv_t regs[ARM_FPS_REGNUM];
1491 struct pv_area *stack;
1492 struct cleanup *back_to;
0d39a070
DJ
1493 CORE_ADDR unrecognized_pc = 0;
1494
1495 /* Search the prologue looking for instructions that set up the
96baa820 1496 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1497
96baa820
JM
1498 Be careful, however, and if it doesn't look like a prologue,
1499 don't try to scan it. If, for instance, a frameless function
1500 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1501 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1502 and other operations that rely on a knowledge of the stack
0d39a070 1503 traceback. */
d4473757 1504
4be43953
DJ
1505 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1506 regs[regno] = pv_register (regno, 0);
55f960e1 1507 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1508 back_to = make_cleanup_free_pv_area (stack);
1509
94c30b78
MS
1510 for (current_pc = prologue_start;
1511 current_pc < prologue_end;
f43845b3 1512 current_pc += 4)
96baa820 1513 {
e17a4113 1514 unsigned int insn
198cd59d 1515 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1516
94c30b78 1517 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1518 {
4be43953 1519 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1520 continue;
1521 }
0d39a070
DJ
1522 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1523 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1524 {
1525 unsigned imm = insn & 0xff; /* immediate value */
1526 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1527 int rd = bits (insn, 12, 15);
28cd8767 1528 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1529 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1530 continue;
1531 }
0d39a070
DJ
1532 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1534 {
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1537 int rd = bits (insn, 12, 15);
28cd8767 1538 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1540 continue;
1541 }
0963b4bd
MS
1542 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1543 [sp, #-4]! */
f43845b3 1544 {
4be43953
DJ
1545 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1546 break;
1547 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1548 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1549 regs[bits (insn, 12, 15)]);
f43845b3
MS
1550 continue;
1551 }
1552 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1553 /* stmfd sp!, {..., fp, ip, lr, pc}
1554 or
1555 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1556 {
d4473757 1557 int mask = insn & 0xffff;
ed9a39eb 1558
4be43953
DJ
1559 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1560 break;
1561
94c30b78 1562 /* Calculate offsets of saved registers. */
34e8f22d 1563 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1564 if (mask & (1 << regno))
1565 {
0963b4bd
MS
1566 regs[ARM_SP_REGNUM]
1567 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1568 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1569 }
1570 }
0d39a070
DJ
1571 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1572 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1573 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1574 {
1575 /* No need to add this to saved_regs -- it's just an arg reg. */
1576 continue;
1577 }
0d39a070
DJ
1578 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1579 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1580 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1581 {
1582 /* No need to add this to saved_regs -- it's just an arg reg. */
1583 continue;
1584 }
0963b4bd
MS
1585 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1586 { registers } */
0d39a070
DJ
1587 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1588 {
1589 /* No need to add this to saved_regs -- it's just arg regs. */
1590 continue;
1591 }
d4473757
KB
1592 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1593 {
94c30b78
MS
1594 unsigned imm = insn & 0xff; /* immediate value */
1595 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1596 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1597 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1598 }
1599 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1600 {
94c30b78
MS
1601 unsigned imm = insn & 0xff; /* immediate value */
1602 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1603 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1605 }
0963b4bd
MS
1606 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1607 [sp, -#c]! */
2af46ca0 1608 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1609 {
4be43953
DJ
1610 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1611 break;
1612
1613 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1614 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1615 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1616 }
0963b4bd
MS
1617 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1618 [sp!] */
2af46ca0 1619 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1620 {
1621 int n_saved_fp_regs;
1622 unsigned int fp_start_reg, fp_bound_reg;
1623
4be43953
DJ
1624 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1625 break;
1626
94c30b78 1627 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1628 {
d4473757
KB
1629 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1630 n_saved_fp_regs = 3;
1631 else
1632 n_saved_fp_regs = 1;
96baa820 1633 }
d4473757 1634 else
96baa820 1635 {
d4473757
KB
1636 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1637 n_saved_fp_regs = 2;
1638 else
1639 n_saved_fp_regs = 4;
96baa820 1640 }
d4473757 1641
34e8f22d 1642 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1643 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1644 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1645 {
4be43953
DJ
1646 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1647 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1648 regs[fp_start_reg++]);
96baa820 1649 }
c906108c 1650 }
0d39a070
DJ
1651 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1652 {
1653 /* Allow some special function calls when skipping the
1654 prologue; GCC generates these before storing arguments to
1655 the stack. */
1656 CORE_ADDR dest = BranchDest (current_pc, insn);
1657
e0634ccf 1658 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1659 continue;
1660 else
1661 break;
1662 }
d4473757 1663 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1664 break; /* Condition not true, exit early. */
0d39a070
DJ
1665 else if (arm_instruction_changes_pc (insn))
1666 /* Don't scan past anything that might change control flow. */
1667 break;
f303bc3e
YQ
1668 else if (arm_instruction_restores_sp (insn))
1669 {
1670 /* Don't scan past the epilogue. */
1671 break;
1672 }
d19f7eee
UW
1673 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1674 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1675 /* Ignore block loads from the stack, potentially copying
1676 parameters from memory. */
1677 continue;
1678 else if ((insn & 0xfc500000) == 0xe4100000
1679 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1680 /* Similarly ignore single loads from the stack. */
1681 continue;
0d39a070
DJ
1682 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1683 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1684 register instead of the stack. */
d4473757 1685 continue;
0d39a070
DJ
1686 else
1687 {
21daaaaf
YQ
1688 /* The optimizer might shove anything into the prologue, if
1689 we build up cache (cache != NULL) from scanning prologue,
1690 we just skip what we don't recognize and scan further to
1691 make cache as complete as possible. However, if we skip
1692 prologue, we'll stop immediately on unrecognized
1693 instruction. */
0d39a070 1694 unrecognized_pc = current_pc;
21daaaaf
YQ
1695 if (cache != NULL)
1696 continue;
1697 else
1698 break;
0d39a070 1699 }
c906108c
SS
1700 }
1701
0d39a070
DJ
1702 if (unrecognized_pc == 0)
1703 unrecognized_pc = current_pc;
1704
0d39a070
DJ
1705 if (cache)
1706 {
4072f920
YQ
1707 int framereg, framesize;
1708
1709 /* The frame size is just the distance from the frame register
1710 to the original stack pointer. */
1711 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1712 {
1713 /* Frame pointer is fp. */
1714 framereg = ARM_FP_REGNUM;
1715 framesize = -regs[ARM_FP_REGNUM].k;
1716 }
1717 else
1718 {
1719 /* Try the stack pointer... this is a bit desperate. */
1720 framereg = ARM_SP_REGNUM;
1721 framesize = -regs[ARM_SP_REGNUM].k;
1722 }
1723
0d39a070
DJ
1724 cache->framereg = framereg;
1725 cache->framesize = framesize;
1726
1727 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1728 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1729 cache->saved_regs[regno].addr = offset;
1730 }
1731
1732 if (arm_debug)
1733 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1734 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1735
1736 do_cleanups (back_to);
0d39a070
DJ
1737 return unrecognized_pc;
1738}
1739
1740static void
1741arm_scan_prologue (struct frame_info *this_frame,
1742 struct arm_prologue_cache *cache)
1743{
1744 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1745 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1746 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1747 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1748 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1749
1750 /* Assume there is no frame until proven otherwise. */
1751 cache->framereg = ARM_SP_REGNUM;
1752 cache->framesize = 0;
1753
1754 /* Check for Thumb prologue. */
1755 if (arm_frame_is_thumb (this_frame))
1756 {
1757 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1758 return;
1759 }
1760
1761 /* Find the function prologue. If we can't find the function in
1762 the symbol table, peek in the stack frame to find the PC. */
1763 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1764 &prologue_end))
1765 {
1766 /* One way to find the end of the prologue (which works well
1767 for unoptimized code) is to do the following:
1768
1769 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1770
1771 if (sal.line == 0)
1772 prologue_end = prev_pc;
1773 else if (sal.end < prologue_end)
1774 prologue_end = sal.end;
1775
1776 This mechanism is very accurate so long as the optimizer
1777 doesn't move any instructions from the function body into the
1778 prologue. If this happens, sal.end will be the last
1779 instruction in the first hunk of prologue code just before
1780 the first instruction that the scheduler has moved from
1781 the body to the prologue.
1782
1783 In order to make sure that we scan all of the prologue
1784 instructions, we use a slightly less accurate mechanism which
1785 may scan more than necessary. To help compensate for this
1786 lack of accuracy, the prologue scanning loop below contains
1787 several clauses which'll cause the loop to terminate early if
1788 an implausible prologue instruction is encountered.
1789
1790 The expression
1791
1792 prologue_start + 64
1793
1794 is a suitable endpoint since it accounts for the largest
1795 possible prologue plus up to five instructions inserted by
1796 the scheduler. */
1797
1798 if (prologue_end > prologue_start + 64)
1799 {
1800 prologue_end = prologue_start + 64; /* See above. */
1801 }
1802 }
1803 else
1804 {
1805 /* We have no symbol information. Our only option is to assume this
1806 function has a standard stack frame and the normal frame register.
1807 Then, we can find the value of our frame pointer on entrance to
1808 the callee (or at the present moment if this is the innermost frame).
1809 The value stored there should be the address of the stmfd + 8. */
1810 CORE_ADDR frame_loc;
7913a64c 1811 ULONGEST return_value;
0d39a070
DJ
1812
1813 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1814 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1815 &return_value))
0d39a070
DJ
1816 return;
1817 else
1818 {
1819 prologue_start = gdbarch_addr_bits_remove
1820 (gdbarch, return_value) - 8;
1821 prologue_end = prologue_start + 64; /* See above. */
1822 }
1823 }
1824
1825 if (prev_pc < prologue_end)
1826 prologue_end = prev_pc;
1827
1828 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1829}
1830
eb5492fa 1831static struct arm_prologue_cache *
a262aec2 1832arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1833{
eb5492fa
DJ
1834 int reg;
1835 struct arm_prologue_cache *cache;
1836 CORE_ADDR unwound_fp;
c5aa993b 1837
35d5d4ee 1838 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1839 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1840
a262aec2 1841 arm_scan_prologue (this_frame, cache);
848cfffb 1842
a262aec2 1843 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1844 if (unwound_fp == 0)
1845 return cache;
c906108c 1846
4be43953 1847 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1848
eb5492fa
DJ
1849 /* Calculate actual addresses of saved registers using offsets
1850 determined by arm_scan_prologue. */
a262aec2 1851 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1852 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1853 cache->saved_regs[reg].addr += cache->prev_sp;
1854
1855 return cache;
c906108c
SS
1856}
1857
c1ee9414
LM
1858/* Implementation of the stop_reason hook for arm_prologue frames. */
1859
1860static enum unwind_stop_reason
1861arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1862 void **this_cache)
1863{
1864 struct arm_prologue_cache *cache;
1865 CORE_ADDR pc;
1866
1867 if (*this_cache == NULL)
1868 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1869 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1870
1871 /* This is meant to halt the backtrace at "_start". */
1872 pc = get_frame_pc (this_frame);
1873 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1874 return UNWIND_OUTERMOST;
1875
1876 /* If we've hit a wall, stop. */
1877 if (cache->prev_sp == 0)
1878 return UNWIND_OUTERMOST;
1879
1880 return UNWIND_NO_REASON;
1881}
1882
eb5492fa
DJ
1883/* Our frame ID for a normal frame is the current function's starting PC
1884 and the caller's SP when we were called. */
c906108c 1885
148754e5 1886static void
a262aec2 1887arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1888 void **this_cache,
1889 struct frame_id *this_id)
c906108c 1890{
eb5492fa
DJ
1891 struct arm_prologue_cache *cache;
1892 struct frame_id id;
2c404490 1893 CORE_ADDR pc, func;
f079148d 1894
eb5492fa 1895 if (*this_cache == NULL)
a262aec2 1896 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1897 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1898
0e9e9abd
UW
1899 /* Use function start address as part of the frame ID. If we cannot
1900 identify the start address (due to missing symbol information),
1901 fall back to just using the current PC. */
c1ee9414 1902 pc = get_frame_pc (this_frame);
2c404490 1903 func = get_frame_func (this_frame);
0e9e9abd
UW
1904 if (!func)
1905 func = pc;
1906
eb5492fa 1907 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1908 *this_id = id;
c906108c
SS
1909}
1910
a262aec2
DJ
1911static struct value *
1912arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1913 void **this_cache,
a262aec2 1914 int prev_regnum)
24de872b 1915{
24568a2c 1916 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1917 struct arm_prologue_cache *cache;
1918
eb5492fa 1919 if (*this_cache == NULL)
a262aec2 1920 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1921 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1922
eb5492fa 1923 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1924 instead. The prologue may save PC, but it will point into this
1925 frame's prologue, not the next frame's resume location. Also
1926 strip the saved T bit. A valid LR may have the low bit set, but
1927 a valid PC never does. */
eb5492fa 1928 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1929 {
1930 CORE_ADDR lr;
1931
1932 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1933 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1934 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1935 }
24de872b 1936
eb5492fa 1937 /* SP is generally not saved to the stack, but this frame is
a262aec2 1938 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1939 The value was already reconstructed into PREV_SP. */
1940 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1941 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1942
b39cc962
DJ
1943 /* The CPSR may have been changed by the call instruction and by the
1944 called function. The only bit we can reconstruct is the T bit,
1945 by checking the low bit of LR as of the call. This is a reliable
1946 indicator of Thumb-ness except for some ARM v4T pre-interworking
1947 Thumb code, which could get away with a clear low bit as long as
1948 the called function did not use bx. Guess that all other
1949 bits are unchanged; the condition flags are presumably lost,
1950 but the processor status is likely valid. */
1951 if (prev_regnum == ARM_PS_REGNUM)
1952 {
1953 CORE_ADDR lr, cpsr;
9779414d 1954 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1955
1956 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1957 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1958 if (IS_THUMB_ADDR (lr))
9779414d 1959 cpsr |= t_bit;
b39cc962 1960 else
9779414d 1961 cpsr &= ~t_bit;
b39cc962
DJ
1962 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1963 }
1964
a262aec2
DJ
1965 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1966 prev_regnum);
eb5492fa
DJ
1967}
1968
1969struct frame_unwind arm_prologue_unwind = {
1970 NORMAL_FRAME,
c1ee9414 1971 arm_prologue_unwind_stop_reason,
eb5492fa 1972 arm_prologue_this_id,
a262aec2
DJ
1973 arm_prologue_prev_register,
1974 NULL,
1975 default_frame_sniffer
eb5492fa
DJ
1976};
1977
0e9e9abd
UW
1978/* Maintain a list of ARM exception table entries per objfile, similar to the
1979 list of mapping symbols. We only cache entries for standard ARM-defined
1980 personality routines; the cache will contain only the frame unwinding
1981 instructions associated with the entry (not the descriptors). */
1982
1983static const struct objfile_data *arm_exidx_data_key;
1984
1985struct arm_exidx_entry
1986{
1987 bfd_vma addr;
1988 gdb_byte *entry;
1989};
1990typedef struct arm_exidx_entry arm_exidx_entry_s;
1991DEF_VEC_O(arm_exidx_entry_s);
1992
1993struct arm_exidx_data
1994{
1995 VEC(arm_exidx_entry_s) **section_maps;
1996};
1997
1998static void
1999arm_exidx_data_free (struct objfile *objfile, void *arg)
2000{
9a3c8263 2001 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2002 unsigned int i;
2003
2004 for (i = 0; i < objfile->obfd->section_count; i++)
2005 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2006}
2007
2008static inline int
2009arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2010 const struct arm_exidx_entry *rhs)
2011{
2012 return lhs->addr < rhs->addr;
2013}
2014
2015static struct obj_section *
2016arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2017{
2018 struct obj_section *osect;
2019
2020 ALL_OBJFILE_OSECTIONS (objfile, osect)
2021 if (bfd_get_section_flags (objfile->obfd,
2022 osect->the_bfd_section) & SEC_ALLOC)
2023 {
2024 bfd_vma start, size;
2025 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2026 size = bfd_get_section_size (osect->the_bfd_section);
2027
2028 if (start <= vma && vma < start + size)
2029 return osect;
2030 }
2031
2032 return NULL;
2033}
2034
2035/* Parse contents of exception table and exception index sections
2036 of OBJFILE, and fill in the exception table entry cache.
2037
2038 For each entry that refers to a standard ARM-defined personality
2039 routine, extract the frame unwinding instructions (from either
2040 the index or the table section). The unwinding instructions
2041 are normalized by:
2042 - extracting them from the rest of the table data
2043 - converting to host endianness
2044 - appending the implicit 0xb0 ("Finish") code
2045
2046 The extracted and normalized instructions are stored for later
2047 retrieval by the arm_find_exidx_entry routine. */
2048
2049static void
2050arm_exidx_new_objfile (struct objfile *objfile)
2051{
3bb47e8b 2052 struct cleanup *cleanups;
0e9e9abd
UW
2053 struct arm_exidx_data *data;
2054 asection *exidx, *extab;
2055 bfd_vma exidx_vma = 0, extab_vma = 0;
2056 bfd_size_type exidx_size = 0, extab_size = 0;
2057 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2058 LONGEST i;
2059
2060 /* If we've already touched this file, do nothing. */
2061 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2062 return;
3bb47e8b 2063 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2064
2065 /* Read contents of exception table and index. */
a5eda10c 2066 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2067 if (exidx)
2068 {
2069 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2070 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2071 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2072 make_cleanup (xfree, exidx_data);
2073
2074 if (!bfd_get_section_contents (objfile->obfd, exidx,
2075 exidx_data, 0, exidx_size))
2076 {
2077 do_cleanups (cleanups);
2078 return;
2079 }
2080 }
2081
2082 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2083 if (extab)
2084 {
2085 extab_vma = bfd_section_vma (objfile->obfd, extab);
2086 extab_size = bfd_get_section_size (extab);
224c3ddb 2087 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2088 make_cleanup (xfree, extab_data);
2089
2090 if (!bfd_get_section_contents (objfile->obfd, extab,
2091 extab_data, 0, extab_size))
2092 {
2093 do_cleanups (cleanups);
2094 return;
2095 }
2096 }
2097
2098 /* Allocate exception table data structure. */
2099 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2100 set_objfile_data (objfile, arm_exidx_data_key, data);
2101 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2102 objfile->obfd->section_count,
2103 VEC(arm_exidx_entry_s) *);
2104
2105 /* Fill in exception table. */
2106 for (i = 0; i < exidx_size / 8; i++)
2107 {
2108 struct arm_exidx_entry new_exidx_entry;
2109 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2110 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2111 bfd_vma addr = 0, word = 0;
2112 int n_bytes = 0, n_words = 0;
2113 struct obj_section *sec;
2114 gdb_byte *entry = NULL;
2115
2116 /* Extract address of start of function. */
2117 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 idx += exidx_vma + i * 8;
2119
2120 /* Find section containing function and compute section offset. */
2121 sec = arm_obj_section_from_vma (objfile, idx);
2122 if (sec == NULL)
2123 continue;
2124 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2125
2126 /* Determine address of exception table entry. */
2127 if (val == 1)
2128 {
2129 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2130 }
2131 else if ((val & 0xff000000) == 0x80000000)
2132 {
2133 /* Exception table entry embedded in .ARM.exidx
2134 -- must be short form. */
2135 word = val;
2136 n_bytes = 3;
2137 }
2138 else if (!(val & 0x80000000))
2139 {
2140 /* Exception table entry in .ARM.extab. */
2141 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2142 addr += exidx_vma + i * 8 + 4;
2143
2144 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2145 {
2146 word = bfd_h_get_32 (objfile->obfd,
2147 extab_data + addr - extab_vma);
2148 addr += 4;
2149
2150 if ((word & 0xff000000) == 0x80000000)
2151 {
2152 /* Short form. */
2153 n_bytes = 3;
2154 }
2155 else if ((word & 0xff000000) == 0x81000000
2156 || (word & 0xff000000) == 0x82000000)
2157 {
2158 /* Long form. */
2159 n_bytes = 2;
2160 n_words = ((word >> 16) & 0xff);
2161 }
2162 else if (!(word & 0x80000000))
2163 {
2164 bfd_vma pers;
2165 struct obj_section *pers_sec;
2166 int gnu_personality = 0;
2167
2168 /* Custom personality routine. */
2169 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2171
2172 /* Check whether we've got one of the variants of the
2173 GNU personality routines. */
2174 pers_sec = arm_obj_section_from_vma (objfile, pers);
2175 if (pers_sec)
2176 {
2177 static const char *personality[] =
2178 {
2179 "__gcc_personality_v0",
2180 "__gxx_personality_v0",
2181 "__gcj_personality_v0",
2182 "__gnu_objc_personality_v0",
2183 NULL
2184 };
2185
2186 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2187 int k;
2188
2189 for (k = 0; personality[k]; k++)
2190 if (lookup_minimal_symbol_by_pc_name
2191 (pc, personality[k], objfile))
2192 {
2193 gnu_personality = 1;
2194 break;
2195 }
2196 }
2197
2198 /* If so, the next word contains a word count in the high
2199 byte, followed by the same unwind instructions as the
2200 pre-defined forms. */
2201 if (gnu_personality
2202 && addr + 4 <= extab_vma + extab_size)
2203 {
2204 word = bfd_h_get_32 (objfile->obfd,
2205 extab_data + addr - extab_vma);
2206 addr += 4;
2207 n_bytes = 3;
2208 n_words = ((word >> 24) & 0xff);
2209 }
2210 }
2211 }
2212 }
2213
2214 /* Sanity check address. */
2215 if (n_words)
2216 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2217 n_words = n_bytes = 0;
2218
2219 /* The unwind instructions reside in WORD (only the N_BYTES least
2220 significant bytes are valid), followed by N_WORDS words in the
2221 extab section starting at ADDR. */
2222 if (n_bytes || n_words)
2223 {
224c3ddb
SM
2224 gdb_byte *p = entry
2225 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2226 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2227
2228 while (n_bytes--)
2229 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2230
2231 while (n_words--)
2232 {
2233 word = bfd_h_get_32 (objfile->obfd,
2234 extab_data + addr - extab_vma);
2235 addr += 4;
2236
2237 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2238 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2239 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2240 *p++ = (gdb_byte) (word & 0xff);
2241 }
2242
2243 /* Implied "Finish" to terminate the list. */
2244 *p++ = 0xb0;
2245 }
2246
2247 /* Push entry onto vector. They are guaranteed to always
2248 appear in order of increasing addresses. */
2249 new_exidx_entry.addr = idx;
2250 new_exidx_entry.entry = entry;
2251 VEC_safe_push (arm_exidx_entry_s,
2252 data->section_maps[sec->the_bfd_section->index],
2253 &new_exidx_entry);
2254 }
2255
2256 do_cleanups (cleanups);
2257}
2258
2259/* Search for the exception table entry covering MEMADDR. If one is found,
2260 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2261 set *START to the start of the region covered by this entry. */
2262
2263static gdb_byte *
2264arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2265{
2266 struct obj_section *sec;
2267
2268 sec = find_pc_section (memaddr);
2269 if (sec != NULL)
2270 {
2271 struct arm_exidx_data *data;
2272 VEC(arm_exidx_entry_s) *map;
2273 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2274 unsigned int idx;
2275
9a3c8263
SM
2276 data = ((struct arm_exidx_data *)
2277 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2278 if (data != NULL)
2279 {
2280 map = data->section_maps[sec->the_bfd_section->index];
2281 if (!VEC_empty (arm_exidx_entry_s, map))
2282 {
2283 struct arm_exidx_entry *map_sym;
2284
2285 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2286 arm_compare_exidx_entries);
2287
2288 /* VEC_lower_bound finds the earliest ordered insertion
2289 point. If the following symbol starts at this exact
2290 address, we use that; otherwise, the preceding
2291 exception table entry covers this address. */
2292 if (idx < VEC_length (arm_exidx_entry_s, map))
2293 {
2294 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2295 if (map_sym->addr == map_key.addr)
2296 {
2297 if (start)
2298 *start = map_sym->addr + obj_section_addr (sec);
2299 return map_sym->entry;
2300 }
2301 }
2302
2303 if (idx > 0)
2304 {
2305 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2306 if (start)
2307 *start = map_sym->addr + obj_section_addr (sec);
2308 return map_sym->entry;
2309 }
2310 }
2311 }
2312 }
2313
2314 return NULL;
2315}
2316
2317/* Given the current frame THIS_FRAME, and its associated frame unwinding
2318 instruction list from the ARM exception table entry ENTRY, allocate and
2319 return a prologue cache structure describing how to unwind this frame.
2320
2321 Return NULL if the unwinding instruction list contains a "spare",
2322 "reserved" or "refuse to unwind" instruction as defined in section
2323 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2324 for the ARM Architecture" document. */
2325
2326static struct arm_prologue_cache *
2327arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2328{
2329 CORE_ADDR vsp = 0;
2330 int vsp_valid = 0;
2331
2332 struct arm_prologue_cache *cache;
2333 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2334 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2335
2336 for (;;)
2337 {
2338 gdb_byte insn;
2339
2340 /* Whenever we reload SP, we actually have to retrieve its
2341 actual value in the current frame. */
2342 if (!vsp_valid)
2343 {
2344 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2345 {
2346 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2347 vsp = get_frame_register_unsigned (this_frame, reg);
2348 }
2349 else
2350 {
2351 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2352 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2353 }
2354
2355 vsp_valid = 1;
2356 }
2357
2358 /* Decode next unwind instruction. */
2359 insn = *entry++;
2360
2361 if ((insn & 0xc0) == 0)
2362 {
2363 int offset = insn & 0x3f;
2364 vsp += (offset << 2) + 4;
2365 }
2366 else if ((insn & 0xc0) == 0x40)
2367 {
2368 int offset = insn & 0x3f;
2369 vsp -= (offset << 2) + 4;
2370 }
2371 else if ((insn & 0xf0) == 0x80)
2372 {
2373 int mask = ((insn & 0xf) << 8) | *entry++;
2374 int i;
2375
2376 /* The special case of an all-zero mask identifies
2377 "Refuse to unwind". We return NULL to fall back
2378 to the prologue analyzer. */
2379 if (mask == 0)
2380 return NULL;
2381
2382 /* Pop registers r4..r15 under mask. */
2383 for (i = 0; i < 12; i++)
2384 if (mask & (1 << i))
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* Special-case popping SP -- we need to reload vsp. */
2391 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2392 vsp_valid = 0;
2393 }
2394 else if ((insn & 0xf0) == 0x90)
2395 {
2396 int reg = insn & 0xf;
2397
2398 /* Reserved cases. */
2399 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2400 return NULL;
2401
2402 /* Set SP from another register and mark VSP for reload. */
2403 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2404 vsp_valid = 0;
2405 }
2406 else if ((insn & 0xf0) == 0xa0)
2407 {
2408 int count = insn & 0x7;
2409 int pop_lr = (insn & 0x8) != 0;
2410 int i;
2411
2412 /* Pop r4..r[4+count]. */
2413 for (i = 0; i <= count; i++)
2414 {
2415 cache->saved_regs[4 + i].addr = vsp;
2416 vsp += 4;
2417 }
2418
2419 /* If indicated by flag, pop LR as well. */
2420 if (pop_lr)
2421 {
2422 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2423 vsp += 4;
2424 }
2425 }
2426 else if (insn == 0xb0)
2427 {
2428 /* We could only have updated PC by popping into it; if so, it
2429 will show up as address. Otherwise, copy LR into PC. */
2430 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2431 cache->saved_regs[ARM_PC_REGNUM]
2432 = cache->saved_regs[ARM_LR_REGNUM];
2433
2434 /* We're done. */
2435 break;
2436 }
2437 else if (insn == 0xb1)
2438 {
2439 int mask = *entry++;
2440 int i;
2441
2442 /* All-zero mask and mask >= 16 is "spare". */
2443 if (mask == 0 || mask >= 16)
2444 return NULL;
2445
2446 /* Pop r0..r3 under mask. */
2447 for (i = 0; i < 4; i++)
2448 if (mask & (1 << i))
2449 {
2450 cache->saved_regs[i].addr = vsp;
2451 vsp += 4;
2452 }
2453 }
2454 else if (insn == 0xb2)
2455 {
2456 ULONGEST offset = 0;
2457 unsigned shift = 0;
2458
2459 do
2460 {
2461 offset |= (*entry & 0x7f) << shift;
2462 shift += 7;
2463 }
2464 while (*entry++ & 0x80);
2465
2466 vsp += 0x204 + (offset << 2);
2467 }
2468 else if (insn == 0xb3)
2469 {
2470 int start = *entry >> 4;
2471 int count = (*entry++) & 0xf;
2472 int i;
2473
2474 /* Only registers D0..D15 are valid here. */
2475 if (start + count >= 16)
2476 return NULL;
2477
2478 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if ((insn & 0xf8) == 0xb8)
2489 {
2490 int count = insn & 0x7;
2491 int i;
2492
2493 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2494 for (i = 0; i <= count; i++)
2495 {
2496 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2497 vsp += 8;
2498 }
2499
2500 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2501 vsp += 4;
2502 }
2503 else if (insn == 0xc6)
2504 {
2505 int start = *entry >> 4;
2506 int count = (*entry++) & 0xf;
2507 int i;
2508
2509 /* Only registers WR0..WR15 are valid. */
2510 if (start + count >= 16)
2511 return NULL;
2512
2513 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc7)
2521 {
2522 int mask = *entry++;
2523 int i;
2524
2525 /* All-zero mask and mask >= 16 is "spare". */
2526 if (mask == 0 || mask >= 16)
2527 return NULL;
2528
2529 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2530 for (i = 0; i < 4; i++)
2531 if (mask & (1 << i))
2532 {
2533 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2534 vsp += 4;
2535 }
2536 }
2537 else if ((insn & 0xf8) == 0xc0)
2538 {
2539 int count = insn & 0x7;
2540 int i;
2541
2542 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2543 for (i = 0; i <= count; i++)
2544 {
2545 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2546 vsp += 8;
2547 }
2548 }
2549 else if (insn == 0xc8)
2550 {
2551 int start = *entry >> 4;
2552 int count = (*entry++) & 0xf;
2553 int i;
2554
2555 /* Only registers D0..D31 are valid. */
2556 if (start + count >= 16)
2557 return NULL;
2558
2559 /* Pop VFP double-precision registers
2560 D[16+start]..D[16+start+count]. */
2561 for (i = 0; i <= count; i++)
2562 {
2563 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2564 vsp += 8;
2565 }
2566 }
2567 else if (insn == 0xc9)
2568 {
2569 int start = *entry >> 4;
2570 int count = (*entry++) & 0xf;
2571 int i;
2572
2573 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2574 for (i = 0; i <= count; i++)
2575 {
2576 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2577 vsp += 8;
2578 }
2579 }
2580 else if ((insn & 0xf8) == 0xd0)
2581 {
2582 int count = insn & 0x7;
2583 int i;
2584
2585 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2586 for (i = 0; i <= count; i++)
2587 {
2588 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2589 vsp += 8;
2590 }
2591 }
2592 else
2593 {
2594 /* Everything else is "spare". */
2595 return NULL;
2596 }
2597 }
2598
2599 /* If we restore SP from a register, assume this was the frame register.
2600 Otherwise just fall back to SP as frame register. */
2601 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2602 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2603 else
2604 cache->framereg = ARM_SP_REGNUM;
2605
2606 /* Determine offset to previous frame. */
2607 cache->framesize
2608 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2609
2610 /* We already got the previous SP. */
2611 cache->prev_sp = vsp;
2612
2613 return cache;
2614}
2615
2616/* Unwinding via ARM exception table entries. Note that the sniffer
2617 already computes a filled-in prologue cache, which is then used
2618 with the same arm_prologue_this_id and arm_prologue_prev_register
2619 routines also used for prologue-parsing based unwinding. */
2620
2621static int
2622arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2623 struct frame_info *this_frame,
2624 void **this_prologue_cache)
2625{
2626 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2628 CORE_ADDR addr_in_block, exidx_region, func_start;
2629 struct arm_prologue_cache *cache;
2630 gdb_byte *entry;
2631
2632 /* See if we have an ARM exception table entry covering this address. */
2633 addr_in_block = get_frame_address_in_block (this_frame);
2634 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2635 if (!entry)
2636 return 0;
2637
2638 /* The ARM exception table does not describe unwind information
2639 for arbitrary PC values, but is guaranteed to be correct only
2640 at call sites. We have to decide here whether we want to use
2641 ARM exception table information for this frame, or fall back
2642 to using prologue parsing. (Note that if we have DWARF CFI,
2643 this sniffer isn't even called -- CFI is always preferred.)
2644
2645 Before we make this decision, however, we check whether we
2646 actually have *symbol* information for the current frame.
2647 If not, prologue parsing would not work anyway, so we might
2648 as well use the exception table and hope for the best. */
2649 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2650 {
2651 int exc_valid = 0;
2652
2653 /* If the next frame is "normal", we are at a call site in this
2654 frame, so exception information is guaranteed to be valid. */
2655 if (get_next_frame (this_frame)
2656 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2657 exc_valid = 1;
2658
2659 /* We also assume exception information is valid if we're currently
2660 blocked in a system call. The system library is supposed to
d9311bfa
AT
2661 ensure this, so that e.g. pthread cancellation works. */
2662 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2663 {
7913a64c 2664 ULONGEST insn;
416dc9c6 2665
7913a64c
YQ
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2667 2, byte_order_for_code, &insn)
d9311bfa
AT
2668 && (insn & 0xff00) == 0xdf00 /* svc */)
2669 exc_valid = 1;
0e9e9abd 2670 }
d9311bfa
AT
2671 else
2672 {
7913a64c 2673 ULONGEST insn;
416dc9c6 2674
7913a64c
YQ
2675 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2676 4, byte_order_for_code, &insn)
d9311bfa
AT
2677 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2678 exc_valid = 1;
2679 }
2680
0e9e9abd
UW
2681 /* Bail out if we don't know that exception information is valid. */
2682 if (!exc_valid)
2683 return 0;
2684
2685 /* The ARM exception index does not mark the *end* of the region
2686 covered by the entry, and some functions will not have any entry.
2687 To correctly recognize the end of the covered region, the linker
2688 should have inserted dummy records with a CANTUNWIND marker.
2689
2690 Unfortunately, current versions of GNU ld do not reliably do
2691 this, and thus we may have found an incorrect entry above.
2692 As a (temporary) sanity check, we only use the entry if it
2693 lies *within* the bounds of the function. Note that this check
2694 might reject perfectly valid entries that just happen to cover
2695 multiple functions; therefore this check ought to be removed
2696 once the linker is fixed. */
2697 if (func_start > exidx_region)
2698 return 0;
2699 }
2700
2701 /* Decode the list of unwinding instructions into a prologue cache.
2702 Note that this may fail due to e.g. a "refuse to unwind" code. */
2703 cache = arm_exidx_fill_cache (this_frame, entry);
2704 if (!cache)
2705 return 0;
2706
2707 *this_prologue_cache = cache;
2708 return 1;
2709}
2710
2711struct frame_unwind arm_exidx_unwind = {
2712 NORMAL_FRAME,
8fbca658 2713 default_frame_unwind_stop_reason,
0e9e9abd
UW
2714 arm_prologue_this_id,
2715 arm_prologue_prev_register,
2716 NULL,
2717 arm_exidx_unwind_sniffer
2718};
2719
779aa56f
YQ
2720static struct arm_prologue_cache *
2721arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2722{
2723 struct arm_prologue_cache *cache;
779aa56f
YQ
2724 int reg;
2725
2726 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2727 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2728
2729 /* Still rely on the offset calculated from prologue. */
2730 arm_scan_prologue (this_frame, cache);
2731
2732 /* Since we are in epilogue, the SP has been restored. */
2733 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2734
2735 /* Calculate actual addresses of saved registers using offsets
2736 determined by arm_scan_prologue. */
2737 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2738 if (trad_frame_addr_p (cache->saved_regs, reg))
2739 cache->saved_regs[reg].addr += cache->prev_sp;
2740
2741 return cache;
2742}
2743
2744/* Implementation of function hook 'this_id' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2746
2747static void
2748arm_epilogue_frame_this_id (struct frame_info *this_frame,
2749 void **this_cache,
2750 struct frame_id *this_id)
2751{
2752 struct arm_prologue_cache *cache;
2753 CORE_ADDR pc, func;
2754
2755 if (*this_cache == NULL)
2756 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2757 cache = (struct arm_prologue_cache *) *this_cache;
2758
2759 /* Use function start address as part of the frame ID. If we cannot
2760 identify the start address (due to missing symbol information),
2761 fall back to just using the current PC. */
2762 pc = get_frame_pc (this_frame);
2763 func = get_frame_func (this_frame);
fb3f3d25 2764 if (func == 0)
779aa56f
YQ
2765 func = pc;
2766
2767 (*this_id) = frame_id_build (cache->prev_sp, pc);
2768}
2769
2770/* Implementation of function hook 'prev_register' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2772
2773static struct value *
2774arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2775 void **this_cache, int regnum)
2776{
779aa56f
YQ
2777 if (*this_cache == NULL)
2778 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2779
2780 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2781}
2782
2783static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2784 CORE_ADDR pc);
2785static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2786 CORE_ADDR pc);
2787
2788/* Implementation of function hook 'sniffer' in
2789 'struct frame_uwnind' for epilogue unwinder. */
2790
2791static int
2792arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2793 struct frame_info *this_frame,
2794 void **this_prologue_cache)
2795{
2796 if (frame_relative_level (this_frame) == 0)
2797 {
2798 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2799 CORE_ADDR pc = get_frame_pc (this_frame);
2800
2801 if (arm_frame_is_thumb (this_frame))
2802 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2803 else
2804 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2805 }
2806 else
2807 return 0;
2808}
2809
2810/* Frame unwinder from epilogue. */
2811
2812static const struct frame_unwind arm_epilogue_frame_unwind =
2813{
2814 NORMAL_FRAME,
2815 default_frame_unwind_stop_reason,
2816 arm_epilogue_frame_this_id,
2817 arm_epilogue_frame_prev_register,
2818 NULL,
2819 arm_epilogue_frame_sniffer,
2820};
2821
80d8d390
YQ
2822/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2823 trampoline, return the target PC. Otherwise return 0.
2824
2825 void call0a (char c, short s, int i, long l) {}
2826
2827 int main (void)
2828 {
2829 (*pointer_to_call0a) (c, s, i, l);
2830 }
2831
2832 Instead of calling a stub library function _call_via_xx (xx is
2833 the register name), GCC may inline the trampoline in the object
2834 file as below (register r2 has the address of call0a).
2835
2836 .global main
2837 .type main, %function
2838 ...
2839 bl .L1
2840 ...
2841 .size main, .-main
2842
2843 .L1:
2844 bx r2
2845
2846 The trampoline 'bx r2' doesn't belong to main. */
2847
2848static CORE_ADDR
2849arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2850{
2851 /* The heuristics of recognizing such trampoline is that FRAME is
2852 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2853 if (arm_frame_is_thumb (frame))
2854 {
2855 gdb_byte buf[2];
2856
2857 if (target_read_memory (pc, buf, 2) == 0)
2858 {
2859 struct gdbarch *gdbarch = get_frame_arch (frame);
2860 enum bfd_endian byte_order_for_code
2861 = gdbarch_byte_order_for_code (gdbarch);
2862 uint16_t insn
2863 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2864
2865 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2866 {
2867 CORE_ADDR dest
2868 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2869
2870 /* Clear the LSB so that gdb core sets step-resume
2871 breakpoint at the right address. */
2872 return UNMAKE_THUMB_ADDR (dest);
2873 }
2874 }
2875 }
2876
2877 return 0;
2878}
2879
909cf6ea 2880static struct arm_prologue_cache *
a262aec2 2881arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2882{
909cf6ea 2883 struct arm_prologue_cache *cache;
909cf6ea 2884
35d5d4ee 2885 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2886 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2887
a262aec2 2888 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2889
2890 return cache;
2891}
2892
2893/* Our frame ID for a stub frame is the current SP and LR. */
2894
2895static void
a262aec2 2896arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2897 void **this_cache,
2898 struct frame_id *this_id)
2899{
2900 struct arm_prologue_cache *cache;
2901
2902 if (*this_cache == NULL)
a262aec2 2903 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2904 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2905
a262aec2 2906 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2907}
2908
a262aec2
DJ
2909static int
2910arm_stub_unwind_sniffer (const struct frame_unwind *self,
2911 struct frame_info *this_frame,
2912 void **this_prologue_cache)
909cf6ea 2913{
93d42b30 2914 CORE_ADDR addr_in_block;
948f8e3d 2915 gdb_byte dummy[4];
18d18ac8
YQ
2916 CORE_ADDR pc, start_addr;
2917 const char *name;
909cf6ea 2918
a262aec2 2919 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2920 pc = get_frame_pc (this_frame);
3e5d3a5a 2921 if (in_plt_section (addr_in_block)
fc36e839
DE
2922 /* We also use the stub winder if the target memory is unreadable
2923 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2924 || target_read_memory (pc, dummy, 4) != 0)
2925 return 1;
2926
2927 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2928 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2929 return 1;
909cf6ea 2930
a262aec2 2931 return 0;
909cf6ea
DJ
2932}
2933
a262aec2
DJ
2934struct frame_unwind arm_stub_unwind = {
2935 NORMAL_FRAME,
8fbca658 2936 default_frame_unwind_stop_reason,
a262aec2
DJ
2937 arm_stub_this_id,
2938 arm_prologue_prev_register,
2939 NULL,
2940 arm_stub_unwind_sniffer
2941};
2942
2ae28aa9
YQ
2943/* Put here the code to store, into CACHE->saved_regs, the addresses
2944 of the saved registers of frame described by THIS_FRAME. CACHE is
2945 returned. */
2946
2947static struct arm_prologue_cache *
2948arm_m_exception_cache (struct frame_info *this_frame)
2949{
2950 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2951 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2952 struct arm_prologue_cache *cache;
2953 CORE_ADDR unwound_sp;
2954 LONGEST xpsr;
2955
2956 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2957 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2958
2959 unwound_sp = get_frame_register_unsigned (this_frame,
2960 ARM_SP_REGNUM);
2961
2962 /* The hardware saves eight 32-bit words, comprising xPSR,
2963 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2964 "B1.5.6 Exception entry behavior" in
2965 "ARMv7-M Architecture Reference Manual". */
2966 cache->saved_regs[0].addr = unwound_sp;
2967 cache->saved_regs[1].addr = unwound_sp + 4;
2968 cache->saved_regs[2].addr = unwound_sp + 8;
2969 cache->saved_regs[3].addr = unwound_sp + 12;
2970 cache->saved_regs[12].addr = unwound_sp + 16;
2971 cache->saved_regs[14].addr = unwound_sp + 20;
2972 cache->saved_regs[15].addr = unwound_sp + 24;
2973 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2974
2975 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2976 aligner between the top of the 32-byte stack frame and the
2977 previous context's stack pointer. */
2978 cache->prev_sp = unwound_sp + 32;
2979 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2980 && (xpsr & (1 << 9)) != 0)
2981 cache->prev_sp += 4;
2982
2983 return cache;
2984}
2985
2986/* Implementation of function hook 'this_id' in
2987 'struct frame_uwnind'. */
2988
2989static void
2990arm_m_exception_this_id (struct frame_info *this_frame,
2991 void **this_cache,
2992 struct frame_id *this_id)
2993{
2994 struct arm_prologue_cache *cache;
2995
2996 if (*this_cache == NULL)
2997 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2998 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2999
3000 /* Our frame ID for a stub frame is the current SP and LR. */
3001 *this_id = frame_id_build (cache->prev_sp,
3002 get_frame_pc (this_frame));
3003}
3004
3005/* Implementation of function hook 'prev_register' in
3006 'struct frame_uwnind'. */
3007
3008static struct value *
3009arm_m_exception_prev_register (struct frame_info *this_frame,
3010 void **this_cache,
3011 int prev_regnum)
3012{
2ae28aa9
YQ
3013 struct arm_prologue_cache *cache;
3014
3015 if (*this_cache == NULL)
3016 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3017 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3018
3019 /* The value was already reconstructed into PREV_SP. */
3020 if (prev_regnum == ARM_SP_REGNUM)
3021 return frame_unwind_got_constant (this_frame, prev_regnum,
3022 cache->prev_sp);
3023
3024 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3025 prev_regnum);
3026}
3027
3028/* Implementation of function hook 'sniffer' in
3029 'struct frame_uwnind'. */
3030
3031static int
3032arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3033 struct frame_info *this_frame,
3034 void **this_prologue_cache)
3035{
3036 CORE_ADDR this_pc = get_frame_pc (this_frame);
3037
3038 /* No need to check is_m; this sniffer is only registered for
3039 M-profile architectures. */
3040
ca90e760
FH
3041 /* Check if exception frame returns to a magic PC value. */
3042 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3043}
3044
3045/* Frame unwinder for M-profile exceptions. */
3046
3047struct frame_unwind arm_m_exception_unwind =
3048{
3049 SIGTRAMP_FRAME,
3050 default_frame_unwind_stop_reason,
3051 arm_m_exception_this_id,
3052 arm_m_exception_prev_register,
3053 NULL,
3054 arm_m_exception_unwind_sniffer
3055};
3056
24de872b 3057static CORE_ADDR
a262aec2 3058arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3059{
3060 struct arm_prologue_cache *cache;
3061
eb5492fa 3062 if (*this_cache == NULL)
a262aec2 3063 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3064 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3065
4be43953 3066 return cache->prev_sp - cache->framesize;
24de872b
DJ
3067}
3068
eb5492fa
DJ
3069struct frame_base arm_normal_base = {
3070 &arm_prologue_unwind,
3071 arm_normal_frame_base,
3072 arm_normal_frame_base,
3073 arm_normal_frame_base
3074};
3075
a262aec2 3076/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3077 dummy frame. The frame ID's base needs to match the TOS value
3078 saved by save_dummy_frame_tos() and returned from
3079 arm_push_dummy_call, and the PC needs to match the dummy frame's
3080 breakpoint. */
c906108c 3081
eb5492fa 3082static struct frame_id
a262aec2 3083arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3084{
0963b4bd
MS
3085 return frame_id_build (get_frame_register_unsigned (this_frame,
3086 ARM_SP_REGNUM),
a262aec2 3087 get_frame_pc (this_frame));
eb5492fa 3088}
c3b4394c 3089
eb5492fa
DJ
3090/* Given THIS_FRAME, find the previous frame's resume PC (which will
3091 be used to construct the previous frame's ID, after looking up the
3092 containing function). */
c3b4394c 3093
eb5492fa
DJ
3094static CORE_ADDR
3095arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3096{
3097 CORE_ADDR pc;
3098 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3099 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3100}
3101
3102static CORE_ADDR
3103arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3104{
3105 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3106}
3107
b39cc962
DJ
3108static struct value *
3109arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3110 int regnum)
3111{
24568a2c 3112 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3113 CORE_ADDR lr, cpsr;
9779414d 3114 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3115
3116 switch (regnum)
3117 {
3118 case ARM_PC_REGNUM:
3119 /* The PC is normally copied from the return column, which
3120 describes saves of LR. However, that version may have an
3121 extra bit set to indicate Thumb state. The bit is not
3122 part of the PC. */
3123 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3124 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3125 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3126
3127 case ARM_PS_REGNUM:
3128 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3129 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3130 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3131 if (IS_THUMB_ADDR (lr))
9779414d 3132 cpsr |= t_bit;
b39cc962 3133 else
9779414d 3134 cpsr &= ~t_bit;
ca38c58e 3135 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3136
3137 default:
3138 internal_error (__FILE__, __LINE__,
3139 _("Unexpected register %d"), regnum);
3140 }
3141}
3142
3143static void
3144arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3145 struct dwarf2_frame_state_reg *reg,
3146 struct frame_info *this_frame)
3147{
3148 switch (regnum)
3149 {
3150 case ARM_PC_REGNUM:
3151 case ARM_PS_REGNUM:
3152 reg->how = DWARF2_FRAME_REG_FN;
3153 reg->loc.fn = arm_dwarf2_prev_register;
3154 break;
3155 case ARM_SP_REGNUM:
3156 reg->how = DWARF2_FRAME_REG_CFA;
3157 break;
3158 }
3159}
3160
c9cf6e20 3161/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3162
3163static int
c9cf6e20 3164thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3165{
3166 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3167 unsigned int insn, insn2;
3168 int found_return = 0, found_stack_adjust = 0;
3169 CORE_ADDR func_start, func_end;
3170 CORE_ADDR scan_pc;
3171 gdb_byte buf[4];
3172
3173 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3174 return 0;
3175
3176 /* The epilogue is a sequence of instructions along the following lines:
3177
3178 - add stack frame size to SP or FP
3179 - [if frame pointer used] restore SP from FP
3180 - restore registers from SP [may include PC]
3181 - a return-type instruction [if PC wasn't already restored]
3182
3183 In a first pass, we scan forward from the current PC and verify the
3184 instructions we find as compatible with this sequence, ending in a
3185 return instruction.
3186
3187 However, this is not sufficient to distinguish indirect function calls
3188 within a function from indirect tail calls in the epilogue in some cases.
3189 Therefore, if we didn't already find any SP-changing instruction during
3190 forward scan, we add a backward scanning heuristic to ensure we actually
3191 are in the epilogue. */
3192
3193 scan_pc = pc;
3194 while (scan_pc < func_end && !found_return)
3195 {
3196 if (target_read_memory (scan_pc, buf, 2))
3197 break;
3198
3199 scan_pc += 2;
3200 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3201
3202 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3203 found_return = 1;
3204 else if (insn == 0x46f7) /* mov pc, lr */
3205 found_return = 1;
540314bd 3206 else if (thumb_instruction_restores_sp (insn))
4024ca99 3207 {
b7576e5c 3208 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3209 found_return = 1;
3210 }
db24da6d 3211 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3212 {
3213 if (target_read_memory (scan_pc, buf, 2))
3214 break;
3215
3216 scan_pc += 2;
3217 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3218
3219 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3220 {
4024ca99
UW
3221 if (insn2 & 0x8000) /* <registers> include PC. */
3222 found_return = 1;
3223 }
3224 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3225 && (insn2 & 0x0fff) == 0x0b04)
3226 {
4024ca99
UW
3227 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3228 found_return = 1;
3229 }
3230 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3231 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3232 ;
4024ca99
UW
3233 else
3234 break;
3235 }
3236 else
3237 break;
3238 }
3239
3240 if (!found_return)
3241 return 0;
3242
3243 /* Since any instruction in the epilogue sequence, with the possible
3244 exception of return itself, updates the stack pointer, we need to
3245 scan backwards for at most one instruction. Try either a 16-bit or
3246 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3247 too much about false positives. */
4024ca99 3248
6b65d1b6
YQ
3249 if (pc - 4 < func_start)
3250 return 0;
3251 if (target_read_memory (pc - 4, buf, 4))
3252 return 0;
4024ca99 3253
6b65d1b6
YQ
3254 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3255 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3256
3257 if (thumb_instruction_restores_sp (insn2))
3258 found_stack_adjust = 1;
3259 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3260 found_stack_adjust = 1;
3261 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3262 && (insn2 & 0x0fff) == 0x0b04)
3263 found_stack_adjust = 1;
3264 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3265 && (insn2 & 0x0e00) == 0x0a00)
3266 found_stack_adjust = 1;
4024ca99
UW
3267
3268 return found_stack_adjust;
3269}
3270
4024ca99 3271static int
c58b006a 3272arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3273{
3274 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3275 unsigned int insn;
f303bc3e 3276 int found_return;
4024ca99
UW
3277 CORE_ADDR func_start, func_end;
3278
4024ca99
UW
3279 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3280 return 0;
3281
3282 /* We are in the epilogue if the previous instruction was a stack
3283 adjustment and the next instruction is a possible return (bx, mov
3284 pc, or pop). We could have to scan backwards to find the stack
3285 adjustment, or forwards to find the return, but this is a decent
3286 approximation. First scan forwards. */
3287
3288 found_return = 0;
3289 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3290 if (bits (insn, 28, 31) != INST_NV)
3291 {
3292 if ((insn & 0x0ffffff0) == 0x012fff10)
3293 /* BX. */
3294 found_return = 1;
3295 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3296 /* MOV PC. */
3297 found_return = 1;
3298 else if ((insn & 0x0fff0000) == 0x08bd0000
3299 && (insn & 0x0000c000) != 0)
3300 /* POP (LDMIA), including PC or LR. */
3301 found_return = 1;
3302 }
3303
3304 if (!found_return)
3305 return 0;
3306
3307 /* Scan backwards. This is just a heuristic, so do not worry about
3308 false positives from mode changes. */
3309
3310 if (pc < func_start + 4)
3311 return 0;
3312
3313 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3314 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3315 return 1;
3316
3317 return 0;
3318}
3319
c58b006a
YQ
3320/* Implement the stack_frame_destroyed_p gdbarch method. */
3321
3322static int
3323arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3324{
3325 if (arm_pc_is_thumb (gdbarch, pc))
3326 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3327 else
3328 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3329}
4024ca99 3330
2dd604e7
RE
3331/* When arguments must be pushed onto the stack, they go on in reverse
3332 order. The code below implements a FILO (stack) to do this. */
3333
3334struct stack_item
3335{
3336 int len;
3337 struct stack_item *prev;
7c543f7b 3338 gdb_byte *data;
2dd604e7
RE
3339};
3340
3341static struct stack_item *
df3b6708 3342push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3343{
3344 struct stack_item *si;
8d749320 3345 si = XNEW (struct stack_item);
7c543f7b 3346 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3347 si->len = len;
3348 si->prev = prev;
3349 memcpy (si->data, contents, len);
3350 return si;
3351}
3352
3353static struct stack_item *
3354pop_stack_item (struct stack_item *si)
3355{
3356 struct stack_item *dead = si;
3357 si = si->prev;
3358 xfree (dead->data);
3359 xfree (dead);
3360 return si;
3361}
3362
2af48f68
PB
3363
3364/* Return the alignment (in bytes) of the given type. */
3365
3366static int
3367arm_type_align (struct type *t)
3368{
3369 int n;
3370 int align;
3371 int falign;
3372
3373 t = check_typedef (t);
3374 switch (TYPE_CODE (t))
3375 {
3376 default:
3377 /* Should never happen. */
3378 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3379 return 4;
3380
3381 case TYPE_CODE_PTR:
3382 case TYPE_CODE_ENUM:
3383 case TYPE_CODE_INT:
3384 case TYPE_CODE_FLT:
3385 case TYPE_CODE_SET:
3386 case TYPE_CODE_RANGE:
2af48f68
PB
3387 case TYPE_CODE_REF:
3388 case TYPE_CODE_CHAR:
3389 case TYPE_CODE_BOOL:
3390 return TYPE_LENGTH (t);
3391
3392 case TYPE_CODE_ARRAY:
c4312b19
YQ
3393 if (TYPE_VECTOR (t))
3394 {
3395 /* Use the natural alignment for vector types (the same for
3396 scalar type), but the maximum alignment is 64-bit. */
3397 if (TYPE_LENGTH (t) > 8)
3398 return 8;
3399 else
3400 return TYPE_LENGTH (t);
3401 }
3402 else
3403 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3404 case TYPE_CODE_COMPLEX:
2af48f68
PB
3405 return arm_type_align (TYPE_TARGET_TYPE (t));
3406
3407 case TYPE_CODE_STRUCT:
3408 case TYPE_CODE_UNION:
3409 align = 1;
3410 for (n = 0; n < TYPE_NFIELDS (t); n++)
3411 {
3412 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3413 if (falign > align)
3414 align = falign;
3415 }
3416 return align;
3417 }
3418}
3419
90445bd3
DJ
3420/* Possible base types for a candidate for passing and returning in
3421 VFP registers. */
3422
3423enum arm_vfp_cprc_base_type
3424{
3425 VFP_CPRC_UNKNOWN,
3426 VFP_CPRC_SINGLE,
3427 VFP_CPRC_DOUBLE,
3428 VFP_CPRC_VEC64,
3429 VFP_CPRC_VEC128
3430};
3431
3432/* The length of one element of base type B. */
3433
3434static unsigned
3435arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3436{
3437 switch (b)
3438 {
3439 case VFP_CPRC_SINGLE:
3440 return 4;
3441 case VFP_CPRC_DOUBLE:
3442 return 8;
3443 case VFP_CPRC_VEC64:
3444 return 8;
3445 case VFP_CPRC_VEC128:
3446 return 16;
3447 default:
3448 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3449 (int) b);
3450 }
3451}
3452
3453/* The character ('s', 'd' or 'q') for the type of VFP register used
3454 for passing base type B. */
3455
3456static int
3457arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3458{
3459 switch (b)
3460 {
3461 case VFP_CPRC_SINGLE:
3462 return 's';
3463 case VFP_CPRC_DOUBLE:
3464 return 'd';
3465 case VFP_CPRC_VEC64:
3466 return 'd';
3467 case VFP_CPRC_VEC128:
3468 return 'q';
3469 default:
3470 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3471 (int) b);
3472 }
3473}
3474
3475/* Determine whether T may be part of a candidate for passing and
3476 returning in VFP registers, ignoring the limit on the total number
3477 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3478 classification of the first valid component found; if it is not
3479 VFP_CPRC_UNKNOWN, all components must have the same classification
3480 as *BASE_TYPE. If it is found that T contains a type not permitted
3481 for passing and returning in VFP registers, a type differently
3482 classified from *BASE_TYPE, or two types differently classified
3483 from each other, return -1, otherwise return the total number of
3484 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3485 array). Vector types are not currently supported, matching the
3486 generic AAPCS support. */
90445bd3
DJ
3487
3488static int
3489arm_vfp_cprc_sub_candidate (struct type *t,
3490 enum arm_vfp_cprc_base_type *base_type)
3491{
3492 t = check_typedef (t);
3493 switch (TYPE_CODE (t))
3494 {
3495 case TYPE_CODE_FLT:
3496 switch (TYPE_LENGTH (t))
3497 {
3498 case 4:
3499 if (*base_type == VFP_CPRC_UNKNOWN)
3500 *base_type = VFP_CPRC_SINGLE;
3501 else if (*base_type != VFP_CPRC_SINGLE)
3502 return -1;
3503 return 1;
3504
3505 case 8:
3506 if (*base_type == VFP_CPRC_UNKNOWN)
3507 *base_type = VFP_CPRC_DOUBLE;
3508 else if (*base_type != VFP_CPRC_DOUBLE)
3509 return -1;
3510 return 1;
3511
3512 default:
3513 return -1;
3514 }
3515 break;
3516
817e0957
YQ
3517 case TYPE_CODE_COMPLEX:
3518 /* Arguments of complex T where T is one of the types float or
3519 double get treated as if they are implemented as:
3520
3521 struct complexT
3522 {
3523 T real;
3524 T imag;
5f52445b
YQ
3525 };
3526
3527 */
817e0957
YQ
3528 switch (TYPE_LENGTH (t))
3529 {
3530 case 8:
3531 if (*base_type == VFP_CPRC_UNKNOWN)
3532 *base_type = VFP_CPRC_SINGLE;
3533 else if (*base_type != VFP_CPRC_SINGLE)
3534 return -1;
3535 return 2;
3536
3537 case 16:
3538 if (*base_type == VFP_CPRC_UNKNOWN)
3539 *base_type = VFP_CPRC_DOUBLE;
3540 else if (*base_type != VFP_CPRC_DOUBLE)
3541 return -1;
3542 return 2;
3543
3544 default:
3545 return -1;
3546 }
3547 break;
3548
90445bd3
DJ
3549 case TYPE_CODE_ARRAY:
3550 {
c4312b19 3551 if (TYPE_VECTOR (t))
90445bd3 3552 {
c4312b19
YQ
3553 /* A 64-bit or 128-bit containerized vector type are VFP
3554 CPRCs. */
3555 switch (TYPE_LENGTH (t))
3556 {
3557 case 8:
3558 if (*base_type == VFP_CPRC_UNKNOWN)
3559 *base_type = VFP_CPRC_VEC64;
3560 return 1;
3561 case 16:
3562 if (*base_type == VFP_CPRC_UNKNOWN)
3563 *base_type = VFP_CPRC_VEC128;
3564 return 1;
3565 default:
3566 return -1;
3567 }
3568 }
3569 else
3570 {
3571 int count;
3572 unsigned unitlen;
3573
3574 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3575 base_type);
3576 if (count == -1)
3577 return -1;
3578 if (TYPE_LENGTH (t) == 0)
3579 {
3580 gdb_assert (count == 0);
3581 return 0;
3582 }
3583 else if (count == 0)
3584 return -1;
3585 unitlen = arm_vfp_cprc_unit_length (*base_type);
3586 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3587 return TYPE_LENGTH (t) / unitlen;
90445bd3 3588 }
90445bd3
DJ
3589 }
3590 break;
3591
3592 case TYPE_CODE_STRUCT:
3593 {
3594 int count = 0;
3595 unsigned unitlen;
3596 int i;
3597 for (i = 0; i < TYPE_NFIELDS (t); i++)
3598 {
1040b979
YQ
3599 int sub_count = 0;
3600
3601 if (!field_is_static (&TYPE_FIELD (t, i)))
3602 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3603 base_type);
90445bd3
DJ
3604 if (sub_count == -1)
3605 return -1;
3606 count += sub_count;
3607 }
3608 if (TYPE_LENGTH (t) == 0)
3609 {
3610 gdb_assert (count == 0);
3611 return 0;
3612 }
3613 else if (count == 0)
3614 return -1;
3615 unitlen = arm_vfp_cprc_unit_length (*base_type);
3616 if (TYPE_LENGTH (t) != unitlen * count)
3617 return -1;
3618 return count;
3619 }
3620
3621 case TYPE_CODE_UNION:
3622 {
3623 int count = 0;
3624 unsigned unitlen;
3625 int i;
3626 for (i = 0; i < TYPE_NFIELDS (t); i++)
3627 {
3628 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3629 base_type);
3630 if (sub_count == -1)
3631 return -1;
3632 count = (count > sub_count ? count : sub_count);
3633 }
3634 if (TYPE_LENGTH (t) == 0)
3635 {
3636 gdb_assert (count == 0);
3637 return 0;
3638 }
3639 else if (count == 0)
3640 return -1;
3641 unitlen = arm_vfp_cprc_unit_length (*base_type);
3642 if (TYPE_LENGTH (t) != unitlen * count)
3643 return -1;
3644 return count;
3645 }
3646
3647 default:
3648 break;
3649 }
3650
3651 return -1;
3652}
3653
3654/* Determine whether T is a VFP co-processor register candidate (CPRC)
3655 if passed to or returned from a non-variadic function with the VFP
3656 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3657 *BASE_TYPE to the base type for T and *COUNT to the number of
3658 elements of that base type before returning. */
3659
3660static int
3661arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3662 int *count)
3663{
3664 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3665 int c = arm_vfp_cprc_sub_candidate (t, &b);
3666 if (c <= 0 || c > 4)
3667 return 0;
3668 *base_type = b;
3669 *count = c;
3670 return 1;
3671}
3672
3673/* Return 1 if the VFP ABI should be used for passing arguments to and
3674 returning values from a function of type FUNC_TYPE, 0
3675 otherwise. */
3676
3677static int
3678arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3679{
3680 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3681 /* Variadic functions always use the base ABI. Assume that functions
3682 without debug info are not variadic. */
3683 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3684 return 0;
3685 /* The VFP ABI is only supported as a variant of AAPCS. */
3686 if (tdep->arm_abi != ARM_ABI_AAPCS)
3687 return 0;
3688 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3689}
3690
3691/* We currently only support passing parameters in integer registers, which
3692 conforms with GCC's default model, and VFP argument passing following
3693 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3694 we should probably support some of them based on the selected ABI. */
3695
3696static CORE_ADDR
7d9b040b 3697arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3698 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3699 struct value **args, CORE_ADDR sp, int struct_return,
3700 CORE_ADDR struct_addr)
2dd604e7 3701{
e17a4113 3702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3703 int argnum;
3704 int argreg;
3705 int nstack;
3706 struct stack_item *si = NULL;
90445bd3
DJ
3707 int use_vfp_abi;
3708 struct type *ftype;
3709 unsigned vfp_regs_free = (1 << 16) - 1;
3710
3711 /* Determine the type of this function and whether the VFP ABI
3712 applies. */
3713 ftype = check_typedef (value_type (function));
3714 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3715 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3716 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3717
6a65450a
AC
3718 /* Set the return address. For the ARM, the return breakpoint is
3719 always at BP_ADDR. */
9779414d 3720 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3721 bp_addr |= 1;
6a65450a 3722 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3723
3724 /* Walk through the list of args and determine how large a temporary
3725 stack is required. Need to take care here as structs may be
7a9dd1b2 3726 passed on the stack, and we have to push them. */
2dd604e7
RE
3727 nstack = 0;
3728
3729 argreg = ARM_A1_REGNUM;
3730 nstack = 0;
3731
2dd604e7
RE
3732 /* The struct_return pointer occupies the first parameter
3733 passing register. */
3734 if (struct_return)
3735 {
3736 if (arm_debug)
5af949e3 3737 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3738 gdbarch_register_name (gdbarch, argreg),
5af949e3 3739 paddress (gdbarch, struct_addr));
2dd604e7
RE
3740 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3741 argreg++;
3742 }
3743
3744 for (argnum = 0; argnum < nargs; argnum++)
3745 {
3746 int len;
3747 struct type *arg_type;
3748 struct type *target_type;
3749 enum type_code typecode;
8c6363cf 3750 const bfd_byte *val;
2af48f68 3751 int align;
90445bd3
DJ
3752 enum arm_vfp_cprc_base_type vfp_base_type;
3753 int vfp_base_count;
3754 int may_use_core_reg = 1;
2dd604e7 3755
df407dfe 3756 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3757 len = TYPE_LENGTH (arg_type);
3758 target_type = TYPE_TARGET_TYPE (arg_type);
3759 typecode = TYPE_CODE (arg_type);
8c6363cf 3760 val = value_contents (args[argnum]);
2dd604e7 3761
2af48f68
PB
3762 align = arm_type_align (arg_type);
3763 /* Round alignment up to a whole number of words. */
3764 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3765 /* Different ABIs have different maximum alignments. */
3766 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3767 {
3768 /* The APCS ABI only requires word alignment. */
3769 align = INT_REGISTER_SIZE;
3770 }
3771 else
3772 {
3773 /* The AAPCS requires at most doubleword alignment. */
3774 if (align > INT_REGISTER_SIZE * 2)
3775 align = INT_REGISTER_SIZE * 2;
3776 }
3777
90445bd3
DJ
3778 if (use_vfp_abi
3779 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3780 &vfp_base_count))
3781 {
3782 int regno;
3783 int unit_length;
3784 int shift;
3785 unsigned mask;
3786
3787 /* Because this is a CPRC it cannot go in a core register or
3788 cause a core register to be skipped for alignment.
3789 Either it goes in VFP registers and the rest of this loop
3790 iteration is skipped for this argument, or it goes on the
3791 stack (and the stack alignment code is correct for this
3792 case). */
3793 may_use_core_reg = 0;
3794
3795 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3796 shift = unit_length / 4;
3797 mask = (1 << (shift * vfp_base_count)) - 1;
3798 for (regno = 0; regno < 16; regno += shift)
3799 if (((vfp_regs_free >> regno) & mask) == mask)
3800 break;
3801
3802 if (regno < 16)
3803 {
3804 int reg_char;
3805 int reg_scaled;
3806 int i;
3807
3808 vfp_regs_free &= ~(mask << regno);
3809 reg_scaled = regno / shift;
3810 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3811 for (i = 0; i < vfp_base_count; i++)
3812 {
3813 char name_buf[4];
3814 int regnum;
58d6951d
DJ
3815 if (reg_char == 'q')
3816 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3817 val + i * unit_length);
58d6951d
DJ
3818 else
3819 {
8c042590
PM
3820 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3821 reg_char, reg_scaled + i);
58d6951d
DJ
3822 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3823 strlen (name_buf));
3824 regcache_cooked_write (regcache, regnum,
3825 val + i * unit_length);
3826 }
90445bd3
DJ
3827 }
3828 continue;
3829 }
3830 else
3831 {
3832 /* This CPRC could not go in VFP registers, so all VFP
3833 registers are now marked as used. */
3834 vfp_regs_free = 0;
3835 }
3836 }
3837
2af48f68
PB
3838 /* Push stack padding for dowubleword alignment. */
3839 if (nstack & (align - 1))
3840 {
3841 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3842 nstack += INT_REGISTER_SIZE;
3843 }
3844
3845 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3846 if (may_use_core_reg
3847 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3848 && align > INT_REGISTER_SIZE
3849 && argreg & 1)
3850 argreg++;
3851
2dd604e7
RE
3852 /* If the argument is a pointer to a function, and it is a
3853 Thumb function, create a LOCAL copy of the value and set
3854 the THUMB bit in it. */
3855 if (TYPE_CODE_PTR == typecode
3856 && target_type != NULL
f96b8fa0 3857 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3858 {
e17a4113 3859 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3860 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3861 {
224c3ddb 3862 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3863 store_unsigned_integer (copy, len, byte_order,
e17a4113 3864 MAKE_THUMB_ADDR (regval));
8c6363cf 3865 val = copy;
2dd604e7
RE
3866 }
3867 }
3868
3869 /* Copy the argument to general registers or the stack in
3870 register-sized pieces. Large arguments are split between
3871 registers and stack. */
3872 while (len > 0)
3873 {
f0c9063c 3874 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3875 CORE_ADDR regval
3876 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3877
90445bd3 3878 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3879 {
3880 /* The argument is being passed in a general purpose
3881 register. */
e17a4113 3882 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3883 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3884 if (arm_debug)
3885 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3886 argnum,
3887 gdbarch_register_name
2af46ca0 3888 (gdbarch, argreg),
f0c9063c 3889 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3890 regcache_cooked_write_unsigned (regcache, argreg, regval);
3891 argreg++;
3892 }
3893 else
3894 {
ef9bd0b8
YQ
3895 gdb_byte buf[INT_REGISTER_SIZE];
3896
3897 memset (buf, 0, sizeof (buf));
3898 store_unsigned_integer (buf, partial_len, byte_order, regval);
3899
2dd604e7
RE
3900 /* Push the arguments onto the stack. */
3901 if (arm_debug)
3902 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3903 argnum, nstack);
ef9bd0b8 3904 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3905 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3906 }
3907
3908 len -= partial_len;
3909 val += partial_len;
3910 }
3911 }
3912 /* If we have an odd number of words to push, then decrement the stack
3913 by one word now, so first stack argument will be dword aligned. */
3914 if (nstack & 4)
3915 sp -= 4;
3916
3917 while (si)
3918 {
3919 sp -= si->len;
3920 write_memory (sp, si->data, si->len);
3921 si = pop_stack_item (si);
3922 }
3923
3924 /* Finally, update teh SP register. */
3925 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3926
3927 return sp;
3928}
3929
f53f0d0b
PB
3930
3931/* Always align the frame to an 8-byte boundary. This is required on
3932 some platforms and harmless on the rest. */
3933
3934static CORE_ADDR
3935arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3936{
3937 /* Align the stack to eight bytes. */
3938 return sp & ~ (CORE_ADDR) 7;
3939}
3940
c906108c 3941static void
12b27276 3942print_fpu_flags (struct ui_file *file, int flags)
c906108c 3943{
c5aa993b 3944 if (flags & (1 << 0))
12b27276 3945 fputs_filtered ("IVO ", file);
c5aa993b 3946 if (flags & (1 << 1))
12b27276 3947 fputs_filtered ("DVZ ", file);
c5aa993b 3948 if (flags & (1 << 2))
12b27276 3949 fputs_filtered ("OFL ", file);
c5aa993b 3950 if (flags & (1 << 3))
12b27276 3951 fputs_filtered ("UFL ", file);
c5aa993b 3952 if (flags & (1 << 4))
12b27276
WN
3953 fputs_filtered ("INX ", file);
3954 fputc_filtered ('\n', file);
c906108c
SS
3955}
3956
5e74b15c
RE
3957/* Print interesting information about the floating point processor
3958 (if present) or emulator. */
34e8f22d 3959static void
d855c300 3960arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3961 struct frame_info *frame, const char *args)
c906108c 3962{
9c9acae0 3963 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3964 int type;
3965
3966 type = (status >> 24) & 127;
edefbb7c 3967 if (status & (1 << 31))
12b27276 3968 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3969 else
12b27276 3970 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3971 /* i18n: [floating point unit] mask */
12b27276
WN
3972 fputs_filtered (_("mask: "), file);
3973 print_fpu_flags (file, status >> 16);
edefbb7c 3974 /* i18n: [floating point unit] flags */
12b27276
WN
3975 fputs_filtered (_("flags: "), file);
3976 print_fpu_flags (file, status);
c906108c
SS
3977}
3978
27067745
UW
3979/* Construct the ARM extended floating point type. */
3980static struct type *
3981arm_ext_type (struct gdbarch *gdbarch)
3982{
3983 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3984
3985 if (!tdep->arm_ext_type)
3986 tdep->arm_ext_type
e9bb382b 3987 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3988 floatformats_arm_ext);
3989
3990 return tdep->arm_ext_type;
3991}
3992
58d6951d
DJ
3993static struct type *
3994arm_neon_double_type (struct gdbarch *gdbarch)
3995{
3996 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3997
3998 if (tdep->neon_double_type == NULL)
3999 {
4000 struct type *t, *elem;
4001
4002 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4003 TYPE_CODE_UNION);
4004 elem = builtin_type (gdbarch)->builtin_uint8;
4005 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4006 elem = builtin_type (gdbarch)->builtin_uint16;
4007 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4008 elem = builtin_type (gdbarch)->builtin_uint32;
4009 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4010 elem = builtin_type (gdbarch)->builtin_uint64;
4011 append_composite_type_field (t, "u64", elem);
4012 elem = builtin_type (gdbarch)->builtin_float;
4013 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4014 elem = builtin_type (gdbarch)->builtin_double;
4015 append_composite_type_field (t, "f64", elem);
4016
4017 TYPE_VECTOR (t) = 1;
4018 TYPE_NAME (t) = "neon_d";
4019 tdep->neon_double_type = t;
4020 }
4021
4022 return tdep->neon_double_type;
4023}
4024
4025/* FIXME: The vector types are not correctly ordered on big-endian
4026 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4027 bits of d0 - regardless of what unit size is being held in d0. So
4028 the offset of the first uint8 in d0 is 7, but the offset of the
4029 first float is 4. This code works as-is for little-endian
4030 targets. */
4031
4032static struct type *
4033arm_neon_quad_type (struct gdbarch *gdbarch)
4034{
4035 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4036
4037 if (tdep->neon_quad_type == NULL)
4038 {
4039 struct type *t, *elem;
4040
4041 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4042 TYPE_CODE_UNION);
4043 elem = builtin_type (gdbarch)->builtin_uint8;
4044 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4045 elem = builtin_type (gdbarch)->builtin_uint16;
4046 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4047 elem = builtin_type (gdbarch)->builtin_uint32;
4048 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4049 elem = builtin_type (gdbarch)->builtin_uint64;
4050 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4051 elem = builtin_type (gdbarch)->builtin_float;
4052 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4053 elem = builtin_type (gdbarch)->builtin_double;
4054 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4055
4056 TYPE_VECTOR (t) = 1;
4057 TYPE_NAME (t) = "neon_q";
4058 tdep->neon_quad_type = t;
4059 }
4060
4061 return tdep->neon_quad_type;
4062}
4063
34e8f22d
RE
4064/* Return the GDB type object for the "standard" data type of data in
4065 register N. */
4066
4067static struct type *
7a5ea0d4 4068arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4069{
58d6951d
DJ
4070 int num_regs = gdbarch_num_regs (gdbarch);
4071
4072 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4073 && regnum >= num_regs && regnum < num_regs + 32)
4074 return builtin_type (gdbarch)->builtin_float;
4075
4076 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4077 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4078 return arm_neon_quad_type (gdbarch);
4079
4080 /* If the target description has register information, we are only
4081 in this function so that we can override the types of
4082 double-precision registers for NEON. */
4083 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4084 {
4085 struct type *t = tdesc_register_type (gdbarch, regnum);
4086
4087 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4088 && TYPE_CODE (t) == TYPE_CODE_FLT
4089 && gdbarch_tdep (gdbarch)->have_neon)
4090 return arm_neon_double_type (gdbarch);
4091 else
4092 return t;
4093 }
4094
34e8f22d 4095 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4096 {
4097 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4098 return builtin_type (gdbarch)->builtin_void;
4099
4100 return arm_ext_type (gdbarch);
4101 }
e4c16157 4102 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4103 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4104 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4105 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4106 else if (regnum >= ARRAY_SIZE (arm_register_names))
4107 /* These registers are only supported on targets which supply
4108 an XML description. */
df4df182 4109 return builtin_type (gdbarch)->builtin_int0;
032758dc 4110 else
df4df182 4111 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4112}
4113
ff6f572f
DJ
4114/* Map a DWARF register REGNUM onto the appropriate GDB register
4115 number. */
4116
4117static int
d3f73121 4118arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4119{
4120 /* Core integer regs. */
4121 if (reg >= 0 && reg <= 15)
4122 return reg;
4123
4124 /* Legacy FPA encoding. These were once used in a way which
4125 overlapped with VFP register numbering, so their use is
4126 discouraged, but GDB doesn't support the ARM toolchain
4127 which used them for VFP. */
4128 if (reg >= 16 && reg <= 23)
4129 return ARM_F0_REGNUM + reg - 16;
4130
4131 /* New assignments for the FPA registers. */
4132 if (reg >= 96 && reg <= 103)
4133 return ARM_F0_REGNUM + reg - 96;
4134
4135 /* WMMX register assignments. */
4136 if (reg >= 104 && reg <= 111)
4137 return ARM_WCGR0_REGNUM + reg - 104;
4138
4139 if (reg >= 112 && reg <= 127)
4140 return ARM_WR0_REGNUM + reg - 112;
4141
4142 if (reg >= 192 && reg <= 199)
4143 return ARM_WC0_REGNUM + reg - 192;
4144
58d6951d
DJ
4145 /* VFP v2 registers. A double precision value is actually
4146 in d1 rather than s2, but the ABI only defines numbering
4147 for the single precision registers. This will "just work"
4148 in GDB for little endian targets (we'll read eight bytes,
4149 starting in s0 and then progressing to s1), but will be
4150 reversed on big endian targets with VFP. This won't
4151 be a problem for the new Neon quad registers; you're supposed
4152 to use DW_OP_piece for those. */
4153 if (reg >= 64 && reg <= 95)
4154 {
4155 char name_buf[4];
4156
8c042590 4157 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4158 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4159 strlen (name_buf));
4160 }
4161
4162 /* VFP v3 / Neon registers. This range is also used for VFP v2
4163 registers, except that it now describes d0 instead of s0. */
4164 if (reg >= 256 && reg <= 287)
4165 {
4166 char name_buf[4];
4167
8c042590 4168 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4169 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4170 strlen (name_buf));
4171 }
4172
ff6f572f
DJ
4173 return -1;
4174}
4175
26216b98
AC
4176/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4177static int
e7faf938 4178arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4179{
4180 int reg = regnum;
e7faf938 4181 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4182
ff6f572f
DJ
4183 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4184 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4185
4186 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4187 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4188
4189 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4190 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4191
26216b98
AC
4192 if (reg < NUM_GREGS)
4193 return SIM_ARM_R0_REGNUM + reg;
4194 reg -= NUM_GREGS;
4195
4196 if (reg < NUM_FREGS)
4197 return SIM_ARM_FP0_REGNUM + reg;
4198 reg -= NUM_FREGS;
4199
4200 if (reg < NUM_SREGS)
4201 return SIM_ARM_FPS_REGNUM + reg;
4202 reg -= NUM_SREGS;
4203
edefbb7c 4204 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4205}
34e8f22d 4206
a37b3cc0
AC
4207/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4208 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4209 It is thought that this is is the floating-point register format on
4210 little-endian systems. */
c906108c 4211
ed9a39eb 4212static void
b508a996 4213convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4214 void *dbl, int endianess)
c906108c 4215{
a37b3cc0 4216 DOUBLEST d;
be8626e0
MD
4217
4218 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4219 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4220 else
4221 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4222 ptr, &d);
b508a996 4223 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4224}
4225
34e8f22d 4226static void
be8626e0
MD
4227convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4228 int endianess)
c906108c 4229{
a37b3cc0 4230 DOUBLEST d;
be8626e0 4231
b508a996 4232 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4233 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4234 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4235 else
4236 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4237 &d, dbl);
c906108c 4238}
ed9a39eb 4239
d9311bfa
AT
4240/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4241 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4242 NULL if an error occurs. BUF is freed. */
c906108c 4243
d9311bfa
AT
4244static gdb_byte *
4245extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4246 int old_len, int new_len)
4247{
4248 gdb_byte *new_buf;
4249 int bytes_to_read = new_len - old_len;
c906108c 4250
d9311bfa
AT
4251 new_buf = (gdb_byte *) xmalloc (new_len);
4252 memcpy (new_buf + bytes_to_read, buf, old_len);
4253 xfree (buf);
198cd59d 4254 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4255 {
4256 xfree (new_buf);
4257 return NULL;
c906108c 4258 }
d9311bfa 4259 return new_buf;
c906108c
SS
4260}
4261
d9311bfa
AT
4262/* An IT block is at most the 2-byte IT instruction followed by
4263 four 4-byte instructions. The furthest back we must search to
4264 find an IT block that affects the current instruction is thus
4265 2 + 3 * 4 == 14 bytes. */
4266#define MAX_IT_BLOCK_PREFIX 14
177321bd 4267
d9311bfa
AT
4268/* Use a quick scan if there are more than this many bytes of
4269 code. */
4270#define IT_SCAN_THRESHOLD 32
177321bd 4271
d9311bfa
AT
4272/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4273 A breakpoint in an IT block may not be hit, depending on the
4274 condition flags. */
ad527d2e 4275static CORE_ADDR
d9311bfa 4276arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4277{
d9311bfa
AT
4278 gdb_byte *buf;
4279 char map_type;
4280 CORE_ADDR boundary, func_start;
4281 int buf_len;
4282 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4283 int i, any, last_it, last_it_count;
177321bd 4284
d9311bfa
AT
4285 /* If we are using BKPT breakpoints, none of this is necessary. */
4286 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4287 return bpaddr;
177321bd 4288
d9311bfa
AT
4289 /* ARM mode does not have this problem. */
4290 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4291 return bpaddr;
177321bd 4292
d9311bfa
AT
4293 /* We are setting a breakpoint in Thumb code that could potentially
4294 contain an IT block. The first step is to find how much Thumb
4295 code there is; we do not need to read outside of known Thumb
4296 sequences. */
4297 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4298 if (map_type == 0)
4299 /* Thumb-2 code must have mapping symbols to have a chance. */
4300 return bpaddr;
9dca5578 4301
d9311bfa 4302 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4303
d9311bfa
AT
4304 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4305 && func_start > boundary)
4306 boundary = func_start;
9dca5578 4307
d9311bfa
AT
4308 /* Search for a candidate IT instruction. We have to do some fancy
4309 footwork to distinguish a real IT instruction from the second
4310 half of a 32-bit instruction, but there is no need for that if
4311 there's no candidate. */
325fac50 4312 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4313 if (buf_len == 0)
4314 /* No room for an IT instruction. */
4315 return bpaddr;
c906108c 4316
d9311bfa 4317 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4318 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4319 return bpaddr;
4320 any = 0;
4321 for (i = 0; i < buf_len; i += 2)
c906108c 4322 {
d9311bfa
AT
4323 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4324 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4325 {
d9311bfa
AT
4326 any = 1;
4327 break;
25b41d01 4328 }
c906108c 4329 }
d9311bfa
AT
4330
4331 if (any == 0)
c906108c 4332 {
d9311bfa
AT
4333 xfree (buf);
4334 return bpaddr;
f9d67f43
DJ
4335 }
4336
4337 /* OK, the code bytes before this instruction contain at least one
4338 halfword which resembles an IT instruction. We know that it's
4339 Thumb code, but there are still two possibilities. Either the
4340 halfword really is an IT instruction, or it is the second half of
4341 a 32-bit Thumb instruction. The only way we can tell is to
4342 scan forwards from a known instruction boundary. */
4343 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4344 {
4345 int definite;
4346
4347 /* There's a lot of code before this instruction. Start with an
4348 optimistic search; it's easy to recognize halfwords that can
4349 not be the start of a 32-bit instruction, and use that to
4350 lock on to the instruction boundaries. */
4351 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4352 if (buf == NULL)
4353 return bpaddr;
4354 buf_len = IT_SCAN_THRESHOLD;
4355
4356 definite = 0;
4357 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4358 {
4359 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4360 if (thumb_insn_size (inst1) == 2)
4361 {
4362 definite = 1;
4363 break;
4364 }
4365 }
4366
4367 /* At this point, if DEFINITE, BUF[I] is the first place we
4368 are sure that we know the instruction boundaries, and it is far
4369 enough from BPADDR that we could not miss an IT instruction
4370 affecting BPADDR. If ! DEFINITE, give up - start from a
4371 known boundary. */
4372 if (! definite)
4373 {
0963b4bd
MS
4374 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4375 bpaddr - boundary);
f9d67f43
DJ
4376 if (buf == NULL)
4377 return bpaddr;
4378 buf_len = bpaddr - boundary;
4379 i = 0;
4380 }
4381 }
4382 else
4383 {
4384 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4385 if (buf == NULL)
4386 return bpaddr;
4387 buf_len = bpaddr - boundary;
4388 i = 0;
4389 }
4390
4391 /* Scan forwards. Find the last IT instruction before BPADDR. */
4392 last_it = -1;
4393 last_it_count = 0;
4394 while (i < buf_len)
4395 {
4396 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4397 last_it_count--;
4398 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4399 {
4400 last_it = i;
4401 if (inst1 & 0x0001)
4402 last_it_count = 4;
4403 else if (inst1 & 0x0002)
4404 last_it_count = 3;
4405 else if (inst1 & 0x0004)
4406 last_it_count = 2;
4407 else
4408 last_it_count = 1;
4409 }
4410 i += thumb_insn_size (inst1);
4411 }
4412
4413 xfree (buf);
4414
4415 if (last_it == -1)
4416 /* There wasn't really an IT instruction after all. */
4417 return bpaddr;
4418
4419 if (last_it_count < 1)
4420 /* It was too far away. */
4421 return bpaddr;
4422
4423 /* This really is a trouble spot. Move the breakpoint to the IT
4424 instruction. */
4425 return bpaddr - buf_len + last_it;
4426}
4427
cca44b1b 4428/* ARM displaced stepping support.
c906108c 4429
cca44b1b 4430 Generally ARM displaced stepping works as follows:
c906108c 4431
cca44b1b 4432 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4433 arm_process_displaced_insn. Depending on the type of instruction, it is
4434 then copied to a scratch location, possibly in a modified form. The
4435 copy_* set of functions performs such modification, as necessary. A
4436 breakpoint is placed after the modified instruction in the scratch space
4437 to return control to GDB. Note in particular that instructions which
4438 modify the PC will no longer do so after modification.
c5aa993b 4439
cca44b1b
JB
4440 2. The instruction is single-stepped, by setting the PC to the scratch
4441 location address, and resuming. Control returns to GDB when the
4442 breakpoint is hit.
c5aa993b 4443
cca44b1b
JB
4444 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4445 function used for the current instruction. This function's job is to
4446 put the CPU/memory state back to what it would have been if the
4447 instruction had been executed unmodified in its original location. */
c5aa993b 4448
cca44b1b
JB
4449/* NOP instruction (mov r0, r0). */
4450#define ARM_NOP 0xe1a00000
34518530 4451#define THUMB_NOP 0x4600
cca44b1b
JB
4452
4453/* Helper for register reads for displaced stepping. In particular, this
4454 returns the PC as it would be seen by the instruction at its original
4455 location. */
4456
4457ULONGEST
36073a92
YQ
4458displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4459 int regno)
cca44b1b
JB
4460{
4461 ULONGEST ret;
36073a92 4462 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4463
bf9f652a 4464 if (regno == ARM_PC_REGNUM)
cca44b1b 4465 {
4db71c0b
YQ
4466 /* Compute pipeline offset:
4467 - When executing an ARM instruction, PC reads as the address of the
4468 current instruction plus 8.
4469 - When executing a Thumb instruction, PC reads as the address of the
4470 current instruction plus 4. */
4471
36073a92 4472 if (!dsc->is_thumb)
4db71c0b
YQ
4473 from += 8;
4474 else
4475 from += 4;
4476
cca44b1b
JB
4477 if (debug_displaced)
4478 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4479 (unsigned long) from);
4480 return (ULONGEST) from;
cca44b1b 4481 }
c906108c 4482 else
cca44b1b
JB
4483 {
4484 regcache_cooked_read_unsigned (regs, regno, &ret);
4485 if (debug_displaced)
4486 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4487 regno, (unsigned long) ret);
4488 return ret;
4489 }
c906108c
SS
4490}
4491
cca44b1b
JB
4492static int
4493displaced_in_arm_mode (struct regcache *regs)
4494{
4495 ULONGEST ps;
9779414d 4496 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4497
cca44b1b 4498 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4499
9779414d 4500 return (ps & t_bit) == 0;
cca44b1b 4501}
66e810cd 4502
cca44b1b 4503/* Write to the PC as from a branch instruction. */
c906108c 4504
cca44b1b 4505static void
36073a92
YQ
4506branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4507 ULONGEST val)
c906108c 4508{
36073a92 4509 if (!dsc->is_thumb)
cca44b1b
JB
4510 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4511 architecture versions < 6. */
0963b4bd
MS
4512 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4513 val & ~(ULONGEST) 0x3);
cca44b1b 4514 else
0963b4bd
MS
4515 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4516 val & ~(ULONGEST) 0x1);
cca44b1b 4517}
66e810cd 4518
cca44b1b
JB
4519/* Write to the PC as from a branch-exchange instruction. */
4520
4521static void
4522bx_write_pc (struct regcache *regs, ULONGEST val)
4523{
4524 ULONGEST ps;
9779414d 4525 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4526
4527 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4528
4529 if ((val & 1) == 1)
c906108c 4530 {
9779414d 4531 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4532 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4533 }
4534 else if ((val & 2) == 0)
4535 {
9779414d 4536 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4537 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4538 }
4539 else
4540 {
cca44b1b
JB
4541 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4542 mode, align dest to 4 bytes). */
4543 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4544 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4545 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4546 }
4547}
ed9a39eb 4548
cca44b1b 4549/* Write to the PC as if from a load instruction. */
ed9a39eb 4550
34e8f22d 4551static void
36073a92
YQ
4552load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4553 ULONGEST val)
ed9a39eb 4554{
cca44b1b
JB
4555 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4556 bx_write_pc (regs, val);
4557 else
36073a92 4558 branch_write_pc (regs, dsc, val);
cca44b1b 4559}
be8626e0 4560
cca44b1b
JB
4561/* Write to the PC as if from an ALU instruction. */
4562
4563static void
36073a92
YQ
4564alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4565 ULONGEST val)
cca44b1b 4566{
36073a92 4567 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4568 bx_write_pc (regs, val);
4569 else
36073a92 4570 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4571}
4572
4573/* Helper for writing to registers for displaced stepping. Writing to the PC
4574 has a varying effects depending on the instruction which does the write:
4575 this is controlled by the WRITE_PC argument. */
4576
4577void
4578displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4579 int regno, ULONGEST val, enum pc_write_style write_pc)
4580{
bf9f652a 4581 if (regno == ARM_PC_REGNUM)
08216dd7 4582 {
cca44b1b
JB
4583 if (debug_displaced)
4584 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4585 (unsigned long) val);
4586 switch (write_pc)
08216dd7 4587 {
cca44b1b 4588 case BRANCH_WRITE_PC:
36073a92 4589 branch_write_pc (regs, dsc, val);
08216dd7
RE
4590 break;
4591
cca44b1b
JB
4592 case BX_WRITE_PC:
4593 bx_write_pc (regs, val);
4594 break;
4595
4596 case LOAD_WRITE_PC:
36073a92 4597 load_write_pc (regs, dsc, val);
cca44b1b
JB
4598 break;
4599
4600 case ALU_WRITE_PC:
36073a92 4601 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4602 break;
4603
4604 case CANNOT_WRITE_PC:
4605 warning (_("Instruction wrote to PC in an unexpected way when "
4606 "single-stepping"));
08216dd7
RE
4607 break;
4608
4609 default:
97b9747c
JB
4610 internal_error (__FILE__, __LINE__,
4611 _("Invalid argument to displaced_write_reg"));
08216dd7 4612 }
b508a996 4613
cca44b1b 4614 dsc->wrote_to_pc = 1;
b508a996 4615 }
ed9a39eb 4616 else
b508a996 4617 {
cca44b1b
JB
4618 if (debug_displaced)
4619 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4620 regno, (unsigned long) val);
4621 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4622 }
34e8f22d
RE
4623}
4624
cca44b1b
JB
4625/* This function is used to concisely determine if an instruction INSN
4626 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4627 corresponding fields of BITMASK set to 0b1111. The function
4628 returns return 1 if any of these fields in INSN reference the PC
4629 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4630
4631static int
cca44b1b 4632insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4633{
cca44b1b 4634 uint32_t lowbit = 1;
67255d04 4635
cca44b1b
JB
4636 while (bitmask != 0)
4637 {
4638 uint32_t mask;
44e1a9eb 4639
cca44b1b
JB
4640 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4641 ;
67255d04 4642
cca44b1b
JB
4643 if (!lowbit)
4644 break;
67255d04 4645
cca44b1b 4646 mask = lowbit * 0xf;
67255d04 4647
cca44b1b
JB
4648 if ((insn & mask) == mask)
4649 return 1;
4650
4651 bitmask &= ~mask;
67255d04
RE
4652 }
4653
cca44b1b
JB
4654 return 0;
4655}
2af48f68 4656
cca44b1b
JB
4657/* The simplest copy function. Many instructions have the same effect no
4658 matter what address they are executed at: in those cases, use this. */
67255d04 4659
cca44b1b 4660static int
7ff120b4
YQ
4661arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4662 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4663{
4664 if (debug_displaced)
4665 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4666 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4667 iname);
67255d04 4668
cca44b1b 4669 dsc->modinsn[0] = insn;
67255d04 4670
cca44b1b
JB
4671 return 0;
4672}
4673
34518530
YQ
4674static int
4675thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4676 uint16_t insn2, const char *iname,
4677 struct displaced_step_closure *dsc)
4678{
4679 if (debug_displaced)
4680 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4681 "opcode/class '%s' unmodified\n", insn1, insn2,
4682 iname);
4683
4684 dsc->modinsn[0] = insn1;
4685 dsc->modinsn[1] = insn2;
4686 dsc->numinsns = 2;
4687
4688 return 0;
4689}
4690
4691/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4692 modification. */
4693static int
615234c1 4694thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4695 const char *iname,
4696 struct displaced_step_closure *dsc)
4697{
4698 if (debug_displaced)
4699 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4700 "opcode/class '%s' unmodified\n", insn,
4701 iname);
4702
4703 dsc->modinsn[0] = insn;
4704
4705 return 0;
4706}
4707
cca44b1b
JB
4708/* Preload instructions with immediate offset. */
4709
4710static void
6e39997a 4711cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4712 struct regcache *regs, struct displaced_step_closure *dsc)
4713{
4714 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4715 if (!dsc->u.preload.immed)
4716 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4717}
4718
7ff120b4
YQ
4719static void
4720install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4721 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4722{
cca44b1b 4723 ULONGEST rn_val;
cca44b1b
JB
4724 /* Preload instructions:
4725
4726 {pli/pld} [rn, #+/-imm]
4727 ->
4728 {pli/pld} [r0, #+/-imm]. */
4729
36073a92
YQ
4730 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4731 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4732 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4733 dsc->u.preload.immed = 1;
4734
cca44b1b 4735 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4736}
4737
cca44b1b 4738static int
7ff120b4 4739arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4740 struct displaced_step_closure *dsc)
4741{
4742 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4743
7ff120b4
YQ
4744 if (!insn_references_pc (insn, 0x000f0000ul))
4745 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4746
4747 if (debug_displaced)
4748 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4749 (unsigned long) insn);
4750
7ff120b4
YQ
4751 dsc->modinsn[0] = insn & 0xfff0ffff;
4752
4753 install_preload (gdbarch, regs, dsc, rn);
4754
4755 return 0;
4756}
4757
34518530
YQ
4758static int
4759thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4760 struct regcache *regs, struct displaced_step_closure *dsc)
4761{
4762 unsigned int rn = bits (insn1, 0, 3);
4763 unsigned int u_bit = bit (insn1, 7);
4764 int imm12 = bits (insn2, 0, 11);
4765 ULONGEST pc_val;
4766
4767 if (rn != ARM_PC_REGNUM)
4768 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4769
4770 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4771 PLD (literal) Encoding T1. */
4772 if (debug_displaced)
4773 fprintf_unfiltered (gdb_stdlog,
4774 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4775 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4776 imm12);
4777
4778 if (!u_bit)
4779 imm12 = -1 * imm12;
4780
4781 /* Rewrite instruction {pli/pld} PC imm12 into:
4782 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4783
4784 {pli/pld} [r0, r1]
4785
4786 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4787
4788 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4789 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4790
4791 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4792
4793 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4794 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4795 dsc->u.preload.immed = 0;
4796
4797 /* {pli/pld} [r0, r1] */
4798 dsc->modinsn[0] = insn1 & 0xfff0;
4799 dsc->modinsn[1] = 0xf001;
4800 dsc->numinsns = 2;
4801
4802 dsc->cleanup = &cleanup_preload;
4803 return 0;
4804}
4805
7ff120b4
YQ
4806/* Preload instructions with register offset. */
4807
4808static void
4809install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4810 struct displaced_step_closure *dsc, unsigned int rn,
4811 unsigned int rm)
4812{
4813 ULONGEST rn_val, rm_val;
4814
cca44b1b
JB
4815 /* Preload register-offset instructions:
4816
4817 {pli/pld} [rn, rm {, shift}]
4818 ->
4819 {pli/pld} [r0, r1 {, shift}]. */
4820
36073a92
YQ
4821 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4822 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4823 rn_val = displaced_read_reg (regs, dsc, rn);
4824 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4825 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4826 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4827 dsc->u.preload.immed = 0;
4828
cca44b1b 4829 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4830}
4831
4832static int
4833arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4834 struct regcache *regs,
4835 struct displaced_step_closure *dsc)
4836{
4837 unsigned int rn = bits (insn, 16, 19);
4838 unsigned int rm = bits (insn, 0, 3);
4839
4840
4841 if (!insn_references_pc (insn, 0x000f000ful))
4842 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4843
4844 if (debug_displaced)
4845 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4846 (unsigned long) insn);
4847
4848 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4849
7ff120b4 4850 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4851 return 0;
4852}
4853
4854/* Copy/cleanup coprocessor load and store instructions. */
4855
4856static void
6e39997a 4857cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4858 struct regcache *regs,
4859 struct displaced_step_closure *dsc)
4860{
36073a92 4861 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4862
4863 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4864
4865 if (dsc->u.ldst.writeback)
4866 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4867}
4868
7ff120b4
YQ
4869static void
4870install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4871 struct displaced_step_closure *dsc,
4872 int writeback, unsigned int rn)
cca44b1b 4873{
cca44b1b 4874 ULONGEST rn_val;
cca44b1b 4875
cca44b1b
JB
4876 /* Coprocessor load/store instructions:
4877
4878 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4879 ->
4880 {stc/stc2} [r0, #+/-imm].
4881
4882 ldc/ldc2 are handled identically. */
4883
36073a92
YQ
4884 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4885 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4886 /* PC should be 4-byte aligned. */
4887 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4888 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4889
7ff120b4 4890 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4891 dsc->u.ldst.rn = rn;
4892
7ff120b4
YQ
4893 dsc->cleanup = &cleanup_copro_load_store;
4894}
4895
4896static int
4897arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4898 struct regcache *regs,
4899 struct displaced_step_closure *dsc)
4900{
4901 unsigned int rn = bits (insn, 16, 19);
4902
4903 if (!insn_references_pc (insn, 0x000f0000ul))
4904 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4905
4906 if (debug_displaced)
4907 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4908 "load/store insn %.8lx\n", (unsigned long) insn);
4909
cca44b1b
JB
4910 dsc->modinsn[0] = insn & 0xfff0ffff;
4911
7ff120b4 4912 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4913
4914 return 0;
4915}
4916
34518530
YQ
4917static int
4918thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4919 uint16_t insn2, struct regcache *regs,
4920 struct displaced_step_closure *dsc)
4921{
4922 unsigned int rn = bits (insn1, 0, 3);
4923
4924 if (rn != ARM_PC_REGNUM)
4925 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4926 "copro load/store", dsc);
4927
4928 if (debug_displaced)
4929 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4930 "load/store insn %.4x%.4x\n", insn1, insn2);
4931
4932 dsc->modinsn[0] = insn1 & 0xfff0;
4933 dsc->modinsn[1] = insn2;
4934 dsc->numinsns = 2;
4935
4936 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4937 doesn't support writeback, so pass 0. */
4938 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4939
4940 return 0;
4941}
4942
cca44b1b
JB
4943/* Clean up branch instructions (actually perform the branch, by setting
4944 PC). */
4945
4946static void
6e39997a 4947cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4948 struct displaced_step_closure *dsc)
4949{
36073a92 4950 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4951 int branch_taken = condition_true (dsc->u.branch.cond, status);
4952 enum pc_write_style write_pc = dsc->u.branch.exchange
4953 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4954
4955 if (!branch_taken)
4956 return;
4957
4958 if (dsc->u.branch.link)
4959 {
8c8dba6d
YQ
4960 /* The value of LR should be the next insn of current one. In order
4961 not to confuse logic hanlding later insn `bx lr', if current insn mode
4962 is Thumb, the bit 0 of LR value should be set to 1. */
4963 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4964
4965 if (dsc->is_thumb)
4966 next_insn_addr |= 0x1;
4967
4968 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4969 CANNOT_WRITE_PC);
cca44b1b
JB
4970 }
4971
bf9f652a 4972 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4973}
4974
4975/* Copy B/BL/BLX instructions with immediate destinations. */
4976
7ff120b4
YQ
4977static void
4978install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4979 struct displaced_step_closure *dsc,
4980 unsigned int cond, int exchange, int link, long offset)
4981{
4982 /* Implement "BL<cond> <label>" as:
4983
4984 Preparation: cond <- instruction condition
4985 Insn: mov r0, r0 (nop)
4986 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4987
4988 B<cond> similar, but don't set r14 in cleanup. */
4989
4990 dsc->u.branch.cond = cond;
4991 dsc->u.branch.link = link;
4992 dsc->u.branch.exchange = exchange;
4993
2b16b2e3
YQ
4994 dsc->u.branch.dest = dsc->insn_addr;
4995 if (link && exchange)
4996 /* For BLX, offset is computed from the Align (PC, 4). */
4997 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4998
7ff120b4 4999 if (dsc->is_thumb)
2b16b2e3 5000 dsc->u.branch.dest += 4 + offset;
7ff120b4 5001 else
2b16b2e3 5002 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5003
5004 dsc->cleanup = &cleanup_branch;
5005}
cca44b1b 5006static int
7ff120b4
YQ
5007arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5008 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5009{
5010 unsigned int cond = bits (insn, 28, 31);
5011 int exchange = (cond == 0xf);
5012 int link = exchange || bit (insn, 24);
cca44b1b
JB
5013 long offset;
5014
5015 if (debug_displaced)
5016 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5017 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5018 (unsigned long) insn);
cca44b1b
JB
5019 if (exchange)
5020 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5021 then arrange the switch into Thumb mode. */
5022 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5023 else
5024 offset = bits (insn, 0, 23) << 2;
5025
5026 if (bit (offset, 25))
5027 offset = offset | ~0x3ffffff;
5028
cca44b1b
JB
5029 dsc->modinsn[0] = ARM_NOP;
5030
7ff120b4 5031 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5032 return 0;
5033}
5034
34518530
YQ
5035static int
5036thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5037 uint16_t insn2, struct regcache *regs,
5038 struct displaced_step_closure *dsc)
5039{
5040 int link = bit (insn2, 14);
5041 int exchange = link && !bit (insn2, 12);
5042 int cond = INST_AL;
5043 long offset = 0;
5044 int j1 = bit (insn2, 13);
5045 int j2 = bit (insn2, 11);
5046 int s = sbits (insn1, 10, 10);
5047 int i1 = !(j1 ^ bit (insn1, 10));
5048 int i2 = !(j2 ^ bit (insn1, 10));
5049
5050 if (!link && !exchange) /* B */
5051 {
5052 offset = (bits (insn2, 0, 10) << 1);
5053 if (bit (insn2, 12)) /* Encoding T4 */
5054 {
5055 offset |= (bits (insn1, 0, 9) << 12)
5056 | (i2 << 22)
5057 | (i1 << 23)
5058 | (s << 24);
5059 cond = INST_AL;
5060 }
5061 else /* Encoding T3 */
5062 {
5063 offset |= (bits (insn1, 0, 5) << 12)
5064 | (j1 << 18)
5065 | (j2 << 19)
5066 | (s << 20);
5067 cond = bits (insn1, 6, 9);
5068 }
5069 }
5070 else
5071 {
5072 offset = (bits (insn1, 0, 9) << 12);
5073 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5074 offset |= exchange ?
5075 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5076 }
5077
5078 if (debug_displaced)
5079 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5080 "%.4x %.4x with offset %.8lx\n",
5081 link ? (exchange) ? "blx" : "bl" : "b",
5082 insn1, insn2, offset);
5083
5084 dsc->modinsn[0] = THUMB_NOP;
5085
5086 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5087 return 0;
5088}
5089
5090/* Copy B Thumb instructions. */
5091static int
615234c1 5092thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5093 struct displaced_step_closure *dsc)
5094{
5095 unsigned int cond = 0;
5096 int offset = 0;
5097 unsigned short bit_12_15 = bits (insn, 12, 15);
5098 CORE_ADDR from = dsc->insn_addr;
5099
5100 if (bit_12_15 == 0xd)
5101 {
5102 /* offset = SignExtend (imm8:0, 32) */
5103 offset = sbits ((insn << 1), 0, 8);
5104 cond = bits (insn, 8, 11);
5105 }
5106 else if (bit_12_15 == 0xe) /* Encoding T2 */
5107 {
5108 offset = sbits ((insn << 1), 0, 11);
5109 cond = INST_AL;
5110 }
5111
5112 if (debug_displaced)
5113 fprintf_unfiltered (gdb_stdlog,
5114 "displaced: copying b immediate insn %.4x "
5115 "with offset %d\n", insn, offset);
5116
5117 dsc->u.branch.cond = cond;
5118 dsc->u.branch.link = 0;
5119 dsc->u.branch.exchange = 0;
5120 dsc->u.branch.dest = from + 4 + offset;
5121
5122 dsc->modinsn[0] = THUMB_NOP;
5123
5124 dsc->cleanup = &cleanup_branch;
5125
5126 return 0;
5127}
5128
cca44b1b
JB
5129/* Copy BX/BLX with register-specified destinations. */
5130
7ff120b4
YQ
5131static void
5132install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5133 struct displaced_step_closure *dsc, int link,
5134 unsigned int cond, unsigned int rm)
cca44b1b 5135{
cca44b1b
JB
5136 /* Implement {BX,BLX}<cond> <reg>" as:
5137
5138 Preparation: cond <- instruction condition
5139 Insn: mov r0, r0 (nop)
5140 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5141
5142 Don't set r14 in cleanup for BX. */
5143
36073a92 5144 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5145
5146 dsc->u.branch.cond = cond;
5147 dsc->u.branch.link = link;
cca44b1b 5148
7ff120b4 5149 dsc->u.branch.exchange = 1;
cca44b1b
JB
5150
5151 dsc->cleanup = &cleanup_branch;
7ff120b4 5152}
cca44b1b 5153
7ff120b4
YQ
5154static int
5155arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5156 struct regcache *regs, struct displaced_step_closure *dsc)
5157{
5158 unsigned int cond = bits (insn, 28, 31);
5159 /* BX: x12xxx1x
5160 BLX: x12xxx3x. */
5161 int link = bit (insn, 5);
5162 unsigned int rm = bits (insn, 0, 3);
5163
5164 if (debug_displaced)
5165 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5166 (unsigned long) insn);
5167
5168 dsc->modinsn[0] = ARM_NOP;
5169
5170 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5171 return 0;
5172}
5173
34518530
YQ
5174static int
5175thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5176 struct regcache *regs,
5177 struct displaced_step_closure *dsc)
5178{
5179 int link = bit (insn, 7);
5180 unsigned int rm = bits (insn, 3, 6);
5181
5182 if (debug_displaced)
5183 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5184 (unsigned short) insn);
5185
5186 dsc->modinsn[0] = THUMB_NOP;
5187
5188 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5189
5190 return 0;
5191}
5192
5193
0963b4bd 5194/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5195
5196static void
6e39997a 5197cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5198 struct regcache *regs, struct displaced_step_closure *dsc)
5199{
36073a92 5200 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5201 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5202 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5203 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5204}
5205
5206static int
7ff120b4
YQ
5207arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5208 struct displaced_step_closure *dsc)
cca44b1b
JB
5209{
5210 unsigned int rn = bits (insn, 16, 19);
5211 unsigned int rd = bits (insn, 12, 15);
5212 unsigned int op = bits (insn, 21, 24);
5213 int is_mov = (op == 0xd);
5214 ULONGEST rd_val, rn_val;
cca44b1b
JB
5215
5216 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5217 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5218
5219 if (debug_displaced)
5220 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5221 "%.8lx\n", is_mov ? "move" : "ALU",
5222 (unsigned long) insn);
5223
5224 /* Instruction is of form:
5225
5226 <op><cond> rd, [rn,] #imm
5227
5228 Rewrite as:
5229
5230 Preparation: tmp1, tmp2 <- r0, r1;
5231 r0, r1 <- rd, rn
5232 Insn: <op><cond> r0, r1, #imm
5233 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5234 */
5235
36073a92
YQ
5236 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5237 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5238 rn_val = displaced_read_reg (regs, dsc, rn);
5239 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5240 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5241 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5242 dsc->rd = rd;
5243
5244 if (is_mov)
5245 dsc->modinsn[0] = insn & 0xfff00fff;
5246 else
5247 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5248
5249 dsc->cleanup = &cleanup_alu_imm;
5250
5251 return 0;
5252}
5253
34518530
YQ
5254static int
5255thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5256 uint16_t insn2, struct regcache *regs,
5257 struct displaced_step_closure *dsc)
5258{
5259 unsigned int op = bits (insn1, 5, 8);
5260 unsigned int rn, rm, rd;
5261 ULONGEST rd_val, rn_val;
5262
5263 rn = bits (insn1, 0, 3); /* Rn */
5264 rm = bits (insn2, 0, 3); /* Rm */
5265 rd = bits (insn2, 8, 11); /* Rd */
5266
5267 /* This routine is only called for instruction MOV. */
5268 gdb_assert (op == 0x2 && rn == 0xf);
5269
5270 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5271 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5272
5273 if (debug_displaced)
5274 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5275 "ALU", insn1, insn2);
5276
5277 /* Instruction is of form:
5278
5279 <op><cond> rd, [rn,] #imm
5280
5281 Rewrite as:
5282
5283 Preparation: tmp1, tmp2 <- r0, r1;
5284 r0, r1 <- rd, rn
5285 Insn: <op><cond> r0, r1, #imm
5286 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5287 */
5288
5289 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5290 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5291 rn_val = displaced_read_reg (regs, dsc, rn);
5292 rd_val = displaced_read_reg (regs, dsc, rd);
5293 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5294 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5295 dsc->rd = rd;
5296
5297 dsc->modinsn[0] = insn1;
5298 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5299 dsc->numinsns = 2;
5300
5301 dsc->cleanup = &cleanup_alu_imm;
5302
5303 return 0;
5304}
5305
cca44b1b
JB
5306/* Copy/cleanup arithmetic/logic insns with register RHS. */
5307
5308static void
6e39997a 5309cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5310 struct regcache *regs, struct displaced_step_closure *dsc)
5311{
5312 ULONGEST rd_val;
5313 int i;
5314
36073a92 5315 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5316
5317 for (i = 0; i < 3; i++)
5318 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5319
5320 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5321}
5322
7ff120b4
YQ
5323static void
5324install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5325 struct displaced_step_closure *dsc,
5326 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5327{
cca44b1b 5328 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5329
cca44b1b
JB
5330 /* Instruction is of form:
5331
5332 <op><cond> rd, [rn,] rm [, <shift>]
5333
5334 Rewrite as:
5335
5336 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5337 r0, r1, r2 <- rd, rn, rm
ef713951 5338 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5339 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5340 */
5341
36073a92
YQ
5342 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5343 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5344 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5345 rd_val = displaced_read_reg (regs, dsc, rd);
5346 rn_val = displaced_read_reg (regs, dsc, rn);
5347 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5348 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5349 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5350 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5351 dsc->rd = rd;
5352
7ff120b4
YQ
5353 dsc->cleanup = &cleanup_alu_reg;
5354}
5355
5356static int
5357arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5358 struct displaced_step_closure *dsc)
5359{
5360 unsigned int op = bits (insn, 21, 24);
5361 int is_mov = (op == 0xd);
5362
5363 if (!insn_references_pc (insn, 0x000ff00ful))
5364 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5365
5366 if (debug_displaced)
5367 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5368 is_mov ? "move" : "ALU", (unsigned long) insn);
5369
cca44b1b
JB
5370 if (is_mov)
5371 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5372 else
5373 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5374
7ff120b4
YQ
5375 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5376 bits (insn, 0, 3));
cca44b1b
JB
5377 return 0;
5378}
5379
34518530
YQ
5380static int
5381thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5382 struct regcache *regs,
5383 struct displaced_step_closure *dsc)
5384{
ef713951 5385 unsigned rm, rd;
34518530 5386
ef713951
YQ
5387 rm = bits (insn, 3, 6);
5388 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5389
ef713951 5390 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5391 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5392
5393 if (debug_displaced)
ef713951
YQ
5394 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5395 (unsigned short) insn);
34518530 5396
ef713951 5397 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5398
ef713951 5399 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5400
5401 return 0;
5402}
5403
cca44b1b
JB
5404/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5405
5406static void
6e39997a 5407cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5408 struct regcache *regs,
5409 struct displaced_step_closure *dsc)
5410{
36073a92 5411 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5412 int i;
5413
5414 for (i = 0; i < 4; i++)
5415 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5416
5417 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5418}
5419
7ff120b4
YQ
5420static void
5421install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5422 struct displaced_step_closure *dsc,
5423 unsigned int rd, unsigned int rn, unsigned int rm,
5424 unsigned rs)
cca44b1b 5425{
7ff120b4 5426 int i;
cca44b1b 5427 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5428
cca44b1b
JB
5429 /* Instruction is of form:
5430
5431 <op><cond> rd, [rn,] rm, <shift> rs
5432
5433 Rewrite as:
5434
5435 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5436 r0, r1, r2, r3 <- rd, rn, rm, rs
5437 Insn: <op><cond> r0, r1, r2, <shift> r3
5438 Cleanup: tmp5 <- r0
5439 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5440 rd <- tmp5
5441 */
5442
5443 for (i = 0; i < 4; i++)
36073a92 5444 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5445
36073a92
YQ
5446 rd_val = displaced_read_reg (regs, dsc, rd);
5447 rn_val = displaced_read_reg (regs, dsc, rn);
5448 rm_val = displaced_read_reg (regs, dsc, rm);
5449 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5450 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5451 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5452 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5453 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5454 dsc->rd = rd;
7ff120b4
YQ
5455 dsc->cleanup = &cleanup_alu_shifted_reg;
5456}
5457
5458static int
5459arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5460 struct regcache *regs,
5461 struct displaced_step_closure *dsc)
5462{
5463 unsigned int op = bits (insn, 21, 24);
5464 int is_mov = (op == 0xd);
5465 unsigned int rd, rn, rm, rs;
5466
5467 if (!insn_references_pc (insn, 0x000fff0ful))
5468 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5469
5470 if (debug_displaced)
5471 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5472 "%.8lx\n", is_mov ? "move" : "ALU",
5473 (unsigned long) insn);
5474
5475 rn = bits (insn, 16, 19);
5476 rm = bits (insn, 0, 3);
5477 rs = bits (insn, 8, 11);
5478 rd = bits (insn, 12, 15);
cca44b1b
JB
5479
5480 if (is_mov)
5481 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5482 else
5483 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5484
7ff120b4 5485 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5486
5487 return 0;
5488}
5489
5490/* Clean up load instructions. */
5491
5492static void
6e39997a 5493cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5494 struct displaced_step_closure *dsc)
5495{
5496 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5497
36073a92 5498 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5499 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5500 rt_val2 = displaced_read_reg (regs, dsc, 1);
5501 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5502
5503 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5504 if (dsc->u.ldst.xfersize > 4)
5505 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5506 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5507 if (!dsc->u.ldst.immed)
5508 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5509
5510 /* Handle register writeback. */
5511 if (dsc->u.ldst.writeback)
5512 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5513 /* Put result in right place. */
5514 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5515 if (dsc->u.ldst.xfersize == 8)
5516 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5517}
5518
5519/* Clean up store instructions. */
5520
5521static void
6e39997a 5522cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5523 struct displaced_step_closure *dsc)
5524{
36073a92 5525 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5526
5527 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5528 if (dsc->u.ldst.xfersize > 4)
5529 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5530 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5531 if (!dsc->u.ldst.immed)
5532 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5533 if (!dsc->u.ldst.restore_r4)
5534 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5535
5536 /* Writeback. */
5537 if (dsc->u.ldst.writeback)
5538 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5539}
5540
5541/* Copy "extra" load/store instructions. These are halfword/doubleword
5542 transfers, which have a different encoding to byte/word transfers. */
5543
5544static int
550dc4e2 5545arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5546 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5547{
5548 unsigned int op1 = bits (insn, 20, 24);
5549 unsigned int op2 = bits (insn, 5, 6);
5550 unsigned int rt = bits (insn, 12, 15);
5551 unsigned int rn = bits (insn, 16, 19);
5552 unsigned int rm = bits (insn, 0, 3);
5553 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5554 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5555 int immed = (op1 & 0x4) != 0;
5556 int opcode;
5557 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5558
5559 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5560 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5561
5562 if (debug_displaced)
5563 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5564 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5565 (unsigned long) insn);
5566
5567 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5568
5569 if (opcode < 0)
5570 internal_error (__FILE__, __LINE__,
5571 _("copy_extra_ld_st: instruction decode error"));
5572
36073a92
YQ
5573 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5574 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5575 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5576 if (!immed)
36073a92 5577 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5578
36073a92 5579 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5580 if (bytesize[opcode] == 8)
36073a92
YQ
5581 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5582 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5583 if (!immed)
36073a92 5584 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5585
5586 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5587 if (bytesize[opcode] == 8)
5588 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5589 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5590 if (!immed)
5591 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5592
5593 dsc->rd = rt;
5594 dsc->u.ldst.xfersize = bytesize[opcode];
5595 dsc->u.ldst.rn = rn;
5596 dsc->u.ldst.immed = immed;
5597 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5598 dsc->u.ldst.restore_r4 = 0;
5599
5600 if (immed)
5601 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5602 ->
5603 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5604 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5605 else
5606 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5607 ->
5608 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5609 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5610
5611 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5612
5613 return 0;
5614}
5615
0f6f04ba 5616/* Copy byte/half word/word loads and stores. */
cca44b1b 5617
7ff120b4 5618static void
0f6f04ba
YQ
5619install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5620 struct displaced_step_closure *dsc, int load,
5621 int immed, int writeback, int size, int usermode,
5622 int rt, int rm, int rn)
cca44b1b 5623{
cca44b1b 5624 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5625
36073a92
YQ
5626 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5627 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5628 if (!immed)
36073a92 5629 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5630 if (!load)
36073a92 5631 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5632
36073a92
YQ
5633 rt_val = displaced_read_reg (regs, dsc, rt);
5634 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5635 if (!immed)
36073a92 5636 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5637
5638 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5639 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5640 if (!immed)
5641 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5642 dsc->rd = rt;
0f6f04ba 5643 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5644 dsc->u.ldst.rn = rn;
5645 dsc->u.ldst.immed = immed;
7ff120b4 5646 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5647
5648 /* To write PC we can do:
5649
494e194e
YQ
5650 Before this sequence of instructions:
5651 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5652 r2 is the Rn value got from dispalced_read_reg.
5653
5654 Insn1: push {pc} Write address of STR instruction + offset on stack
5655 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5656 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5657 = addr(Insn1) + offset - addr(Insn3) - 8
5658 = offset - 16
5659 Insn4: add r4, r4, #8 r4 = offset - 8
5660 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5661 = from + offset
5662 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5663
5664 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5665 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5666 of this can be found in Section "Saving from r15" in
5667 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5668
7ff120b4
YQ
5669 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5670}
5671
34518530
YQ
5672
5673static int
5674thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5675 uint16_t insn2, struct regcache *regs,
5676 struct displaced_step_closure *dsc, int size)
5677{
5678 unsigned int u_bit = bit (insn1, 7);
5679 unsigned int rt = bits (insn2, 12, 15);
5680 int imm12 = bits (insn2, 0, 11);
5681 ULONGEST pc_val;
5682
5683 if (debug_displaced)
5684 fprintf_unfiltered (gdb_stdlog,
5685 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5686 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5687 imm12);
5688
5689 if (!u_bit)
5690 imm12 = -1 * imm12;
5691
5692 /* Rewrite instruction LDR Rt imm12 into:
5693
5694 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5695
5696 LDR R0, R2, R3,
5697
5698 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5699
5700
5701 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5702 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5703 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5704
5705 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5706
5707 pc_val = pc_val & 0xfffffffc;
5708
5709 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5710 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5711
5712 dsc->rd = rt;
5713
5714 dsc->u.ldst.xfersize = size;
5715 dsc->u.ldst.immed = 0;
5716 dsc->u.ldst.writeback = 0;
5717 dsc->u.ldst.restore_r4 = 0;
5718
5719 /* LDR R0, R2, R3 */
5720 dsc->modinsn[0] = 0xf852;
5721 dsc->modinsn[1] = 0x3;
5722 dsc->numinsns = 2;
5723
5724 dsc->cleanup = &cleanup_load;
5725
5726 return 0;
5727}
5728
5729static int
5730thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5731 uint16_t insn2, struct regcache *regs,
5732 struct displaced_step_closure *dsc,
5733 int writeback, int immed)
5734{
5735 unsigned int rt = bits (insn2, 12, 15);
5736 unsigned int rn = bits (insn1, 0, 3);
5737 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5738 /* In LDR (register), there is also a register Rm, which is not allowed to
5739 be PC, so we don't have to check it. */
5740
5741 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5742 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5743 dsc);
5744
5745 if (debug_displaced)
5746 fprintf_unfiltered (gdb_stdlog,
5747 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5748 rt, rn, insn1, insn2);
5749
5750 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5751 0, rt, rm, rn);
5752
5753 dsc->u.ldst.restore_r4 = 0;
5754
5755 if (immed)
5756 /* ldr[b]<cond> rt, [rn, #imm], etc.
5757 ->
5758 ldr[b]<cond> r0, [r2, #imm]. */
5759 {
5760 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5761 dsc->modinsn[1] = insn2 & 0x0fff;
5762 }
5763 else
5764 /* ldr[b]<cond> rt, [rn, rm], etc.
5765 ->
5766 ldr[b]<cond> r0, [r2, r3]. */
5767 {
5768 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5769 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5770 }
5771
5772 dsc->numinsns = 2;
5773
5774 return 0;
5775}
5776
5777
7ff120b4
YQ
5778static int
5779arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5780 struct regcache *regs,
5781 struct displaced_step_closure *dsc,
0f6f04ba 5782 int load, int size, int usermode)
7ff120b4
YQ
5783{
5784 int immed = !bit (insn, 25);
5785 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5786 unsigned int rt = bits (insn, 12, 15);
5787 unsigned int rn = bits (insn, 16, 19);
5788 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5789
5790 if (!insn_references_pc (insn, 0x000ff00ful))
5791 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5792
5793 if (debug_displaced)
5794 fprintf_unfiltered (gdb_stdlog,
5795 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5796 load ? (size == 1 ? "ldrb" : "ldr")
5797 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5798 rt, rn,
5799 (unsigned long) insn);
5800
0f6f04ba
YQ
5801 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5802 usermode, rt, rm, rn);
7ff120b4 5803
bf9f652a 5804 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5805 {
5806 dsc->u.ldst.restore_r4 = 0;
5807
5808 if (immed)
5809 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5810 ->
5811 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5812 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5813 else
5814 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5815 ->
5816 {ldr,str}[b]<cond> r0, [r2, r3]. */
5817 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5818 }
5819 else
5820 {
5821 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5822 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5823 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5824 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5825 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5826 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5827 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5828
5829 /* As above. */
5830 if (immed)
5831 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5832 else
5833 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5834
cca44b1b
JB
5835 dsc->numinsns = 6;
5836 }
5837
5838 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5839
5840 return 0;
5841}
5842
5843/* Cleanup LDM instructions with fully-populated register list. This is an
5844 unfortunate corner case: it's impossible to implement correctly by modifying
5845 the instruction. The issue is as follows: we have an instruction,
5846
5847 ldm rN, {r0-r15}
5848
5849 which we must rewrite to avoid loading PC. A possible solution would be to
5850 do the load in two halves, something like (with suitable cleanup
5851 afterwards):
5852
5853 mov r8, rN
5854 ldm[id][ab] r8!, {r0-r7}
5855 str r7, <temp>
5856 ldm[id][ab] r8, {r7-r14}
5857 <bkpt>
5858
5859 but at present there's no suitable place for <temp>, since the scratch space
5860 is overwritten before the cleanup routine is called. For now, we simply
5861 emulate the instruction. */
5862
5863static void
5864cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5865 struct displaced_step_closure *dsc)
5866{
cca44b1b
JB
5867 int inc = dsc->u.block.increment;
5868 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5869 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5870 uint32_t regmask = dsc->u.block.regmask;
5871 int regno = inc ? 0 : 15;
5872 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5873 int exception_return = dsc->u.block.load && dsc->u.block.user
5874 && (regmask & 0x8000) != 0;
36073a92 5875 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5876 int do_transfer = condition_true (dsc->u.block.cond, status);
5877 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5878
5879 if (!do_transfer)
5880 return;
5881
5882 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5883 sensible we can do here. Complain loudly. */
5884 if (exception_return)
5885 error (_("Cannot single-step exception return"));
5886
5887 /* We don't handle any stores here for now. */
5888 gdb_assert (dsc->u.block.load != 0);
5889
5890 if (debug_displaced)
5891 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5892 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5893 dsc->u.block.increment ? "inc" : "dec",
5894 dsc->u.block.before ? "before" : "after");
5895
5896 while (regmask)
5897 {
5898 uint32_t memword;
5899
5900 if (inc)
bf9f652a 5901 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5902 regno++;
5903 else
5904 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5905 regno--;
5906
5907 xfer_addr += bump_before;
5908
5909 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5910 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5911
5912 xfer_addr += bump_after;
5913
5914 regmask &= ~(1 << regno);
5915 }
5916
5917 if (dsc->u.block.writeback)
5918 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5919 CANNOT_WRITE_PC);
5920}
5921
5922/* Clean up an STM which included the PC in the register list. */
5923
5924static void
5925cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5926 struct displaced_step_closure *dsc)
5927{
36073a92 5928 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5929 int store_executed = condition_true (dsc->u.block.cond, status);
5930 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5931 CORE_ADDR stm_insn_addr;
5932 uint32_t pc_val;
5933 long offset;
5934 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5935
5936 /* If condition code fails, there's nothing else to do. */
5937 if (!store_executed)
5938 return;
5939
5940 if (dsc->u.block.increment)
5941 {
5942 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5943
5944 if (dsc->u.block.before)
5945 pc_stored_at += 4;
5946 }
5947 else
5948 {
5949 pc_stored_at = dsc->u.block.xfer_addr;
5950
5951 if (dsc->u.block.before)
5952 pc_stored_at -= 4;
5953 }
5954
5955 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5956 stm_insn_addr = dsc->scratch_base;
5957 offset = pc_val - stm_insn_addr;
5958
5959 if (debug_displaced)
5960 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5961 "STM instruction\n", offset);
5962
5963 /* Rewrite the stored PC to the proper value for the non-displaced original
5964 instruction. */
5965 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5966 dsc->insn_addr + offset);
5967}
5968
5969/* Clean up an LDM which includes the PC in the register list. We clumped all
5970 the registers in the transferred list into a contiguous range r0...rX (to
5971 avoid loading PC directly and losing control of the debugged program), so we
5972 must undo that here. */
5973
5974static void
6e39997a 5975cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5976 struct regcache *regs,
5977 struct displaced_step_closure *dsc)
5978{
36073a92 5979 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5980 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5981 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5982 unsigned int regs_loaded = bitcount (mask);
5983 unsigned int num_to_shuffle = regs_loaded, clobbered;
5984
5985 /* The method employed here will fail if the register list is fully populated
5986 (we need to avoid loading PC directly). */
5987 gdb_assert (num_to_shuffle < 16);
5988
5989 if (!load_executed)
5990 return;
5991
5992 clobbered = (1 << num_to_shuffle) - 1;
5993
5994 while (num_to_shuffle > 0)
5995 {
5996 if ((mask & (1 << write_reg)) != 0)
5997 {
5998 unsigned int read_reg = num_to_shuffle - 1;
5999
6000 if (read_reg != write_reg)
6001 {
36073a92 6002 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6003 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6004 if (debug_displaced)
6005 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6006 "loaded register r%d to r%d\n"), read_reg,
6007 write_reg);
6008 }
6009 else if (debug_displaced)
6010 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6011 "r%d already in the right place\n"),
6012 write_reg);
6013
6014 clobbered &= ~(1 << write_reg);
6015
6016 num_to_shuffle--;
6017 }
6018
6019 write_reg--;
6020 }
6021
6022 /* Restore any registers we scribbled over. */
6023 for (write_reg = 0; clobbered != 0; write_reg++)
6024 {
6025 if ((clobbered & (1 << write_reg)) != 0)
6026 {
6027 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6028 CANNOT_WRITE_PC);
6029 if (debug_displaced)
6030 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6031 "clobbered register r%d\n"), write_reg);
6032 clobbered &= ~(1 << write_reg);
6033 }
6034 }
6035
6036 /* Perform register writeback manually. */
6037 if (dsc->u.block.writeback)
6038 {
6039 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6040
6041 if (dsc->u.block.increment)
6042 new_rn_val += regs_loaded * 4;
6043 else
6044 new_rn_val -= regs_loaded * 4;
6045
6046 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6047 CANNOT_WRITE_PC);
6048 }
6049}
6050
6051/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6052 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6053
6054static int
7ff120b4
YQ
6055arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6056 struct regcache *regs,
6057 struct displaced_step_closure *dsc)
cca44b1b
JB
6058{
6059 int load = bit (insn, 20);
6060 int user = bit (insn, 22);
6061 int increment = bit (insn, 23);
6062 int before = bit (insn, 24);
6063 int writeback = bit (insn, 21);
6064 int rn = bits (insn, 16, 19);
cca44b1b 6065
0963b4bd
MS
6066 /* Block transfers which don't mention PC can be run directly
6067 out-of-line. */
bf9f652a 6068 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6069 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6070
bf9f652a 6071 if (rn == ARM_PC_REGNUM)
cca44b1b 6072 {
0963b4bd
MS
6073 warning (_("displaced: Unpredictable LDM or STM with "
6074 "base register r15"));
7ff120b4 6075 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6076 }
6077
6078 if (debug_displaced)
6079 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6080 "%.8lx\n", (unsigned long) insn);
6081
36073a92 6082 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6083 dsc->u.block.rn = rn;
6084
6085 dsc->u.block.load = load;
6086 dsc->u.block.user = user;
6087 dsc->u.block.increment = increment;
6088 dsc->u.block.before = before;
6089 dsc->u.block.writeback = writeback;
6090 dsc->u.block.cond = bits (insn, 28, 31);
6091
6092 dsc->u.block.regmask = insn & 0xffff;
6093
6094 if (load)
6095 {
6096 if ((insn & 0xffff) == 0xffff)
6097 {
6098 /* LDM with a fully-populated register list. This case is
6099 particularly tricky. Implement for now by fully emulating the
6100 instruction (which might not behave perfectly in all cases, but
6101 these instructions should be rare enough for that not to matter
6102 too much). */
6103 dsc->modinsn[0] = ARM_NOP;
6104
6105 dsc->cleanup = &cleanup_block_load_all;
6106 }
6107 else
6108 {
6109 /* LDM of a list of registers which includes PC. Implement by
6110 rewriting the list of registers to be transferred into a
6111 contiguous chunk r0...rX before doing the transfer, then shuffling
6112 registers into the correct places in the cleanup routine. */
6113 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6114 unsigned int num_in_list = bitcount (regmask), new_regmask;
6115 unsigned int i;
cca44b1b
JB
6116
6117 for (i = 0; i < num_in_list; i++)
36073a92 6118 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6119
6120 /* Writeback makes things complicated. We need to avoid clobbering
6121 the base register with one of the registers in our modified
6122 register list, but just using a different register can't work in
6123 all cases, e.g.:
6124
6125 ldm r14!, {r0-r13,pc}
6126
6127 which would need to be rewritten as:
6128
6129 ldm rN!, {r0-r14}
6130
6131 but that can't work, because there's no free register for N.
6132
6133 Solve this by turning off the writeback bit, and emulating
6134 writeback manually in the cleanup routine. */
6135
6136 if (writeback)
6137 insn &= ~(1 << 21);
6138
6139 new_regmask = (1 << num_in_list) - 1;
6140
6141 if (debug_displaced)
6142 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6143 "{..., pc}: original reg list %.4x, modified "
6144 "list %.4x\n"), rn, writeback ? "!" : "",
6145 (int) insn & 0xffff, new_regmask);
6146
6147 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6148
6149 dsc->cleanup = &cleanup_block_load_pc;
6150 }
6151 }
6152 else
6153 {
6154 /* STM of a list of registers which includes PC. Run the instruction
6155 as-is, but out of line: this will store the wrong value for the PC,
6156 so we must manually fix up the memory in the cleanup routine.
6157 Doing things this way has the advantage that we can auto-detect
6158 the offset of the PC write (which is architecture-dependent) in
6159 the cleanup routine. */
6160 dsc->modinsn[0] = insn;
6161
6162 dsc->cleanup = &cleanup_block_store_pc;
6163 }
6164
6165 return 0;
6166}
6167
34518530
YQ
6168static int
6169thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6170 struct regcache *regs,
6171 struct displaced_step_closure *dsc)
cca44b1b 6172{
34518530
YQ
6173 int rn = bits (insn1, 0, 3);
6174 int load = bit (insn1, 4);
6175 int writeback = bit (insn1, 5);
cca44b1b 6176
34518530
YQ
6177 /* Block transfers which don't mention PC can be run directly
6178 out-of-line. */
6179 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6180 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6181
34518530
YQ
6182 if (rn == ARM_PC_REGNUM)
6183 {
6184 warning (_("displaced: Unpredictable LDM or STM with "
6185 "base register r15"));
6186 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6187 "unpredictable ldm/stm", dsc);
6188 }
cca44b1b
JB
6189
6190 if (debug_displaced)
34518530
YQ
6191 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6192 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6193
34518530
YQ
6194 /* Clear bit 13, since it should be always zero. */
6195 dsc->u.block.regmask = (insn2 & 0xdfff);
6196 dsc->u.block.rn = rn;
cca44b1b 6197
34518530
YQ
6198 dsc->u.block.load = load;
6199 dsc->u.block.user = 0;
6200 dsc->u.block.increment = bit (insn1, 7);
6201 dsc->u.block.before = bit (insn1, 8);
6202 dsc->u.block.writeback = writeback;
6203 dsc->u.block.cond = INST_AL;
6204 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6205
34518530
YQ
6206 if (load)
6207 {
6208 if (dsc->u.block.regmask == 0xffff)
6209 {
6210 /* This branch is impossible to happen. */
6211 gdb_assert (0);
6212 }
6213 else
6214 {
6215 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6216 unsigned int num_in_list = bitcount (regmask), new_regmask;
6217 unsigned int i;
34518530
YQ
6218
6219 for (i = 0; i < num_in_list; i++)
6220 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6221
6222 if (writeback)
6223 insn1 &= ~(1 << 5);
6224
6225 new_regmask = (1 << num_in_list) - 1;
6226
6227 if (debug_displaced)
6228 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6229 "{..., pc}: original reg list %.4x, modified "
6230 "list %.4x\n"), rn, writeback ? "!" : "",
6231 (int) dsc->u.block.regmask, new_regmask);
6232
6233 dsc->modinsn[0] = insn1;
6234 dsc->modinsn[1] = (new_regmask & 0xffff);
6235 dsc->numinsns = 2;
6236
6237 dsc->cleanup = &cleanup_block_load_pc;
6238 }
6239 }
6240 else
6241 {
6242 dsc->modinsn[0] = insn1;
6243 dsc->modinsn[1] = insn2;
6244 dsc->numinsns = 2;
6245 dsc->cleanup = &cleanup_block_store_pc;
6246 }
6247 return 0;
6248}
6249
d9311bfa
AT
6250/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6251 This is used to avoid a dependency on BFD's bfd_endian enum. */
6252
6253ULONGEST
6254arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6255 int byte_order)
6256{
5f2dfcfd
AT
6257 return read_memory_unsigned_integer (memaddr, len,
6258 (enum bfd_endian) byte_order);
d9311bfa
AT
6259}
6260
6261/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6262
6263CORE_ADDR
6264arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6265 CORE_ADDR val)
6266{
6267 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6268}
6269
6270/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6271
e7cf25a8 6272static CORE_ADDR
553cb527 6273arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6274{
d9311bfa
AT
6275 return 0;
6276}
6277
6278/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6279
6280int
6281arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6282{
6283 return arm_is_thumb (self->regcache);
6284}
6285
6286/* single_step() is called just before we want to resume the inferior,
6287 if we want to single-step it but there is no hardware or kernel
6288 single-step support. We find the target of the coming instructions
6289 and breakpoint them. */
6290
93f9a11f 6291VEC (CORE_ADDR) *
f5ea389a 6292arm_software_single_step (struct regcache *regcache)
d9311bfa 6293{
d9311bfa 6294 struct gdbarch *gdbarch = get_regcache_arch (regcache);
d9311bfa
AT
6295 struct arm_get_next_pcs next_pcs_ctx;
6296 CORE_ADDR pc;
6297 int i;
6298 VEC (CORE_ADDR) *next_pcs = NULL;
6299 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6300
6301 arm_get_next_pcs_ctor (&next_pcs_ctx,
6302 &arm_get_next_pcs_ops,
6303 gdbarch_byte_order (gdbarch),
6304 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6305 0,
d9311bfa
AT
6306 regcache);
6307
4d18591b 6308 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6309
6310 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
771da62d
YQ
6311 {
6312 pc = gdbarch_addr_bits_remove (gdbarch, pc);
0bc5d801 6313 VEC_replace (CORE_ADDR, next_pcs, i, pc);
771da62d 6314 }
d9311bfa 6315
93f9a11f 6316 discard_cleanups (old_chain);
d9311bfa 6317
93f9a11f 6318 return next_pcs;
d9311bfa
AT
6319}
6320
34518530
YQ
6321/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6322 for Linux, where some SVC instructions must be treated specially. */
6323
6324static void
6325cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6326 struct displaced_step_closure *dsc)
6327{
6328 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6329
6330 if (debug_displaced)
6331 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6332 "%.8lx\n", (unsigned long) resume_addr);
6333
6334 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6335}
6336
6337
6338/* Common copy routine for svc instruciton. */
6339
6340static int
6341install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6342 struct displaced_step_closure *dsc)
6343{
6344 /* Preparation: none.
6345 Insn: unmodified svc.
6346 Cleanup: pc <- insn_addr + insn_size. */
6347
6348 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6349 instruction. */
6350 dsc->wrote_to_pc = 1;
6351
6352 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6353 if (dsc->u.svc.copy_svc_os)
6354 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6355 else
6356 {
6357 dsc->cleanup = &cleanup_svc;
6358 return 0;
6359 }
34518530
YQ
6360}
6361
6362static int
6363arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6364 struct regcache *regs, struct displaced_step_closure *dsc)
6365{
6366
6367 if (debug_displaced)
6368 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6369 (unsigned long) insn);
6370
6371 dsc->modinsn[0] = insn;
6372
6373 return install_svc (gdbarch, regs, dsc);
6374}
6375
6376static int
6377thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6378 struct regcache *regs, struct displaced_step_closure *dsc)
6379{
6380
6381 if (debug_displaced)
6382 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6383 insn);
bd18283a 6384
34518530
YQ
6385 dsc->modinsn[0] = insn;
6386
6387 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6388}
6389
6390/* Copy undefined instructions. */
6391
6392static int
7ff120b4
YQ
6393arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6394 struct displaced_step_closure *dsc)
cca44b1b
JB
6395{
6396 if (debug_displaced)
0963b4bd
MS
6397 fprintf_unfiltered (gdb_stdlog,
6398 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6399 (unsigned long) insn);
6400
6401 dsc->modinsn[0] = insn;
6402
6403 return 0;
6404}
6405
34518530
YQ
6406static int
6407thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6408 struct displaced_step_closure *dsc)
6409{
6410
6411 if (debug_displaced)
6412 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6413 "%.4x %.4x\n", (unsigned short) insn1,
6414 (unsigned short) insn2);
6415
6416 dsc->modinsn[0] = insn1;
6417 dsc->modinsn[1] = insn2;
6418 dsc->numinsns = 2;
6419
6420 return 0;
6421}
6422
cca44b1b
JB
6423/* Copy unpredictable instructions. */
6424
6425static int
7ff120b4
YQ
6426arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6427 struct displaced_step_closure *dsc)
cca44b1b
JB
6428{
6429 if (debug_displaced)
6430 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6431 "%.8lx\n", (unsigned long) insn);
6432
6433 dsc->modinsn[0] = insn;
6434
6435 return 0;
6436}
6437
6438/* The decode_* functions are instruction decoding helpers. They mostly follow
6439 the presentation in the ARM ARM. */
6440
6441static int
7ff120b4
YQ
6442arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6443 struct regcache *regs,
6444 struct displaced_step_closure *dsc)
cca44b1b
JB
6445{
6446 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6447 unsigned int rn = bits (insn, 16, 19);
6448
6449 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6450 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6451 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6452 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6453 else if ((op1 & 0x60) == 0x20)
7ff120b4 6454 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6455 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6456 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6457 dsc);
cca44b1b 6458 else if ((op1 & 0x77) == 0x41)
7ff120b4 6459 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6460 else if ((op1 & 0x77) == 0x45)
7ff120b4 6461 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6462 else if ((op1 & 0x77) == 0x51)
6463 {
6464 if (rn != 0xf)
7ff120b4 6465 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6466 else
7ff120b4 6467 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6468 }
6469 else if ((op1 & 0x77) == 0x55)
7ff120b4 6470 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6471 else if (op1 == 0x57)
6472 switch (op2)
6473 {
7ff120b4
YQ
6474 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6475 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6476 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6477 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6478 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6479 }
6480 else if ((op1 & 0x63) == 0x43)
7ff120b4 6481 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6482 else if ((op2 & 0x1) == 0x0)
6483 switch (op1 & ~0x80)
6484 {
6485 case 0x61:
7ff120b4 6486 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6487 case 0x65:
7ff120b4 6488 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6489 case 0x71: case 0x75:
6490 /* pld/pldw reg. */
7ff120b4 6491 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6492 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6493 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6494 default:
7ff120b4 6495 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6496 }
6497 else
7ff120b4 6498 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6499}
6500
6501static int
7ff120b4
YQ
6502arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 struct displaced_step_closure *dsc)
cca44b1b
JB
6505{
6506 if (bit (insn, 27) == 0)
7ff120b4 6507 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6508 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6509 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6510 {
6511 case 0x0: case 0x2:
7ff120b4 6512 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6513
6514 case 0x1: case 0x3:
7ff120b4 6515 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6516
6517 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6518 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6519
6520 case 0x8:
6521 switch ((insn & 0xe00000) >> 21)
6522 {
6523 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6524 /* stc/stc2. */
7ff120b4 6525 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6526
6527 case 0x2:
7ff120b4 6528 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6529
6530 default:
7ff120b4 6531 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6532 }
6533
6534 case 0x9:
6535 {
6536 int rn_f = (bits (insn, 16, 19) == 0xf);
6537 switch ((insn & 0xe00000) >> 21)
6538 {
6539 case 0x1: case 0x3:
6540 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6541 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6542 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6543
6544 case 0x2:
7ff120b4 6545 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6546
6547 case 0x4: case 0x5: case 0x6: case 0x7:
6548 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6549 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6550 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6551
6552 default:
7ff120b4 6553 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6554 }
6555 }
6556
6557 case 0xa:
7ff120b4 6558 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6559
6560 case 0xb:
6561 if (bits (insn, 16, 19) == 0xf)
6562 /* ldc/ldc2 lit. */
7ff120b4 6563 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6564 else
7ff120b4 6565 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6566
6567 case 0xc:
6568 if (bit (insn, 4))
7ff120b4 6569 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6570 else
7ff120b4 6571 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6572
6573 case 0xd:
6574 if (bit (insn, 4))
7ff120b4 6575 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6576 else
7ff120b4 6577 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6578
6579 default:
7ff120b4 6580 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6581 }
6582}
6583
6584/* Decode miscellaneous instructions in dp/misc encoding space. */
6585
6586static int
7ff120b4
YQ
6587arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6588 struct regcache *regs,
6589 struct displaced_step_closure *dsc)
cca44b1b
JB
6590{
6591 unsigned int op2 = bits (insn, 4, 6);
6592 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6593
6594 switch (op2)
6595 {
6596 case 0x0:
7ff120b4 6597 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6598
6599 case 0x1:
6600 if (op == 0x1) /* bx. */
7ff120b4 6601 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6602 else if (op == 0x3)
7ff120b4 6603 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6604 else
7ff120b4 6605 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6606
6607 case 0x2:
6608 if (op == 0x1)
6609 /* Not really supported. */
7ff120b4 6610 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6611 else
7ff120b4 6612 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6613
6614 case 0x3:
6615 if (op == 0x1)
7ff120b4 6616 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6617 regs, dsc); /* blx register. */
cca44b1b 6618 else
7ff120b4 6619 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6620
6621 case 0x5:
7ff120b4 6622 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6623
6624 case 0x7:
6625 if (op == 0x1)
7ff120b4 6626 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6627 else if (op == 0x3)
6628 /* Not really supported. */
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6630
6631 default:
7ff120b4 6632 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6633 }
6634}
6635
6636static int
7ff120b4
YQ
6637arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6638 struct regcache *regs,
6639 struct displaced_step_closure *dsc)
cca44b1b
JB
6640{
6641 if (bit (insn, 25))
6642 switch (bits (insn, 20, 24))
6643 {
6644 case 0x10:
7ff120b4 6645 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6646
6647 case 0x14:
7ff120b4 6648 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6649
6650 case 0x12: case 0x16:
7ff120b4 6651 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6652
6653 default:
7ff120b4 6654 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6655 }
6656 else
6657 {
6658 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6659
6660 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6661 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6662 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6663 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6664 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6665 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6666 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6667 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6668 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6669 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6670 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6671 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6672 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6673 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6674 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6675 dsc);
cca44b1b
JB
6676 }
6677
6678 /* Should be unreachable. */
6679 return 1;
6680}
6681
6682static int
7ff120b4
YQ
6683arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6684 struct regcache *regs,
6685 struct displaced_step_closure *dsc)
cca44b1b
JB
6686{
6687 int a = bit (insn, 25), b = bit (insn, 4);
6688 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6689
6690 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6691 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6692 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6693 else if ((!a && (op1 & 0x17) == 0x02)
6694 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6695 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6696 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6697 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6698 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6699 else if ((!a && (op1 & 0x17) == 0x03)
6700 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6701 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6702 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6703 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6704 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6705 else if ((!a && (op1 & 0x17) == 0x06)
6706 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6707 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6708 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6709 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6710 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6711 else if ((!a && (op1 & 0x17) == 0x07)
6712 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6713 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6714
6715 /* Should be unreachable. */
6716 return 1;
6717}
6718
6719static int
7ff120b4
YQ
6720arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6721 struct displaced_step_closure *dsc)
cca44b1b
JB
6722{
6723 switch (bits (insn, 20, 24))
6724 {
6725 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6726 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6727
6728 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6729 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6730
6731 case 0x08: case 0x09: case 0x0a: case 0x0b:
6732 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6733 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6734 "decode/pack/unpack/saturate/reverse", dsc);
6735
6736 case 0x18:
6737 if (bits (insn, 5, 7) == 0) /* op2. */
6738 {
6739 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6740 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6741 else
7ff120b4 6742 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6743 }
6744 else
7ff120b4 6745 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6746
6747 case 0x1a: case 0x1b:
6748 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6749 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6750 else
7ff120b4 6751 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6752
6753 case 0x1c: case 0x1d:
6754 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6755 {
6756 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6757 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6758 else
7ff120b4 6759 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6760 }
6761 else
7ff120b4 6762 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6763
6764 case 0x1e: case 0x1f:
6765 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6766 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6767 else
7ff120b4 6768 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6769 }
6770
6771 /* Should be unreachable. */
6772 return 1;
6773}
6774
6775static int
615234c1 6776arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6777 struct regcache *regs,
6778 struct displaced_step_closure *dsc)
cca44b1b
JB
6779{
6780 if (bit (insn, 25))
7ff120b4 6781 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6782 else
7ff120b4 6783 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6784}
6785
6786static int
7ff120b4
YQ
6787arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6788 struct regcache *regs,
6789 struct displaced_step_closure *dsc)
cca44b1b
JB
6790{
6791 unsigned int opcode = bits (insn, 20, 24);
6792
6793 switch (opcode)
6794 {
6795 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6796 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6797
6798 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6799 case 0x12: case 0x16:
7ff120b4 6800 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6801
6802 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6803 case 0x13: case 0x17:
7ff120b4 6804 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6805
6806 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6807 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6808 /* Note: no writeback for these instructions. Bit 25 will always be
6809 zero though (via caller), so the following works OK. */
7ff120b4 6810 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6811 }
6812
6813 /* Should be unreachable. */
6814 return 1;
6815}
6816
34518530
YQ
6817/* Decode shifted register instructions. */
6818
6819static int
6820thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6821 uint16_t insn2, struct regcache *regs,
6822 struct displaced_step_closure *dsc)
6823{
6824 /* PC is only allowed to be used in instruction MOV. */
6825
6826 unsigned int op = bits (insn1, 5, 8);
6827 unsigned int rn = bits (insn1, 0, 3);
6828
6829 if (op == 0x2 && rn == 0xf) /* MOV */
6830 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6831 else
6832 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6833 "dp (shift reg)", dsc);
6834}
6835
6836
6837/* Decode extension register load/store. Exactly the same as
6838 arm_decode_ext_reg_ld_st. */
6839
6840static int
6841thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6842 uint16_t insn2, struct regcache *regs,
6843 struct displaced_step_closure *dsc)
6844{
6845 unsigned int opcode = bits (insn1, 4, 8);
6846
6847 switch (opcode)
6848 {
6849 case 0x04: case 0x05:
6850 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6851 "vfp/neon vmov", dsc);
6852
6853 case 0x08: case 0x0c: /* 01x00 */
6854 case 0x0a: case 0x0e: /* 01x10 */
6855 case 0x12: case 0x16: /* 10x10 */
6856 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6857 "vfp/neon vstm/vpush", dsc);
6858
6859 case 0x09: case 0x0d: /* 01x01 */
6860 case 0x0b: case 0x0f: /* 01x11 */
6861 case 0x13: case 0x17: /* 10x11 */
6862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6863 "vfp/neon vldm/vpop", dsc);
6864
6865 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6866 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6867 "vstr", dsc);
6868 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6869 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6870 }
6871
6872 /* Should be unreachable. */
6873 return 1;
6874}
6875
cca44b1b 6876static int
12545665 6877arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6878 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6879{
6880 unsigned int op1 = bits (insn, 20, 25);
6881 int op = bit (insn, 4);
6882 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6883
6884 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6885 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6886 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6887 && (coproc & 0xe) != 0xa)
6888 /* stc/stc2. */
7ff120b4 6889 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6890 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6891 && (coproc & 0xe) != 0xa)
6892 /* ldc/ldc2 imm/lit. */
7ff120b4 6893 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6894 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6895 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6896 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6897 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6898 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6899 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6900 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6901 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6902 else if ((op1 & 0x30) == 0x20 && !op)
6903 {
6904 if ((coproc & 0xe) == 0xa)
7ff120b4 6905 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6906 else
7ff120b4 6907 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6908 }
6909 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6910 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6911 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6912 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6913 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6914 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6915 else if ((op1 & 0x30) == 0x30)
7ff120b4 6916 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6917 else
7ff120b4 6918 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6919}
6920
34518530
YQ
6921static int
6922thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6923 uint16_t insn2, struct regcache *regs,
6924 struct displaced_step_closure *dsc)
6925{
6926 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6927 unsigned int bit_5_8 = bits (insn1, 5, 8);
6928 unsigned int bit_9 = bit (insn1, 9);
6929 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6930
6931 if (bit_9 == 0)
6932 {
6933 if (bit_5_8 == 2)
6934 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6935 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6936 dsc);
6937 else if (bit_5_8 == 0) /* UNDEFINED. */
6938 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6939 else
6940 {
6941 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6942 if ((coproc & 0xe) == 0xa)
6943 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6944 dsc);
6945 else /* coproc is not 101x. */
6946 {
6947 if (bit_4 == 0) /* STC/STC2. */
6948 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6949 "stc/stc2", dsc);
6950 else /* LDC/LDC2 {literal, immeidate}. */
6951 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6952 regs, dsc);
6953 }
6954 }
6955 }
6956 else
6957 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6958
6959 return 0;
6960}
6961
6962static void
6963install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6964 struct displaced_step_closure *dsc, int rd)
6965{
6966 /* ADR Rd, #imm
6967
6968 Rewrite as:
6969
6970 Preparation: Rd <- PC
6971 Insn: ADD Rd, #imm
6972 Cleanup: Null.
6973 */
6974
6975 /* Rd <- PC */
6976 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6977 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6978}
6979
6980static int
6981thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6982 struct displaced_step_closure *dsc,
6983 int rd, unsigned int imm)
6984{
6985
6986 /* Encoding T2: ADDS Rd, #imm */
6987 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6988
6989 install_pc_relative (gdbarch, regs, dsc, rd);
6990
6991 return 0;
6992}
6993
6994static int
6995thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6996 struct regcache *regs,
6997 struct displaced_step_closure *dsc)
6998{
6999 unsigned int rd = bits (insn, 8, 10);
7000 unsigned int imm8 = bits (insn, 0, 7);
7001
7002 if (debug_displaced)
7003 fprintf_unfiltered (gdb_stdlog,
7004 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7005 rd, imm8, insn);
7006
7007 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7008}
7009
7010static int
7011thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7012 uint16_t insn2, struct regcache *regs,
7013 struct displaced_step_closure *dsc)
7014{
7015 unsigned int rd = bits (insn2, 8, 11);
7016 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7017 extract raw immediate encoding rather than computing immediate. When
7018 generating ADD or SUB instruction, we can simply perform OR operation to
7019 set immediate into ADD. */
7020 unsigned int imm_3_8 = insn2 & 0x70ff;
7021 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7022
7023 if (debug_displaced)
7024 fprintf_unfiltered (gdb_stdlog,
7025 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7026 rd, imm_i, imm_3_8, insn1, insn2);
7027
7028 if (bit (insn1, 7)) /* Encoding T2 */
7029 {
7030 /* Encoding T3: SUB Rd, Rd, #imm */
7031 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7032 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7033 }
7034 else /* Encoding T3 */
7035 {
7036 /* Encoding T3: ADD Rd, Rd, #imm */
7037 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7038 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7039 }
7040 dsc->numinsns = 2;
7041
7042 install_pc_relative (gdbarch, regs, dsc, rd);
7043
7044 return 0;
7045}
7046
7047static int
615234c1 7048thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7049 struct regcache *regs,
7050 struct displaced_step_closure *dsc)
7051{
7052 unsigned int rt = bits (insn1, 8, 10);
7053 unsigned int pc;
7054 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7055
7056 /* LDR Rd, #imm8
7057
7058 Rwrite as:
7059
7060 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7061
7062 Insn: LDR R0, [R2, R3];
7063 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7064
7065 if (debug_displaced)
7066 fprintf_unfiltered (gdb_stdlog,
7067 "displaced: copying thumb ldr r%d [pc #%d]\n"
7068 , rt, imm8);
7069
7070 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7071 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7072 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7073 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7074 /* The assembler calculates the required value of the offset from the
7075 Align(PC,4) value of this instruction to the label. */
7076 pc = pc & 0xfffffffc;
7077
7078 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7079 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7080
7081 dsc->rd = rt;
7082 dsc->u.ldst.xfersize = 4;
7083 dsc->u.ldst.rn = 0;
7084 dsc->u.ldst.immed = 0;
7085 dsc->u.ldst.writeback = 0;
7086 dsc->u.ldst.restore_r4 = 0;
7087
7088 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7089
7090 dsc->cleanup = &cleanup_load;
7091
7092 return 0;
7093}
7094
7095/* Copy Thumb cbnz/cbz insruction. */
7096
7097static int
7098thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7099 struct regcache *regs,
7100 struct displaced_step_closure *dsc)
7101{
7102 int non_zero = bit (insn1, 11);
7103 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7104 CORE_ADDR from = dsc->insn_addr;
7105 int rn = bits (insn1, 0, 2);
7106 int rn_val = displaced_read_reg (regs, dsc, rn);
7107
7108 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7109 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7110 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7111 condition is false, let it be, cleanup_branch will do nothing. */
7112 if (dsc->u.branch.cond)
7113 {
7114 dsc->u.branch.cond = INST_AL;
7115 dsc->u.branch.dest = from + 4 + imm5;
7116 }
7117 else
7118 dsc->u.branch.dest = from + 2;
7119
7120 dsc->u.branch.link = 0;
7121 dsc->u.branch.exchange = 0;
7122
7123 if (debug_displaced)
7124 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7125 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7126 rn, rn_val, insn1, dsc->u.branch.dest);
7127
7128 dsc->modinsn[0] = THUMB_NOP;
7129
7130 dsc->cleanup = &cleanup_branch;
7131 return 0;
7132}
7133
7134/* Copy Table Branch Byte/Halfword */
7135static int
7136thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7137 uint16_t insn2, struct regcache *regs,
7138 struct displaced_step_closure *dsc)
7139{
7140 ULONGEST rn_val, rm_val;
7141 int is_tbh = bit (insn2, 4);
7142 CORE_ADDR halfwords = 0;
7143 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7144
7145 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7146 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7147
7148 if (is_tbh)
7149 {
7150 gdb_byte buf[2];
7151
7152 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7153 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7154 }
7155 else
7156 {
7157 gdb_byte buf[1];
7158
7159 target_read_memory (rn_val + rm_val, buf, 1);
7160 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7161 }
7162
7163 if (debug_displaced)
7164 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7165 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7166 (unsigned int) rn_val, (unsigned int) rm_val,
7167 (unsigned int) halfwords);
7168
7169 dsc->u.branch.cond = INST_AL;
7170 dsc->u.branch.link = 0;
7171 dsc->u.branch.exchange = 0;
7172 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7173
7174 dsc->cleanup = &cleanup_branch;
7175
7176 return 0;
7177}
7178
7179static void
7180cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7181 struct displaced_step_closure *dsc)
7182{
7183 /* PC <- r7 */
7184 int val = displaced_read_reg (regs, dsc, 7);
7185 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7186
7187 /* r7 <- r8 */
7188 val = displaced_read_reg (regs, dsc, 8);
7189 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7190
7191 /* r8 <- tmp[0] */
7192 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7193
7194}
7195
7196static int
615234c1 7197thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7198 struct regcache *regs,
7199 struct displaced_step_closure *dsc)
7200{
7201 dsc->u.block.regmask = insn1 & 0x00ff;
7202
7203 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7204 to :
7205
7206 (1) register list is full, that is, r0-r7 are used.
7207 Prepare: tmp[0] <- r8
7208
7209 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7210 MOV r8, r7; Move value of r7 to r8;
7211 POP {r7}; Store PC value into r7.
7212
7213 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7214
7215 (2) register list is not full, supposing there are N registers in
7216 register list (except PC, 0 <= N <= 7).
7217 Prepare: for each i, 0 - N, tmp[i] <- ri.
7218
7219 POP {r0, r1, ...., rN};
7220
7221 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7222 from tmp[] properly.
7223 */
7224 if (debug_displaced)
7225 fprintf_unfiltered (gdb_stdlog,
7226 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7227 dsc->u.block.regmask, insn1);
7228
7229 if (dsc->u.block.regmask == 0xff)
7230 {
7231 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7232
7233 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7234 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7235 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7236
7237 dsc->numinsns = 3;
7238 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7239 }
7240 else
7241 {
7242 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7243 unsigned int i;
7244 unsigned int new_regmask;
34518530
YQ
7245
7246 for (i = 0; i < num_in_list + 1; i++)
7247 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7248
7249 new_regmask = (1 << (num_in_list + 1)) - 1;
7250
7251 if (debug_displaced)
7252 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7253 "{..., pc}: original reg list %.4x,"
7254 " modified list %.4x\n"),
7255 (int) dsc->u.block.regmask, new_regmask);
7256
7257 dsc->u.block.regmask |= 0x8000;
7258 dsc->u.block.writeback = 0;
7259 dsc->u.block.cond = INST_AL;
7260
7261 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7262
7263 dsc->cleanup = &cleanup_block_load_pc;
7264 }
7265
7266 return 0;
7267}
7268
7269static void
7270thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7271 struct regcache *regs,
7272 struct displaced_step_closure *dsc)
7273{
7274 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7275 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7276 int err = 0;
7277
7278 /* 16-bit thumb instructions. */
7279 switch (op_bit_12_15)
7280 {
7281 /* Shift (imme), add, subtract, move and compare. */
7282 case 0: case 1: case 2: case 3:
7283 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7284 "shift/add/sub/mov/cmp",
7285 dsc);
7286 break;
7287 case 4:
7288 switch (op_bit_10_11)
7289 {
7290 case 0: /* Data-processing */
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7292 "data-processing",
7293 dsc);
7294 break;
7295 case 1: /* Special data instructions and branch and exchange. */
7296 {
7297 unsigned short op = bits (insn1, 7, 9);
7298 if (op == 6 || op == 7) /* BX or BLX */
7299 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7300 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7301 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7302 else
7303 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7304 dsc);
7305 }
7306 break;
7307 default: /* LDR (literal) */
7308 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7309 }
7310 break;
7311 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7312 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7313 break;
7314 case 10:
7315 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7316 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7317 else /* Generate SP-relative address */
7318 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7319 break;
7320 case 11: /* Misc 16-bit instructions */
7321 {
7322 switch (bits (insn1, 8, 11))
7323 {
7324 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7325 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7326 break;
7327 case 12: case 13: /* POP */
7328 if (bit (insn1, 8)) /* PC is in register list. */
7329 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7330 else
7331 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7332 break;
7333 case 15: /* If-Then, and hints */
7334 if (bits (insn1, 0, 3))
7335 /* If-Then makes up to four following instructions conditional.
7336 IT instruction itself is not conditional, so handle it as a
7337 common unmodified instruction. */
7338 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7339 dsc);
7340 else
7341 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7342 break;
7343 default:
7344 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7345 }
7346 }
7347 break;
7348 case 12:
7349 if (op_bit_10_11 < 2) /* Store multiple registers */
7350 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7351 else /* Load multiple registers */
7352 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7353 break;
7354 case 13: /* Conditional branch and supervisor call */
7355 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7356 err = thumb_copy_b (gdbarch, insn1, dsc);
7357 else
7358 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7359 break;
7360 case 14: /* Unconditional branch */
7361 err = thumb_copy_b (gdbarch, insn1, dsc);
7362 break;
7363 default:
7364 err = 1;
7365 }
7366
7367 if (err)
7368 internal_error (__FILE__, __LINE__,
7369 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7370}
7371
7372static int
7373decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7374 uint16_t insn1, uint16_t insn2,
7375 struct regcache *regs,
7376 struct displaced_step_closure *dsc)
7377{
7378 int rt = bits (insn2, 12, 15);
7379 int rn = bits (insn1, 0, 3);
7380 int op1 = bits (insn1, 7, 8);
34518530
YQ
7381
7382 switch (bits (insn1, 5, 6))
7383 {
7384 case 0: /* Load byte and memory hints */
7385 if (rt == 0xf) /* PLD/PLI */
7386 {
7387 if (rn == 0xf)
7388 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7389 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7390 else
7391 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7392 "pli/pld", dsc);
7393 }
7394 else
7395 {
7396 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7397 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7398 1);
7399 else
7400 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7401 "ldrb{reg, immediate}/ldrbt",
7402 dsc);
7403 }
7404
7405 break;
7406 case 1: /* Load halfword and memory hints. */
7407 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7408 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7409 "pld/unalloc memhint", dsc);
7410 else
7411 {
7412 if (rn == 0xf)
7413 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7414 2);
7415 else
7416 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "ldrh/ldrht", dsc);
7418 }
7419 break;
7420 case 2: /* Load word */
7421 {
7422 int insn2_bit_8_11 = bits (insn2, 8, 11);
7423
7424 if (rn == 0xf)
7425 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7426 else if (op1 == 0x1) /* Encoding T3 */
7427 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7428 0, 1);
7429 else /* op1 == 0x0 */
7430 {
7431 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7432 /* LDR (immediate) */
7433 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7434 dsc, bit (insn2, 8), 1);
7435 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7436 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7437 "ldrt", dsc);
7438 else
7439 /* LDR (register) */
7440 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7441 dsc, 0, 0);
7442 }
7443 break;
7444 }
7445 default:
7446 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7447 break;
7448 }
7449 return 0;
7450}
7451
7452static void
7453thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7454 uint16_t insn2, struct regcache *regs,
7455 struct displaced_step_closure *dsc)
7456{
7457 int err = 0;
7458 unsigned short op = bit (insn2, 15);
7459 unsigned int op1 = bits (insn1, 11, 12);
7460
7461 switch (op1)
7462 {
7463 case 1:
7464 {
7465 switch (bits (insn1, 9, 10))
7466 {
7467 case 0:
7468 if (bit (insn1, 6))
7469 {
7470 /* Load/store {dual, execlusive}, table branch. */
7471 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7472 && bits (insn2, 5, 7) == 0)
7473 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7474 dsc);
7475 else
7476 /* PC is not allowed to use in load/store {dual, exclusive}
7477 instructions. */
7478 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7479 "load/store dual/ex", dsc);
7480 }
7481 else /* load/store multiple */
7482 {
7483 switch (bits (insn1, 7, 8))
7484 {
7485 case 0: case 3: /* SRS, RFE */
7486 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7487 "srs/rfe", dsc);
7488 break;
7489 case 1: case 2: /* LDM/STM/PUSH/POP */
7490 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7491 break;
7492 }
7493 }
7494 break;
7495
7496 case 1:
7497 /* Data-processing (shift register). */
7498 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7499 dsc);
7500 break;
7501 default: /* Coprocessor instructions. */
7502 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7503 break;
7504 }
7505 break;
7506 }
7507 case 2: /* op1 = 2 */
7508 if (op) /* Branch and misc control. */
7509 {
7510 if (bit (insn2, 14) /* BLX/BL */
7511 || bit (insn2, 12) /* Unconditional branch */
7512 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7513 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7514 else
7515 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7516 "misc ctrl", dsc);
7517 }
7518 else
7519 {
7520 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7521 {
7522 int op = bits (insn1, 4, 8);
7523 int rn = bits (insn1, 0, 3);
7524 if ((op == 0 || op == 0xa) && rn == 0xf)
7525 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7526 regs, dsc);
7527 else
7528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 "dp/pb", dsc);
7530 }
7531 else /* Data processing (modified immeidate) */
7532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7533 "dp/mi", dsc);
7534 }
7535 break;
7536 case 3: /* op1 = 3 */
7537 switch (bits (insn1, 9, 10))
7538 {
7539 case 0:
7540 if (bit (insn1, 4))
7541 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7542 regs, dsc);
7543 else /* NEON Load/Store and Store single data item */
7544 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7545 "neon elt/struct load/store",
7546 dsc);
7547 break;
7548 case 1: /* op1 = 3, bits (9, 10) == 1 */
7549 switch (bits (insn1, 7, 8))
7550 {
7551 case 0: case 1: /* Data processing (register) */
7552 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7553 "dp(reg)", dsc);
7554 break;
7555 case 2: /* Multiply and absolute difference */
7556 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7557 "mul/mua/diff", dsc);
7558 break;
7559 case 3: /* Long multiply and divide */
7560 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7561 "lmul/lmua", dsc);
7562 break;
7563 }
7564 break;
7565 default: /* Coprocessor instructions */
7566 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7567 break;
7568 }
7569 break;
7570 default:
7571 err = 1;
7572 }
7573
7574 if (err)
7575 internal_error (__FILE__, __LINE__,
7576 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7577
7578}
7579
b434a28f
YQ
7580static void
7581thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7582 struct regcache *regs,
b434a28f
YQ
7583 struct displaced_step_closure *dsc)
7584{
34518530
YQ
7585 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7586 uint16_t insn1
7587 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7588
7589 if (debug_displaced)
7590 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7591 "at %.8lx\n", insn1, (unsigned long) from);
7592
7593 dsc->is_thumb = 1;
7594 dsc->insn_size = thumb_insn_size (insn1);
7595 if (thumb_insn_size (insn1) == 4)
7596 {
7597 uint16_t insn2
7598 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7599 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7600 }
7601 else
7602 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7603}
7604
cca44b1b 7605void
b434a28f
YQ
7606arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7607 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7608 struct displaced_step_closure *dsc)
7609{
7610 int err = 0;
b434a28f
YQ
7611 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7612 uint32_t insn;
cca44b1b
JB
7613
7614 /* Most displaced instructions use a 1-instruction scratch space, so set this
7615 here and override below if/when necessary. */
7616 dsc->numinsns = 1;
7617 dsc->insn_addr = from;
7618 dsc->scratch_base = to;
7619 dsc->cleanup = NULL;
7620 dsc->wrote_to_pc = 0;
7621
b434a28f 7622 if (!displaced_in_arm_mode (regs))
12545665 7623 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7624
4db71c0b
YQ
7625 dsc->is_thumb = 0;
7626 dsc->insn_size = 4;
b434a28f
YQ
7627 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7628 if (debug_displaced)
7629 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7630 "at %.8lx\n", (unsigned long) insn,
7631 (unsigned long) from);
7632
cca44b1b 7633 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7634 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7635 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7636 {
7637 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7638 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7639 break;
7640
7641 case 0x4: case 0x5: case 0x6:
7ff120b4 7642 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7643 break;
7644
7645 case 0x7:
7ff120b4 7646 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7647 break;
7648
7649 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7650 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7651 break;
7652
7653 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7654 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7655 break;
7656 }
7657
7658 if (err)
7659 internal_error (__FILE__, __LINE__,
7660 _("arm_process_displaced_insn: Instruction decode error"));
7661}
7662
7663/* Actually set up the scratch space for a displaced instruction. */
7664
7665void
7666arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7667 CORE_ADDR to, struct displaced_step_closure *dsc)
7668{
7669 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7670 unsigned int i, len, offset;
cca44b1b 7671 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7672 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7673 const gdb_byte *bkp_insn;
cca44b1b 7674
4db71c0b 7675 offset = 0;
cca44b1b
JB
7676 /* Poke modified instruction(s). */
7677 for (i = 0; i < dsc->numinsns; i++)
7678 {
7679 if (debug_displaced)
4db71c0b
YQ
7680 {
7681 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7682 if (size == 4)
7683 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7684 dsc->modinsn[i]);
7685 else if (size == 2)
7686 fprintf_unfiltered (gdb_stdlog, "%.4x",
7687 (unsigned short)dsc->modinsn[i]);
7688
7689 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7690 (unsigned long) to + offset);
7691
7692 }
7693 write_memory_unsigned_integer (to + offset, size,
7694 byte_order_for_code,
cca44b1b 7695 dsc->modinsn[i]);
4db71c0b
YQ
7696 offset += size;
7697 }
7698
7699 /* Choose the correct breakpoint instruction. */
7700 if (dsc->is_thumb)
7701 {
7702 bkp_insn = tdep->thumb_breakpoint;
7703 len = tdep->thumb_breakpoint_size;
7704 }
7705 else
7706 {
7707 bkp_insn = tdep->arm_breakpoint;
7708 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7709 }
7710
7711 /* Put breakpoint afterwards. */
4db71c0b 7712 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7713
7714 if (debug_displaced)
7715 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7716 paddress (gdbarch, from), paddress (gdbarch, to));
7717}
7718
cca44b1b
JB
7719/* Entry point for cleaning things up after a displaced instruction has been
7720 single-stepped. */
7721
7722void
7723arm_displaced_step_fixup (struct gdbarch *gdbarch,
7724 struct displaced_step_closure *dsc,
7725 CORE_ADDR from, CORE_ADDR to,
7726 struct regcache *regs)
7727{
7728 if (dsc->cleanup)
7729 dsc->cleanup (gdbarch, regs, dsc);
7730
7731 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7732 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7733 dsc->insn_addr + dsc->insn_size);
7734
cca44b1b
JB
7735}
7736
7737#include "bfd-in2.h"
7738#include "libcoff.h"
7739
7740static int
7741gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7742{
e47ad6c0
YQ
7743 gdb_disassembler *di
7744 = static_cast<gdb_disassembler *>(info->application_data);
7745 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7746
7747 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7748 {
7749 static asymbol *asym;
7750 static combined_entry_type ce;
7751 static struct coff_symbol_struct csym;
7752 static struct bfd fake_bfd;
7753 static bfd_target fake_target;
7754
7755 if (csym.native == NULL)
7756 {
7757 /* Create a fake symbol vector containing a Thumb symbol.
7758 This is solely so that the code in print_insn_little_arm()
7759 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7760 the presence of a Thumb symbol and switch to decoding
7761 Thumb instructions. */
7762
7763 fake_target.flavour = bfd_target_coff_flavour;
7764 fake_bfd.xvec = &fake_target;
7765 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7766 csym.native = &ce;
7767 csym.symbol.the_bfd = &fake_bfd;
7768 csym.symbol.name = "fake";
7769 asym = (asymbol *) & csym;
7770 }
7771
7772 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7773 info->symbols = &asym;
7774 }
7775 else
7776 info->symbols = NULL;
7777
7778 if (info->endian == BFD_ENDIAN_BIG)
7779 return print_insn_big_arm (memaddr, info);
7780 else
7781 return print_insn_little_arm (memaddr, info);
7782}
7783
7784/* The following define instruction sequences that will cause ARM
7785 cpu's to take an undefined instruction trap. These are used to
7786 signal a breakpoint to GDB.
7787
7788 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7789 modes. A different instruction is required for each mode. The ARM
7790 cpu's can also be big or little endian. Thus four different
7791 instructions are needed to support all cases.
7792
7793 Note: ARMv4 defines several new instructions that will take the
7794 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7795 not in fact add the new instructions. The new undefined
7796 instructions in ARMv4 are all instructions that had no defined
7797 behaviour in earlier chips. There is no guarantee that they will
7798 raise an exception, but may be treated as NOP's. In practice, it
7799 may only safe to rely on instructions matching:
7800
7801 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7802 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7803 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7804
0963b4bd 7805 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7806 following use a condition predicate of ALWAYS so it is always TRUE.
7807
7808 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7809 and NetBSD all use a software interrupt rather than an undefined
7810 instruction to force a trap. This can be handled by by the
7811 abi-specific code during establishment of the gdbarch vector. */
7812
7813#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7814#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7815#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7816#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7817
948f8e3d
PA
7818static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7819static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7820static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7821static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7822
cd6c3b4f
YQ
7823/* Implement the breakpoint_kind_from_pc gdbarch method. */
7824
d19280ad
YQ
7825static int
7826arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7827{
7828 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7829 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7830
9779414d 7831 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7832 {
7833 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7834
7835 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7836 check whether we are replacing a 32-bit instruction. */
7837 if (tdep->thumb2_breakpoint != NULL)
7838 {
7839 gdb_byte buf[2];
d19280ad 7840
177321bd
DJ
7841 if (target_read_memory (*pcptr, buf, 2) == 0)
7842 {
7843 unsigned short inst1;
d19280ad 7844
177321bd 7845 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7846 if (thumb_insn_size (inst1) == 4)
d19280ad 7847 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7848 }
7849 }
7850
d19280ad 7851 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7852 }
7853 else
d19280ad
YQ
7854 return ARM_BP_KIND_ARM;
7855
7856}
7857
cd6c3b4f
YQ
7858/* Implement the sw_breakpoint_from_kind gdbarch method. */
7859
d19280ad
YQ
7860static const gdb_byte *
7861arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7862{
7863 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7864
7865 switch (kind)
cca44b1b 7866 {
d19280ad
YQ
7867 case ARM_BP_KIND_ARM:
7868 *size = tdep->arm_breakpoint_size;
cca44b1b 7869 return tdep->arm_breakpoint;
d19280ad
YQ
7870 case ARM_BP_KIND_THUMB:
7871 *size = tdep->thumb_breakpoint_size;
7872 return tdep->thumb_breakpoint;
7873 case ARM_BP_KIND_THUMB2:
7874 *size = tdep->thumb2_breakpoint_size;
7875 return tdep->thumb2_breakpoint;
7876 default:
7877 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7878 }
7879}
7880
833b7ab5
YQ
7881/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7882
7883static int
7884arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7885 struct regcache *regcache,
7886 CORE_ADDR *pcptr)
7887{
7888 gdb_byte buf[4];
7889
7890 /* Check the memory pointed by PC is readable. */
7891 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7892 {
7893 struct arm_get_next_pcs next_pcs_ctx;
7894 CORE_ADDR pc;
7895 int i;
7896 VEC (CORE_ADDR) *next_pcs = NULL;
7897 struct cleanup *old_chain
7898 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7899
7900 arm_get_next_pcs_ctor (&next_pcs_ctx,
7901 &arm_get_next_pcs_ops,
7902 gdbarch_byte_order (gdbarch),
7903 gdbarch_byte_order_for_code (gdbarch),
7904 0,
7905 regcache);
7906
7907 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7908
7909 /* If MEMADDR is the next instruction of current pc, do the
7910 software single step computation, and get the thumb mode by
7911 the destination address. */
7912 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7913 {
7914 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7915 {
7916 do_cleanups (old_chain);
7917
7918 if (IS_THUMB_ADDR (pc))
7919 {
7920 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7921 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7922 }
7923 else
7924 return ARM_BP_KIND_ARM;
7925 }
7926 }
7927
7928 do_cleanups (old_chain);
7929 }
7930
7931 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7932}
7933
cca44b1b
JB
7934/* Extract from an array REGBUF containing the (raw) register state a
7935 function return value of type TYPE, and copy that, in virtual
7936 format, into VALBUF. */
7937
7938static void
7939arm_extract_return_value (struct type *type, struct regcache *regs,
7940 gdb_byte *valbuf)
7941{
7942 struct gdbarch *gdbarch = get_regcache_arch (regs);
7943 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7944
7945 if (TYPE_CODE_FLT == TYPE_CODE (type))
7946 {
7947 switch (gdbarch_tdep (gdbarch)->fp_model)
7948 {
7949 case ARM_FLOAT_FPA:
7950 {
7951 /* The value is in register F0 in internal format. We need to
7952 extract the raw value and then convert it to the desired
7953 internal type. */
7954 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7955
7956 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7957 convert_from_extended (floatformat_from_type (type), tmpbuf,
7958 valbuf, gdbarch_byte_order (gdbarch));
7959 }
7960 break;
7961
7962 case ARM_FLOAT_SOFT_FPA:
7963 case ARM_FLOAT_SOFT_VFP:
7964 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7965 not using the VFP ABI code. */
7966 case ARM_FLOAT_VFP:
7967 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7968 if (TYPE_LENGTH (type) > 4)
7969 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7970 valbuf + INT_REGISTER_SIZE);
7971 break;
7972
7973 default:
0963b4bd
MS
7974 internal_error (__FILE__, __LINE__,
7975 _("arm_extract_return_value: "
7976 "Floating point model not supported"));
cca44b1b
JB
7977 break;
7978 }
7979 }
7980 else if (TYPE_CODE (type) == TYPE_CODE_INT
7981 || TYPE_CODE (type) == TYPE_CODE_CHAR
7982 || TYPE_CODE (type) == TYPE_CODE_BOOL
7983 || TYPE_CODE (type) == TYPE_CODE_PTR
7984 || TYPE_CODE (type) == TYPE_CODE_REF
7985 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7986 {
b021a221
MS
7987 /* If the type is a plain integer, then the access is
7988 straight-forward. Otherwise we have to play around a bit
7989 more. */
cca44b1b
JB
7990 int len = TYPE_LENGTH (type);
7991 int regno = ARM_A1_REGNUM;
7992 ULONGEST tmp;
7993
7994 while (len > 0)
7995 {
7996 /* By using store_unsigned_integer we avoid having to do
7997 anything special for small big-endian values. */
7998 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7999 store_unsigned_integer (valbuf,
8000 (len > INT_REGISTER_SIZE
8001 ? INT_REGISTER_SIZE : len),
8002 byte_order, tmp);
8003 len -= INT_REGISTER_SIZE;
8004 valbuf += INT_REGISTER_SIZE;
8005 }
8006 }
8007 else
8008 {
8009 /* For a structure or union the behaviour is as if the value had
8010 been stored to word-aligned memory and then loaded into
8011 registers with 32-bit load instruction(s). */
8012 int len = TYPE_LENGTH (type);
8013 int regno = ARM_A1_REGNUM;
8014 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8015
8016 while (len > 0)
8017 {
8018 regcache_cooked_read (regs, regno++, tmpbuf);
8019 memcpy (valbuf, tmpbuf,
8020 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8021 len -= INT_REGISTER_SIZE;
8022 valbuf += INT_REGISTER_SIZE;
8023 }
8024 }
8025}
8026
8027
8028/* Will a function return an aggregate type in memory or in a
8029 register? Return 0 if an aggregate type can be returned in a
8030 register, 1 if it must be returned in memory. */
8031
8032static int
8033arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8034{
cca44b1b
JB
8035 enum type_code code;
8036
f168693b 8037 type = check_typedef (type);
cca44b1b 8038
b13c8ab2
YQ
8039 /* Simple, non-aggregate types (ie not including vectors and
8040 complex) are always returned in a register (or registers). */
8041 code = TYPE_CODE (type);
8042 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8043 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8044 return 0;
cca44b1b 8045
c4312b19
YQ
8046 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8047 {
8048 /* Vector values should be returned using ARM registers if they
8049 are not over 16 bytes. */
8050 return (TYPE_LENGTH (type) > 16);
8051 }
8052
b13c8ab2 8053 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8054 {
b13c8ab2
YQ
8055 /* The AAPCS says all aggregates not larger than a word are returned
8056 in a register. */
8057 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8058 return 0;
8059
cca44b1b
JB
8060 return 1;
8061 }
b13c8ab2
YQ
8062 else
8063 {
8064 int nRc;
cca44b1b 8065
b13c8ab2
YQ
8066 /* All aggregate types that won't fit in a register must be returned
8067 in memory. */
8068 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8069 return 1;
cca44b1b 8070
b13c8ab2
YQ
8071 /* In the ARM ABI, "integer" like aggregate types are returned in
8072 registers. For an aggregate type to be integer like, its size
8073 must be less than or equal to INT_REGISTER_SIZE and the
8074 offset of each addressable subfield must be zero. Note that bit
8075 fields are not addressable, and all addressable subfields of
8076 unions always start at offset zero.
cca44b1b 8077
b13c8ab2
YQ
8078 This function is based on the behaviour of GCC 2.95.1.
8079 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8080
b13c8ab2
YQ
8081 Note: All versions of GCC before GCC 2.95.2 do not set up the
8082 parameters correctly for a function returning the following
8083 structure: struct { float f;}; This should be returned in memory,
8084 not a register. Richard Earnshaw sent me a patch, but I do not
8085 know of any way to detect if a function like the above has been
8086 compiled with the correct calling convention. */
8087
8088 /* Assume all other aggregate types can be returned in a register.
8089 Run a check for structures, unions and arrays. */
8090 nRc = 0;
67255d04 8091
b13c8ab2
YQ
8092 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8093 {
8094 int i;
8095 /* Need to check if this struct/union is "integer" like. For
8096 this to be true, its size must be less than or equal to
8097 INT_REGISTER_SIZE and the offset of each addressable
8098 subfield must be zero. Note that bit fields are not
8099 addressable, and unions always start at offset zero. If any
8100 of the subfields is a floating point type, the struct/union
8101 cannot be an integer type. */
8102
8103 /* For each field in the object, check:
8104 1) Is it FP? --> yes, nRc = 1;
8105 2) Is it addressable (bitpos != 0) and
8106 not packed (bitsize == 0)?
8107 --> yes, nRc = 1
8108 */
8109
8110 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8111 {
b13c8ab2
YQ
8112 enum type_code field_type_code;
8113
8114 field_type_code
8115 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8116 i)));
8117
8118 /* Is it a floating point type field? */
8119 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8120 {
8121 nRc = 1;
8122 break;
8123 }
b13c8ab2
YQ
8124
8125 /* If bitpos != 0, then we have to care about it. */
8126 if (TYPE_FIELD_BITPOS (type, i) != 0)
8127 {
8128 /* Bitfields are not addressable. If the field bitsize is
8129 zero, then the field is not packed. Hence it cannot be
8130 a bitfield or any other packed type. */
8131 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8132 {
8133 nRc = 1;
8134 break;
8135 }
8136 }
67255d04
RE
8137 }
8138 }
67255d04 8139
b13c8ab2
YQ
8140 return nRc;
8141 }
67255d04
RE
8142}
8143
34e8f22d
RE
8144/* Write into appropriate registers a function return value of type
8145 TYPE, given in virtual format. */
8146
8147static void
b508a996 8148arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8149 const gdb_byte *valbuf)
34e8f22d 8150{
be8626e0 8151 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8152 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8153
34e8f22d
RE
8154 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8155 {
e362b510 8156 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8157
be8626e0 8158 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8159 {
8160 case ARM_FLOAT_FPA:
8161
be8626e0
MD
8162 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8163 gdbarch_byte_order (gdbarch));
b508a996 8164 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8165 break;
8166
fd50bc42 8167 case ARM_FLOAT_SOFT_FPA:
08216dd7 8168 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8169 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8170 not using the VFP ABI code. */
8171 case ARM_FLOAT_VFP:
b508a996
RE
8172 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8173 if (TYPE_LENGTH (type) > 4)
8174 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8175 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8176 break;
8177
8178 default:
9b20d036
MS
8179 internal_error (__FILE__, __LINE__,
8180 _("arm_store_return_value: Floating "
8181 "point model not supported"));
08216dd7
RE
8182 break;
8183 }
34e8f22d 8184 }
b508a996
RE
8185 else if (TYPE_CODE (type) == TYPE_CODE_INT
8186 || TYPE_CODE (type) == TYPE_CODE_CHAR
8187 || TYPE_CODE (type) == TYPE_CODE_BOOL
8188 || TYPE_CODE (type) == TYPE_CODE_PTR
8189 || TYPE_CODE (type) == TYPE_CODE_REF
8190 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8191 {
8192 if (TYPE_LENGTH (type) <= 4)
8193 {
8194 /* Values of one word or less are zero/sign-extended and
8195 returned in r0. */
7a5ea0d4 8196 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8197 LONGEST val = unpack_long (type, valbuf);
8198
e17a4113 8199 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8200 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8201 }
8202 else
8203 {
8204 /* Integral values greater than one word are stored in consecutive
8205 registers starting with r0. This will always be a multiple of
8206 the regiser size. */
8207 int len = TYPE_LENGTH (type);
8208 int regno = ARM_A1_REGNUM;
8209
8210 while (len > 0)
8211 {
8212 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8213 len -= INT_REGISTER_SIZE;
8214 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8215 }
8216 }
8217 }
34e8f22d 8218 else
b508a996
RE
8219 {
8220 /* For a structure or union the behaviour is as if the value had
8221 been stored to word-aligned memory and then loaded into
8222 registers with 32-bit load instruction(s). */
8223 int len = TYPE_LENGTH (type);
8224 int regno = ARM_A1_REGNUM;
7a5ea0d4 8225 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8226
8227 while (len > 0)
8228 {
8229 memcpy (tmpbuf, valbuf,
7a5ea0d4 8230 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8231 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8232 len -= INT_REGISTER_SIZE;
8233 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8234 }
8235 }
34e8f22d
RE
8236}
8237
2af48f68
PB
8238
8239/* Handle function return values. */
8240
8241static enum return_value_convention
6a3a010b 8242arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8243 struct type *valtype, struct regcache *regcache,
8244 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8245{
7c00367c 8246 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8247 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8248 enum arm_vfp_cprc_base_type vfp_base_type;
8249 int vfp_base_count;
8250
8251 if (arm_vfp_abi_for_function (gdbarch, func_type)
8252 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8253 {
8254 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8255 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8256 int i;
8257 for (i = 0; i < vfp_base_count; i++)
8258 {
58d6951d
DJ
8259 if (reg_char == 'q')
8260 {
8261 if (writebuf)
8262 arm_neon_quad_write (gdbarch, regcache, i,
8263 writebuf + i * unit_length);
8264
8265 if (readbuf)
8266 arm_neon_quad_read (gdbarch, regcache, i,
8267 readbuf + i * unit_length);
8268 }
8269 else
8270 {
8271 char name_buf[4];
8272 int regnum;
8273
8c042590 8274 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8275 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8276 strlen (name_buf));
8277 if (writebuf)
8278 regcache_cooked_write (regcache, regnum,
8279 writebuf + i * unit_length);
8280 if (readbuf)
8281 regcache_cooked_read (regcache, regnum,
8282 readbuf + i * unit_length);
8283 }
90445bd3
DJ
8284 }
8285 return RETURN_VALUE_REGISTER_CONVENTION;
8286 }
7c00367c 8287
2af48f68
PB
8288 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8289 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8290 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8291 {
7c00367c
MK
8292 if (tdep->struct_return == pcc_struct_return
8293 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8294 return RETURN_VALUE_STRUCT_CONVENTION;
8295 }
b13c8ab2
YQ
8296 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8297 {
8298 if (arm_return_in_memory (gdbarch, valtype))
8299 return RETURN_VALUE_STRUCT_CONVENTION;
8300 }
7052e42c 8301
2af48f68
PB
8302 if (writebuf)
8303 arm_store_return_value (valtype, regcache, writebuf);
8304
8305 if (readbuf)
8306 arm_extract_return_value (valtype, regcache, readbuf);
8307
8308 return RETURN_VALUE_REGISTER_CONVENTION;
8309}
8310
8311
9df628e0 8312static int
60ade65d 8313arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8314{
e17a4113
UW
8315 struct gdbarch *gdbarch = get_frame_arch (frame);
8316 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8317 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8318 CORE_ADDR jb_addr;
e362b510 8319 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8320
60ade65d 8321 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8322
8323 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8324 INT_REGISTER_SIZE))
9df628e0
RE
8325 return 0;
8326
e17a4113 8327 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8328 return 1;
8329}
8330
faa95490
DJ
8331/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8332 return the target PC. Otherwise return 0. */
c906108c
SS
8333
8334CORE_ADDR
52f729a7 8335arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8336{
2c02bd72 8337 const char *name;
faa95490 8338 int namelen;
c906108c
SS
8339 CORE_ADDR start_addr;
8340
8341 /* Find the starting address and name of the function containing the PC. */
8342 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8343 {
8344 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8345 check here. */
8346 start_addr = arm_skip_bx_reg (frame, pc);
8347 if (start_addr != 0)
8348 return start_addr;
8349
8350 return 0;
8351 }
c906108c 8352
faa95490
DJ
8353 /* If PC is in a Thumb call or return stub, return the address of the
8354 target PC, which is in a register. The thunk functions are called
8355 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8356 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8357 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8358 if (startswith (name, "_call_via_")
8359 || startswith (name, "__ARM_call_via_"))
c906108c 8360 {
ed9a39eb
JM
8361 /* Use the name suffix to determine which register contains the
8362 target PC. */
c5aa993b
JM
8363 static char *table[15] =
8364 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8365 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8366 };
c906108c 8367 int regno;
faa95490 8368 int offset = strlen (name) - 2;
c906108c
SS
8369
8370 for (regno = 0; regno <= 14; regno++)
faa95490 8371 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8372 return get_frame_register_unsigned (frame, regno);
c906108c 8373 }
ed9a39eb 8374
faa95490
DJ
8375 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8376 non-interworking calls to foo. We could decode the stubs
8377 to find the target but it's easier to use the symbol table. */
8378 namelen = strlen (name);
8379 if (name[0] == '_' && name[1] == '_'
8380 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8381 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8382 || (namelen > 2 + strlen ("_from_arm")
61012eef 8383 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8384 {
8385 char *target_name;
8386 int target_len = namelen - 2;
3b7344d5 8387 struct bound_minimal_symbol minsym;
faa95490
DJ
8388 struct objfile *objfile;
8389 struct obj_section *sec;
8390
8391 if (name[namelen - 1] == 'b')
8392 target_len -= strlen ("_from_thumb");
8393 else
8394 target_len -= strlen ("_from_arm");
8395
224c3ddb 8396 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8397 memcpy (target_name, name + 2, target_len);
8398 target_name[target_len] = '\0';
8399
8400 sec = find_pc_section (pc);
8401 objfile = (sec == NULL) ? NULL : sec->objfile;
8402 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8403 if (minsym.minsym != NULL)
77e371c0 8404 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8405 else
8406 return 0;
8407 }
8408
c5aa993b 8409 return 0; /* not a stub */
c906108c
SS
8410}
8411
afd7eef0
RE
8412static void
8413set_arm_command (char *args, int from_tty)
8414{
edefbb7c
AC
8415 printf_unfiltered (_("\
8416\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8417 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8418}
8419
8420static void
8421show_arm_command (char *args, int from_tty)
8422{
26304000 8423 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8424}
8425
28e97307
DJ
8426static void
8427arm_update_current_architecture (void)
fd50bc42 8428{
28e97307 8429 struct gdbarch_info info;
fd50bc42 8430
28e97307 8431 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8432 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8433 return;
fd50bc42 8434
28e97307
DJ
8435 /* Update the architecture. */
8436 gdbarch_info_init (&info);
fd50bc42 8437
28e97307 8438 if (!gdbarch_update_p (info))
9b20d036 8439 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8440}
8441
8442static void
8443set_fp_model_sfunc (char *args, int from_tty,
8444 struct cmd_list_element *c)
8445{
570dc176 8446 int fp_model;
fd50bc42
RE
8447
8448 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8449 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8450 {
aead7601 8451 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8452 break;
8453 }
8454
8455 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8456 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8457 current_fp_model);
8458
28e97307 8459 arm_update_current_architecture ();
fd50bc42
RE
8460}
8461
8462static void
08546159
AC
8463show_fp_model (struct ui_file *file, int from_tty,
8464 struct cmd_list_element *c, const char *value)
fd50bc42 8465{
f5656ead 8466 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8467
28e97307 8468 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8469 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8470 fprintf_filtered (file, _("\
8471The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8472 fp_model_strings[tdep->fp_model]);
8473 else
8474 fprintf_filtered (file, _("\
8475The current ARM floating point model is \"%s\".\n"),
8476 fp_model_strings[arm_fp_model]);
8477}
8478
8479static void
8480arm_set_abi (char *args, int from_tty,
8481 struct cmd_list_element *c)
8482{
570dc176 8483 int arm_abi;
28e97307
DJ
8484
8485 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8486 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8487 {
aead7601 8488 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8489 break;
8490 }
8491
8492 if (arm_abi == ARM_ABI_LAST)
8493 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8494 arm_abi_string);
8495
8496 arm_update_current_architecture ();
8497}
8498
8499static void
8500arm_show_abi (struct ui_file *file, int from_tty,
8501 struct cmd_list_element *c, const char *value)
8502{
f5656ead 8503 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8504
8505 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8506 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8507 fprintf_filtered (file, _("\
8508The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8509 arm_abi_strings[tdep->arm_abi]);
8510 else
8511 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8512 arm_abi_string);
fd50bc42
RE
8513}
8514
0428b8f5
DJ
8515static void
8516arm_show_fallback_mode (struct ui_file *file, int from_tty,
8517 struct cmd_list_element *c, const char *value)
8518{
0963b4bd
MS
8519 fprintf_filtered (file,
8520 _("The current execution mode assumed "
8521 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8522 arm_fallback_mode_string);
8523}
8524
8525static void
8526arm_show_force_mode (struct ui_file *file, int from_tty,
8527 struct cmd_list_element *c, const char *value)
8528{
0963b4bd
MS
8529 fprintf_filtered (file,
8530 _("The current execution mode assumed "
8531 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8532 arm_force_mode_string);
8533}
8534
afd7eef0
RE
8535/* If the user changes the register disassembly style used for info
8536 register and other commands, we have to also switch the style used
8537 in opcodes for disassembly output. This function is run in the "set
8538 arm disassembly" command, and does that. */
bc90b915
FN
8539
8540static void
afd7eef0 8541set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8542 struct cmd_list_element *c)
8543{
afd7eef0 8544 set_disassembly_style ();
bc90b915
FN
8545}
8546\f
966fbf70 8547/* Return the ARM register name corresponding to register I. */
a208b0cb 8548static const char *
d93859e2 8549arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8550{
58d6951d
DJ
8551 const int num_regs = gdbarch_num_regs (gdbarch);
8552
8553 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8554 && i >= num_regs && i < num_regs + 32)
8555 {
8556 static const char *const vfp_pseudo_names[] = {
8557 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8558 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8559 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8560 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8561 };
8562
8563 return vfp_pseudo_names[i - num_regs];
8564 }
8565
8566 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8567 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8568 {
8569 static const char *const neon_pseudo_names[] = {
8570 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8571 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8572 };
8573
8574 return neon_pseudo_names[i - num_regs - 32];
8575 }
8576
ff6f572f
DJ
8577 if (i >= ARRAY_SIZE (arm_register_names))
8578 /* These registers are only supported on targets which supply
8579 an XML description. */
8580 return "";
8581
966fbf70
RE
8582 return arm_register_names[i];
8583}
8584
bc90b915 8585static void
afd7eef0 8586set_disassembly_style (void)
bc90b915 8587{
123dc839 8588 int current;
bc90b915 8589
123dc839
DJ
8590 /* Find the style that the user wants. */
8591 for (current = 0; current < num_disassembly_options; current++)
8592 if (disassembly_style == valid_disassembly_styles[current])
8593 break;
8594 gdb_assert (current < num_disassembly_options);
bc90b915 8595
94c30b78 8596 /* Synchronize the disassembler. */
bc90b915
FN
8597 set_arm_regname_option (current);
8598}
8599
082fc60d
RE
8600/* Test whether the coff symbol specific value corresponds to a Thumb
8601 function. */
8602
8603static int
8604coff_sym_is_thumb (int val)
8605{
f8bf5763
PM
8606 return (val == C_THUMBEXT
8607 || val == C_THUMBSTAT
8608 || val == C_THUMBEXTFUNC
8609 || val == C_THUMBSTATFUNC
8610 || val == C_THUMBLABEL);
082fc60d
RE
8611}
8612
8613/* arm_coff_make_msymbol_special()
8614 arm_elf_make_msymbol_special()
8615
8616 These functions test whether the COFF or ELF symbol corresponds to
8617 an address in thumb code, and set a "special" bit in a minimal
8618 symbol to indicate that it does. */
8619
34e8f22d 8620static void
082fc60d
RE
8621arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8622{
39d911fc
TP
8623 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8624
8625 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8626 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8627 MSYMBOL_SET_SPECIAL (msym);
8628}
8629
34e8f22d 8630static void
082fc60d
RE
8631arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8632{
8633 if (coff_sym_is_thumb (val))
8634 MSYMBOL_SET_SPECIAL (msym);
8635}
8636
60c5725c 8637static void
c1bd65d0 8638arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8639{
9a3c8263 8640 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8641 unsigned int i;
8642
8643 for (i = 0; i < objfile->obfd->section_count; i++)
8644 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8645}
8646
8647static void
8648arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8649 asymbol *sym)
8650{
8651 const char *name = bfd_asymbol_name (sym);
8652 struct arm_per_objfile *data;
8653 VEC(arm_mapping_symbol_s) **map_p;
8654 struct arm_mapping_symbol new_map_sym;
8655
8656 gdb_assert (name[0] == '$');
8657 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8658 return;
8659
9a3c8263
SM
8660 data = (struct arm_per_objfile *) objfile_data (objfile,
8661 arm_objfile_data_key);
60c5725c
DJ
8662 if (data == NULL)
8663 {
8664 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8665 struct arm_per_objfile);
8666 set_objfile_data (objfile, arm_objfile_data_key, data);
8667 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8668 objfile->obfd->section_count,
8669 VEC(arm_mapping_symbol_s) *);
8670 }
8671 map_p = &data->section_maps[bfd_get_section (sym)->index];
8672
8673 new_map_sym.value = sym->value;
8674 new_map_sym.type = name[1];
8675
8676 /* Assume that most mapping symbols appear in order of increasing
8677 value. If they were randomly distributed, it would be faster to
8678 always push here and then sort at first use. */
8679 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8680 {
8681 struct arm_mapping_symbol *prev_map_sym;
8682
8683 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8684 if (prev_map_sym->value >= sym->value)
8685 {
8686 unsigned int idx;
8687 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8688 arm_compare_mapping_symbols);
8689 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8690 return;
8691 }
8692 }
8693
8694 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8695}
8696
756fe439 8697static void
61a1198a 8698arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8699{
9779414d 8700 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8701 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8702
8703 /* If necessary, set the T bit. */
8704 if (arm_apcs_32)
8705 {
9779414d 8706 ULONGEST val, t_bit;
61a1198a 8707 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8708 t_bit = arm_psr_thumb_bit (gdbarch);
8709 if (arm_pc_is_thumb (gdbarch, pc))
8710 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8711 val | t_bit);
756fe439 8712 else
61a1198a 8713 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8714 val & ~t_bit);
756fe439
DJ
8715 }
8716}
123dc839 8717
58d6951d
DJ
8718/* Read the contents of a NEON quad register, by reading from two
8719 double registers. This is used to implement the quad pseudo
8720 registers, and for argument passing in case the quad registers are
8721 missing; vectors are passed in quad registers when using the VFP
8722 ABI, even if a NEON unit is not present. REGNUM is the index of
8723 the quad register, in [0, 15]. */
8724
05d1431c 8725static enum register_status
58d6951d
DJ
8726arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8727 int regnum, gdb_byte *buf)
8728{
8729 char name_buf[4];
8730 gdb_byte reg_buf[8];
8731 int offset, double_regnum;
05d1431c 8732 enum register_status status;
58d6951d 8733
8c042590 8734 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8735 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8736 strlen (name_buf));
8737
8738 /* d0 is always the least significant half of q0. */
8739 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8740 offset = 8;
8741 else
8742 offset = 0;
8743
05d1431c
PA
8744 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8745 if (status != REG_VALID)
8746 return status;
58d6951d
DJ
8747 memcpy (buf + offset, reg_buf, 8);
8748
8749 offset = 8 - offset;
05d1431c
PA
8750 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8751 if (status != REG_VALID)
8752 return status;
58d6951d 8753 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8754
8755 return REG_VALID;
58d6951d
DJ
8756}
8757
05d1431c 8758static enum register_status
58d6951d
DJ
8759arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8760 int regnum, gdb_byte *buf)
8761{
8762 const int num_regs = gdbarch_num_regs (gdbarch);
8763 char name_buf[4];
8764 gdb_byte reg_buf[8];
8765 int offset, double_regnum;
8766
8767 gdb_assert (regnum >= num_regs);
8768 regnum -= num_regs;
8769
8770 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8771 /* Quad-precision register. */
05d1431c 8772 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8773 else
8774 {
05d1431c
PA
8775 enum register_status status;
8776
58d6951d
DJ
8777 /* Single-precision register. */
8778 gdb_assert (regnum < 32);
8779
8780 /* s0 is always the least significant half of d0. */
8781 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8782 offset = (regnum & 1) ? 0 : 4;
8783 else
8784 offset = (regnum & 1) ? 4 : 0;
8785
8c042590 8786 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8787 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8788 strlen (name_buf));
8789
05d1431c
PA
8790 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8791 if (status == REG_VALID)
8792 memcpy (buf, reg_buf + offset, 4);
8793 return status;
58d6951d
DJ
8794 }
8795}
8796
8797/* Store the contents of BUF to a NEON quad register, by writing to
8798 two double registers. This is used to implement the quad pseudo
8799 registers, and for argument passing in case the quad registers are
8800 missing; vectors are passed in quad registers when using the VFP
8801 ABI, even if a NEON unit is not present. REGNUM is the index
8802 of the quad register, in [0, 15]. */
8803
8804static void
8805arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8806 int regnum, const gdb_byte *buf)
8807{
8808 char name_buf[4];
58d6951d
DJ
8809 int offset, double_regnum;
8810
8c042590 8811 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8812 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8813 strlen (name_buf));
8814
8815 /* d0 is always the least significant half of q0. */
8816 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8817 offset = 8;
8818 else
8819 offset = 0;
8820
8821 regcache_raw_write (regcache, double_regnum, buf + offset);
8822 offset = 8 - offset;
8823 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8824}
8825
8826static void
8827arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8828 int regnum, const gdb_byte *buf)
8829{
8830 const int num_regs = gdbarch_num_regs (gdbarch);
8831 char name_buf[4];
8832 gdb_byte reg_buf[8];
8833 int offset, double_regnum;
8834
8835 gdb_assert (regnum >= num_regs);
8836 regnum -= num_regs;
8837
8838 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8839 /* Quad-precision register. */
8840 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8841 else
8842 {
8843 /* Single-precision register. */
8844 gdb_assert (regnum < 32);
8845
8846 /* s0 is always the least significant half of d0. */
8847 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8848 offset = (regnum & 1) ? 0 : 4;
8849 else
8850 offset = (regnum & 1) ? 4 : 0;
8851
8c042590 8852 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8853 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8854 strlen (name_buf));
8855
8856 regcache_raw_read (regcache, double_regnum, reg_buf);
8857 memcpy (reg_buf + offset, buf, 4);
8858 regcache_raw_write (regcache, double_regnum, reg_buf);
8859 }
8860}
8861
123dc839
DJ
8862static struct value *
8863value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8864{
9a3c8263 8865 const int *reg_p = (const int *) baton;
123dc839
DJ
8866 return value_of_register (*reg_p, frame);
8867}
97e03143 8868\f
70f80edf
JT
8869static enum gdb_osabi
8870arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8871{
2af48f68 8872 unsigned int elfosabi;
70f80edf 8873 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8874
70f80edf 8875 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8876
28e97307
DJ
8877 if (elfosabi == ELFOSABI_ARM)
8878 /* GNU tools use this value. Check note sections in this case,
8879 as well. */
8880 bfd_map_over_sections (abfd,
8881 generic_elf_osabi_sniff_abi_tag_sections,
8882 &osabi);
97e03143 8883
28e97307 8884 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8885 return osabi;
97e03143
RE
8886}
8887
54483882
YQ
8888static int
8889arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8890 struct reggroup *group)
8891{
2c291032
YQ
8892 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8893 this, FPS register belongs to save_regroup, restore_reggroup, and
8894 all_reggroup, of course. */
54483882 8895 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8896 return (group == float_reggroup
8897 || group == save_reggroup
8898 || group == restore_reggroup
8899 || group == all_reggroup);
54483882
YQ
8900 else
8901 return default_register_reggroup_p (gdbarch, regnum, group);
8902}
8903
25f8c692
JL
8904\f
8905/* For backward-compatibility we allow two 'g' packet lengths with
8906 the remote protocol depending on whether FPA registers are
8907 supplied. M-profile targets do not have FPA registers, but some
8908 stubs already exist in the wild which use a 'g' packet which
8909 supplies them albeit with dummy values. The packet format which
8910 includes FPA registers should be considered deprecated for
8911 M-profile targets. */
8912
8913static void
8914arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8915{
8916 if (gdbarch_tdep (gdbarch)->is_m)
8917 {
8918 /* If we know from the executable this is an M-profile target,
8919 cater for remote targets whose register set layout is the
8920 same as the FPA layout. */
8921 register_remote_g_packet_guess (gdbarch,
03145bf4 8922 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8923 (16 * INT_REGISTER_SIZE)
8924 + (8 * FP_REGISTER_SIZE)
8925 + (2 * INT_REGISTER_SIZE),
8926 tdesc_arm_with_m_fpa_layout);
8927
8928 /* The regular M-profile layout. */
8929 register_remote_g_packet_guess (gdbarch,
8930 /* r0-r12,sp,lr,pc; xpsr */
8931 (16 * INT_REGISTER_SIZE)
8932 + INT_REGISTER_SIZE,
8933 tdesc_arm_with_m);
3184d3f9
JL
8934
8935 /* M-profile plus M4F VFP. */
8936 register_remote_g_packet_guess (gdbarch,
8937 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8938 (16 * INT_REGISTER_SIZE)
8939 + (16 * VFP_REGISTER_SIZE)
8940 + (2 * INT_REGISTER_SIZE),
8941 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8942 }
8943
8944 /* Otherwise we don't have a useful guess. */
8945}
8946
7eb89530
YQ
8947/* Implement the code_of_frame_writable gdbarch method. */
8948
8949static int
8950arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8951{
8952 if (gdbarch_tdep (gdbarch)->is_m
8953 && get_frame_type (frame) == SIGTRAMP_FRAME)
8954 {
8955 /* M-profile exception frames return to some magic PCs, where
8956 isn't writable at all. */
8957 return 0;
8958 }
8959 else
8960 return 1;
8961}
8962
70f80edf 8963\f
da3c6d4a
MS
8964/* Initialize the current architecture based on INFO. If possible,
8965 re-use an architecture from ARCHES, which is a list of
8966 architectures already created during this debugging session.
97e03143 8967
da3c6d4a
MS
8968 Called e.g. at program startup, when reading a core file, and when
8969 reading a binary file. */
97e03143 8970
39bbf761
RE
8971static struct gdbarch *
8972arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8973{
97e03143 8974 struct gdbarch_tdep *tdep;
39bbf761 8975 struct gdbarch *gdbarch;
28e97307
DJ
8976 struct gdbarch_list *best_arch;
8977 enum arm_abi_kind arm_abi = arm_abi_global;
8978 enum arm_float_model fp_model = arm_fp_model;
123dc839 8979 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8980 int i, is_m = 0;
330c6ca9 8981 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8982 int have_wmmx_registers = 0;
58d6951d 8983 int have_neon = 0;
ff6f572f 8984 int have_fpa_registers = 1;
9779414d
DJ
8985 const struct target_desc *tdesc = info.target_desc;
8986
8987 /* If we have an object to base this architecture on, try to determine
8988 its ABI. */
8989
8990 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8991 {
8992 int ei_osabi, e_flags;
8993
8994 switch (bfd_get_flavour (info.abfd))
8995 {
9779414d
DJ
8996 case bfd_target_coff_flavour:
8997 /* Assume it's an old APCS-style ABI. */
8998 /* XXX WinCE? */
8999 arm_abi = ARM_ABI_APCS;
9000 break;
9001
9002 case bfd_target_elf_flavour:
9003 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9004 e_flags = elf_elfheader (info.abfd)->e_flags;
9005
9006 if (ei_osabi == ELFOSABI_ARM)
9007 {
9008 /* GNU tools used to use this value, but do not for EABI
9009 objects. There's nowhere to tag an EABI version
9010 anyway, so assume APCS. */
9011 arm_abi = ARM_ABI_APCS;
9012 }
d403db27 9013 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9014 {
9015 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9016 int attr_arch, attr_profile;
9017
9018 switch (eabi_ver)
9019 {
9020 case EF_ARM_EABI_UNKNOWN:
9021 /* Assume GNU tools. */
9022 arm_abi = ARM_ABI_APCS;
9023 break;
9024
9025 case EF_ARM_EABI_VER4:
9026 case EF_ARM_EABI_VER5:
9027 arm_abi = ARM_ABI_AAPCS;
9028 /* EABI binaries default to VFP float ordering.
9029 They may also contain build attributes that can
9030 be used to identify if the VFP argument-passing
9031 ABI is in use. */
9032 if (fp_model == ARM_FLOAT_AUTO)
9033 {
9034#ifdef HAVE_ELF
9035 switch (bfd_elf_get_obj_attr_int (info.abfd,
9036 OBJ_ATTR_PROC,
9037 Tag_ABI_VFP_args))
9038 {
b35b0298 9039 case AEABI_VFP_args_base:
9779414d
DJ
9040 /* "The user intended FP parameter/result
9041 passing to conform to AAPCS, base
9042 variant". */
9043 fp_model = ARM_FLOAT_SOFT_VFP;
9044 break;
b35b0298 9045 case AEABI_VFP_args_vfp:
9779414d
DJ
9046 /* "The user intended FP parameter/result
9047 passing to conform to AAPCS, VFP
9048 variant". */
9049 fp_model = ARM_FLOAT_VFP;
9050 break;
b35b0298 9051 case AEABI_VFP_args_toolchain:
9779414d
DJ
9052 /* "The user intended FP parameter/result
9053 passing to conform to tool chain-specific
9054 conventions" - we don't know any such
9055 conventions, so leave it as "auto". */
9056 break;
b35b0298 9057 case AEABI_VFP_args_compatible:
5c294fee
TG
9058 /* "Code is compatible with both the base
9059 and VFP variants; the user did not permit
9060 non-variadic functions to pass FP
9061 parameters/results" - leave it as
9062 "auto". */
9063 break;
9779414d
DJ
9064 default:
9065 /* Attribute value not mentioned in the
5c294fee 9066 November 2012 ABI, so leave it as
9779414d
DJ
9067 "auto". */
9068 break;
9069 }
9070#else
9071 fp_model = ARM_FLOAT_SOFT_VFP;
9072#endif
9073 }
9074 break;
9075
9076 default:
9077 /* Leave it as "auto". */
9078 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9079 break;
9080 }
9081
9082#ifdef HAVE_ELF
9083 /* Detect M-profile programs. This only works if the
9084 executable file includes build attributes; GCC does
9085 copy them to the executable, but e.g. RealView does
9086 not. */
9087 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9088 Tag_CPU_arch);
0963b4bd
MS
9089 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9090 OBJ_ATTR_PROC,
9779414d
DJ
9091 Tag_CPU_arch_profile);
9092 /* GCC specifies the profile for v6-M; RealView only
9093 specifies the profile for architectures starting with
9094 V7 (as opposed to architectures with a tag
9095 numerically greater than TAG_CPU_ARCH_V7). */
9096 if (!tdesc_has_registers (tdesc)
9097 && (attr_arch == TAG_CPU_ARCH_V6_M
9098 || attr_arch == TAG_CPU_ARCH_V6S_M
9099 || attr_profile == 'M'))
25f8c692 9100 is_m = 1;
9779414d
DJ
9101#endif
9102 }
9103
9104 if (fp_model == ARM_FLOAT_AUTO)
9105 {
9106 int e_flags = elf_elfheader (info.abfd)->e_flags;
9107
9108 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9109 {
9110 case 0:
9111 /* Leave it as "auto". Strictly speaking this case
9112 means FPA, but almost nobody uses that now, and
9113 many toolchains fail to set the appropriate bits
9114 for the floating-point model they use. */
9115 break;
9116 case EF_ARM_SOFT_FLOAT:
9117 fp_model = ARM_FLOAT_SOFT_FPA;
9118 break;
9119 case EF_ARM_VFP_FLOAT:
9120 fp_model = ARM_FLOAT_VFP;
9121 break;
9122 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9123 fp_model = ARM_FLOAT_SOFT_VFP;
9124 break;
9125 }
9126 }
9127
9128 if (e_flags & EF_ARM_BE8)
9129 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9130
9131 break;
9132
9133 default:
9134 /* Leave it as "auto". */
9135 break;
9136 }
9137 }
123dc839
DJ
9138
9139 /* Check any target description for validity. */
9779414d 9140 if (tdesc_has_registers (tdesc))
123dc839
DJ
9141 {
9142 /* For most registers we require GDB's default names; but also allow
9143 the numeric names for sp / lr / pc, as a convenience. */
9144 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9145 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9146 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9147
9148 const struct tdesc_feature *feature;
58d6951d 9149 int valid_p;
123dc839 9150
9779414d 9151 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9152 "org.gnu.gdb.arm.core");
9153 if (feature == NULL)
9779414d
DJ
9154 {
9155 feature = tdesc_find_feature (tdesc,
9156 "org.gnu.gdb.arm.m-profile");
9157 if (feature == NULL)
9158 return NULL;
9159 else
9160 is_m = 1;
9161 }
123dc839
DJ
9162
9163 tdesc_data = tdesc_data_alloc ();
9164
9165 valid_p = 1;
9166 for (i = 0; i < ARM_SP_REGNUM; i++)
9167 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9168 arm_register_names[i]);
9169 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9170 ARM_SP_REGNUM,
9171 arm_sp_names);
9172 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9173 ARM_LR_REGNUM,
9174 arm_lr_names);
9175 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9176 ARM_PC_REGNUM,
9177 arm_pc_names);
9779414d
DJ
9178 if (is_m)
9179 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9180 ARM_PS_REGNUM, "xpsr");
9181 else
9182 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9183 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9184
9185 if (!valid_p)
9186 {
9187 tdesc_data_cleanup (tdesc_data);
9188 return NULL;
9189 }
9190
9779414d 9191 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9192 "org.gnu.gdb.arm.fpa");
9193 if (feature != NULL)
9194 {
9195 valid_p = 1;
9196 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9197 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9198 arm_register_names[i]);
9199 if (!valid_p)
9200 {
9201 tdesc_data_cleanup (tdesc_data);
9202 return NULL;
9203 }
9204 }
ff6f572f
DJ
9205 else
9206 have_fpa_registers = 0;
9207
9779414d 9208 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9209 "org.gnu.gdb.xscale.iwmmxt");
9210 if (feature != NULL)
9211 {
9212 static const char *const iwmmxt_names[] = {
9213 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9214 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9215 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9216 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9217 };
9218
9219 valid_p = 1;
9220 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9221 valid_p
9222 &= tdesc_numbered_register (feature, tdesc_data, i,
9223 iwmmxt_names[i - ARM_WR0_REGNUM]);
9224
9225 /* Check for the control registers, but do not fail if they
9226 are missing. */
9227 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9228 tdesc_numbered_register (feature, tdesc_data, i,
9229 iwmmxt_names[i - ARM_WR0_REGNUM]);
9230
9231 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9232 valid_p
9233 &= tdesc_numbered_register (feature, tdesc_data, i,
9234 iwmmxt_names[i - ARM_WR0_REGNUM]);
9235
9236 if (!valid_p)
9237 {
9238 tdesc_data_cleanup (tdesc_data);
9239 return NULL;
9240 }
a56cc1ce
YQ
9241
9242 have_wmmx_registers = 1;
ff6f572f 9243 }
58d6951d
DJ
9244
9245 /* If we have a VFP unit, check whether the single precision registers
9246 are present. If not, then we will synthesize them as pseudo
9247 registers. */
9779414d 9248 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9249 "org.gnu.gdb.arm.vfp");
9250 if (feature != NULL)
9251 {
9252 static const char *const vfp_double_names[] = {
9253 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9254 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9255 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9256 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9257 };
9258
9259 /* Require the double precision registers. There must be either
9260 16 or 32. */
9261 valid_p = 1;
9262 for (i = 0; i < 32; i++)
9263 {
9264 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9265 ARM_D0_REGNUM + i,
9266 vfp_double_names[i]);
9267 if (!valid_p)
9268 break;
9269 }
2b9e5ea6
UW
9270 if (!valid_p && i == 16)
9271 valid_p = 1;
58d6951d 9272
2b9e5ea6
UW
9273 /* Also require FPSCR. */
9274 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9275 ARM_FPSCR_REGNUM, "fpscr");
9276 if (!valid_p)
58d6951d
DJ
9277 {
9278 tdesc_data_cleanup (tdesc_data);
9279 return NULL;
9280 }
9281
9282 if (tdesc_unnumbered_register (feature, "s0") == 0)
9283 have_vfp_pseudos = 1;
9284
330c6ca9 9285 vfp_register_count = i;
58d6951d
DJ
9286
9287 /* If we have VFP, also check for NEON. The architecture allows
9288 NEON without VFP (integer vector operations only), but GDB
9289 does not support that. */
9779414d 9290 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9291 "org.gnu.gdb.arm.neon");
9292 if (feature != NULL)
9293 {
9294 /* NEON requires 32 double-precision registers. */
9295 if (i != 32)
9296 {
9297 tdesc_data_cleanup (tdesc_data);
9298 return NULL;
9299 }
9300
9301 /* If there are quad registers defined by the stub, use
9302 their type; otherwise (normally) provide them with
9303 the default type. */
9304 if (tdesc_unnumbered_register (feature, "q0") == 0)
9305 have_neon_pseudos = 1;
9306
9307 have_neon = 1;
9308 }
9309 }
123dc839 9310 }
39bbf761 9311
28e97307
DJ
9312 /* If there is already a candidate, use it. */
9313 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9314 best_arch != NULL;
9315 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9316 {
b8926edc
DJ
9317 if (arm_abi != ARM_ABI_AUTO
9318 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9319 continue;
9320
b8926edc
DJ
9321 if (fp_model != ARM_FLOAT_AUTO
9322 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9323 continue;
9324
58d6951d
DJ
9325 /* There are various other properties in tdep that we do not
9326 need to check here: those derived from a target description,
9327 since gdbarches with a different target description are
9328 automatically disqualified. */
9329
9779414d
DJ
9330 /* Do check is_m, though, since it might come from the binary. */
9331 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9332 continue;
9333
28e97307
DJ
9334 /* Found a match. */
9335 break;
9336 }
97e03143 9337
28e97307 9338 if (best_arch != NULL)
123dc839
DJ
9339 {
9340 if (tdesc_data != NULL)
9341 tdesc_data_cleanup (tdesc_data);
9342 return best_arch->gdbarch;
9343 }
28e97307 9344
8d749320 9345 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9346 gdbarch = gdbarch_alloc (&info, tdep);
9347
28e97307
DJ
9348 /* Record additional information about the architecture we are defining.
9349 These are gdbarch discriminators, like the OSABI. */
9350 tdep->arm_abi = arm_abi;
9351 tdep->fp_model = fp_model;
9779414d 9352 tdep->is_m = is_m;
ff6f572f 9353 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9354 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9355 gdb_assert (vfp_register_count == 0
9356 || vfp_register_count == 16
9357 || vfp_register_count == 32);
9358 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9359 tdep->have_vfp_pseudos = have_vfp_pseudos;
9360 tdep->have_neon_pseudos = have_neon_pseudos;
9361 tdep->have_neon = have_neon;
08216dd7 9362
25f8c692
JL
9363 arm_register_g_packet_guesses (gdbarch);
9364
08216dd7 9365 /* Breakpoints. */
9d4fde75 9366 switch (info.byte_order_for_code)
67255d04
RE
9367 {
9368 case BFD_ENDIAN_BIG:
66e810cd
RE
9369 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9370 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9371 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9372 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9373
67255d04
RE
9374 break;
9375
9376 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9377 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9378 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9379 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9380 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9381
67255d04
RE
9382 break;
9383
9384 default:
9385 internal_error (__FILE__, __LINE__,
edefbb7c 9386 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9387 }
9388
d7b486e7
RE
9389 /* On ARM targets char defaults to unsigned. */
9390 set_gdbarch_char_signed (gdbarch, 0);
9391
cca44b1b
JB
9392 /* Note: for displaced stepping, this includes the breakpoint, and one word
9393 of additional scratch space. This setting isn't used for anything beside
9394 displaced stepping at present. */
9395 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9396
9df628e0 9397 /* This should be low enough for everything. */
97e03143 9398 tdep->lowest_pc = 0x20;
94c30b78 9399 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9400
7c00367c
MK
9401 /* The default, for both APCS and AAPCS, is to return small
9402 structures in registers. */
9403 tdep->struct_return = reg_struct_return;
9404
2dd604e7 9405 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9406 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9407
7eb89530
YQ
9408 if (is_m)
9409 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9410
756fe439
DJ
9411 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9412
148754e5 9413 /* Frame handling. */
a262aec2 9414 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9415 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9416 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9417
eb5492fa 9418 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9419
34e8f22d 9420 /* Address manipulation. */
34e8f22d
RE
9421 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9422
34e8f22d
RE
9423 /* Advance PC across function entry code. */
9424 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9425
c9cf6e20
MG
9426 /* Detect whether PC is at a point where the stack has been destroyed. */
9427 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9428
190dce09
UW
9429 /* Skip trampolines. */
9430 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9431
34e8f22d
RE
9432 /* The stack grows downward. */
9433 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9434
9435 /* Breakpoint manipulation. */
04180708
YQ
9436 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9437 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9438 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9439 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9440
9441 /* Information about registers, etc. */
34e8f22d
RE
9442 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9443 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9444 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9445 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9446 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9447
ff6f572f
DJ
9448 /* This "info float" is FPA-specific. Use the generic version if we
9449 do not have FPA. */
9450 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9451 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9452
26216b98 9453 /* Internal <-> external register number maps. */
ff6f572f 9454 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9455 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9456
34e8f22d
RE
9457 set_gdbarch_register_name (gdbarch, arm_register_name);
9458
9459 /* Returning results. */
2af48f68 9460 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9461
03d48a7d
RE
9462 /* Disassembly. */
9463 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9464
34e8f22d
RE
9465 /* Minsymbol frobbing. */
9466 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9467 set_gdbarch_coff_make_msymbol_special (gdbarch,
9468 arm_coff_make_msymbol_special);
60c5725c 9469 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9470
f9d67f43
DJ
9471 /* Thumb-2 IT block support. */
9472 set_gdbarch_adjust_breakpoint_address (gdbarch,
9473 arm_adjust_breakpoint_address);
9474
0d5de010
DJ
9475 /* Virtual tables. */
9476 set_gdbarch_vbit_in_delta (gdbarch, 1);
9477
97e03143 9478 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9479 gdbarch_init_osabi (info, gdbarch);
97e03143 9480
b39cc962
DJ
9481 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9482
eb5492fa 9483 /* Add some default predicates. */
2ae28aa9
YQ
9484 if (is_m)
9485 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9486 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9487 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9488 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9489 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9490 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9491
97e03143
RE
9492 /* Now we have tuned the configuration, set a few final things,
9493 based on what the OS ABI has told us. */
9494
b8926edc
DJ
9495 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9496 binaries are always marked. */
9497 if (tdep->arm_abi == ARM_ABI_AUTO)
9498 tdep->arm_abi = ARM_ABI_APCS;
9499
e3039479
UW
9500 /* Watchpoints are not steppable. */
9501 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9502
b8926edc
DJ
9503 /* We used to default to FPA for generic ARM, but almost nobody
9504 uses that now, and we now provide a way for the user to force
9505 the model. So default to the most useful variant. */
9506 if (tdep->fp_model == ARM_FLOAT_AUTO)
9507 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9508
9df628e0
RE
9509 if (tdep->jb_pc >= 0)
9510 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9511
08216dd7 9512 /* Floating point sizes and format. */
8da61cc4 9513 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9514 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9515 {
8da61cc4
DJ
9516 set_gdbarch_double_format
9517 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9518 set_gdbarch_long_double_format
9519 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9520 }
9521 else
9522 {
9523 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9524 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9525 }
9526
58d6951d
DJ
9527 if (have_vfp_pseudos)
9528 {
9529 /* NOTE: These are the only pseudo registers used by
9530 the ARM target at the moment. If more are added, a
9531 little more care in numbering will be needed. */
9532
9533 int num_pseudos = 32;
9534 if (have_neon_pseudos)
9535 num_pseudos += 16;
9536 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9537 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9538 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9539 }
9540
123dc839 9541 if (tdesc_data)
58d6951d
DJ
9542 {
9543 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9544
9779414d 9545 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9546
9547 /* Override tdesc_register_type to adjust the types of VFP
9548 registers for NEON. */
9549 set_gdbarch_register_type (gdbarch, arm_register_type);
9550 }
123dc839
DJ
9551
9552 /* Add standard register aliases. We add aliases even for those
9553 nanes which are used by the current architecture - it's simpler,
9554 and does no harm, since nothing ever lists user registers. */
9555 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9556 user_reg_add (gdbarch, arm_register_aliases[i].name,
9557 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9558
39bbf761
RE
9559 return gdbarch;
9560}
9561
97e03143 9562static void
2af46ca0 9563arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9564{
2af46ca0 9565 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9566
9567 if (tdep == NULL)
9568 return;
9569
edefbb7c 9570 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9571 (unsigned long) tdep->lowest_pc);
9572}
9573
a78f21af
AC
9574extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9575
c906108c 9576void
ed9a39eb 9577_initialize_arm_tdep (void)
c906108c 9578{
bc90b915
FN
9579 struct ui_file *stb;
9580 long length;
53904c9e
AC
9581 const char *setname;
9582 const char *setdesc;
4bd7b427 9583 const char *const *regnames;
bec2ab5a 9584 int i;
09b0e4b0 9585 static std::string helptext;
edefbb7c
AC
9586 char regdesc[1024], *rdptr = regdesc;
9587 size_t rest = sizeof (regdesc);
085dd6e6 9588
42cf1509 9589 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9590
60c5725c 9591 arm_objfile_data_key
c1bd65d0 9592 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9593
0e9e9abd
UW
9594 /* Add ourselves to objfile event chain. */
9595 observer_attach_new_objfile (arm_exidx_new_objfile);
9596 arm_exidx_data_key
9597 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9598
70f80edf
JT
9599 /* Register an ELF OS ABI sniffer for ARM binaries. */
9600 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9601 bfd_target_elf_flavour,
9602 arm_elf_osabi_sniffer);
9603
9779414d
DJ
9604 /* Initialize the standard target descriptions. */
9605 initialize_tdesc_arm_with_m ();
25f8c692 9606 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9607 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9608 initialize_tdesc_arm_with_iwmmxt ();
9609 initialize_tdesc_arm_with_vfpv2 ();
9610 initialize_tdesc_arm_with_vfpv3 ();
9611 initialize_tdesc_arm_with_neon ();
9779414d 9612
94c30b78 9613 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9614 num_disassembly_options = get_arm_regname_num_options ();
9615
9616 /* Add root prefix command for all "set arm"/"show arm" commands. */
9617 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9618 _("Various ARM-specific commands."),
afd7eef0
RE
9619 &setarmcmdlist, "set arm ", 0, &setlist);
9620
9621 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9622 _("Various ARM-specific commands."),
afd7eef0 9623 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9624
94c30b78 9625 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9626 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9627
eefe576e
AC
9628 /* Initialize the array that will be passed to
9629 add_setshow_enum_cmd(). */
8d749320
SM
9630 valid_disassembly_styles = XNEWVEC (const char *,
9631 num_disassembly_options + 1);
afd7eef0 9632 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9633 {
bec2ab5a 9634 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9635 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9636 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9637 rdptr += length;
9638 rest -= length;
123dc839
DJ
9639 /* When we find the default names, tell the disassembler to use
9640 them. */
bc90b915
FN
9641 if (!strcmp (setname, "std"))
9642 {
afd7eef0 9643 disassembly_style = setname;
bc90b915
FN
9644 set_arm_regname_option (i);
9645 }
9646 }
94c30b78 9647 /* Mark the end of valid options. */
afd7eef0 9648 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9649
edefbb7c
AC
9650 /* Create the help text. */
9651 stb = mem_fileopen ();
9652 fprintf_unfiltered (stb, "%s%s%s",
9653 _("The valid values are:\n"),
9654 regdesc,
9655 _("The default is \"std\"."));
09b0e4b0 9656 helptext = ui_file_as_string (stb);
bc90b915 9657 ui_file_delete (stb);
ed9a39eb 9658
edefbb7c
AC
9659 add_setshow_enum_cmd("disassembler", no_class,
9660 valid_disassembly_styles, &disassembly_style,
9661 _("Set the disassembly style."),
9662 _("Show the disassembly style."),
09b0e4b0 9663 helptext.c_str (),
2c5b56ce 9664 set_disassembly_style_sfunc,
0963b4bd
MS
9665 NULL, /* FIXME: i18n: The disassembly style is
9666 \"%s\". */
7376b4c2 9667 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9668
9669 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9670 _("Set usage of ARM 32-bit mode."),
9671 _("Show usage of ARM 32-bit mode."),
9672 _("When off, a 26-bit PC will be used."),
2c5b56ce 9673 NULL,
0963b4bd
MS
9674 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9675 mode is %s. */
26304000 9676 &setarmcmdlist, &showarmcmdlist);
c906108c 9677
fd50bc42 9678 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9679 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9680 _("Set the floating point type."),
9681 _("Show the floating point type."),
9682 _("auto - Determine the FP typefrom the OS-ABI.\n\
9683softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9684fpa - FPA co-processor (GCC compiled).\n\
9685softvfp - Software FP with pure-endian doubles.\n\
9686vfp - VFP co-processor."),
edefbb7c 9687 set_fp_model_sfunc, show_fp_model,
7376b4c2 9688 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9689
28e97307
DJ
9690 /* Add a command to allow the user to force the ABI. */
9691 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9692 _("Set the ABI."),
9693 _("Show the ABI."),
9694 NULL, arm_set_abi, arm_show_abi,
9695 &setarmcmdlist, &showarmcmdlist);
9696
0428b8f5
DJ
9697 /* Add two commands to allow the user to force the assumed
9698 execution mode. */
9699 add_setshow_enum_cmd ("fallback-mode", class_support,
9700 arm_mode_strings, &arm_fallback_mode_string,
9701 _("Set the mode assumed when symbols are unavailable."),
9702 _("Show the mode assumed when symbols are unavailable."),
9703 NULL, NULL, arm_show_fallback_mode,
9704 &setarmcmdlist, &showarmcmdlist);
9705 add_setshow_enum_cmd ("force-mode", class_support,
9706 arm_mode_strings, &arm_force_mode_string,
9707 _("Set the mode assumed even when symbols are available."),
9708 _("Show the mode assumed even when symbols are available."),
9709 NULL, NULL, arm_show_force_mode,
9710 &setarmcmdlist, &showarmcmdlist);
9711
6529d2dd 9712 /* Debugging flag. */
edefbb7c
AC
9713 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9714 _("Set ARM debugging."),
9715 _("Show ARM debugging."),
9716 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9717 NULL,
7915a72c 9718 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9719 &setdebuglist, &showdebuglist);
c906108c 9720}
72508ac0
PO
9721
9722/* ARM-reversible process record data structures. */
9723
9724#define ARM_INSN_SIZE_BYTES 4
9725#define THUMB_INSN_SIZE_BYTES 2
9726#define THUMB2_INSN_SIZE_BYTES 4
9727
9728
71e396f9
LM
9729/* Position of the bit within a 32-bit ARM instruction
9730 that defines whether the instruction is a load or store. */
72508ac0
PO
9731#define INSN_S_L_BIT_NUM 20
9732
9733#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9734 do \
9735 { \
9736 unsigned int reg_len = LENGTH; \
9737 if (reg_len) \
9738 { \
9739 REGS = XNEWVEC (uint32_t, reg_len); \
9740 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9741 } \
9742 } \
9743 while (0)
9744
9745#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9746 do \
9747 { \
9748 unsigned int mem_len = LENGTH; \
9749 if (mem_len) \
9750 { \
9751 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9752 memcpy(&MEMS->len, &RECORD_BUF[0], \
9753 sizeof(struct arm_mem_r) * LENGTH); \
9754 } \
9755 } \
9756 while (0)
9757
9758/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9759#define INSN_RECORDED(ARM_RECORD) \
9760 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9761
9762/* ARM memory record structure. */
9763struct arm_mem_r
9764{
9765 uint32_t len; /* Record length. */
bfbbec00 9766 uint32_t addr; /* Memory address. */
72508ac0
PO
9767};
9768
9769/* ARM instruction record contains opcode of current insn
9770 and execution state (before entry to decode_insn()),
9771 contains list of to-be-modified registers and
9772 memory blocks (on return from decode_insn()). */
9773
9774typedef struct insn_decode_record_t
9775{
9776 struct gdbarch *gdbarch;
9777 struct regcache *regcache;
9778 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9779 uint32_t arm_insn; /* Should accommodate thumb. */
9780 uint32_t cond; /* Condition code. */
9781 uint32_t opcode; /* Insn opcode. */
9782 uint32_t decode; /* Insn decode bits. */
9783 uint32_t mem_rec_count; /* No of mem records. */
9784 uint32_t reg_rec_count; /* No of reg records. */
9785 uint32_t *arm_regs; /* Registers to be saved for this record. */
9786 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9787} insn_decode_record;
9788
9789
9790/* Checks ARM SBZ and SBO mandatory fields. */
9791
9792static int
9793sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9794{
9795 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9796
9797 if (!len)
9798 return 1;
9799
9800 if (!sbo)
9801 ones = ~ones;
9802
9803 while (ones)
9804 {
9805 if (!(ones & sbo))
9806 {
9807 return 0;
9808 }
9809 ones = ones >> 1;
9810 }
9811 return 1;
9812}
9813
c6ec2b30
OJ
9814enum arm_record_result
9815{
9816 ARM_RECORD_SUCCESS = 0,
9817 ARM_RECORD_FAILURE = 1
9818};
9819
72508ac0
PO
9820typedef enum
9821{
9822 ARM_RECORD_STRH=1,
9823 ARM_RECORD_STRD
9824} arm_record_strx_t;
9825
9826typedef enum
9827{
9828 ARM_RECORD=1,
9829 THUMB_RECORD,
9830 THUMB2_RECORD
9831} record_type_t;
9832
9833
9834static int
9835arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9836 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9837{
9838
9839 struct regcache *reg_cache = arm_insn_r->regcache;
9840 ULONGEST u_regval[2]= {0};
9841
9842 uint32_t reg_src1 = 0, reg_src2 = 0;
9843 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9844
9845 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9846 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9847
9848 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9849 {
9850 /* 1) Handle misc store, immediate offset. */
9851 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9852 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9853 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9854 regcache_raw_read_unsigned (reg_cache, reg_src1,
9855 &u_regval[0]);
9856 if (ARM_PC_REGNUM == reg_src1)
9857 {
9858 /* If R15 was used as Rn, hence current PC+8. */
9859 u_regval[0] = u_regval[0] + 8;
9860 }
9861 offset_8 = (immed_high << 4) | immed_low;
9862 /* Calculate target store address. */
9863 if (14 == arm_insn_r->opcode)
9864 {
9865 tgt_mem_addr = u_regval[0] + offset_8;
9866 }
9867 else
9868 {
9869 tgt_mem_addr = u_regval[0] - offset_8;
9870 }
9871 if (ARM_RECORD_STRH == str_type)
9872 {
9873 record_buf_mem[0] = 2;
9874 record_buf_mem[1] = tgt_mem_addr;
9875 arm_insn_r->mem_rec_count = 1;
9876 }
9877 else if (ARM_RECORD_STRD == str_type)
9878 {
9879 record_buf_mem[0] = 4;
9880 record_buf_mem[1] = tgt_mem_addr;
9881 record_buf_mem[2] = 4;
9882 record_buf_mem[3] = tgt_mem_addr + 4;
9883 arm_insn_r->mem_rec_count = 2;
9884 }
9885 }
9886 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9887 {
9888 /* 2) Store, register offset. */
9889 /* Get Rm. */
9890 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9891 /* Get Rn. */
9892 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9893 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9894 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9895 if (15 == reg_src2)
9896 {
9897 /* If R15 was used as Rn, hence current PC+8. */
9898 u_regval[0] = u_regval[0] + 8;
9899 }
9900 /* Calculate target store address, Rn +/- Rm, register offset. */
9901 if (12 == arm_insn_r->opcode)
9902 {
9903 tgt_mem_addr = u_regval[0] + u_regval[1];
9904 }
9905 else
9906 {
9907 tgt_mem_addr = u_regval[1] - u_regval[0];
9908 }
9909 if (ARM_RECORD_STRH == str_type)
9910 {
9911 record_buf_mem[0] = 2;
9912 record_buf_mem[1] = tgt_mem_addr;
9913 arm_insn_r->mem_rec_count = 1;
9914 }
9915 else if (ARM_RECORD_STRD == str_type)
9916 {
9917 record_buf_mem[0] = 4;
9918 record_buf_mem[1] = tgt_mem_addr;
9919 record_buf_mem[2] = 4;
9920 record_buf_mem[3] = tgt_mem_addr + 4;
9921 arm_insn_r->mem_rec_count = 2;
9922 }
9923 }
9924 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9925 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9926 {
9927 /* 3) Store, immediate pre-indexed. */
9928 /* 5) Store, immediate post-indexed. */
9929 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9930 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9931 offset_8 = (immed_high << 4) | immed_low;
9932 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9933 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9934 /* Calculate target store address, Rn +/- Rm, register offset. */
9935 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9936 {
9937 tgt_mem_addr = u_regval[0] + offset_8;
9938 }
9939 else
9940 {
9941 tgt_mem_addr = u_regval[0] - offset_8;
9942 }
9943 if (ARM_RECORD_STRH == str_type)
9944 {
9945 record_buf_mem[0] = 2;
9946 record_buf_mem[1] = tgt_mem_addr;
9947 arm_insn_r->mem_rec_count = 1;
9948 }
9949 else if (ARM_RECORD_STRD == str_type)
9950 {
9951 record_buf_mem[0] = 4;
9952 record_buf_mem[1] = tgt_mem_addr;
9953 record_buf_mem[2] = 4;
9954 record_buf_mem[3] = tgt_mem_addr + 4;
9955 arm_insn_r->mem_rec_count = 2;
9956 }
9957 /* Record Rn also as it changes. */
9958 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9959 arm_insn_r->reg_rec_count = 1;
9960 }
9961 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9962 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9963 {
9964 /* 4) Store, register pre-indexed. */
9965 /* 6) Store, register post -indexed. */
9966 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9967 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9968 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9969 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9970 /* Calculate target store address, Rn +/- Rm, register offset. */
9971 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9972 {
9973 tgt_mem_addr = u_regval[0] + u_regval[1];
9974 }
9975 else
9976 {
9977 tgt_mem_addr = u_regval[1] - u_regval[0];
9978 }
9979 if (ARM_RECORD_STRH == str_type)
9980 {
9981 record_buf_mem[0] = 2;
9982 record_buf_mem[1] = tgt_mem_addr;
9983 arm_insn_r->mem_rec_count = 1;
9984 }
9985 else if (ARM_RECORD_STRD == str_type)
9986 {
9987 record_buf_mem[0] = 4;
9988 record_buf_mem[1] = tgt_mem_addr;
9989 record_buf_mem[2] = 4;
9990 record_buf_mem[3] = tgt_mem_addr + 4;
9991 arm_insn_r->mem_rec_count = 2;
9992 }
9993 /* Record Rn also as it changes. */
9994 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9995 arm_insn_r->reg_rec_count = 1;
9996 }
9997 return 0;
9998}
9999
10000/* Handling ARM extension space insns. */
10001
10002static int
10003arm_record_extension_space (insn_decode_record *arm_insn_r)
10004{
10005 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10006 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10007 uint32_t record_buf[8], record_buf_mem[8];
10008 uint32_t reg_src1 = 0;
72508ac0
PO
10009 struct regcache *reg_cache = arm_insn_r->regcache;
10010 ULONGEST u_regval = 0;
10011
10012 gdb_assert (!INSN_RECORDED(arm_insn_r));
10013 /* Handle unconditional insn extension space. */
10014
10015 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10016 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10017 if (arm_insn_r->cond)
10018 {
10019 /* PLD has no affect on architectural state, it just affects
10020 the caches. */
10021 if (5 == ((opcode1 & 0xE0) >> 5))
10022 {
10023 /* BLX(1) */
10024 record_buf[0] = ARM_PS_REGNUM;
10025 record_buf[1] = ARM_LR_REGNUM;
10026 arm_insn_r->reg_rec_count = 2;
10027 }
10028 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10029 }
10030
10031
10032 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10033 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10034 {
10035 ret = -1;
10036 /* Undefined instruction on ARM V5; need to handle if later
10037 versions define it. */
10038 }
10039
10040 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10041 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10042 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10043
10044 /* Handle arithmetic insn extension space. */
10045 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10046 && !INSN_RECORDED(arm_insn_r))
10047 {
10048 /* Handle MLA(S) and MUL(S). */
10049 if (0 <= insn_op1 && 3 >= insn_op1)
10050 {
10051 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10052 record_buf[1] = ARM_PS_REGNUM;
10053 arm_insn_r->reg_rec_count = 2;
10054 }
10055 else if (4 <= insn_op1 && 15 >= insn_op1)
10056 {
10057 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10058 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10059 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10060 record_buf[2] = ARM_PS_REGNUM;
10061 arm_insn_r->reg_rec_count = 3;
10062 }
10063 }
10064
10065 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10066 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10067 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10068
10069 /* Handle control insn extension space. */
10070
10071 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10072 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10073 {
10074 if (!bit (arm_insn_r->arm_insn,25))
10075 {
10076 if (!bits (arm_insn_r->arm_insn, 4, 7))
10077 {
10078 if ((0 == insn_op1) || (2 == insn_op1))
10079 {
10080 /* MRS. */
10081 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10082 arm_insn_r->reg_rec_count = 1;
10083 }
10084 else if (1 == insn_op1)
10085 {
10086 /* CSPR is going to be changed. */
10087 record_buf[0] = ARM_PS_REGNUM;
10088 arm_insn_r->reg_rec_count = 1;
10089 }
10090 else if (3 == insn_op1)
10091 {
10092 /* SPSR is going to be changed. */
10093 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10094 return -1;
10095 }
10096 }
10097 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10098 {
10099 if (1 == insn_op1)
10100 {
10101 /* BX. */
10102 record_buf[0] = ARM_PS_REGNUM;
10103 arm_insn_r->reg_rec_count = 1;
10104 }
10105 else if (3 == insn_op1)
10106 {
10107 /* CLZ. */
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10109 arm_insn_r->reg_rec_count = 1;
10110 }
10111 }
10112 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10113 {
10114 /* BLX. */
10115 record_buf[0] = ARM_PS_REGNUM;
10116 record_buf[1] = ARM_LR_REGNUM;
10117 arm_insn_r->reg_rec_count = 2;
10118 }
10119 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10120 {
10121 /* QADD, QSUB, QDADD, QDSUB */
10122 record_buf[0] = ARM_PS_REGNUM;
10123 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10124 arm_insn_r->reg_rec_count = 2;
10125 }
10126 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10127 {
10128 /* BKPT. */
10129 record_buf[0] = ARM_PS_REGNUM;
10130 record_buf[1] = ARM_LR_REGNUM;
10131 arm_insn_r->reg_rec_count = 2;
10132
10133 /* Save SPSR also;how? */
72508ac0
PO
10134 return -1;
10135 }
10136 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10137 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10138 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10139 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10140 )
10141 {
10142 if (0 == insn_op1 || 1 == insn_op1)
10143 {
10144 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10145 /* We dont do optimization for SMULW<y> where we
10146 need only Rd. */
10147 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10148 record_buf[1] = ARM_PS_REGNUM;
10149 arm_insn_r->reg_rec_count = 2;
10150 }
10151 else if (2 == insn_op1)
10152 {
10153 /* SMLAL<x><y>. */
10154 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10155 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10156 arm_insn_r->reg_rec_count = 2;
10157 }
10158 else if (3 == insn_op1)
10159 {
10160 /* SMUL<x><y>. */
10161 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10162 arm_insn_r->reg_rec_count = 1;
10163 }
10164 }
10165 }
10166 else
10167 {
10168 /* MSR : immediate form. */
10169 if (1 == insn_op1)
10170 {
10171 /* CSPR is going to be changed. */
10172 record_buf[0] = ARM_PS_REGNUM;
10173 arm_insn_r->reg_rec_count = 1;
10174 }
10175 else if (3 == insn_op1)
10176 {
10177 /* SPSR is going to be changed. */
10178 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10179 return -1;
10180 }
10181 }
10182 }
10183
10184 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10185 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10186 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10187
10188 /* Handle load/store insn extension space. */
10189
10190 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10191 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10192 && !INSN_RECORDED(arm_insn_r))
10193 {
10194 /* SWP/SWPB. */
10195 if (0 == insn_op1)
10196 {
10197 /* These insn, changes register and memory as well. */
10198 /* SWP or SWPB insn. */
10199 /* Get memory address given by Rn. */
10200 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10201 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10202 /* SWP insn ?, swaps word. */
10203 if (8 == arm_insn_r->opcode)
10204 {
10205 record_buf_mem[0] = 4;
10206 }
10207 else
10208 {
10209 /* SWPB insn, swaps only byte. */
10210 record_buf_mem[0] = 1;
10211 }
10212 record_buf_mem[1] = u_regval;
10213 arm_insn_r->mem_rec_count = 1;
10214 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10215 arm_insn_r->reg_rec_count = 1;
10216 }
10217 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10218 {
10219 /* STRH. */
10220 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10221 ARM_RECORD_STRH);
10222 }
10223 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10224 {
10225 /* LDRD. */
10226 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10227 record_buf[1] = record_buf[0] + 1;
10228 arm_insn_r->reg_rec_count = 2;
10229 }
10230 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10231 {
10232 /* STRD. */
10233 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10234 ARM_RECORD_STRD);
10235 }
10236 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10237 {
10238 /* LDRH, LDRSB, LDRSH. */
10239 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10240 arm_insn_r->reg_rec_count = 1;
10241 }
10242
10243 }
10244
10245 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10246 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10247 && !INSN_RECORDED(arm_insn_r))
10248 {
10249 ret = -1;
10250 /* Handle coprocessor insn extension space. */
10251 }
10252
10253 /* To be done for ARMv5 and later; as of now we return -1. */
10254 if (-1 == ret)
ca92db2d 10255 return ret;
72508ac0
PO
10256
10257 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10258 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10259
10260 return ret;
10261}
10262
10263/* Handling opcode 000 insns. */
10264
10265static int
10266arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10267{
10268 struct regcache *reg_cache = arm_insn_r->regcache;
10269 uint32_t record_buf[8], record_buf_mem[8];
10270 ULONGEST u_regval[2] = {0};
10271
bec2ab5a 10272 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10273 uint32_t opcode1 = 0;
10274
10275 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10276 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10277 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10278
10279 /* Data processing insn /multiply insn. */
10280 if (9 == arm_insn_r->decode
10281 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10282 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10283 {
10284 /* Handle multiply instructions. */
10285 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10286 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10287 {
10288 /* Handle MLA and MUL. */
10289 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10290 record_buf[1] = ARM_PS_REGNUM;
10291 arm_insn_r->reg_rec_count = 2;
10292 }
10293 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10294 {
10295 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10296 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10297 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10298 record_buf[2] = ARM_PS_REGNUM;
10299 arm_insn_r->reg_rec_count = 3;
10300 }
10301 }
10302 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10303 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10304 {
10305 /* Handle misc load insns, as 20th bit (L = 1). */
10306 /* LDR insn has a capability to do branching, if
10307 MOV LR, PC is precceded by LDR insn having Rn as R15
10308 in that case, it emulates branch and link insn, and hence we
10309 need to save CSPR and PC as well. I am not sure this is right
10310 place; as opcode = 010 LDR insn make this happen, if R15 was
10311 used. */
10312 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10313 if (15 != reg_dest)
10314 {
10315 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10316 arm_insn_r->reg_rec_count = 1;
10317 }
10318 else
10319 {
10320 record_buf[0] = reg_dest;
10321 record_buf[1] = ARM_PS_REGNUM;
10322 arm_insn_r->reg_rec_count = 2;
10323 }
10324 }
10325 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10326 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10327 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10328 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10329 {
10330 /* Handle MSR insn. */
10331 if (9 == arm_insn_r->opcode)
10332 {
10333 /* CSPR is going to be changed. */
10334 record_buf[0] = ARM_PS_REGNUM;
10335 arm_insn_r->reg_rec_count = 1;
10336 }
10337 else
10338 {
10339 /* SPSR is going to be changed. */
10340 /* How to read SPSR value? */
72508ac0
PO
10341 return -1;
10342 }
10343 }
10344 else if (9 == arm_insn_r->decode
10345 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10346 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10347 {
10348 /* Handling SWP, SWPB. */
10349 /* These insn, changes register and memory as well. */
10350 /* SWP or SWPB insn. */
10351
10352 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10353 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10354 /* SWP insn ?, swaps word. */
10355 if (8 == arm_insn_r->opcode)
10356 {
10357 record_buf_mem[0] = 4;
10358 }
10359 else
10360 {
10361 /* SWPB insn, swaps only byte. */
10362 record_buf_mem[0] = 1;
10363 }
10364 record_buf_mem[1] = u_regval[0];
10365 arm_insn_r->mem_rec_count = 1;
10366 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10367 arm_insn_r->reg_rec_count = 1;
10368 }
10369 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10370 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10371 {
10372 /* Handle BLX, branch and link/exchange. */
10373 if (9 == arm_insn_r->opcode)
10374 {
10375 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10376 and R14 stores the return address. */
10377 record_buf[0] = ARM_PS_REGNUM;
10378 record_buf[1] = ARM_LR_REGNUM;
10379 arm_insn_r->reg_rec_count = 2;
10380 }
10381 }
10382 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10383 {
10384 /* Handle enhanced software breakpoint insn, BKPT. */
10385 /* CPSR is changed to be executed in ARM state, disabling normal
10386 interrupts, entering abort mode. */
10387 /* According to high vector configuration PC is set. */
10388 /* user hit breakpoint and type reverse, in
10389 that case, we need to go back with previous CPSR and
10390 Program Counter. */
10391 record_buf[0] = ARM_PS_REGNUM;
10392 record_buf[1] = ARM_LR_REGNUM;
10393 arm_insn_r->reg_rec_count = 2;
10394
10395 /* Save SPSR also; how? */
72508ac0
PO
10396 return -1;
10397 }
10398 else if (11 == arm_insn_r->decode
10399 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10400 {
10401 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10402
10403 /* Handle str(x) insn */
10404 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10405 ARM_RECORD_STRH);
10406 }
10407 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10408 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10409 {
10410 /* Handle BX, branch and link/exchange. */
10411 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10412 record_buf[0] = ARM_PS_REGNUM;
10413 arm_insn_r->reg_rec_count = 1;
10414 }
10415 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10416 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10417 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10418 {
10419 /* Count leading zeros: CLZ. */
10420 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10421 arm_insn_r->reg_rec_count = 1;
10422 }
10423 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10424 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10425 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10426 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10427 )
10428 {
10429 /* Handle MRS insn. */
10430 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10431 arm_insn_r->reg_rec_count = 1;
10432 }
10433 else if (arm_insn_r->opcode <= 15)
10434 {
10435 /* Normal data processing insns. */
10436 /* Out of 11 shifter operands mode, all the insn modifies destination
10437 register, which is specified by 13-16 decode. */
10438 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10439 record_buf[1] = ARM_PS_REGNUM;
10440 arm_insn_r->reg_rec_count = 2;
10441 }
10442 else
10443 {
10444 return -1;
10445 }
10446
10447 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10448 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10449 return 0;
10450}
10451
10452/* Handling opcode 001 insns. */
10453
10454static int
10455arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10456{
10457 uint32_t record_buf[8], record_buf_mem[8];
10458
10459 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10460 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10461
10462 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10463 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10464 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10465 )
10466 {
10467 /* Handle MSR insn. */
10468 if (9 == arm_insn_r->opcode)
10469 {
10470 /* CSPR is going to be changed. */
10471 record_buf[0] = ARM_PS_REGNUM;
10472 arm_insn_r->reg_rec_count = 1;
10473 }
10474 else
10475 {
10476 /* SPSR is going to be changed. */
10477 }
10478 }
10479 else if (arm_insn_r->opcode <= 15)
10480 {
10481 /* Normal data processing insns. */
10482 /* Out of 11 shifter operands mode, all the insn modifies destination
10483 register, which is specified by 13-16 decode. */
10484 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10485 record_buf[1] = ARM_PS_REGNUM;
10486 arm_insn_r->reg_rec_count = 2;
10487 }
10488 else
10489 {
10490 return -1;
10491 }
10492
10493 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10494 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10495 return 0;
10496}
10497
c55978a6
YQ
10498static int
10499arm_record_media (insn_decode_record *arm_insn_r)
10500{
10501 uint32_t record_buf[8];
10502
10503 switch (bits (arm_insn_r->arm_insn, 22, 24))
10504 {
10505 case 0:
10506 /* Parallel addition and subtraction, signed */
10507 case 1:
10508 /* Parallel addition and subtraction, unsigned */
10509 case 2:
10510 case 3:
10511 /* Packing, unpacking, saturation and reversal */
10512 {
10513 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10514
10515 record_buf[arm_insn_r->reg_rec_count++] = rd;
10516 }
10517 break;
10518
10519 case 4:
10520 case 5:
10521 /* Signed multiplies */
10522 {
10523 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10524 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10525
10526 record_buf[arm_insn_r->reg_rec_count++] = rd;
10527 if (op1 == 0x0)
10528 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10529 else if (op1 == 0x4)
10530 record_buf[arm_insn_r->reg_rec_count++]
10531 = bits (arm_insn_r->arm_insn, 12, 15);
10532 }
10533 break;
10534
10535 case 6:
10536 {
10537 if (bit (arm_insn_r->arm_insn, 21)
10538 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10539 {
10540 /* SBFX */
10541 record_buf[arm_insn_r->reg_rec_count++]
10542 = bits (arm_insn_r->arm_insn, 12, 15);
10543 }
10544 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10545 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10546 {
10547 /* USAD8 and USADA8 */
10548 record_buf[arm_insn_r->reg_rec_count++]
10549 = bits (arm_insn_r->arm_insn, 16, 19);
10550 }
10551 }
10552 break;
10553
10554 case 7:
10555 {
10556 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10557 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10558 {
10559 /* Permanently UNDEFINED */
10560 return -1;
10561 }
10562 else
10563 {
10564 /* BFC, BFI and UBFX */
10565 record_buf[arm_insn_r->reg_rec_count++]
10566 = bits (arm_insn_r->arm_insn, 12, 15);
10567 }
10568 }
10569 break;
10570
10571 default:
10572 return -1;
10573 }
10574
10575 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10576
10577 return 0;
10578}
10579
71e396f9 10580/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10581
10582static int
10583arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10584{
10585 struct regcache *reg_cache = arm_insn_r->regcache;
10586
71e396f9
LM
10587 uint32_t reg_base , reg_dest;
10588 uint32_t offset_12, tgt_mem_addr;
72508ac0 10589 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10590 unsigned char wback;
10591 ULONGEST u_regval;
72508ac0 10592
71e396f9
LM
10593 /* Calculate wback. */
10594 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10595 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10596
71e396f9
LM
10597 arm_insn_r->reg_rec_count = 0;
10598 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10599
10600 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10601 {
71e396f9
LM
10602 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10603 and LDRT. */
10604
72508ac0 10605 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10606 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10607
10608 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10609 preceeds a LDR instruction having R15 as reg_base, it
10610 emulates a branch and link instruction, and hence we need to save
10611 CPSR and PC as well. */
10612 if (ARM_PC_REGNUM == reg_dest)
10613 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10614
10615 /* If wback is true, also save the base register, which is going to be
10616 written to. */
10617 if (wback)
10618 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10619 }
10620 else
10621 {
71e396f9
LM
10622 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10623
72508ac0 10624 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10625 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10626
10627 /* Handle bit U. */
72508ac0 10628 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10629 {
10630 /* U == 1: Add the offset. */
10631 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10632 }
72508ac0 10633 else
71e396f9
LM
10634 {
10635 /* U == 0: subtract the offset. */
10636 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10637 }
10638
10639 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10640 bytes. */
10641 if (bit (arm_insn_r->arm_insn, 22))
10642 {
10643 /* STRB and STRBT: 1 byte. */
10644 record_buf_mem[0] = 1;
10645 }
10646 else
10647 {
10648 /* STR and STRT: 4 bytes. */
10649 record_buf_mem[0] = 4;
10650 }
10651
10652 /* Handle bit P. */
10653 if (bit (arm_insn_r->arm_insn, 24))
10654 record_buf_mem[1] = tgt_mem_addr;
10655 else
10656 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10657
72508ac0
PO
10658 arm_insn_r->mem_rec_count = 1;
10659
71e396f9
LM
10660 /* If wback is true, also save the base register, which is going to be
10661 written to. */
10662 if (wback)
10663 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10664 }
10665
10666 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10667 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10668 return 0;
10669}
10670
10671/* Handling opcode 011 insns. */
10672
10673static int
10674arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10675{
10676 struct regcache *reg_cache = arm_insn_r->regcache;
10677
10678 uint32_t shift_imm = 0;
10679 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10680 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10681 uint32_t record_buf[8], record_buf_mem[8];
10682
10683 LONGEST s_word;
10684 ULONGEST u_regval[2];
10685
c55978a6
YQ
10686 if (bit (arm_insn_r->arm_insn, 4))
10687 return arm_record_media (arm_insn_r);
10688
72508ac0
PO
10689 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10690 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10691
10692 /* Handle enhanced store insns and LDRD DSP insn,
10693 order begins according to addressing modes for store insns
10694 STRH insn. */
10695
10696 /* LDR or STR? */
10697 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10698 {
10699 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10700 /* LDR insn has a capability to do branching, if
10701 MOV LR, PC is precedded by LDR insn having Rn as R15
10702 in that case, it emulates branch and link insn, and hence we
10703 need to save CSPR and PC as well. */
10704 if (15 != reg_dest)
10705 {
10706 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10707 arm_insn_r->reg_rec_count = 1;
10708 }
10709 else
10710 {
10711 record_buf[0] = reg_dest;
10712 record_buf[1] = ARM_PS_REGNUM;
10713 arm_insn_r->reg_rec_count = 2;
10714 }
10715 }
10716 else
10717 {
10718 if (! bits (arm_insn_r->arm_insn, 4, 11))
10719 {
10720 /* Store insn, register offset and register pre-indexed,
10721 register post-indexed. */
10722 /* Get Rm. */
10723 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10724 /* Get Rn. */
10725 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10726 regcache_raw_read_unsigned (reg_cache, reg_src1
10727 , &u_regval[0]);
10728 regcache_raw_read_unsigned (reg_cache, reg_src2
10729 , &u_regval[1]);
10730 if (15 == reg_src2)
10731 {
10732 /* If R15 was used as Rn, hence current PC+8. */
10733 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10734 u_regval[0] = u_regval[0] + 8;
10735 }
10736 /* Calculate target store address, Rn +/- Rm, register offset. */
10737 /* U == 1. */
10738 if (bit (arm_insn_r->arm_insn, 23))
10739 {
10740 tgt_mem_addr = u_regval[0] + u_regval[1];
10741 }
10742 else
10743 {
10744 tgt_mem_addr = u_regval[1] - u_regval[0];
10745 }
10746
10747 switch (arm_insn_r->opcode)
10748 {
10749 /* STR. */
10750 case 8:
10751 case 12:
10752 /* STR. */
10753 case 9:
10754 case 13:
10755 /* STRT. */
10756 case 1:
10757 case 5:
10758 /* STR. */
10759 case 0:
10760 case 4:
10761 record_buf_mem[0] = 4;
10762 break;
10763
10764 /* STRB. */
10765 case 10:
10766 case 14:
10767 /* STRB. */
10768 case 11:
10769 case 15:
10770 /* STRBT. */
10771 case 3:
10772 case 7:
10773 /* STRB. */
10774 case 2:
10775 case 6:
10776 record_buf_mem[0] = 1;
10777 break;
10778
10779 default:
10780 gdb_assert_not_reached ("no decoding pattern found");
10781 break;
10782 }
10783 record_buf_mem[1] = tgt_mem_addr;
10784 arm_insn_r->mem_rec_count = 1;
10785
10786 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10787 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10788 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10789 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10790 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10791 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10792 )
10793 {
10794 /* Rn is going to be changed in pre-indexed mode and
10795 post-indexed mode as well. */
10796 record_buf[0] = reg_src2;
10797 arm_insn_r->reg_rec_count = 1;
10798 }
10799 }
10800 else
10801 {
10802 /* Store insn, scaled register offset; scaled pre-indexed. */
10803 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10804 /* Get Rm. */
10805 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10806 /* Get Rn. */
10807 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10808 /* Get shift_imm. */
10809 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10810 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10811 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10812 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10813 /* Offset_12 used as shift. */
10814 switch (offset_12)
10815 {
10816 case 0:
10817 /* Offset_12 used as index. */
10818 offset_12 = u_regval[0] << shift_imm;
10819 break;
10820
10821 case 1:
10822 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10823 break;
10824
10825 case 2:
10826 if (!shift_imm)
10827 {
10828 if (bit (u_regval[0], 31))
10829 {
10830 offset_12 = 0xFFFFFFFF;
10831 }
10832 else
10833 {
10834 offset_12 = 0;
10835 }
10836 }
10837 else
10838 {
10839 /* This is arithmetic shift. */
10840 offset_12 = s_word >> shift_imm;
10841 }
10842 break;
10843
10844 case 3:
10845 if (!shift_imm)
10846 {
10847 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10848 &u_regval[1]);
10849 /* Get C flag value and shift it by 31. */
10850 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10851 | (u_regval[0]) >> 1);
10852 }
10853 else
10854 {
10855 offset_12 = (u_regval[0] >> shift_imm) \
10856 | (u_regval[0] <<
10857 (sizeof(uint32_t) - shift_imm));
10858 }
10859 break;
10860
10861 default:
10862 gdb_assert_not_reached ("no decoding pattern found");
10863 break;
10864 }
10865
10866 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10867 /* bit U set. */
10868 if (bit (arm_insn_r->arm_insn, 23))
10869 {
10870 tgt_mem_addr = u_regval[1] + offset_12;
10871 }
10872 else
10873 {
10874 tgt_mem_addr = u_regval[1] - offset_12;
10875 }
10876
10877 switch (arm_insn_r->opcode)
10878 {
10879 /* STR. */
10880 case 8:
10881 case 12:
10882 /* STR. */
10883 case 9:
10884 case 13:
10885 /* STRT. */
10886 case 1:
10887 case 5:
10888 /* STR. */
10889 case 0:
10890 case 4:
10891 record_buf_mem[0] = 4;
10892 break;
10893
10894 /* STRB. */
10895 case 10:
10896 case 14:
10897 /* STRB. */
10898 case 11:
10899 case 15:
10900 /* STRBT. */
10901 case 3:
10902 case 7:
10903 /* STRB. */
10904 case 2:
10905 case 6:
10906 record_buf_mem[0] = 1;
10907 break;
10908
10909 default:
10910 gdb_assert_not_reached ("no decoding pattern found");
10911 break;
10912 }
10913 record_buf_mem[1] = tgt_mem_addr;
10914 arm_insn_r->mem_rec_count = 1;
10915
10916 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10917 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10918 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10919 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10920 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10921 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10922 )
10923 {
10924 /* Rn is going to be changed in register scaled pre-indexed
10925 mode,and scaled post indexed mode. */
10926 record_buf[0] = reg_src2;
10927 arm_insn_r->reg_rec_count = 1;
10928 }
10929 }
10930 }
10931
10932 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10933 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10934 return 0;
10935}
10936
71e396f9 10937/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10938
10939static int
10940arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10941{
10942 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10943 uint32_t register_count = 0, register_bits;
10944 uint32_t reg_base, addr_mode;
72508ac0 10945 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10946 uint32_t wback;
10947 ULONGEST u_regval;
72508ac0 10948
71e396f9
LM
10949 /* Fetch the list of registers. */
10950 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10951 arm_insn_r->reg_rec_count = 0;
10952
10953 /* Fetch the base register that contains the address we are loading data
10954 to. */
10955 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10956
71e396f9
LM
10957 /* Calculate wback. */
10958 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10959
10960 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10961 {
71e396f9 10962 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10963
71e396f9 10964 /* Find out which registers are going to be loaded from memory. */
72508ac0 10965 while (register_bits)
71e396f9
LM
10966 {
10967 if (register_bits & 0x00000001)
10968 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10969 register_bits = register_bits >> 1;
10970 register_count++;
10971 }
72508ac0 10972
71e396f9
LM
10973
10974 /* If wback is true, also save the base register, which is going to be
10975 written to. */
10976 if (wback)
10977 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10978
10979 /* Save the CPSR register. */
10980 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10981 }
10982 else
10983 {
71e396f9 10984 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10985
71e396f9
LM
10986 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10987
10988 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10989
10990 /* Find out how many registers are going to be stored to memory. */
72508ac0 10991 while (register_bits)
71e396f9
LM
10992 {
10993 if (register_bits & 0x00000001)
10994 register_count++;
10995 register_bits = register_bits >> 1;
10996 }
72508ac0
PO
10997
10998 switch (addr_mode)
71e396f9
LM
10999 {
11000 /* STMDA (STMED): Decrement after. */
11001 case 0:
11002 record_buf_mem[1] = (uint32_t) u_regval
11003 - register_count * INT_REGISTER_SIZE + 4;
11004 break;
11005 /* STM (STMIA, STMEA): Increment after. */
11006 case 1:
11007 record_buf_mem[1] = (uint32_t) u_regval;
11008 break;
11009 /* STMDB (STMFD): Decrement before. */
11010 case 2:
11011 record_buf_mem[1] = (uint32_t) u_regval
11012 - register_count * INT_REGISTER_SIZE;
11013 break;
11014 /* STMIB (STMFA): Increment before. */
11015 case 3:
11016 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11017 break;
11018 default:
11019 gdb_assert_not_reached ("no decoding pattern found");
11020 break;
11021 }
72508ac0 11022
71e396f9
LM
11023 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11024 arm_insn_r->mem_rec_count = 1;
11025
11026 /* If wback is true, also save the base register, which is going to be
11027 written to. */
11028 if (wback)
11029 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11030 }
11031
11032 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11033 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11034 return 0;
11035}
11036
11037/* Handling opcode 101 insns. */
11038
11039static int
11040arm_record_b_bl (insn_decode_record *arm_insn_r)
11041{
11042 uint32_t record_buf[8];
11043
11044 /* Handle B, BL, BLX(1) insns. */
11045 /* B simply branches so we do nothing here. */
11046 /* Note: BLX(1) doesnt fall here but instead it falls into
11047 extension space. */
11048 if (bit (arm_insn_r->arm_insn, 24))
11049 {
11050 record_buf[0] = ARM_LR_REGNUM;
11051 arm_insn_r->reg_rec_count = 1;
11052 }
11053
11054 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11055
11056 return 0;
11057}
11058
72508ac0 11059static int
c6ec2b30 11060arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11061{
11062 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11063 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11064 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11065
11066 return -1;
11067}
11068
5a578da5
OJ
11069/* Record handler for vector data transfer instructions. */
11070
11071static int
11072arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11073{
11074 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11075 uint32_t record_buf[4];
11076
5a578da5
OJ
11077 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11078 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11079 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11080 bit_l = bit (arm_insn_r->arm_insn, 20);
11081 bit_c = bit (arm_insn_r->arm_insn, 8);
11082
11083 /* Handle VMOV instruction. */
11084 if (bit_l && bit_c)
11085 {
11086 record_buf[0] = reg_t;
11087 arm_insn_r->reg_rec_count = 1;
11088 }
11089 else if (bit_l && !bit_c)
11090 {
11091 /* Handle VMOV instruction. */
11092 if (bits_a == 0x00)
11093 {
f1771dce 11094 record_buf[0] = reg_t;
5a578da5
OJ
11095 arm_insn_r->reg_rec_count = 1;
11096 }
11097 /* Handle VMRS instruction. */
11098 else if (bits_a == 0x07)
11099 {
11100 if (reg_t == 15)
11101 reg_t = ARM_PS_REGNUM;
11102
11103 record_buf[0] = reg_t;
11104 arm_insn_r->reg_rec_count = 1;
11105 }
11106 }
11107 else if (!bit_l && !bit_c)
11108 {
11109 /* Handle VMOV instruction. */
11110 if (bits_a == 0x00)
11111 {
f1771dce 11112 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11113
11114 arm_insn_r->reg_rec_count = 1;
11115 }
11116 /* Handle VMSR instruction. */
11117 else if (bits_a == 0x07)
11118 {
11119 record_buf[0] = ARM_FPSCR_REGNUM;
11120 arm_insn_r->reg_rec_count = 1;
11121 }
11122 }
11123 else if (!bit_l && bit_c)
11124 {
11125 /* Handle VMOV instruction. */
11126 if (!(bits_a & 0x04))
11127 {
11128 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11129 + ARM_D0_REGNUM;
11130 arm_insn_r->reg_rec_count = 1;
11131 }
11132 /* Handle VDUP instruction. */
11133 else
11134 {
11135 if (bit (arm_insn_r->arm_insn, 21))
11136 {
11137 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11138 record_buf[0] = reg_v + ARM_D0_REGNUM;
11139 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11140 arm_insn_r->reg_rec_count = 2;
11141 }
11142 else
11143 {
11144 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11145 record_buf[0] = reg_v + ARM_D0_REGNUM;
11146 arm_insn_r->reg_rec_count = 1;
11147 }
11148 }
11149 }
11150
11151 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11152 return 0;
11153}
11154
f20f80dd
OJ
11155/* Record handler for extension register load/store instructions. */
11156
11157static int
11158arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11159{
11160 uint32_t opcode, single_reg;
11161 uint8_t op_vldm_vstm;
11162 uint32_t record_buf[8], record_buf_mem[128];
11163 ULONGEST u_regval = 0;
11164
11165 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11166
11167 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11168 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11169 op_vldm_vstm = opcode & 0x1b;
11170
11171 /* Handle VMOV instructions. */
11172 if ((opcode & 0x1e) == 0x04)
11173 {
9fde51ed 11174 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11175 {
11176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11177 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11178 arm_insn_r->reg_rec_count = 2;
11179 }
f20f80dd 11180 else
01e57735 11181 {
9fde51ed
YQ
11182 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11183 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11184
9fde51ed 11185 if (single_reg)
01e57735 11186 {
9fde51ed
YQ
11187 /* The first S register number m is REG_M:M (M is bit 5),
11188 the corresponding D register number is REG_M:M / 2, which
11189 is REG_M. */
11190 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11191 /* The second S register number is REG_M:M + 1, the
11192 corresponding D register number is (REG_M:M + 1) / 2.
11193 IOW, if bit M is 1, the first and second S registers
11194 are mapped to different D registers, otherwise, they are
11195 in the same D register. */
11196 if (bit_m)
11197 {
11198 record_buf[arm_insn_r->reg_rec_count++]
11199 = ARM_D0_REGNUM + reg_m + 1;
11200 }
01e57735
YQ
11201 }
11202 else
11203 {
9fde51ed 11204 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11205 arm_insn_r->reg_rec_count = 1;
11206 }
11207 }
f20f80dd
OJ
11208 }
11209 /* Handle VSTM and VPUSH instructions. */
11210 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11211 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11212 {
11213 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11214 uint32_t memory_index = 0;
11215
11216 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11217 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11218 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11219 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11220 memory_count = imm_off8;
11221
11222 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11223 start_address = u_regval;
f20f80dd 11224 else
01e57735 11225 start_address = u_regval - imm_off32;
f20f80dd
OJ
11226
11227 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11228 {
11229 record_buf[0] = reg_rn;
11230 arm_insn_r->reg_rec_count = 1;
11231 }
f20f80dd
OJ
11232
11233 while (memory_count > 0)
01e57735 11234 {
9fde51ed 11235 if (single_reg)
01e57735 11236 {
9fde51ed
YQ
11237 record_buf_mem[memory_index] = 4;
11238 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11239 start_address = start_address + 4;
11240 memory_index = memory_index + 2;
11241 }
11242 else
11243 {
9fde51ed
YQ
11244 record_buf_mem[memory_index] = 4;
11245 record_buf_mem[memory_index + 1] = start_address;
11246 record_buf_mem[memory_index + 2] = 4;
11247 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11248 start_address = start_address + 8;
11249 memory_index = memory_index + 4;
11250 }
11251 memory_count--;
11252 }
f20f80dd
OJ
11253 arm_insn_r->mem_rec_count = (memory_index >> 1);
11254 }
11255 /* Handle VLDM instructions. */
11256 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11257 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11258 {
11259 uint32_t reg_count, reg_vd;
11260 uint32_t reg_index = 0;
9fde51ed 11261 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11262
11263 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11264 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11265
9fde51ed
YQ
11266 /* REG_VD is the first D register number. If the instruction
11267 loads memory to S registers (SINGLE_REG is TRUE), the register
11268 number is (REG_VD << 1 | bit D), so the corresponding D
11269 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11270 if (!single_reg)
11271 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11272
9fde51ed 11273 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11274 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11275
9fde51ed
YQ
11276 /* If the instruction loads memory to D register, REG_COUNT should
11277 be divided by 2, according to the ARM Architecture Reference
11278 Manual. If the instruction loads memory to S register, divide by
11279 2 as well because two S registers are mapped to D register. */
11280 reg_count = reg_count / 2;
11281 if (single_reg && bit_d)
01e57735 11282 {
9fde51ed
YQ
11283 /* Increase the register count if S register list starts from
11284 an odd number (bit d is one). */
11285 reg_count++;
11286 }
f20f80dd 11287
9fde51ed
YQ
11288 while (reg_count > 0)
11289 {
11290 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11291 reg_count--;
11292 }
f20f80dd
OJ
11293 arm_insn_r->reg_rec_count = reg_index;
11294 }
11295 /* VSTR Vector store register. */
11296 else if ((opcode & 0x13) == 0x10)
11297 {
bec2ab5a 11298 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11299 uint32_t memory_index = 0;
11300
11301 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11302 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11303 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11304 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11305
11306 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11307 start_address = u_regval + imm_off32;
f20f80dd 11308 else
01e57735 11309 start_address = u_regval - imm_off32;
f20f80dd
OJ
11310
11311 if (single_reg)
01e57735 11312 {
9fde51ed
YQ
11313 record_buf_mem[memory_index] = 4;
11314 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11315 arm_insn_r->mem_rec_count = 1;
11316 }
f20f80dd 11317 else
01e57735 11318 {
9fde51ed
YQ
11319 record_buf_mem[memory_index] = 4;
11320 record_buf_mem[memory_index + 1] = start_address;
11321 record_buf_mem[memory_index + 2] = 4;
11322 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11323 arm_insn_r->mem_rec_count = 2;
11324 }
f20f80dd
OJ
11325 }
11326 /* VLDR Vector load register. */
11327 else if ((opcode & 0x13) == 0x11)
11328 {
11329 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11330
11331 if (!single_reg)
01e57735
YQ
11332 {
11333 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11334 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11335 }
f20f80dd 11336 else
01e57735
YQ
11337 {
11338 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11339 /* Record register D rather than pseudo register S. */
11340 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11341 }
f20f80dd
OJ
11342 arm_insn_r->reg_rec_count = 1;
11343 }
11344
11345 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11346 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11347 return 0;
11348}
11349
851f26ae
OJ
11350/* Record handler for arm/thumb mode VFP data processing instructions. */
11351
11352static int
11353arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11354{
11355 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11356 uint32_t record_buf[4];
11357 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11358 enum insn_types curr_insn_type = INSN_INV;
11359
11360 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11361 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11362 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11363 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11364 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11365 bit_d = bit (arm_insn_r->arm_insn, 22);
11366 opc1 = opc1 & 0x04;
11367
11368 /* Handle VMLA, VMLS. */
11369 if (opc1 == 0x00)
11370 {
11371 if (bit (arm_insn_r->arm_insn, 10))
11372 {
11373 if (bit (arm_insn_r->arm_insn, 6))
11374 curr_insn_type = INSN_T0;
11375 else
11376 curr_insn_type = INSN_T1;
11377 }
11378 else
11379 {
11380 if (dp_op_sz)
11381 curr_insn_type = INSN_T1;
11382 else
11383 curr_insn_type = INSN_T2;
11384 }
11385 }
11386 /* Handle VNMLA, VNMLS, VNMUL. */
11387 else if (opc1 == 0x01)
11388 {
11389 if (dp_op_sz)
11390 curr_insn_type = INSN_T1;
11391 else
11392 curr_insn_type = INSN_T2;
11393 }
11394 /* Handle VMUL. */
11395 else if (opc1 == 0x02 && !(opc3 & 0x01))
11396 {
11397 if (bit (arm_insn_r->arm_insn, 10))
11398 {
11399 if (bit (arm_insn_r->arm_insn, 6))
11400 curr_insn_type = INSN_T0;
11401 else
11402 curr_insn_type = INSN_T1;
11403 }
11404 else
11405 {
11406 if (dp_op_sz)
11407 curr_insn_type = INSN_T1;
11408 else
11409 curr_insn_type = INSN_T2;
11410 }
11411 }
11412 /* Handle VADD, VSUB. */
11413 else if (opc1 == 0x03)
11414 {
11415 if (!bit (arm_insn_r->arm_insn, 9))
11416 {
11417 if (bit (arm_insn_r->arm_insn, 6))
11418 curr_insn_type = INSN_T0;
11419 else
11420 curr_insn_type = INSN_T1;
11421 }
11422 else
11423 {
11424 if (dp_op_sz)
11425 curr_insn_type = INSN_T1;
11426 else
11427 curr_insn_type = INSN_T2;
11428 }
11429 }
11430 /* Handle VDIV. */
11431 else if (opc1 == 0x0b)
11432 {
11433 if (dp_op_sz)
11434 curr_insn_type = INSN_T1;
11435 else
11436 curr_insn_type = INSN_T2;
11437 }
11438 /* Handle all other vfp data processing instructions. */
11439 else if (opc1 == 0x0b)
11440 {
11441 /* Handle VMOV. */
11442 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11443 {
11444 if (bit (arm_insn_r->arm_insn, 4))
11445 {
11446 if (bit (arm_insn_r->arm_insn, 6))
11447 curr_insn_type = INSN_T0;
11448 else
11449 curr_insn_type = INSN_T1;
11450 }
11451 else
11452 {
11453 if (dp_op_sz)
11454 curr_insn_type = INSN_T1;
11455 else
11456 curr_insn_type = INSN_T2;
11457 }
11458 }
11459 /* Handle VNEG and VABS. */
11460 else if ((opc2 == 0x01 && opc3 == 0x01)
11461 || (opc2 == 0x00 && opc3 == 0x03))
11462 {
11463 if (!bit (arm_insn_r->arm_insn, 11))
11464 {
11465 if (bit (arm_insn_r->arm_insn, 6))
11466 curr_insn_type = INSN_T0;
11467 else
11468 curr_insn_type = INSN_T1;
11469 }
11470 else
11471 {
11472 if (dp_op_sz)
11473 curr_insn_type = INSN_T1;
11474 else
11475 curr_insn_type = INSN_T2;
11476 }
11477 }
11478 /* Handle VSQRT. */
11479 else if (opc2 == 0x01 && opc3 == 0x03)
11480 {
11481 if (dp_op_sz)
11482 curr_insn_type = INSN_T1;
11483 else
11484 curr_insn_type = INSN_T2;
11485 }
11486 /* Handle VCVT. */
11487 else if (opc2 == 0x07 && opc3 == 0x03)
11488 {
11489 if (!dp_op_sz)
11490 curr_insn_type = INSN_T1;
11491 else
11492 curr_insn_type = INSN_T2;
11493 }
11494 else if (opc3 & 0x01)
11495 {
11496 /* Handle VCVT. */
11497 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11498 {
11499 if (!bit (arm_insn_r->arm_insn, 18))
11500 curr_insn_type = INSN_T2;
11501 else
11502 {
11503 if (dp_op_sz)
11504 curr_insn_type = INSN_T1;
11505 else
11506 curr_insn_type = INSN_T2;
11507 }
11508 }
11509 /* Handle VCVT. */
11510 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11511 {
11512 if (dp_op_sz)
11513 curr_insn_type = INSN_T1;
11514 else
11515 curr_insn_type = INSN_T2;
11516 }
11517 /* Handle VCVTB, VCVTT. */
11518 else if ((opc2 & 0x0e) == 0x02)
11519 curr_insn_type = INSN_T2;
11520 /* Handle VCMP, VCMPE. */
11521 else if ((opc2 & 0x0e) == 0x04)
11522 curr_insn_type = INSN_T3;
11523 }
11524 }
11525
11526 switch (curr_insn_type)
11527 {
11528 case INSN_T0:
11529 reg_vd = reg_vd | (bit_d << 4);
11530 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11531 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11532 arm_insn_r->reg_rec_count = 2;
11533 break;
11534
11535 case INSN_T1:
11536 reg_vd = reg_vd | (bit_d << 4);
11537 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11538 arm_insn_r->reg_rec_count = 1;
11539 break;
11540
11541 case INSN_T2:
11542 reg_vd = (reg_vd << 1) | bit_d;
11543 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11544 arm_insn_r->reg_rec_count = 1;
11545 break;
11546
11547 case INSN_T3:
11548 record_buf[0] = ARM_FPSCR_REGNUM;
11549 arm_insn_r->reg_rec_count = 1;
11550 break;
11551
11552 default:
11553 gdb_assert_not_reached ("no decoding pattern found");
11554 break;
11555 }
11556
11557 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11558 return 0;
11559}
11560
60cc5e93
OJ
11561/* Handling opcode 110 insns. */
11562
11563static int
11564arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11565{
bec2ab5a 11566 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11567
11568 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11569 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11570 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11571
11572 if ((coproc & 0x0e) == 0x0a)
11573 {
11574 /* Handle extension register ld/st instructions. */
11575 if (!(op1 & 0x20))
f20f80dd 11576 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11577
11578 /* 64-bit transfers between arm core and extension registers. */
11579 if ((op1 & 0x3e) == 0x04)
f20f80dd 11580 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11581 }
11582 else
11583 {
11584 /* Handle coprocessor ld/st instructions. */
11585 if (!(op1 & 0x3a))
11586 {
11587 /* Store. */
11588 if (!op1_ebit)
11589 return arm_record_unsupported_insn (arm_insn_r);
11590 else
11591 /* Load. */
11592 return arm_record_unsupported_insn (arm_insn_r);
11593 }
11594
11595 /* Move to coprocessor from two arm core registers. */
11596 if (op1 == 0x4)
11597 return arm_record_unsupported_insn (arm_insn_r);
11598
11599 /* Move to two arm core registers from coprocessor. */
11600 if (op1 == 0x5)
11601 {
11602 uint32_t reg_t[2];
11603
11604 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11605 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11606 arm_insn_r->reg_rec_count = 2;
11607
11608 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11609 return 0;
11610 }
11611 }
11612 return arm_record_unsupported_insn (arm_insn_r);
11613}
11614
72508ac0
PO
11615/* Handling opcode 111 insns. */
11616
11617static int
11618arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11619{
60cc5e93 11620 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11621 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11622 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11623
11624 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11625 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11626 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11627 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11628 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11629
11630 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11631 if (op1_sbit)
97dfe206
OJ
11632 {
11633 if (tdep->arm_syscall_record != NULL)
11634 {
11635 ULONGEST svc_operand, svc_number;
11636
11637 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11638
11639 if (svc_operand) /* OABI. */
11640 svc_number = svc_operand - 0x900000;
11641 else /* EABI. */
11642 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11643
60cc5e93 11644 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11645 }
11646 else
11647 {
11648 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11649 return -1;
97dfe206
OJ
11650 }
11651 }
60cc5e93
OJ
11652
11653 if ((coproc & 0x0e) == 0x0a)
11654 {
11655 /* VFP data-processing instructions. */
11656 if (!op1_sbit && !op)
851f26ae 11657 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11658
11659 /* Advanced SIMD, VFP instructions. */
11660 if (!op1_sbit && op)
5a578da5 11661 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11662 }
97dfe206
OJ
11663 else
11664 {
60cc5e93
OJ
11665 /* Coprocessor data operations. */
11666 if (!op1_sbit && !op)
11667 return arm_record_unsupported_insn (arm_insn_r);
11668
11669 /* Move to Coprocessor from ARM core register. */
11670 if (!op1_sbit && !op1_ebit && op)
11671 return arm_record_unsupported_insn (arm_insn_r);
11672
11673 /* Move to arm core register from coprocessor. */
11674 if (!op1_sbit && op1_ebit && op)
11675 {
11676 uint32_t record_buf[1];
11677
11678 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11679 if (record_buf[0] == 15)
11680 record_buf[0] = ARM_PS_REGNUM;
11681
11682 arm_insn_r->reg_rec_count = 1;
11683 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11684 record_buf);
11685 return 0;
11686 }
97dfe206 11687 }
72508ac0 11688
60cc5e93 11689 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11690}
11691
11692/* Handling opcode 000 insns. */
11693
11694static int
11695thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11696{
11697 uint32_t record_buf[8];
11698 uint32_t reg_src1 = 0;
11699
11700 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11701
11702 record_buf[0] = ARM_PS_REGNUM;
11703 record_buf[1] = reg_src1;
11704 thumb_insn_r->reg_rec_count = 2;
11705
11706 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11707
11708 return 0;
11709}
11710
11711
11712/* Handling opcode 001 insns. */
11713
11714static int
11715thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11716{
11717 uint32_t record_buf[8];
11718 uint32_t reg_src1 = 0;
11719
11720 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11721
11722 record_buf[0] = ARM_PS_REGNUM;
11723 record_buf[1] = reg_src1;
11724 thumb_insn_r->reg_rec_count = 2;
11725
11726 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11727
11728 return 0;
11729}
11730
11731/* Handling opcode 010 insns. */
11732
11733static int
11734thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11735{
11736 struct regcache *reg_cache = thumb_insn_r->regcache;
11737 uint32_t record_buf[8], record_buf_mem[8];
11738
11739 uint32_t reg_src1 = 0, reg_src2 = 0;
11740 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11741
11742 ULONGEST u_regval[2] = {0};
11743
11744 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11745
11746 if (bit (thumb_insn_r->arm_insn, 12))
11747 {
11748 /* Handle load/store register offset. */
11749 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11750 if (opcode2 >= 12 && opcode2 <= 15)
11751 {
11752 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11753 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11754 record_buf[0] = reg_src1;
11755 thumb_insn_r->reg_rec_count = 1;
11756 }
11757 else if (opcode2 >= 8 && opcode2 <= 10)
11758 {
11759 /* STR(2), STRB(2), STRH(2) . */
11760 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11761 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11762 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11763 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11764 if (8 == opcode2)
11765 record_buf_mem[0] = 4; /* STR (2). */
11766 else if (10 == opcode2)
11767 record_buf_mem[0] = 1; /* STRB (2). */
11768 else if (9 == opcode2)
11769 record_buf_mem[0] = 2; /* STRH (2). */
11770 record_buf_mem[1] = u_regval[0] + u_regval[1];
11771 thumb_insn_r->mem_rec_count = 1;
11772 }
11773 }
11774 else if (bit (thumb_insn_r->arm_insn, 11))
11775 {
11776 /* Handle load from literal pool. */
11777 /* LDR(3). */
11778 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11779 record_buf[0] = reg_src1;
11780 thumb_insn_r->reg_rec_count = 1;
11781 }
11782 else if (opcode1)
11783 {
11784 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11785 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11786 if ((3 == opcode2) && (!opcode3))
11787 {
11788 /* Branch with exchange. */
11789 record_buf[0] = ARM_PS_REGNUM;
11790 thumb_insn_r->reg_rec_count = 1;
11791 }
11792 else
11793 {
1f33efec
YQ
11794 /* Format 8; special data processing insns. */
11795 record_buf[0] = ARM_PS_REGNUM;
11796 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11797 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11798 thumb_insn_r->reg_rec_count = 2;
11799 }
11800 }
11801 else
11802 {
11803 /* Format 5; data processing insns. */
11804 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11805 if (bit (thumb_insn_r->arm_insn, 7))
11806 {
11807 reg_src1 = reg_src1 + 8;
11808 }
11809 record_buf[0] = ARM_PS_REGNUM;
11810 record_buf[1] = reg_src1;
11811 thumb_insn_r->reg_rec_count = 2;
11812 }
11813
11814 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11815 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11816 record_buf_mem);
11817
11818 return 0;
11819}
11820
11821/* Handling opcode 001 insns. */
11822
11823static int
11824thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11825{
11826 struct regcache *reg_cache = thumb_insn_r->regcache;
11827 uint32_t record_buf[8], record_buf_mem[8];
11828
11829 uint32_t reg_src1 = 0;
11830 uint32_t opcode = 0, immed_5 = 0;
11831
11832 ULONGEST u_regval = 0;
11833
11834 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11835
11836 if (opcode)
11837 {
11838 /* LDR(1). */
11839 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11840 record_buf[0] = reg_src1;
11841 thumb_insn_r->reg_rec_count = 1;
11842 }
11843 else
11844 {
11845 /* STR(1). */
11846 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11847 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11848 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11849 record_buf_mem[0] = 4;
11850 record_buf_mem[1] = u_regval + (immed_5 * 4);
11851 thumb_insn_r->mem_rec_count = 1;
11852 }
11853
11854 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11855 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11856 record_buf_mem);
11857
11858 return 0;
11859}
11860
11861/* Handling opcode 100 insns. */
11862
11863static int
11864thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11865{
11866 struct regcache *reg_cache = thumb_insn_r->regcache;
11867 uint32_t record_buf[8], record_buf_mem[8];
11868
11869 uint32_t reg_src1 = 0;
11870 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11871
11872 ULONGEST u_regval = 0;
11873
11874 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11875
11876 if (3 == opcode)
11877 {
11878 /* LDR(4). */
11879 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11880 record_buf[0] = reg_src1;
11881 thumb_insn_r->reg_rec_count = 1;
11882 }
11883 else if (1 == opcode)
11884 {
11885 /* LDRH(1). */
11886 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11887 record_buf[0] = reg_src1;
11888 thumb_insn_r->reg_rec_count = 1;
11889 }
11890 else if (2 == opcode)
11891 {
11892 /* STR(3). */
11893 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11894 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11895 record_buf_mem[0] = 4;
11896 record_buf_mem[1] = u_regval + (immed_8 * 4);
11897 thumb_insn_r->mem_rec_count = 1;
11898 }
11899 else if (0 == opcode)
11900 {
11901 /* STRH(1). */
11902 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11903 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11904 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11905 record_buf_mem[0] = 2;
11906 record_buf_mem[1] = u_regval + (immed_5 * 2);
11907 thumb_insn_r->mem_rec_count = 1;
11908 }
11909
11910 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11911 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11912 record_buf_mem);
11913
11914 return 0;
11915}
11916
11917/* Handling opcode 101 insns. */
11918
11919static int
11920thumb_record_misc (insn_decode_record *thumb_insn_r)
11921{
11922 struct regcache *reg_cache = thumb_insn_r->regcache;
11923
11924 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11925 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11926 uint32_t index = 0, start_address = 0;
72508ac0
PO
11927 uint32_t record_buf[24], record_buf_mem[48];
11928 uint32_t reg_src1;
11929
11930 ULONGEST u_regval = 0;
11931
11932 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11933 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11934 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11935
11936 if (14 == opcode2)
11937 {
11938 /* POP. */
11939 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11940 while (register_bits)
f969241e
OJ
11941 {
11942 if (register_bits & 0x00000001)
11943 record_buf[index++] = register_count;
11944 register_bits = register_bits >> 1;
11945 register_count++;
11946 }
11947 record_buf[index++] = ARM_PS_REGNUM;
11948 record_buf[index++] = ARM_SP_REGNUM;
11949 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11950 }
11951 else if (10 == opcode2)
11952 {
11953 /* PUSH. */
11954 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11955 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11956 while (register_bits)
11957 {
11958 if (register_bits & 0x00000001)
11959 register_count++;
11960 register_bits = register_bits >> 1;
11961 }
11962 start_address = u_regval - \
11963 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11964 thumb_insn_r->mem_rec_count = register_count;
11965 while (register_count)
11966 {
11967 record_buf_mem[(register_count * 2) - 1] = start_address;
11968 record_buf_mem[(register_count * 2) - 2] = 4;
11969 start_address = start_address + 4;
11970 register_count--;
11971 }
11972 record_buf[0] = ARM_SP_REGNUM;
11973 thumb_insn_r->reg_rec_count = 1;
11974 }
11975 else if (0x1E == opcode1)
11976 {
11977 /* BKPT insn. */
11978 /* Handle enhanced software breakpoint insn, BKPT. */
11979 /* CPSR is changed to be executed in ARM state, disabling normal
11980 interrupts, entering abort mode. */
11981 /* According to high vector configuration PC is set. */
11982 /* User hits breakpoint and type reverse, in that case, we need to go back with
11983 previous CPSR and Program Counter. */
11984 record_buf[0] = ARM_PS_REGNUM;
11985 record_buf[1] = ARM_LR_REGNUM;
11986 thumb_insn_r->reg_rec_count = 2;
11987 /* We need to save SPSR value, which is not yet done. */
11988 printf_unfiltered (_("Process record does not support instruction "
11989 "0x%0x at address %s.\n"),
11990 thumb_insn_r->arm_insn,
11991 paddress (thumb_insn_r->gdbarch,
11992 thumb_insn_r->this_addr));
11993 return -1;
11994 }
11995 else if ((0 == opcode) || (1 == opcode))
11996 {
11997 /* ADD(5), ADD(6). */
11998 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11999 record_buf[0] = reg_src1;
12000 thumb_insn_r->reg_rec_count = 1;
12001 }
12002 else if (2 == opcode)
12003 {
12004 /* ADD(7), SUB(4). */
12005 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12006 record_buf[0] = ARM_SP_REGNUM;
12007 thumb_insn_r->reg_rec_count = 1;
12008 }
12009
12010 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12011 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12012 record_buf_mem);
12013
12014 return 0;
12015}
12016
12017/* Handling opcode 110 insns. */
12018
12019static int
12020thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12021{
12022 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12023 struct regcache *reg_cache = thumb_insn_r->regcache;
12024
12025 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12026 uint32_t reg_src1 = 0;
12027 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12028 uint32_t index = 0, start_address = 0;
72508ac0
PO
12029 uint32_t record_buf[24], record_buf_mem[48];
12030
12031 ULONGEST u_regval = 0;
12032
12033 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12034 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12035
12036 if (1 == opcode2)
12037 {
12038
12039 /* LDMIA. */
12040 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12041 /* Get Rn. */
12042 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12043 while (register_bits)
12044 {
12045 if (register_bits & 0x00000001)
f969241e 12046 record_buf[index++] = register_count;
72508ac0 12047 register_bits = register_bits >> 1;
f969241e 12048 register_count++;
72508ac0 12049 }
f969241e
OJ
12050 record_buf[index++] = reg_src1;
12051 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12052 }
12053 else if (0 == opcode2)
12054 {
12055 /* It handles both STMIA. */
12056 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12057 /* Get Rn. */
12058 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12059 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12060 while (register_bits)
12061 {
12062 if (register_bits & 0x00000001)
12063 register_count++;
12064 register_bits = register_bits >> 1;
12065 }
12066 start_address = u_regval;
12067 thumb_insn_r->mem_rec_count = register_count;
12068 while (register_count)
12069 {
12070 record_buf_mem[(register_count * 2) - 1] = start_address;
12071 record_buf_mem[(register_count * 2) - 2] = 4;
12072 start_address = start_address + 4;
12073 register_count--;
12074 }
12075 }
12076 else if (0x1F == opcode1)
12077 {
12078 /* Handle arm syscall insn. */
97dfe206 12079 if (tdep->arm_syscall_record != NULL)
72508ac0 12080 {
97dfe206
OJ
12081 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12082 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12083 }
12084 else
12085 {
12086 printf_unfiltered (_("no syscall record support\n"));
12087 return -1;
12088 }
12089 }
12090
12091 /* B (1), conditional branch is automatically taken care in process_record,
12092 as PC is saved there. */
12093
12094 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12095 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12096 record_buf_mem);
12097
12098 return ret;
12099}
12100
12101/* Handling opcode 111 insns. */
12102
12103static int
12104thumb_record_branch (insn_decode_record *thumb_insn_r)
12105{
12106 uint32_t record_buf[8];
12107 uint32_t bits_h = 0;
12108
12109 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12110
12111 if (2 == bits_h || 3 == bits_h)
12112 {
12113 /* BL */
12114 record_buf[0] = ARM_LR_REGNUM;
12115 thumb_insn_r->reg_rec_count = 1;
12116 }
12117 else if (1 == bits_h)
12118 {
12119 /* BLX(1). */
12120 record_buf[0] = ARM_PS_REGNUM;
12121 record_buf[1] = ARM_LR_REGNUM;
12122 thumb_insn_r->reg_rec_count = 2;
12123 }
12124
12125 /* B(2) is automatically taken care in process_record, as PC is
12126 saved there. */
12127
12128 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12129
12130 return 0;
12131}
12132
c6ec2b30
OJ
12133/* Handler for thumb2 load/store multiple instructions. */
12134
12135static int
12136thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12137{
12138 struct regcache *reg_cache = thumb2_insn_r->regcache;
12139
12140 uint32_t reg_rn, op;
12141 uint32_t register_bits = 0, register_count = 0;
12142 uint32_t index = 0, start_address = 0;
12143 uint32_t record_buf[24], record_buf_mem[48];
12144
12145 ULONGEST u_regval = 0;
12146
12147 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12148 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12149
12150 if (0 == op || 3 == op)
12151 {
12152 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12153 {
12154 /* Handle RFE instruction. */
12155 record_buf[0] = ARM_PS_REGNUM;
12156 thumb2_insn_r->reg_rec_count = 1;
12157 }
12158 else
12159 {
12160 /* Handle SRS instruction after reading banked SP. */
12161 return arm_record_unsupported_insn (thumb2_insn_r);
12162 }
12163 }
12164 else if (1 == op || 2 == op)
12165 {
12166 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12167 {
12168 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12169 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12170 while (register_bits)
12171 {
12172 if (register_bits & 0x00000001)
12173 record_buf[index++] = register_count;
12174
12175 register_count++;
12176 register_bits = register_bits >> 1;
12177 }
12178 record_buf[index++] = reg_rn;
12179 record_buf[index++] = ARM_PS_REGNUM;
12180 thumb2_insn_r->reg_rec_count = index;
12181 }
12182 else
12183 {
12184 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12185 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12186 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12187 while (register_bits)
12188 {
12189 if (register_bits & 0x00000001)
12190 register_count++;
12191
12192 register_bits = register_bits >> 1;
12193 }
12194
12195 if (1 == op)
12196 {
12197 /* Start address calculation for LDMDB/LDMEA. */
12198 start_address = u_regval;
12199 }
12200 else if (2 == op)
12201 {
12202 /* Start address calculation for LDMDB/LDMEA. */
12203 start_address = u_regval - register_count * 4;
12204 }
12205
12206 thumb2_insn_r->mem_rec_count = register_count;
12207 while (register_count)
12208 {
12209 record_buf_mem[register_count * 2 - 1] = start_address;
12210 record_buf_mem[register_count * 2 - 2] = 4;
12211 start_address = start_address + 4;
12212 register_count--;
12213 }
12214 record_buf[0] = reg_rn;
12215 record_buf[1] = ARM_PS_REGNUM;
12216 thumb2_insn_r->reg_rec_count = 2;
12217 }
12218 }
12219
12220 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12221 record_buf_mem);
12222 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12223 record_buf);
12224 return ARM_RECORD_SUCCESS;
12225}
12226
12227/* Handler for thumb2 load/store (dual/exclusive) and table branch
12228 instructions. */
12229
12230static int
12231thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12232{
12233 struct regcache *reg_cache = thumb2_insn_r->regcache;
12234
12235 uint32_t reg_rd, reg_rn, offset_imm;
12236 uint32_t reg_dest1, reg_dest2;
12237 uint32_t address, offset_addr;
12238 uint32_t record_buf[8], record_buf_mem[8];
12239 uint32_t op1, op2, op3;
c6ec2b30
OJ
12240
12241 ULONGEST u_regval[2];
12242
12243 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12244 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12245 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12246
12247 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12248 {
12249 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12250 {
12251 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12252 record_buf[0] = reg_dest1;
12253 record_buf[1] = ARM_PS_REGNUM;
12254 thumb2_insn_r->reg_rec_count = 2;
12255 }
12256
12257 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12258 {
12259 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12260 record_buf[2] = reg_dest2;
12261 thumb2_insn_r->reg_rec_count = 3;
12262 }
12263 }
12264 else
12265 {
12266 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12267 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12268
12269 if (0 == op1 && 0 == op2)
12270 {
12271 /* Handle STREX. */
12272 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12273 address = u_regval[0] + (offset_imm * 4);
12274 record_buf_mem[0] = 4;
12275 record_buf_mem[1] = address;
12276 thumb2_insn_r->mem_rec_count = 1;
12277 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12278 record_buf[0] = reg_rd;
12279 thumb2_insn_r->reg_rec_count = 1;
12280 }
12281 else if (1 == op1 && 0 == op2)
12282 {
12283 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12284 record_buf[0] = reg_rd;
12285 thumb2_insn_r->reg_rec_count = 1;
12286 address = u_regval[0];
12287 record_buf_mem[1] = address;
12288
12289 if (4 == op3)
12290 {
12291 /* Handle STREXB. */
12292 record_buf_mem[0] = 1;
12293 thumb2_insn_r->mem_rec_count = 1;
12294 }
12295 else if (5 == op3)
12296 {
12297 /* Handle STREXH. */
12298 record_buf_mem[0] = 2 ;
12299 thumb2_insn_r->mem_rec_count = 1;
12300 }
12301 else if (7 == op3)
12302 {
12303 /* Handle STREXD. */
12304 address = u_regval[0];
12305 record_buf_mem[0] = 4;
12306 record_buf_mem[2] = 4;
12307 record_buf_mem[3] = address + 4;
12308 thumb2_insn_r->mem_rec_count = 2;
12309 }
12310 }
12311 else
12312 {
12313 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12314
12315 if (bit (thumb2_insn_r->arm_insn, 24))
12316 {
12317 if (bit (thumb2_insn_r->arm_insn, 23))
12318 offset_addr = u_regval[0] + (offset_imm * 4);
12319 else
12320 offset_addr = u_regval[0] - (offset_imm * 4);
12321
12322 address = offset_addr;
12323 }
12324 else
12325 address = u_regval[0];
12326
12327 record_buf_mem[0] = 4;
12328 record_buf_mem[1] = address;
12329 record_buf_mem[2] = 4;
12330 record_buf_mem[3] = address + 4;
12331 thumb2_insn_r->mem_rec_count = 2;
12332 record_buf[0] = reg_rn;
12333 thumb2_insn_r->reg_rec_count = 1;
12334 }
12335 }
12336
12337 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12338 record_buf);
12339 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12340 record_buf_mem);
12341 return ARM_RECORD_SUCCESS;
12342}
12343
12344/* Handler for thumb2 data processing (shift register and modified immediate)
12345 instructions. */
12346
12347static int
12348thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12349{
12350 uint32_t reg_rd, op;
12351 uint32_t record_buf[8];
12352
12353 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12354 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12355
12356 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12357 {
12358 record_buf[0] = ARM_PS_REGNUM;
12359 thumb2_insn_r->reg_rec_count = 1;
12360 }
12361 else
12362 {
12363 record_buf[0] = reg_rd;
12364 record_buf[1] = ARM_PS_REGNUM;
12365 thumb2_insn_r->reg_rec_count = 2;
12366 }
12367
12368 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12369 record_buf);
12370 return ARM_RECORD_SUCCESS;
12371}
12372
12373/* Generic handler for thumb2 instructions which effect destination and PS
12374 registers. */
12375
12376static int
12377thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12378{
12379 uint32_t reg_rd;
12380 uint32_t record_buf[8];
12381
12382 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12383
12384 record_buf[0] = reg_rd;
12385 record_buf[1] = ARM_PS_REGNUM;
12386 thumb2_insn_r->reg_rec_count = 2;
12387
12388 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12389 record_buf);
12390 return ARM_RECORD_SUCCESS;
12391}
12392
12393/* Handler for thumb2 branch and miscellaneous control instructions. */
12394
12395static int
12396thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12397{
12398 uint32_t op, op1, op2;
12399 uint32_t record_buf[8];
12400
12401 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12402 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12403 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12404
12405 /* Handle MSR insn. */
12406 if (!(op1 & 0x2) && 0x38 == op)
12407 {
12408 if (!(op2 & 0x3))
12409 {
12410 /* CPSR is going to be changed. */
12411 record_buf[0] = ARM_PS_REGNUM;
12412 thumb2_insn_r->reg_rec_count = 1;
12413 }
12414 else
12415 {
12416 arm_record_unsupported_insn(thumb2_insn_r);
12417 return -1;
12418 }
12419 }
12420 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12421 {
12422 /* BLX. */
12423 record_buf[0] = ARM_PS_REGNUM;
12424 record_buf[1] = ARM_LR_REGNUM;
12425 thumb2_insn_r->reg_rec_count = 2;
12426 }
12427
12428 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12429 record_buf);
12430 return ARM_RECORD_SUCCESS;
12431}
12432
12433/* Handler for thumb2 store single data item instructions. */
12434
12435static int
12436thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12437{
12438 struct regcache *reg_cache = thumb2_insn_r->regcache;
12439
12440 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12441 uint32_t address, offset_addr;
12442 uint32_t record_buf[8], record_buf_mem[8];
12443 uint32_t op1, op2;
12444
12445 ULONGEST u_regval[2];
12446
12447 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12448 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12449 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12450 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12451
12452 if (bit (thumb2_insn_r->arm_insn, 23))
12453 {
12454 /* T2 encoding. */
12455 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12456 offset_addr = u_regval[0] + offset_imm;
12457 address = offset_addr;
12458 }
12459 else
12460 {
12461 /* T3 encoding. */
12462 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12463 {
12464 /* Handle STRB (register). */
12465 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12466 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12467 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12468 offset_addr = u_regval[1] << shift_imm;
12469 address = u_regval[0] + offset_addr;
12470 }
12471 else
12472 {
12473 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12474 if (bit (thumb2_insn_r->arm_insn, 10))
12475 {
12476 if (bit (thumb2_insn_r->arm_insn, 9))
12477 offset_addr = u_regval[0] + offset_imm;
12478 else
12479 offset_addr = u_regval[0] - offset_imm;
12480
12481 address = offset_addr;
12482 }
12483 else
12484 address = u_regval[0];
12485 }
12486 }
12487
12488 switch (op1)
12489 {
12490 /* Store byte instructions. */
12491 case 4:
12492 case 0:
12493 record_buf_mem[0] = 1;
12494 break;
12495 /* Store half word instructions. */
12496 case 1:
12497 case 5:
12498 record_buf_mem[0] = 2;
12499 break;
12500 /* Store word instructions. */
12501 case 2:
12502 case 6:
12503 record_buf_mem[0] = 4;
12504 break;
12505
12506 default:
12507 gdb_assert_not_reached ("no decoding pattern found");
12508 break;
12509 }
12510
12511 record_buf_mem[1] = address;
12512 thumb2_insn_r->mem_rec_count = 1;
12513 record_buf[0] = reg_rn;
12514 thumb2_insn_r->reg_rec_count = 1;
12515
12516 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12517 record_buf);
12518 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12519 record_buf_mem);
12520 return ARM_RECORD_SUCCESS;
12521}
12522
12523/* Handler for thumb2 load memory hints instructions. */
12524
12525static int
12526thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12527{
12528 uint32_t record_buf[8];
12529 uint32_t reg_rt, reg_rn;
12530
12531 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12532 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12533
12534 if (ARM_PC_REGNUM != reg_rt)
12535 {
12536 record_buf[0] = reg_rt;
12537 record_buf[1] = reg_rn;
12538 record_buf[2] = ARM_PS_REGNUM;
12539 thumb2_insn_r->reg_rec_count = 3;
12540
12541 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12542 record_buf);
12543 return ARM_RECORD_SUCCESS;
12544 }
12545
12546 return ARM_RECORD_FAILURE;
12547}
12548
12549/* Handler for thumb2 load word instructions. */
12550
12551static int
12552thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12553{
c6ec2b30
OJ
12554 uint32_t record_buf[8];
12555
12556 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12557 record_buf[1] = ARM_PS_REGNUM;
12558 thumb2_insn_r->reg_rec_count = 2;
12559
12560 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12561 record_buf);
12562 return ARM_RECORD_SUCCESS;
12563}
12564
12565/* Handler for thumb2 long multiply, long multiply accumulate, and
12566 divide instructions. */
12567
12568static int
12569thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12570{
12571 uint32_t opcode1 = 0, opcode2 = 0;
12572 uint32_t record_buf[8];
c6ec2b30
OJ
12573
12574 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12575 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12576
12577 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12578 {
12579 /* Handle SMULL, UMULL, SMULAL. */
12580 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12581 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12582 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12583 record_buf[2] = ARM_PS_REGNUM;
12584 thumb2_insn_r->reg_rec_count = 3;
12585 }
12586 else if (1 == opcode1 || 3 == opcode2)
12587 {
12588 /* Handle SDIV and UDIV. */
12589 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12590 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12591 record_buf[2] = ARM_PS_REGNUM;
12592 thumb2_insn_r->reg_rec_count = 3;
12593 }
12594 else
12595 return ARM_RECORD_FAILURE;
12596
12597 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12598 record_buf);
12599 return ARM_RECORD_SUCCESS;
12600}
12601
60cc5e93
OJ
12602/* Record handler for thumb32 coprocessor instructions. */
12603
12604static int
12605thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12606{
12607 if (bit (thumb2_insn_r->arm_insn, 25))
12608 return arm_record_coproc_data_proc (thumb2_insn_r);
12609 else
12610 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12611}
12612
1e1b6563
OJ
12613/* Record handler for advance SIMD structure load/store instructions. */
12614
12615static int
12616thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12617{
12618 struct regcache *reg_cache = thumb2_insn_r->regcache;
12619 uint32_t l_bit, a_bit, b_bits;
12620 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12621 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12622 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12623 uint8_t f_ebytes;
12624
12625 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12626 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12627 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12628 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12629 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12630 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12631 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12632 f_elem = 8 / f_ebytes;
12633
12634 if (!l_bit)
12635 {
12636 ULONGEST u_regval = 0;
12637 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12638 address = u_regval;
12639
12640 if (!a_bit)
12641 {
12642 /* Handle VST1. */
12643 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12644 {
12645 if (b_bits == 0x07)
12646 bf_regs = 1;
12647 else if (b_bits == 0x0a)
12648 bf_regs = 2;
12649 else if (b_bits == 0x06)
12650 bf_regs = 3;
12651 else if (b_bits == 0x02)
12652 bf_regs = 4;
12653 else
12654 bf_regs = 0;
12655
12656 for (index_r = 0; index_r < bf_regs; index_r++)
12657 {
12658 for (index_e = 0; index_e < f_elem; index_e++)
12659 {
12660 record_buf_mem[index_m++] = f_ebytes;
12661 record_buf_mem[index_m++] = address;
12662 address = address + f_ebytes;
12663 thumb2_insn_r->mem_rec_count += 1;
12664 }
12665 }
12666 }
12667 /* Handle VST2. */
12668 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12669 {
12670 if (b_bits == 0x09 || b_bits == 0x08)
12671 bf_regs = 1;
12672 else if (b_bits == 0x03)
12673 bf_regs = 2;
12674 else
12675 bf_regs = 0;
12676
12677 for (index_r = 0; index_r < bf_regs; index_r++)
12678 for (index_e = 0; index_e < f_elem; index_e++)
12679 {
12680 for (loop_t = 0; loop_t < 2; loop_t++)
12681 {
12682 record_buf_mem[index_m++] = f_ebytes;
12683 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12684 thumb2_insn_r->mem_rec_count += 1;
12685 }
12686 address = address + (2 * f_ebytes);
12687 }
12688 }
12689 /* Handle VST3. */
12690 else if ((b_bits & 0x0e) == 0x04)
12691 {
12692 for (index_e = 0; index_e < f_elem; index_e++)
12693 {
12694 for (loop_t = 0; loop_t < 3; loop_t++)
12695 {
12696 record_buf_mem[index_m++] = f_ebytes;
12697 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12698 thumb2_insn_r->mem_rec_count += 1;
12699 }
12700 address = address + (3 * f_ebytes);
12701 }
12702 }
12703 /* Handle VST4. */
12704 else if (!(b_bits & 0x0e))
12705 {
12706 for (index_e = 0; index_e < f_elem; index_e++)
12707 {
12708 for (loop_t = 0; loop_t < 4; loop_t++)
12709 {
12710 record_buf_mem[index_m++] = f_ebytes;
12711 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12712 thumb2_insn_r->mem_rec_count += 1;
12713 }
12714 address = address + (4 * f_ebytes);
12715 }
12716 }
12717 }
12718 else
12719 {
12720 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12721
12722 if (bft_size == 0x00)
12723 f_ebytes = 1;
12724 else if (bft_size == 0x01)
12725 f_ebytes = 2;
12726 else if (bft_size == 0x02)
12727 f_ebytes = 4;
12728 else
12729 f_ebytes = 0;
12730
12731 /* Handle VST1. */
12732 if (!(b_bits & 0x0b) || b_bits == 0x08)
12733 thumb2_insn_r->mem_rec_count = 1;
12734 /* Handle VST2. */
12735 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12736 thumb2_insn_r->mem_rec_count = 2;
12737 /* Handle VST3. */
12738 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12739 thumb2_insn_r->mem_rec_count = 3;
12740 /* Handle VST4. */
12741 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12742 thumb2_insn_r->mem_rec_count = 4;
12743
12744 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12745 {
12746 record_buf_mem[index_m] = f_ebytes;
12747 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12748 }
12749 }
12750 }
12751 else
12752 {
12753 if (!a_bit)
12754 {
12755 /* Handle VLD1. */
12756 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12757 thumb2_insn_r->reg_rec_count = 1;
12758 /* Handle VLD2. */
12759 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12760 thumb2_insn_r->reg_rec_count = 2;
12761 /* Handle VLD3. */
12762 else if ((b_bits & 0x0e) == 0x04)
12763 thumb2_insn_r->reg_rec_count = 3;
12764 /* Handle VLD4. */
12765 else if (!(b_bits & 0x0e))
12766 thumb2_insn_r->reg_rec_count = 4;
12767 }
12768 else
12769 {
12770 /* Handle VLD1. */
12771 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12772 thumb2_insn_r->reg_rec_count = 1;
12773 /* Handle VLD2. */
12774 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12775 thumb2_insn_r->reg_rec_count = 2;
12776 /* Handle VLD3. */
12777 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12778 thumb2_insn_r->reg_rec_count = 3;
12779 /* Handle VLD4. */
12780 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12781 thumb2_insn_r->reg_rec_count = 4;
12782
12783 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12784 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12785 }
12786 }
12787
12788 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12789 {
12790 record_buf[index_r] = reg_rn;
12791 thumb2_insn_r->reg_rec_count += 1;
12792 }
12793
12794 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12795 record_buf);
12796 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12797 record_buf_mem);
12798 return 0;
12799}
12800
c6ec2b30
OJ
12801/* Decodes thumb2 instruction type and invokes its record handler. */
12802
12803static unsigned int
12804thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12805{
12806 uint32_t op, op1, op2;
12807
12808 op = bit (thumb2_insn_r->arm_insn, 15);
12809 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12810 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12811
12812 if (op1 == 0x01)
12813 {
12814 if (!(op2 & 0x64 ))
12815 {
12816 /* Load/store multiple instruction. */
12817 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12818 }
12819 else if (!((op2 & 0x64) ^ 0x04))
12820 {
12821 /* Load/store (dual/exclusive) and table branch instruction. */
12822 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12823 }
12824 else if (!((op2 & 0x20) ^ 0x20))
12825 {
12826 /* Data-processing (shifted register). */
12827 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12828 }
12829 else if (op2 & 0x40)
12830 {
12831 /* Co-processor instructions. */
60cc5e93 12832 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12833 }
12834 }
12835 else if (op1 == 0x02)
12836 {
12837 if (op)
12838 {
12839 /* Branches and miscellaneous control instructions. */
12840 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12841 }
12842 else if (op2 & 0x20)
12843 {
12844 /* Data-processing (plain binary immediate) instruction. */
12845 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12846 }
12847 else
12848 {
12849 /* Data-processing (modified immediate). */
12850 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12851 }
12852 }
12853 else if (op1 == 0x03)
12854 {
12855 if (!(op2 & 0x71 ))
12856 {
12857 /* Store single data item. */
12858 return thumb2_record_str_single_data (thumb2_insn_r);
12859 }
12860 else if (!((op2 & 0x71) ^ 0x10))
12861 {
12862 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12863 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12864 }
12865 else if (!((op2 & 0x67) ^ 0x01))
12866 {
12867 /* Load byte, memory hints instruction. */
12868 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12869 }
12870 else if (!((op2 & 0x67) ^ 0x03))
12871 {
12872 /* Load halfword, memory hints instruction. */
12873 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12874 }
12875 else if (!((op2 & 0x67) ^ 0x05))
12876 {
12877 /* Load word instruction. */
12878 return thumb2_record_ld_word (thumb2_insn_r);
12879 }
12880 else if (!((op2 & 0x70) ^ 0x20))
12881 {
12882 /* Data-processing (register) instruction. */
12883 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12884 }
12885 else if (!((op2 & 0x78) ^ 0x30))
12886 {
12887 /* Multiply, multiply accumulate, abs diff instruction. */
12888 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12889 }
12890 else if (!((op2 & 0x78) ^ 0x38))
12891 {
12892 /* Long multiply, long multiply accumulate, and divide. */
12893 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12894 }
12895 else if (op2 & 0x40)
12896 {
12897 /* Co-processor instructions. */
60cc5e93 12898 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12899 }
12900 }
12901
12902 return -1;
12903}
72508ac0
PO
12904
12905/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12906and positive val on fauilure. */
12907
12908static int
12909extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12910{
12911 gdb_byte buf[insn_size];
12912
12913 memset (&buf[0], 0, insn_size);
12914
12915 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12916 return 1;
12917 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12918 insn_size,
2959fed9 12919 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12920 return 0;
12921}
12922
12923typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12924
12925/* Decode arm/thumb insn depending on condition cods and opcodes; and
12926 dispatch it. */
12927
12928static int
12929decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12930 uint32_t insn_size)
72508ac0
PO
12931{
12932
01e57735
YQ
12933 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12934 instruction. */
0fa9c223 12935 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12936 {
12937 arm_record_data_proc_misc_ld_str, /* 000. */
12938 arm_record_data_proc_imm, /* 001. */
12939 arm_record_ld_st_imm_offset, /* 010. */
12940 arm_record_ld_st_reg_offset, /* 011. */
12941 arm_record_ld_st_multiple, /* 100. */
12942 arm_record_b_bl, /* 101. */
60cc5e93 12943 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12944 arm_record_coproc_data_proc /* 111. */
12945 };
12946
01e57735
YQ
12947 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12948 instruction. */
0fa9c223 12949 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12950 { \
12951 thumb_record_shift_add_sub, /* 000. */
12952 thumb_record_add_sub_cmp_mov, /* 001. */
12953 thumb_record_ld_st_reg_offset, /* 010. */
12954 thumb_record_ld_st_imm_offset, /* 011. */
12955 thumb_record_ld_st_stack, /* 100. */
12956 thumb_record_misc, /* 101. */
12957 thumb_record_ldm_stm_swi, /* 110. */
12958 thumb_record_branch /* 111. */
12959 };
12960
12961 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12962 uint32_t insn_id = 0;
12963
12964 if (extract_arm_insn (arm_record, insn_size))
12965 {
12966 if (record_debug)
01e57735
YQ
12967 {
12968 printf_unfiltered (_("Process record: error reading memory at "
12969 "addr %s len = %d.\n"),
12970 paddress (arm_record->gdbarch,
12971 arm_record->this_addr), insn_size);
12972 }
72508ac0
PO
12973 return -1;
12974 }
12975 else if (ARM_RECORD == record_type)
12976 {
12977 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12978 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12979
12980 if (arm_record->cond == 0xf)
12981 ret = arm_record_extension_space (arm_record);
12982 else
01e57735 12983 {
ca92db2d
YQ
12984 /* If this insn has fallen into extension space
12985 then we need not decode it anymore. */
01e57735
YQ
12986 ret = arm_handle_insn[insn_id] (arm_record);
12987 }
ca92db2d
YQ
12988 if (ret != ARM_RECORD_SUCCESS)
12989 {
12990 arm_record_unsupported_insn (arm_record);
12991 ret = -1;
12992 }
72508ac0
PO
12993 }
12994 else if (THUMB_RECORD == record_type)
12995 {
12996 /* As thumb does not have condition codes, we set negative. */
12997 arm_record->cond = -1;
12998 insn_id = bits (arm_record->arm_insn, 13, 15);
12999 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13000 if (ret != ARM_RECORD_SUCCESS)
13001 {
13002 arm_record_unsupported_insn (arm_record);
13003 ret = -1;
13004 }
72508ac0
PO
13005 }
13006 else if (THUMB2_RECORD == record_type)
13007 {
c6ec2b30
OJ
13008 /* As thumb does not have condition codes, we set negative. */
13009 arm_record->cond = -1;
13010
13011 /* Swap first half of 32bit thumb instruction with second half. */
13012 arm_record->arm_insn
01e57735 13013 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13014
ca92db2d 13015 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13016
ca92db2d 13017 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13018 {
13019 arm_record_unsupported_insn (arm_record);
13020 ret = -1;
13021 }
72508ac0
PO
13022 }
13023 else
13024 {
13025 /* Throw assertion. */
13026 gdb_assert_not_reached ("not a valid instruction, could not decode");
13027 }
13028
13029 return ret;
13030}
13031
13032
13033/* Cleans up local record registers and memory allocations. */
13034
13035static void
13036deallocate_reg_mem (insn_decode_record *record)
13037{
13038 xfree (record->arm_regs);
13039 xfree (record->arm_mems);
13040}
13041
13042
01e57735 13043/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13044 memory that will be changed in current instruction to record_arch_list".
13045 Return -1 if something is wrong. */
13046
13047int
01e57735
YQ
13048arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13049 CORE_ADDR insn_addr)
72508ac0
PO
13050{
13051
72508ac0
PO
13052 uint32_t no_of_rec = 0;
13053 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13054 ULONGEST t_bit = 0, insn_id = 0;
13055
13056 ULONGEST u_regval = 0;
13057
13058 insn_decode_record arm_record;
13059
13060 memset (&arm_record, 0, sizeof (insn_decode_record));
13061 arm_record.regcache = regcache;
13062 arm_record.this_addr = insn_addr;
13063 arm_record.gdbarch = gdbarch;
13064
13065
13066 if (record_debug > 1)
13067 {
13068 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13069 "addr = %s\n",
72508ac0
PO
13070 paddress (gdbarch, arm_record.this_addr));
13071 }
13072
13073 if (extract_arm_insn (&arm_record, 2))
13074 {
13075 if (record_debug)
01e57735
YQ
13076 {
13077 printf_unfiltered (_("Process record: error reading memory at "
13078 "addr %s len = %d.\n"),
13079 paddress (arm_record.gdbarch,
13080 arm_record.this_addr), 2);
13081 }
72508ac0
PO
13082 return -1;
13083 }
13084
13085 /* Check the insn, whether it is thumb or arm one. */
13086
13087 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13088 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13089
13090
13091 if (!(u_regval & t_bit))
13092 {
13093 /* We are decoding arm insn. */
13094 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13095 }
13096 else
13097 {
13098 insn_id = bits (arm_record.arm_insn, 11, 15);
13099 /* is it thumb2 insn? */
13100 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13101 {
13102 ret = decode_insn (&arm_record, THUMB2_RECORD,
13103 THUMB2_INSN_SIZE_BYTES);
13104 }
72508ac0 13105 else
01e57735
YQ
13106 {
13107 /* We are decoding thumb insn. */
13108 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13109 }
72508ac0
PO
13110 }
13111
13112 if (0 == ret)
13113 {
13114 /* Record registers. */
25ea693b 13115 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13116 if (arm_record.arm_regs)
01e57735
YQ
13117 {
13118 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13119 {
13120 if (record_full_arch_list_add_reg
25ea693b 13121 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13122 ret = -1;
13123 }
13124 }
72508ac0
PO
13125 /* Record memories. */
13126 if (arm_record.arm_mems)
01e57735
YQ
13127 {
13128 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13129 {
13130 if (record_full_arch_list_add_mem
13131 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13132 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13133 ret = -1;
13134 }
13135 }
72508ac0 13136
25ea693b 13137 if (record_full_arch_list_add_end ())
01e57735 13138 ret = -1;
72508ac0
PO
13139 }
13140
13141
13142 deallocate_reg_mem (&arm_record);
13143
13144 return ret;
13145}
This page took 1.903673 seconds and 4 git commands to generate.