Add myself as a write-after-approval GDB maintainer
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
325fac50 61#include <algorithm>
72508ac0 62
9779414d 63#include "features/arm-with-m.c"
25f8c692 64#include "features/arm-with-m-fpa-layout.c"
3184d3f9 65#include "features/arm-with-m-vfp-d16.c"
ef7e8358
UW
66#include "features/arm-with-iwmmxt.c"
67#include "features/arm-with-vfpv2.c"
68#include "features/arm-with-vfpv3.c"
69#include "features/arm-with-neon.c"
9779414d 70
6529d2dd
AC
71static int arm_debug;
72
082fc60d
RE
73/* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 75 is used for this purpose.
082fc60d
RE
76
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 79
0963b4bd 80#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
82
83#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 84 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 85
60c5725c
DJ
86/* Per-objfile data used for mapping symbols. */
87static const struct objfile_data *arm_objfile_data_key;
88
89struct arm_mapping_symbol
90{
91 bfd_vma value;
92 char type;
93};
94typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95DEF_VEC_O(arm_mapping_symbol_s);
96
97struct arm_per_objfile
98{
99 VEC(arm_mapping_symbol_s) **section_maps;
100};
101
afd7eef0
RE
102/* The list of available "set arm ..." and "show arm ..." commands. */
103static struct cmd_list_element *setarmcmdlist = NULL;
104static struct cmd_list_element *showarmcmdlist = NULL;
105
fd50bc42
RE
106/* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 108static const char *const fp_model_strings[] =
fd50bc42
RE
109{
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
28e97307
DJ
114 "vfp",
115 NULL
fd50bc42
RE
116};
117
118/* A variable that can be configured by the user. */
119static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120static const char *current_fp_model = "auto";
121
28e97307 122/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 123static const char *const arm_abi_strings[] =
28e97307
DJ
124{
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
129};
130
131/* A variable that can be configured by the user. */
132static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133static const char *arm_abi_string = "auto";
134
0428b8f5 135/* The execution mode to assume. */
40478521 136static const char *const arm_mode_strings[] =
0428b8f5
DJ
137 {
138 "auto",
139 "arm",
68770265
MGD
140 "thumb",
141 NULL
0428b8f5
DJ
142 };
143
144static const char *arm_fallback_mode_string = "auto";
145static const char *arm_force_mode_string = "auto";
146
18819fa6
UW
147/* Internal override of the execution mode. -1 means no override,
148 0 means override to ARM mode, 1 means override to Thumb mode.
149 The effect is the same as if arm_force_mode has been set by the
150 user (except the internal override has precedence over a user's
151 arm_force_mode override). */
152static int arm_override_mode = -1;
153
94c30b78 154/* Number of different reg name sets (options). */
afd7eef0 155static int num_disassembly_options;
bc90b915 156
f32bf4a4
YQ
157/* The standard register names, and all the valid aliases for them. Note
158 that `fp', `sp' and `pc' are not added in this alias list, because they
159 have been added as builtin user registers in
160 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
161static const struct
162{
163 const char *name;
164 int regnum;
165} arm_register_aliases[] = {
166 /* Basic register numbers. */
167 { "r0", 0 },
168 { "r1", 1 },
169 { "r2", 2 },
170 { "r3", 3 },
171 { "r4", 4 },
172 { "r5", 5 },
173 { "r6", 6 },
174 { "r7", 7 },
175 { "r8", 8 },
176 { "r9", 9 },
177 { "r10", 10 },
178 { "r11", 11 },
179 { "r12", 12 },
180 { "r13", 13 },
181 { "r14", 14 },
182 { "r15", 15 },
183 /* Synonyms (argument and variable registers). */
184 { "a1", 0 },
185 { "a2", 1 },
186 { "a3", 2 },
187 { "a4", 3 },
188 { "v1", 4 },
189 { "v2", 5 },
190 { "v3", 6 },
191 { "v4", 7 },
192 { "v5", 8 },
193 { "v6", 9 },
194 { "v7", 10 },
195 { "v8", 11 },
196 /* Other platform-specific names for r9. */
197 { "sb", 9 },
198 { "tr", 9 },
199 /* Special names. */
200 { "ip", 12 },
123dc839 201 { "lr", 14 },
123dc839
DJ
202 /* Names used by GCC (not listed in the ARM EABI). */
203 { "sl", 10 },
123dc839
DJ
204 /* A special name from the older ATPCS. */
205 { "wr", 7 },
206};
bc90b915 207
123dc839 208static const char *const arm_register_names[] =
da59e081
JM
209{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
210 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
211 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
212 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
213 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
214 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 215 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 216
afd7eef0
RE
217/* Valid register name styles. */
218static const char **valid_disassembly_styles;
ed9a39eb 219
afd7eef0
RE
220/* Disassembly style to use. Default to "std" register names. */
221static const char *disassembly_style;
96baa820 222
ed9a39eb 223/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
224 style. */
225static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 226 struct cmd_list_element *);
afd7eef0 227static void set_disassembly_style (void);
ed9a39eb 228
b508a996 229static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 230 void *, int);
b508a996 231static void convert_to_extended (const struct floatformat *, void *,
be8626e0 232 const void *, int);
ed9a39eb 233
05d1431c
PA
234static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
58d6951d
DJ
237static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
240
e7cf25a8 241static CORE_ADDR
553cb527 242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
243
244
d9311bfa
AT
245/* get_next_pcs operations. */
246static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
250 arm_get_next_pcs_is_thumb,
251 NULL,
d9311bfa
AT
252};
253
9b8d791a 254struct arm_prologue_cache
c3b4394c 255{
eb5492fa
DJ
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
259 CORE_ADDR prev_sp;
260
4be43953
DJ
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
eb5492fa 264
c3b4394c 265 int framesize;
eb5492fa
DJ
266
267 /* The register used to hold the frame pointer for this frame. */
c3b4394c 268 int framereg;
eb5492fa
DJ
269
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
c3b4394c 272};
ed9a39eb 273
0d39a070
DJ
274static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
278
cca44b1b
JB
279/* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
281
282#define DISPLACED_STEPPING_ARCH_VERSION 5
283
94c30b78 284/* Set to true if the 32-bit mode is in use. */
c906108c
SS
285
286int arm_apcs_32 = 1;
287
9779414d
DJ
288/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
289
478fd957 290int
9779414d
DJ
291arm_psr_thumb_bit (struct gdbarch *gdbarch)
292{
293 if (gdbarch_tdep (gdbarch)->is_m)
294 return XPSR_T;
295 else
296 return CPSR_T;
297}
298
d0e59a68
AT
299/* Determine if the processor is currently executing in Thumb mode. */
300
301int
302arm_is_thumb (struct regcache *regcache)
303{
304 ULONGEST cpsr;
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308
309 return (cpsr & t_bit) != 0;
310}
311
b39cc962
DJ
312/* Determine if FRAME is executing in Thumb mode. */
313
25b41d01 314int
b39cc962
DJ
315arm_frame_is_thumb (struct frame_info *frame)
316{
317 CORE_ADDR cpsr;
9779414d 318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
319
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325
9779414d 326 return (cpsr & t_bit) != 0;
b39cc962
DJ
327}
328
60c5725c
DJ
329/* Callback for VEC_lower_bound. */
330
331static inline int
332arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
334{
335 return lhs->value < rhs->value;
336}
337
f9d67f43
DJ
338/* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
c906108c 341
f9d67f43
DJ
342static char
343arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 344{
60c5725c 345 struct obj_section *sec;
0428b8f5 346
60c5725c
DJ
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
349 if (sec != NULL)
350 {
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
354 0 };
60c5725c
DJ
355 unsigned int idx;
356
9a3c8263
SM
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
60c5725c
DJ
359 if (data != NULL)
360 {
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
363 {
364 struct arm_mapping_symbol *map_sym;
365
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
368
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 {
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
f9d67f43
DJ
377 {
378 if (start)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
381 }
60c5725c
DJ
382 }
383
384 if (idx > 0)
385 {
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
387 if (start)
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
60c5725c
DJ
390 }
391 }
392 }
393 }
394
f9d67f43
DJ
395 return 0;
396}
397
398/* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
401
e3039479 402int
9779414d 403arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 404{
7cbd4a93 405 struct bound_minimal_symbol sym;
f9d67f43 406 char type;
a42244db
YQ
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
409
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
412 if (dsc)
413 {
414 if (debug_displaced)
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
420 }
f9d67f43
DJ
421
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
424 return 1;
425
18819fa6
UW
426 /* Respect internal mode override if active. */
427 if (arm_override_mode != -1)
428 return arm_override_mode;
429
f9d67f43
DJ
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
435
9779414d
DJ
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
439
f9d67f43
DJ
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
444
ed9a39eb 445 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 446 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
449
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
455
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
18819fa6 460 displayed in the mode it will be executed). */
0428b8f5 461 if (target_has_registers)
18819fa6 462 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
463
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
c906108c
SS
466}
467
181c1381 468/* Remove useless bits from addresses in a running program. */
34e8f22d 469static CORE_ADDR
24568a2c 470arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 471{
2ae28aa9
YQ
472 /* On M-profile devices, do not strip the low bit from EXC_RETURN
473 (the magic exception return address). */
474 if (gdbarch_tdep (gdbarch)->is_m
475 && (val & 0xfffffff0) == 0xfffffff0)
476 return val;
477
a3a2ee65 478 if (arm_apcs_32)
dd6be234 479 return UNMAKE_THUMB_ADDR (val);
c906108c 480 else
a3a2ee65 481 return (val & 0x03fffffc);
c906108c
SS
482}
483
0d39a070 484/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
485 can be safely ignored during prologue skipping. IS_THUMB is true
486 if the function is known to be a Thumb function due to the way it
487 is being called. */
0d39a070 488static int
e0634ccf 489skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 490{
e0634ccf 491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 492 struct bound_minimal_symbol msym;
0d39a070
DJ
493
494 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 495 if (msym.minsym != NULL
77e371c0 496 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 497 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 498 {
efd66ac6 499 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 500
e0634ccf
UW
501 /* The GNU linker's Thumb call stub to foo is named
502 __foo_from_thumb. */
503 if (strstr (name, "_from_thumb") != NULL)
504 name += 2;
0d39a070 505
e0634ccf
UW
506 /* On soft-float targets, __truncdfsf2 is called to convert promoted
507 arguments to their argument types in non-prototyped
508 functions. */
61012eef 509 if (startswith (name, "__truncdfsf2"))
e0634ccf 510 return 1;
61012eef 511 if (startswith (name, "__aeabi_d2f"))
e0634ccf 512 return 1;
0d39a070 513
e0634ccf 514 /* Internal functions related to thread-local storage. */
61012eef 515 if (startswith (name, "__tls_get_addr"))
e0634ccf 516 return 1;
61012eef 517 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
518 return 1;
519 }
520 else
521 {
522 /* If we run against a stripped glibc, we may be unable to identify
523 special functions by name. Check for one important case,
524 __aeabi_read_tp, by comparing the *code* against the default
525 implementation (this is hand-written ARM assembler in glibc). */
526
527 if (!is_thumb
528 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
529 == 0xe3e00a0f /* mov r0, #0xffff0fff */
530 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
531 == 0xe240f01f) /* sub pc, r0, #31 */
532 return 1;
533 }
ec3d575a 534
0d39a070
DJ
535 return 0;
536}
537
621c6d5b
YQ
538/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
539 the first 16-bit of instruction, and INSN2 is the second 16-bit of
540 instruction. */
541#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
542 ((bits ((insn1), 0, 3) << 12) \
543 | (bits ((insn1), 10, 10) << 11) \
544 | (bits ((insn2), 12, 14) << 8) \
545 | bits ((insn2), 0, 7))
546
547/* Extract the immediate from instruction movw/movt of encoding A. INSN is
548 the 32-bit instruction. */
549#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
550 ((bits ((insn), 16, 19) << 12) \
551 | bits ((insn), 0, 11))
552
ec3d575a
UW
553/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
554
555static unsigned int
556thumb_expand_immediate (unsigned int imm)
557{
558 unsigned int count = imm >> 7;
559
560 if (count < 8)
561 switch (count / 2)
562 {
563 case 0:
564 return imm & 0xff;
565 case 1:
566 return (imm & 0xff) | ((imm & 0xff) << 16);
567 case 2:
568 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
569 case 3:
570 return (imm & 0xff) | ((imm & 0xff) << 8)
571 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
572 }
573
574 return (0x80 | (imm & 0x7f)) << (32 - count);
575}
576
540314bd
YQ
577/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
578 epilogue, 0 otherwise. */
579
580static int
581thumb_instruction_restores_sp (unsigned short insn)
582{
583 return (insn == 0x46bd /* mov sp, r7 */
584 || (insn & 0xff80) == 0xb000 /* add sp, imm */
585 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
586}
587
29d73ae4
DJ
588/* Analyze a Thumb prologue, looking for a recognizable stack frame
589 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
590 clobber the stack frame unexpectedly, or an unknown instruction.
591 Return the last address which is definitely safe to skip for an
592 initial breakpoint. */
c906108c
SS
593
594static CORE_ADDR
29d73ae4
DJ
595thumb_analyze_prologue (struct gdbarch *gdbarch,
596 CORE_ADDR start, CORE_ADDR limit,
597 struct arm_prologue_cache *cache)
c906108c 598{
0d39a070 599 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 600 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
601 int i;
602 pv_t regs[16];
603 struct pv_area *stack;
604 struct cleanup *back_to;
605 CORE_ADDR offset;
ec3d575a 606 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 607
29d73ae4
DJ
608 for (i = 0; i < 16; i++)
609 regs[i] = pv_register (i, 0);
55f960e1 610 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
611 back_to = make_cleanup_free_pv_area (stack);
612
29d73ae4 613 while (start < limit)
c906108c 614 {
29d73ae4
DJ
615 unsigned short insn;
616
e17a4113 617 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 618
94c30b78 619 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 620 {
29d73ae4
DJ
621 int regno;
622 int mask;
4be43953
DJ
623
624 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
625 break;
29d73ae4
DJ
626
627 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
628 whether to save LR (R14). */
629 mask = (insn & 0xff) | ((insn & 0x100) << 6);
630
631 /* Calculate offsets of saved R0-R7 and LR. */
632 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
633 if (mask & (1 << regno))
634 {
29d73ae4
DJ
635 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
636 -4);
637 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
638 }
da59e081 639 }
1db01f22 640 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 641 {
29d73ae4 642 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
643 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
644 -offset);
da59e081 645 }
808f7ab1
YQ
646 else if (thumb_instruction_restores_sp (insn))
647 {
648 /* Don't scan past the epilogue. */
649 break;
650 }
0d39a070
DJ
651 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
652 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
653 (insn & 0xff) << 2);
654 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
655 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
656 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
657 bits (insn, 6, 8));
658 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
659 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
660 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
661 bits (insn, 0, 7));
662 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
663 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
664 && pv_is_constant (regs[bits (insn, 3, 5)]))
665 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
666 regs[bits (insn, 6, 8)]);
667 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
668 && pv_is_constant (regs[bits (insn, 3, 6)]))
669 {
670 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
671 int rm = bits (insn, 3, 6);
672 regs[rd] = pv_add (regs[rd], regs[rm]);
673 }
29d73ae4 674 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 675 {
29d73ae4
DJ
676 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
677 int src_reg = (insn & 0x78) >> 3;
678 regs[dst_reg] = regs[src_reg];
da59e081 679 }
29d73ae4 680 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 681 {
29d73ae4
DJ
682 /* Handle stores to the stack. Normally pushes are used,
683 but with GCC -mtpcs-frame, there may be other stores
684 in the prologue to create the frame. */
685 int regno = (insn >> 8) & 0x7;
686 pv_t addr;
687
688 offset = (insn & 0xff) << 2;
689 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
690
691 if (pv_area_store_would_trash (stack, addr))
692 break;
693
694 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 695 }
0d39a070
DJ
696 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
697 {
698 int rd = bits (insn, 0, 2);
699 int rn = bits (insn, 3, 5);
700 pv_t addr;
701
702 offset = bits (insn, 6, 10) << 2;
703 addr = pv_add_constant (regs[rn], offset);
704
705 if (pv_area_store_would_trash (stack, addr))
706 break;
707
708 pv_area_store (stack, addr, 4, regs[rd]);
709 }
710 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
711 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
712 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
713 /* Ignore stores of argument registers to the stack. */
714 ;
715 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
716 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
717 /* Ignore block loads from the stack, potentially copying
718 parameters from memory. */
719 ;
720 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
721 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
722 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
723 /* Similarly ignore single loads from the stack. */
724 ;
725 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
726 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
727 /* Skip register copies, i.e. saves to another register
728 instead of the stack. */
729 ;
730 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
731 /* Recognize constant loads; even with small stacks these are necessary
732 on Thumb. */
733 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
734 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
735 {
736 /* Constant pool loads, for the same reason. */
737 unsigned int constant;
738 CORE_ADDR loc;
739
740 loc = start + 4 + bits (insn, 0, 7) * 4;
741 constant = read_memory_unsigned_integer (loc, 4, byte_order);
742 regs[bits (insn, 8, 10)] = pv_constant (constant);
743 }
db24da6d 744 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 745 {
0d39a070
DJ
746 unsigned short inst2;
747
748 inst2 = read_memory_unsigned_integer (start + 2, 2,
749 byte_order_for_code);
750
751 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
752 {
753 /* BL, BLX. Allow some special function calls when
754 skipping the prologue; GCC generates these before
755 storing arguments to the stack. */
756 CORE_ADDR nextpc;
757 int j1, j2, imm1, imm2;
758
759 imm1 = sbits (insn, 0, 10);
760 imm2 = bits (inst2, 0, 10);
761 j1 = bit (inst2, 13);
762 j2 = bit (inst2, 11);
763
764 offset = ((imm1 << 12) + (imm2 << 1));
765 offset ^= ((!j2) << 22) | ((!j1) << 23);
766
767 nextpc = start + 4 + offset;
768 /* For BLX make sure to clear the low bits. */
769 if (bit (inst2, 12) == 0)
770 nextpc = nextpc & 0xfffffffc;
771
e0634ccf
UW
772 if (!skip_prologue_function (gdbarch, nextpc,
773 bit (inst2, 12) != 0))
0d39a070
DJ
774 break;
775 }
ec3d575a 776
0963b4bd
MS
777 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
778 { registers } */
ec3d575a
UW
779 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
780 {
781 pv_t addr = regs[bits (insn, 0, 3)];
782 int regno;
783
784 if (pv_area_store_would_trash (stack, addr))
785 break;
786
787 /* Calculate offsets of saved registers. */
788 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
789 if (inst2 & (1 << regno))
790 {
791 addr = pv_add_constant (addr, -4);
792 pv_area_store (stack, addr, 4, regs[regno]);
793 }
794
795 if (insn & 0x0020)
796 regs[bits (insn, 0, 3)] = addr;
797 }
798
0963b4bd
MS
799 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
800 [Rn, #+/-imm]{!} */
ec3d575a
UW
801 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
802 {
803 int regno1 = bits (inst2, 12, 15);
804 int regno2 = bits (inst2, 8, 11);
805 pv_t addr = regs[bits (insn, 0, 3)];
806
807 offset = inst2 & 0xff;
808 if (insn & 0x0080)
809 addr = pv_add_constant (addr, offset);
810 else
811 addr = pv_add_constant (addr, -offset);
812
813 if (pv_area_store_would_trash (stack, addr))
814 break;
815
816 pv_area_store (stack, addr, 4, regs[regno1]);
817 pv_area_store (stack, pv_add_constant (addr, 4),
818 4, regs[regno2]);
819
820 if (insn & 0x0020)
821 regs[bits (insn, 0, 3)] = addr;
822 }
823
824 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
825 && (inst2 & 0x0c00) == 0x0c00
826 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
827 {
828 int regno = bits (inst2, 12, 15);
829 pv_t addr = regs[bits (insn, 0, 3)];
830
831 offset = inst2 & 0xff;
832 if (inst2 & 0x0200)
833 addr = pv_add_constant (addr, offset);
834 else
835 addr = pv_add_constant (addr, -offset);
836
837 if (pv_area_store_would_trash (stack, addr))
838 break;
839
840 pv_area_store (stack, addr, 4, regs[regno]);
841
842 if (inst2 & 0x0100)
843 regs[bits (insn, 0, 3)] = addr;
844 }
845
846 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 {
849 int regno = bits (inst2, 12, 15);
850 pv_t addr;
851
852 offset = inst2 & 0xfff;
853 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
854
855 if (pv_area_store_would_trash (stack, addr))
856 break;
857
858 pv_area_store (stack, addr, 4, regs[regno]);
859 }
860
861 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 863 /* Ignore stores of argument registers to the stack. */
0d39a070 864 ;
ec3d575a
UW
865
866 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
867 && (inst2 & 0x0d00) == 0x0c00
0d39a070 868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 869 /* Ignore stores of argument registers to the stack. */
0d39a070 870 ;
ec3d575a 871
0963b4bd
MS
872 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
873 { registers } */
ec3d575a
UW
874 && (inst2 & 0x8000) == 0x0000
875 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
876 /* Ignore block loads from the stack, potentially copying
877 parameters from memory. */
0d39a070 878 ;
ec3d575a 879
0963b4bd
MS
880 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
881 [Rn, #+/-imm] */
0d39a070 882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 883 /* Similarly ignore dual loads from the stack. */
0d39a070 884 ;
ec3d575a
UW
885
886 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
887 && (inst2 & 0x0d00) == 0x0c00
0d39a070 888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 889 /* Similarly ignore single loads from the stack. */
0d39a070 890 ;
ec3d575a
UW
891
892 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 893 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 894 /* Similarly ignore single loads from the stack. */
0d39a070 895 ;
ec3d575a
UW
896
897 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
898 && (inst2 & 0x8000) == 0x0000)
899 {
900 unsigned int imm = ((bits (insn, 10, 10) << 11)
901 | (bits (inst2, 12, 14) << 8)
902 | bits (inst2, 0, 7));
903
904 regs[bits (inst2, 8, 11)]
905 = pv_add_constant (regs[bits (insn, 0, 3)],
906 thumb_expand_immediate (imm));
907 }
908
909 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
910 && (inst2 & 0x8000) == 0x0000)
0d39a070 911 {
ec3d575a
UW
912 unsigned int imm = ((bits (insn, 10, 10) << 11)
913 | (bits (inst2, 12, 14) << 8)
914 | bits (inst2, 0, 7));
915
916 regs[bits (inst2, 8, 11)]
917 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
918 }
919
920 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
921 && (inst2 & 0x8000) == 0x0000)
922 {
923 unsigned int imm = ((bits (insn, 10, 10) << 11)
924 | (bits (inst2, 12, 14) << 8)
925 | bits (inst2, 0, 7));
926
927 regs[bits (inst2, 8, 11)]
928 = pv_add_constant (regs[bits (insn, 0, 3)],
929 - (CORE_ADDR) thumb_expand_immediate (imm));
930 }
931
932 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
933 && (inst2 & 0x8000) == 0x0000)
934 {
935 unsigned int imm = ((bits (insn, 10, 10) << 11)
936 | (bits (inst2, 12, 14) << 8)
937 | bits (inst2, 0, 7));
938
939 regs[bits (inst2, 8, 11)]
940 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
941 }
942
943 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
944 {
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
948
949 regs[bits (inst2, 8, 11)]
950 = pv_constant (thumb_expand_immediate (imm));
951 }
952
953 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
954 {
621c6d5b
YQ
955 unsigned int imm
956 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
957
958 regs[bits (inst2, 8, 11)] = pv_constant (imm);
959 }
960
961 else if (insn == 0xea5f /* mov.w Rd,Rm */
962 && (inst2 & 0xf0f0) == 0)
963 {
964 int dst_reg = (inst2 & 0x0f00) >> 8;
965 int src_reg = inst2 & 0xf;
966 regs[dst_reg] = regs[src_reg];
967 }
968
969 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
970 {
971 /* Constant pool loads. */
972 unsigned int constant;
973 CORE_ADDR loc;
974
cac395ea 975 offset = bits (inst2, 0, 11);
ec3d575a
UW
976 if (insn & 0x0080)
977 loc = start + 4 + offset;
978 else
979 loc = start + 4 - offset;
980
981 constant = read_memory_unsigned_integer (loc, 4, byte_order);
982 regs[bits (inst2, 12, 15)] = pv_constant (constant);
983 }
984
985 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
986 {
987 /* Constant pool loads. */
988 unsigned int constant;
989 CORE_ADDR loc;
990
cac395ea 991 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
992 if (insn & 0x0080)
993 loc = start + 4 + offset;
994 else
995 loc = start + 4 - offset;
996
997 constant = read_memory_unsigned_integer (loc, 4, byte_order);
998 regs[bits (inst2, 12, 15)] = pv_constant (constant);
999
1000 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1001 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1002 }
1003
1004 else if (thumb2_instruction_changes_pc (insn, inst2))
1005 {
1006 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1007 break;
1008 }
ec3d575a
UW
1009 else
1010 {
1011 /* The optimizer might shove anything into the prologue,
1012 so we just skip what we don't recognize. */
1013 unrecognized_pc = start;
1014 }
0d39a070
DJ
1015
1016 start += 2;
1017 }
ec3d575a 1018 else if (thumb_instruction_changes_pc (insn))
3d74b771 1019 {
ec3d575a 1020 /* Don't scan past anything that might change control flow. */
da3c6d4a 1021 break;
3d74b771 1022 }
ec3d575a
UW
1023 else
1024 {
1025 /* The optimizer might shove anything into the prologue,
1026 so we just skip what we don't recognize. */
1027 unrecognized_pc = start;
1028 }
29d73ae4
DJ
1029
1030 start += 2;
c906108c
SS
1031 }
1032
0d39a070
DJ
1033 if (arm_debug)
1034 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1035 paddress (gdbarch, start));
1036
ec3d575a
UW
1037 if (unrecognized_pc == 0)
1038 unrecognized_pc = start;
1039
29d73ae4
DJ
1040 if (cache == NULL)
1041 {
1042 do_cleanups (back_to);
ec3d575a 1043 return unrecognized_pc;
29d73ae4
DJ
1044 }
1045
29d73ae4
DJ
1046 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1047 {
1048 /* Frame pointer is fp. Frame size is constant. */
1049 cache->framereg = ARM_FP_REGNUM;
1050 cache->framesize = -regs[ARM_FP_REGNUM].k;
1051 }
1052 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1053 {
1054 /* Frame pointer is r7. Frame size is constant. */
1055 cache->framereg = THUMB_FP_REGNUM;
1056 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1057 }
72a2e3dc 1058 else
29d73ae4
DJ
1059 {
1060 /* Try the stack pointer... this is a bit desperate. */
1061 cache->framereg = ARM_SP_REGNUM;
1062 cache->framesize = -regs[ARM_SP_REGNUM].k;
1063 }
29d73ae4
DJ
1064
1065 for (i = 0; i < 16; i++)
1066 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1067 cache->saved_regs[i].addr = offset;
1068
1069 do_cleanups (back_to);
ec3d575a 1070 return unrecognized_pc;
c906108c
SS
1071}
1072
621c6d5b
YQ
1073
1074/* Try to analyze the instructions starting from PC, which load symbol
1075 __stack_chk_guard. Return the address of instruction after loading this
1076 symbol, set the dest register number to *BASEREG, and set the size of
1077 instructions for loading symbol in OFFSET. Return 0 if instructions are
1078 not recognized. */
1079
1080static CORE_ADDR
1081arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1082 unsigned int *destreg, int *offset)
1083{
1084 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1085 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1086 unsigned int low, high, address;
1087
1088 address = 0;
1089 if (is_thumb)
1090 {
1091 unsigned short insn1
1092 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1093
1094 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1095 {
1096 *destreg = bits (insn1, 8, 10);
1097 *offset = 2;
6ae274b7
YQ
1098 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1099 address = read_memory_unsigned_integer (address, 4,
1100 byte_order_for_code);
621c6d5b
YQ
1101 }
1102 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1103 {
1104 unsigned short insn2
1105 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1106
1107 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1108
1109 insn1
1110 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1111 insn2
1112 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1113
1114 /* movt Rd, #const */
1115 if ((insn1 & 0xfbc0) == 0xf2c0)
1116 {
1117 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1118 *destreg = bits (insn2, 8, 11);
1119 *offset = 8;
1120 address = (high << 16 | low);
1121 }
1122 }
1123 }
1124 else
1125 {
2e9e421f
UW
1126 unsigned int insn
1127 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1128
6ae274b7 1129 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1130 {
6ae274b7
YQ
1131 address = bits (insn, 0, 11) + pc + 8;
1132 address = read_memory_unsigned_integer (address, 4,
1133 byte_order_for_code);
1134
2e9e421f
UW
1135 *destreg = bits (insn, 12, 15);
1136 *offset = 4;
1137 }
1138 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1139 {
1140 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1141
1142 insn
1143 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1144
1145 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1146 {
1147 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1148 *destreg = bits (insn, 12, 15);
1149 *offset = 8;
1150 address = (high << 16 | low);
1151 }
1152 }
621c6d5b
YQ
1153 }
1154
1155 return address;
1156}
1157
1158/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1159 points to the first instruction of this sequence, return the address of
1160 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1161
1162 On arm, this sequence of instructions is composed of mainly three steps,
1163 Step 1: load symbol __stack_chk_guard,
1164 Step 2: load from address of __stack_chk_guard,
1165 Step 3: store it to somewhere else.
1166
1167 Usually, instructions on step 2 and step 3 are the same on various ARM
1168 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1169 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1170 instructions in step 1 vary from different ARM architectures. On ARMv7,
1171 they are,
1172
1173 movw Rn, #:lower16:__stack_chk_guard
1174 movt Rn, #:upper16:__stack_chk_guard
1175
1176 On ARMv5t, it is,
1177
1178 ldr Rn, .Label
1179 ....
1180 .Lable:
1181 .word __stack_chk_guard
1182
1183 Since ldr/str is a very popular instruction, we can't use them as
1184 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1185 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1186 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1187
1188static CORE_ADDR
1189arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1190{
1191 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1192 unsigned int basereg;
7cbd4a93 1193 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1194 int offset;
1195 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1196 CORE_ADDR addr;
1197
1198 /* Try to parse the instructions in Step 1. */
1199 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1200 &basereg, &offset);
1201 if (!addr)
1202 return pc;
1203
1204 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1205 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1206 Otherwise, this sequence cannot be for stack protector. */
1207 if (stack_chk_guard.minsym == NULL
61012eef 1208 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1209 return pc;
1210
1211 if (is_thumb)
1212 {
1213 unsigned int destreg;
1214 unsigned short insn
1215 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1216
1217 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1218 if ((insn & 0xf800) != 0x6800)
1219 return pc;
1220 if (bits (insn, 3, 5) != basereg)
1221 return pc;
1222 destreg = bits (insn, 0, 2);
1223
1224 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1225 byte_order_for_code);
1226 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1227 if ((insn & 0xf800) != 0x6000)
1228 return pc;
1229 if (destreg != bits (insn, 0, 2))
1230 return pc;
1231 }
1232 else
1233 {
1234 unsigned int destreg;
1235 unsigned int insn
1236 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1237
1238 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1239 if ((insn & 0x0e500000) != 0x04100000)
1240 return pc;
1241 if (bits (insn, 16, 19) != basereg)
1242 return pc;
1243 destreg = bits (insn, 12, 15);
1244 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1245 insn = read_memory_unsigned_integer (pc + offset + 4,
1246 4, byte_order_for_code);
1247 if ((insn & 0x0e500000) != 0x04000000)
1248 return pc;
1249 if (bits (insn, 12, 15) != destreg)
1250 return pc;
1251 }
1252 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1253 on arm. */
1254 if (is_thumb)
1255 return pc + offset + 4;
1256 else
1257 return pc + offset + 8;
1258}
1259
da3c6d4a
MS
1260/* Advance the PC across any function entry prologue instructions to
1261 reach some "real" code.
34e8f22d
RE
1262
1263 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1264 prologue:
c906108c 1265
c5aa993b
JM
1266 mov ip, sp
1267 [stmfd sp!, {a1,a2,a3,a4}]
1268 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1269 [stfe f7, [sp, #-12]!]
1270 [stfe f6, [sp, #-12]!]
1271 [stfe f5, [sp, #-12]!]
1272 [stfe f4, [sp, #-12]!]
0963b4bd 1273 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1274
34e8f22d 1275static CORE_ADDR
6093d2eb 1276arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1277{
a89fea3c 1278 CORE_ADDR func_addr, limit_pc;
c906108c 1279
a89fea3c
JL
1280 /* See if we can determine the end of the prologue via the symbol table.
1281 If so, then return either PC, or the PC after the prologue, whichever
1282 is greater. */
1283 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1284 {
d80b854b
UW
1285 CORE_ADDR post_prologue_pc
1286 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1287 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1288
621c6d5b
YQ
1289 if (post_prologue_pc)
1290 post_prologue_pc
1291 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1292
1293
0d39a070
DJ
1294 /* GCC always emits a line note before the prologue and another
1295 one after, even if the two are at the same address or on the
1296 same line. Take advantage of this so that we do not need to
1297 know every instruction that might appear in the prologue. We
1298 will have producer information for most binaries; if it is
1299 missing (e.g. for -gstabs), assuming the GNU tools. */
1300 if (post_prologue_pc
43f3e411
DE
1301 && (cust == NULL
1302 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1303 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1304 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1305 return post_prologue_pc;
1306
a89fea3c 1307 if (post_prologue_pc != 0)
0d39a070
DJ
1308 {
1309 CORE_ADDR analyzed_limit;
1310
1311 /* For non-GCC compilers, make sure the entire line is an
1312 acceptable prologue; GDB will round this function's
1313 return value up to the end of the following line so we
1314 can not skip just part of a line (and we do not want to).
1315
1316 RealView does not treat the prologue specially, but does
1317 associate prologue code with the opening brace; so this
1318 lets us skip the first line if we think it is the opening
1319 brace. */
9779414d 1320 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1321 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1322 post_prologue_pc, NULL);
1323 else
1324 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1325 post_prologue_pc, NULL);
1326
1327 if (analyzed_limit != post_prologue_pc)
1328 return func_addr;
1329
1330 return post_prologue_pc;
1331 }
c906108c
SS
1332 }
1333
a89fea3c
JL
1334 /* Can't determine prologue from the symbol table, need to examine
1335 instructions. */
c906108c 1336
a89fea3c
JL
1337 /* Find an upper limit on the function prologue using the debug
1338 information. If the debug information could not be used to provide
1339 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1340 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1341 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1342 if (limit_pc == 0)
1343 limit_pc = pc + 64; /* Magic. */
1344
c906108c 1345
29d73ae4 1346 /* Check if this is Thumb code. */
9779414d 1347 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1348 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1349 else
1350 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1351}
94c30b78 1352
c5aa993b 1353/* *INDENT-OFF* */
c906108c
SS
1354/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1355 This function decodes a Thumb function prologue to determine:
1356 1) the size of the stack frame
1357 2) which registers are saved on it
1358 3) the offsets of saved regs
1359 4) the offset from the stack pointer to the frame pointer
c906108c 1360
da59e081
JM
1361 A typical Thumb function prologue would create this stack frame
1362 (offsets relative to FP)
c906108c
SS
1363 old SP -> 24 stack parameters
1364 20 LR
1365 16 R7
1366 R7 -> 0 local variables (16 bytes)
1367 SP -> -12 additional stack space (12 bytes)
1368 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1369 12 bytes. The frame register is R7.
da59e081 1370
da3c6d4a
MS
1371 The comments for thumb_skip_prolog() describe the algorithm we use
1372 to detect the end of the prolog. */
c5aa993b
JM
1373/* *INDENT-ON* */
1374
c906108c 1375static void
be8626e0 1376thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1377 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1378{
1379 CORE_ADDR prologue_start;
1380 CORE_ADDR prologue_end;
c906108c 1381
b39cc962
DJ
1382 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1383 &prologue_end))
c906108c 1384 {
ec3d575a
UW
1385 /* See comment in arm_scan_prologue for an explanation of
1386 this heuristics. */
1387 if (prologue_end > prologue_start + 64)
1388 {
1389 prologue_end = prologue_start + 64;
1390 }
c906108c
SS
1391 }
1392 else
f7060f85
DJ
1393 /* We're in the boondocks: we have no idea where the start of the
1394 function is. */
1395 return;
c906108c 1396
325fac50 1397 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1398
be8626e0 1399 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1400}
1401
f303bc3e
YQ
1402/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1403 otherwise. */
1404
1405static int
1406arm_instruction_restores_sp (unsigned int insn)
1407{
1408 if (bits (insn, 28, 31) != INST_NV)
1409 {
1410 if ((insn & 0x0df0f000) == 0x0080d000
1411 /* ADD SP (register or immediate). */
1412 || (insn & 0x0df0f000) == 0x0040d000
1413 /* SUB SP (register or immediate). */
1414 || (insn & 0x0ffffff0) == 0x01a0d000
1415 /* MOV SP. */
1416 || (insn & 0x0fff0000) == 0x08bd0000
1417 /* POP (LDMIA). */
1418 || (insn & 0x0fff0000) == 0x049d0000)
1419 /* POP of a single register. */
1420 return 1;
1421 }
1422
1423 return 0;
1424}
1425
0d39a070
DJ
1426/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1427 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1428 fill it in. Return the first address not recognized as a prologue
1429 instruction.
eb5492fa 1430
0d39a070
DJ
1431 We recognize all the instructions typically found in ARM prologues,
1432 plus harmless instructions which can be skipped (either for analysis
1433 purposes, or a more restrictive set that can be skipped when finding
1434 the end of the prologue). */
1435
1436static CORE_ADDR
1437arm_analyze_prologue (struct gdbarch *gdbarch,
1438 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1439 struct arm_prologue_cache *cache)
1440{
0d39a070
DJ
1441 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1442 int regno;
1443 CORE_ADDR offset, current_pc;
1444 pv_t regs[ARM_FPS_REGNUM];
1445 struct pv_area *stack;
1446 struct cleanup *back_to;
0d39a070
DJ
1447 CORE_ADDR unrecognized_pc = 0;
1448
1449 /* Search the prologue looking for instructions that set up the
96baa820 1450 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1451
96baa820
JM
1452 Be careful, however, and if it doesn't look like a prologue,
1453 don't try to scan it. If, for instance, a frameless function
1454 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1455 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1456 and other operations that rely on a knowledge of the stack
0d39a070 1457 traceback. */
d4473757 1458
4be43953
DJ
1459 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1460 regs[regno] = pv_register (regno, 0);
55f960e1 1461 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1462 back_to = make_cleanup_free_pv_area (stack);
1463
94c30b78
MS
1464 for (current_pc = prologue_start;
1465 current_pc < prologue_end;
f43845b3 1466 current_pc += 4)
96baa820 1467 {
e17a4113
UW
1468 unsigned int insn
1469 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1470
94c30b78 1471 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1472 {
4be43953 1473 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1474 continue;
1475 }
0d39a070
DJ
1476 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1477 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1478 {
1479 unsigned imm = insn & 0xff; /* immediate value */
1480 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1481 int rd = bits (insn, 12, 15);
28cd8767 1482 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1483 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1484 continue;
1485 }
0d39a070
DJ
1486 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1487 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1488 {
1489 unsigned imm = insn & 0xff; /* immediate value */
1490 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1491 int rd = bits (insn, 12, 15);
28cd8767 1492 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1493 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1494 continue;
1495 }
0963b4bd
MS
1496 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1497 [sp, #-4]! */
f43845b3 1498 {
4be43953
DJ
1499 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1500 break;
1501 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1502 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1503 regs[bits (insn, 12, 15)]);
f43845b3
MS
1504 continue;
1505 }
1506 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1507 /* stmfd sp!, {..., fp, ip, lr, pc}
1508 or
1509 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1510 {
d4473757 1511 int mask = insn & 0xffff;
ed9a39eb 1512
4be43953
DJ
1513 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1514 break;
1515
94c30b78 1516 /* Calculate offsets of saved registers. */
34e8f22d 1517 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1518 if (mask & (1 << regno))
1519 {
0963b4bd
MS
1520 regs[ARM_SP_REGNUM]
1521 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1522 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1523 }
1524 }
0d39a070
DJ
1525 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1526 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1527 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1528 {
1529 /* No need to add this to saved_regs -- it's just an arg reg. */
1530 continue;
1531 }
0d39a070
DJ
1532 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1533 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1534 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1535 {
1536 /* No need to add this to saved_regs -- it's just an arg reg. */
1537 continue;
1538 }
0963b4bd
MS
1539 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1540 { registers } */
0d39a070
DJ
1541 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1542 {
1543 /* No need to add this to saved_regs -- it's just arg regs. */
1544 continue;
1545 }
d4473757
KB
1546 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1547 {
94c30b78
MS
1548 unsigned imm = insn & 0xff; /* immediate value */
1549 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1550 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1551 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1552 }
1553 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1554 {
94c30b78
MS
1555 unsigned imm = insn & 0xff; /* immediate value */
1556 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1557 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1558 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1559 }
0963b4bd
MS
1560 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1561 [sp, -#c]! */
2af46ca0 1562 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1563 {
4be43953
DJ
1564 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1565 break;
1566
1567 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1568 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1569 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1570 }
0963b4bd
MS
1571 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1572 [sp!] */
2af46ca0 1573 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1574 {
1575 int n_saved_fp_regs;
1576 unsigned int fp_start_reg, fp_bound_reg;
1577
4be43953
DJ
1578 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1579 break;
1580
94c30b78 1581 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1582 {
d4473757
KB
1583 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1584 n_saved_fp_regs = 3;
1585 else
1586 n_saved_fp_regs = 1;
96baa820 1587 }
d4473757 1588 else
96baa820 1589 {
d4473757
KB
1590 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1591 n_saved_fp_regs = 2;
1592 else
1593 n_saved_fp_regs = 4;
96baa820 1594 }
d4473757 1595
34e8f22d 1596 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1597 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1598 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1599 {
4be43953
DJ
1600 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1601 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1602 regs[fp_start_reg++]);
96baa820 1603 }
c906108c 1604 }
0d39a070
DJ
1605 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1606 {
1607 /* Allow some special function calls when skipping the
1608 prologue; GCC generates these before storing arguments to
1609 the stack. */
1610 CORE_ADDR dest = BranchDest (current_pc, insn);
1611
e0634ccf 1612 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1613 continue;
1614 else
1615 break;
1616 }
d4473757 1617 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1618 break; /* Condition not true, exit early. */
0d39a070
DJ
1619 else if (arm_instruction_changes_pc (insn))
1620 /* Don't scan past anything that might change control flow. */
1621 break;
f303bc3e
YQ
1622 else if (arm_instruction_restores_sp (insn))
1623 {
1624 /* Don't scan past the epilogue. */
1625 break;
1626 }
d19f7eee
UW
1627 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1628 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1629 /* Ignore block loads from the stack, potentially copying
1630 parameters from memory. */
1631 continue;
1632 else if ((insn & 0xfc500000) == 0xe4100000
1633 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1634 /* Similarly ignore single loads from the stack. */
1635 continue;
0d39a070
DJ
1636 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1637 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1638 register instead of the stack. */
d4473757 1639 continue;
0d39a070
DJ
1640 else
1641 {
21daaaaf
YQ
1642 /* The optimizer might shove anything into the prologue, if
1643 we build up cache (cache != NULL) from scanning prologue,
1644 we just skip what we don't recognize and scan further to
1645 make cache as complete as possible. However, if we skip
1646 prologue, we'll stop immediately on unrecognized
1647 instruction. */
0d39a070 1648 unrecognized_pc = current_pc;
21daaaaf
YQ
1649 if (cache != NULL)
1650 continue;
1651 else
1652 break;
0d39a070 1653 }
c906108c
SS
1654 }
1655
0d39a070
DJ
1656 if (unrecognized_pc == 0)
1657 unrecognized_pc = current_pc;
1658
0d39a070
DJ
1659 if (cache)
1660 {
4072f920
YQ
1661 int framereg, framesize;
1662
1663 /* The frame size is just the distance from the frame register
1664 to the original stack pointer. */
1665 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1666 {
1667 /* Frame pointer is fp. */
1668 framereg = ARM_FP_REGNUM;
1669 framesize = -regs[ARM_FP_REGNUM].k;
1670 }
1671 else
1672 {
1673 /* Try the stack pointer... this is a bit desperate. */
1674 framereg = ARM_SP_REGNUM;
1675 framesize = -regs[ARM_SP_REGNUM].k;
1676 }
1677
0d39a070
DJ
1678 cache->framereg = framereg;
1679 cache->framesize = framesize;
1680
1681 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1682 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1683 cache->saved_regs[regno].addr = offset;
1684 }
1685
1686 if (arm_debug)
1687 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1688 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1689
1690 do_cleanups (back_to);
0d39a070
DJ
1691 return unrecognized_pc;
1692}
1693
1694static void
1695arm_scan_prologue (struct frame_info *this_frame,
1696 struct arm_prologue_cache *cache)
1697{
1698 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1700 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1701 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1702 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1703
1704 /* Assume there is no frame until proven otherwise. */
1705 cache->framereg = ARM_SP_REGNUM;
1706 cache->framesize = 0;
1707
1708 /* Check for Thumb prologue. */
1709 if (arm_frame_is_thumb (this_frame))
1710 {
1711 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1712 return;
1713 }
1714
1715 /* Find the function prologue. If we can't find the function in
1716 the symbol table, peek in the stack frame to find the PC. */
1717 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1718 &prologue_end))
1719 {
1720 /* One way to find the end of the prologue (which works well
1721 for unoptimized code) is to do the following:
1722
1723 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1724
1725 if (sal.line == 0)
1726 prologue_end = prev_pc;
1727 else if (sal.end < prologue_end)
1728 prologue_end = sal.end;
1729
1730 This mechanism is very accurate so long as the optimizer
1731 doesn't move any instructions from the function body into the
1732 prologue. If this happens, sal.end will be the last
1733 instruction in the first hunk of prologue code just before
1734 the first instruction that the scheduler has moved from
1735 the body to the prologue.
1736
1737 In order to make sure that we scan all of the prologue
1738 instructions, we use a slightly less accurate mechanism which
1739 may scan more than necessary. To help compensate for this
1740 lack of accuracy, the prologue scanning loop below contains
1741 several clauses which'll cause the loop to terminate early if
1742 an implausible prologue instruction is encountered.
1743
1744 The expression
1745
1746 prologue_start + 64
1747
1748 is a suitable endpoint since it accounts for the largest
1749 possible prologue plus up to five instructions inserted by
1750 the scheduler. */
1751
1752 if (prologue_end > prologue_start + 64)
1753 {
1754 prologue_end = prologue_start + 64; /* See above. */
1755 }
1756 }
1757 else
1758 {
1759 /* We have no symbol information. Our only option is to assume this
1760 function has a standard stack frame and the normal frame register.
1761 Then, we can find the value of our frame pointer on entrance to
1762 the callee (or at the present moment if this is the innermost frame).
1763 The value stored there should be the address of the stmfd + 8. */
1764 CORE_ADDR frame_loc;
1765 LONGEST return_value;
1766
1767 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1768 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1769 return;
1770 else
1771 {
1772 prologue_start = gdbarch_addr_bits_remove
1773 (gdbarch, return_value) - 8;
1774 prologue_end = prologue_start + 64; /* See above. */
1775 }
1776 }
1777
1778 if (prev_pc < prologue_end)
1779 prologue_end = prev_pc;
1780
1781 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1782}
1783
eb5492fa 1784static struct arm_prologue_cache *
a262aec2 1785arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1786{
eb5492fa
DJ
1787 int reg;
1788 struct arm_prologue_cache *cache;
1789 CORE_ADDR unwound_fp;
c5aa993b 1790
35d5d4ee 1791 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1792 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1793
a262aec2 1794 arm_scan_prologue (this_frame, cache);
848cfffb 1795
a262aec2 1796 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1797 if (unwound_fp == 0)
1798 return cache;
c906108c 1799
4be43953 1800 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1801
eb5492fa
DJ
1802 /* Calculate actual addresses of saved registers using offsets
1803 determined by arm_scan_prologue. */
a262aec2 1804 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1805 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1806 cache->saved_regs[reg].addr += cache->prev_sp;
1807
1808 return cache;
c906108c
SS
1809}
1810
c1ee9414
LM
1811/* Implementation of the stop_reason hook for arm_prologue frames. */
1812
1813static enum unwind_stop_reason
1814arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1815 void **this_cache)
1816{
1817 struct arm_prologue_cache *cache;
1818 CORE_ADDR pc;
1819
1820 if (*this_cache == NULL)
1821 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1822 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1823
1824 /* This is meant to halt the backtrace at "_start". */
1825 pc = get_frame_pc (this_frame);
1826 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1827 return UNWIND_OUTERMOST;
1828
1829 /* If we've hit a wall, stop. */
1830 if (cache->prev_sp == 0)
1831 return UNWIND_OUTERMOST;
1832
1833 return UNWIND_NO_REASON;
1834}
1835
eb5492fa
DJ
1836/* Our frame ID for a normal frame is the current function's starting PC
1837 and the caller's SP when we were called. */
c906108c 1838
148754e5 1839static void
a262aec2 1840arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1841 void **this_cache,
1842 struct frame_id *this_id)
c906108c 1843{
eb5492fa
DJ
1844 struct arm_prologue_cache *cache;
1845 struct frame_id id;
2c404490 1846 CORE_ADDR pc, func;
f079148d 1847
eb5492fa 1848 if (*this_cache == NULL)
a262aec2 1849 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1850 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1851
0e9e9abd
UW
1852 /* Use function start address as part of the frame ID. If we cannot
1853 identify the start address (due to missing symbol information),
1854 fall back to just using the current PC. */
c1ee9414 1855 pc = get_frame_pc (this_frame);
2c404490 1856 func = get_frame_func (this_frame);
0e9e9abd
UW
1857 if (!func)
1858 func = pc;
1859
eb5492fa 1860 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1861 *this_id = id;
c906108c
SS
1862}
1863
a262aec2
DJ
1864static struct value *
1865arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1866 void **this_cache,
a262aec2 1867 int prev_regnum)
24de872b 1868{
24568a2c 1869 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1870 struct arm_prologue_cache *cache;
1871
eb5492fa 1872 if (*this_cache == NULL)
a262aec2 1873 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1874 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1875
eb5492fa 1876 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1877 instead. The prologue may save PC, but it will point into this
1878 frame's prologue, not the next frame's resume location. Also
1879 strip the saved T bit. A valid LR may have the low bit set, but
1880 a valid PC never does. */
eb5492fa 1881 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1882 {
1883 CORE_ADDR lr;
1884
1885 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1886 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1887 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1888 }
24de872b 1889
eb5492fa 1890 /* SP is generally not saved to the stack, but this frame is
a262aec2 1891 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1892 The value was already reconstructed into PREV_SP. */
1893 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1894 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1895
b39cc962
DJ
1896 /* The CPSR may have been changed by the call instruction and by the
1897 called function. The only bit we can reconstruct is the T bit,
1898 by checking the low bit of LR as of the call. This is a reliable
1899 indicator of Thumb-ness except for some ARM v4T pre-interworking
1900 Thumb code, which could get away with a clear low bit as long as
1901 the called function did not use bx. Guess that all other
1902 bits are unchanged; the condition flags are presumably lost,
1903 but the processor status is likely valid. */
1904 if (prev_regnum == ARM_PS_REGNUM)
1905 {
1906 CORE_ADDR lr, cpsr;
9779414d 1907 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1908
1909 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1910 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1911 if (IS_THUMB_ADDR (lr))
9779414d 1912 cpsr |= t_bit;
b39cc962 1913 else
9779414d 1914 cpsr &= ~t_bit;
b39cc962
DJ
1915 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1916 }
1917
a262aec2
DJ
1918 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1919 prev_regnum);
eb5492fa
DJ
1920}
1921
1922struct frame_unwind arm_prologue_unwind = {
1923 NORMAL_FRAME,
c1ee9414 1924 arm_prologue_unwind_stop_reason,
eb5492fa 1925 arm_prologue_this_id,
a262aec2
DJ
1926 arm_prologue_prev_register,
1927 NULL,
1928 default_frame_sniffer
eb5492fa
DJ
1929};
1930
0e9e9abd
UW
1931/* Maintain a list of ARM exception table entries per objfile, similar to the
1932 list of mapping symbols. We only cache entries for standard ARM-defined
1933 personality routines; the cache will contain only the frame unwinding
1934 instructions associated with the entry (not the descriptors). */
1935
1936static const struct objfile_data *arm_exidx_data_key;
1937
1938struct arm_exidx_entry
1939{
1940 bfd_vma addr;
1941 gdb_byte *entry;
1942};
1943typedef struct arm_exidx_entry arm_exidx_entry_s;
1944DEF_VEC_O(arm_exidx_entry_s);
1945
1946struct arm_exidx_data
1947{
1948 VEC(arm_exidx_entry_s) **section_maps;
1949};
1950
1951static void
1952arm_exidx_data_free (struct objfile *objfile, void *arg)
1953{
9a3c8263 1954 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
1955 unsigned int i;
1956
1957 for (i = 0; i < objfile->obfd->section_count; i++)
1958 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1959}
1960
1961static inline int
1962arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1963 const struct arm_exidx_entry *rhs)
1964{
1965 return lhs->addr < rhs->addr;
1966}
1967
1968static struct obj_section *
1969arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1970{
1971 struct obj_section *osect;
1972
1973 ALL_OBJFILE_OSECTIONS (objfile, osect)
1974 if (bfd_get_section_flags (objfile->obfd,
1975 osect->the_bfd_section) & SEC_ALLOC)
1976 {
1977 bfd_vma start, size;
1978 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1979 size = bfd_get_section_size (osect->the_bfd_section);
1980
1981 if (start <= vma && vma < start + size)
1982 return osect;
1983 }
1984
1985 return NULL;
1986}
1987
1988/* Parse contents of exception table and exception index sections
1989 of OBJFILE, and fill in the exception table entry cache.
1990
1991 For each entry that refers to a standard ARM-defined personality
1992 routine, extract the frame unwinding instructions (from either
1993 the index or the table section). The unwinding instructions
1994 are normalized by:
1995 - extracting them from the rest of the table data
1996 - converting to host endianness
1997 - appending the implicit 0xb0 ("Finish") code
1998
1999 The extracted and normalized instructions are stored for later
2000 retrieval by the arm_find_exidx_entry routine. */
2001
2002static void
2003arm_exidx_new_objfile (struct objfile *objfile)
2004{
3bb47e8b 2005 struct cleanup *cleanups;
0e9e9abd
UW
2006 struct arm_exidx_data *data;
2007 asection *exidx, *extab;
2008 bfd_vma exidx_vma = 0, extab_vma = 0;
2009 bfd_size_type exidx_size = 0, extab_size = 0;
2010 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2011 LONGEST i;
2012
2013 /* If we've already touched this file, do nothing. */
2014 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2015 return;
3bb47e8b 2016 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2017
2018 /* Read contents of exception table and index. */
a5eda10c 2019 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2020 if (exidx)
2021 {
2022 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2023 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2024 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2025 make_cleanup (xfree, exidx_data);
2026
2027 if (!bfd_get_section_contents (objfile->obfd, exidx,
2028 exidx_data, 0, exidx_size))
2029 {
2030 do_cleanups (cleanups);
2031 return;
2032 }
2033 }
2034
2035 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2036 if (extab)
2037 {
2038 extab_vma = bfd_section_vma (objfile->obfd, extab);
2039 extab_size = bfd_get_section_size (extab);
224c3ddb 2040 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2041 make_cleanup (xfree, extab_data);
2042
2043 if (!bfd_get_section_contents (objfile->obfd, extab,
2044 extab_data, 0, extab_size))
2045 {
2046 do_cleanups (cleanups);
2047 return;
2048 }
2049 }
2050
2051 /* Allocate exception table data structure. */
2052 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2053 set_objfile_data (objfile, arm_exidx_data_key, data);
2054 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2055 objfile->obfd->section_count,
2056 VEC(arm_exidx_entry_s) *);
2057
2058 /* Fill in exception table. */
2059 for (i = 0; i < exidx_size / 8; i++)
2060 {
2061 struct arm_exidx_entry new_exidx_entry;
2062 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2063 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2064 bfd_vma addr = 0, word = 0;
2065 int n_bytes = 0, n_words = 0;
2066 struct obj_section *sec;
2067 gdb_byte *entry = NULL;
2068
2069 /* Extract address of start of function. */
2070 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2071 idx += exidx_vma + i * 8;
2072
2073 /* Find section containing function and compute section offset. */
2074 sec = arm_obj_section_from_vma (objfile, idx);
2075 if (sec == NULL)
2076 continue;
2077 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2078
2079 /* Determine address of exception table entry. */
2080 if (val == 1)
2081 {
2082 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2083 }
2084 else if ((val & 0xff000000) == 0x80000000)
2085 {
2086 /* Exception table entry embedded in .ARM.exidx
2087 -- must be short form. */
2088 word = val;
2089 n_bytes = 3;
2090 }
2091 else if (!(val & 0x80000000))
2092 {
2093 /* Exception table entry in .ARM.extab. */
2094 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2095 addr += exidx_vma + i * 8 + 4;
2096
2097 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2098 {
2099 word = bfd_h_get_32 (objfile->obfd,
2100 extab_data + addr - extab_vma);
2101 addr += 4;
2102
2103 if ((word & 0xff000000) == 0x80000000)
2104 {
2105 /* Short form. */
2106 n_bytes = 3;
2107 }
2108 else if ((word & 0xff000000) == 0x81000000
2109 || (word & 0xff000000) == 0x82000000)
2110 {
2111 /* Long form. */
2112 n_bytes = 2;
2113 n_words = ((word >> 16) & 0xff);
2114 }
2115 else if (!(word & 0x80000000))
2116 {
2117 bfd_vma pers;
2118 struct obj_section *pers_sec;
2119 int gnu_personality = 0;
2120
2121 /* Custom personality routine. */
2122 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2123 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2124
2125 /* Check whether we've got one of the variants of the
2126 GNU personality routines. */
2127 pers_sec = arm_obj_section_from_vma (objfile, pers);
2128 if (pers_sec)
2129 {
2130 static const char *personality[] =
2131 {
2132 "__gcc_personality_v0",
2133 "__gxx_personality_v0",
2134 "__gcj_personality_v0",
2135 "__gnu_objc_personality_v0",
2136 NULL
2137 };
2138
2139 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2140 int k;
2141
2142 for (k = 0; personality[k]; k++)
2143 if (lookup_minimal_symbol_by_pc_name
2144 (pc, personality[k], objfile))
2145 {
2146 gnu_personality = 1;
2147 break;
2148 }
2149 }
2150
2151 /* If so, the next word contains a word count in the high
2152 byte, followed by the same unwind instructions as the
2153 pre-defined forms. */
2154 if (gnu_personality
2155 && addr + 4 <= extab_vma + extab_size)
2156 {
2157 word = bfd_h_get_32 (objfile->obfd,
2158 extab_data + addr - extab_vma);
2159 addr += 4;
2160 n_bytes = 3;
2161 n_words = ((word >> 24) & 0xff);
2162 }
2163 }
2164 }
2165 }
2166
2167 /* Sanity check address. */
2168 if (n_words)
2169 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2170 n_words = n_bytes = 0;
2171
2172 /* The unwind instructions reside in WORD (only the N_BYTES least
2173 significant bytes are valid), followed by N_WORDS words in the
2174 extab section starting at ADDR. */
2175 if (n_bytes || n_words)
2176 {
224c3ddb
SM
2177 gdb_byte *p = entry
2178 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2179 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2180
2181 while (n_bytes--)
2182 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2183
2184 while (n_words--)
2185 {
2186 word = bfd_h_get_32 (objfile->obfd,
2187 extab_data + addr - extab_vma);
2188 addr += 4;
2189
2190 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2192 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2193 *p++ = (gdb_byte) (word & 0xff);
2194 }
2195
2196 /* Implied "Finish" to terminate the list. */
2197 *p++ = 0xb0;
2198 }
2199
2200 /* Push entry onto vector. They are guaranteed to always
2201 appear in order of increasing addresses. */
2202 new_exidx_entry.addr = idx;
2203 new_exidx_entry.entry = entry;
2204 VEC_safe_push (arm_exidx_entry_s,
2205 data->section_maps[sec->the_bfd_section->index],
2206 &new_exidx_entry);
2207 }
2208
2209 do_cleanups (cleanups);
2210}
2211
2212/* Search for the exception table entry covering MEMADDR. If one is found,
2213 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2214 set *START to the start of the region covered by this entry. */
2215
2216static gdb_byte *
2217arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2218{
2219 struct obj_section *sec;
2220
2221 sec = find_pc_section (memaddr);
2222 if (sec != NULL)
2223 {
2224 struct arm_exidx_data *data;
2225 VEC(arm_exidx_entry_s) *map;
2226 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2227 unsigned int idx;
2228
9a3c8263
SM
2229 data = ((struct arm_exidx_data *)
2230 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2231 if (data != NULL)
2232 {
2233 map = data->section_maps[sec->the_bfd_section->index];
2234 if (!VEC_empty (arm_exidx_entry_s, map))
2235 {
2236 struct arm_exidx_entry *map_sym;
2237
2238 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2239 arm_compare_exidx_entries);
2240
2241 /* VEC_lower_bound finds the earliest ordered insertion
2242 point. If the following symbol starts at this exact
2243 address, we use that; otherwise, the preceding
2244 exception table entry covers this address. */
2245 if (idx < VEC_length (arm_exidx_entry_s, map))
2246 {
2247 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2248 if (map_sym->addr == map_key.addr)
2249 {
2250 if (start)
2251 *start = map_sym->addr + obj_section_addr (sec);
2252 return map_sym->entry;
2253 }
2254 }
2255
2256 if (idx > 0)
2257 {
2258 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2259 if (start)
2260 *start = map_sym->addr + obj_section_addr (sec);
2261 return map_sym->entry;
2262 }
2263 }
2264 }
2265 }
2266
2267 return NULL;
2268}
2269
2270/* Given the current frame THIS_FRAME, and its associated frame unwinding
2271 instruction list from the ARM exception table entry ENTRY, allocate and
2272 return a prologue cache structure describing how to unwind this frame.
2273
2274 Return NULL if the unwinding instruction list contains a "spare",
2275 "reserved" or "refuse to unwind" instruction as defined in section
2276 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2277 for the ARM Architecture" document. */
2278
2279static struct arm_prologue_cache *
2280arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2281{
2282 CORE_ADDR vsp = 0;
2283 int vsp_valid = 0;
2284
2285 struct arm_prologue_cache *cache;
2286 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2287 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2288
2289 for (;;)
2290 {
2291 gdb_byte insn;
2292
2293 /* Whenever we reload SP, we actually have to retrieve its
2294 actual value in the current frame. */
2295 if (!vsp_valid)
2296 {
2297 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2298 {
2299 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2300 vsp = get_frame_register_unsigned (this_frame, reg);
2301 }
2302 else
2303 {
2304 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2305 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2306 }
2307
2308 vsp_valid = 1;
2309 }
2310
2311 /* Decode next unwind instruction. */
2312 insn = *entry++;
2313
2314 if ((insn & 0xc0) == 0)
2315 {
2316 int offset = insn & 0x3f;
2317 vsp += (offset << 2) + 4;
2318 }
2319 else if ((insn & 0xc0) == 0x40)
2320 {
2321 int offset = insn & 0x3f;
2322 vsp -= (offset << 2) + 4;
2323 }
2324 else if ((insn & 0xf0) == 0x80)
2325 {
2326 int mask = ((insn & 0xf) << 8) | *entry++;
2327 int i;
2328
2329 /* The special case of an all-zero mask identifies
2330 "Refuse to unwind". We return NULL to fall back
2331 to the prologue analyzer. */
2332 if (mask == 0)
2333 return NULL;
2334
2335 /* Pop registers r4..r15 under mask. */
2336 for (i = 0; i < 12; i++)
2337 if (mask & (1 << i))
2338 {
2339 cache->saved_regs[4 + i].addr = vsp;
2340 vsp += 4;
2341 }
2342
2343 /* Special-case popping SP -- we need to reload vsp. */
2344 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2345 vsp_valid = 0;
2346 }
2347 else if ((insn & 0xf0) == 0x90)
2348 {
2349 int reg = insn & 0xf;
2350
2351 /* Reserved cases. */
2352 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2353 return NULL;
2354
2355 /* Set SP from another register and mark VSP for reload. */
2356 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2357 vsp_valid = 0;
2358 }
2359 else if ((insn & 0xf0) == 0xa0)
2360 {
2361 int count = insn & 0x7;
2362 int pop_lr = (insn & 0x8) != 0;
2363 int i;
2364
2365 /* Pop r4..r[4+count]. */
2366 for (i = 0; i <= count; i++)
2367 {
2368 cache->saved_regs[4 + i].addr = vsp;
2369 vsp += 4;
2370 }
2371
2372 /* If indicated by flag, pop LR as well. */
2373 if (pop_lr)
2374 {
2375 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2376 vsp += 4;
2377 }
2378 }
2379 else if (insn == 0xb0)
2380 {
2381 /* We could only have updated PC by popping into it; if so, it
2382 will show up as address. Otherwise, copy LR into PC. */
2383 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2384 cache->saved_regs[ARM_PC_REGNUM]
2385 = cache->saved_regs[ARM_LR_REGNUM];
2386
2387 /* We're done. */
2388 break;
2389 }
2390 else if (insn == 0xb1)
2391 {
2392 int mask = *entry++;
2393 int i;
2394
2395 /* All-zero mask and mask >= 16 is "spare". */
2396 if (mask == 0 || mask >= 16)
2397 return NULL;
2398
2399 /* Pop r0..r3 under mask. */
2400 for (i = 0; i < 4; i++)
2401 if (mask & (1 << i))
2402 {
2403 cache->saved_regs[i].addr = vsp;
2404 vsp += 4;
2405 }
2406 }
2407 else if (insn == 0xb2)
2408 {
2409 ULONGEST offset = 0;
2410 unsigned shift = 0;
2411
2412 do
2413 {
2414 offset |= (*entry & 0x7f) << shift;
2415 shift += 7;
2416 }
2417 while (*entry++ & 0x80);
2418
2419 vsp += 0x204 + (offset << 2);
2420 }
2421 else if (insn == 0xb3)
2422 {
2423 int start = *entry >> 4;
2424 int count = (*entry++) & 0xf;
2425 int i;
2426
2427 /* Only registers D0..D15 are valid here. */
2428 if (start + count >= 16)
2429 return NULL;
2430
2431 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2432 for (i = 0; i <= count; i++)
2433 {
2434 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2435 vsp += 8;
2436 }
2437
2438 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2439 vsp += 4;
2440 }
2441 else if ((insn & 0xf8) == 0xb8)
2442 {
2443 int count = insn & 0x7;
2444 int i;
2445
2446 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2447 for (i = 0; i <= count; i++)
2448 {
2449 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2450 vsp += 8;
2451 }
2452
2453 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2454 vsp += 4;
2455 }
2456 else if (insn == 0xc6)
2457 {
2458 int start = *entry >> 4;
2459 int count = (*entry++) & 0xf;
2460 int i;
2461
2462 /* Only registers WR0..WR15 are valid. */
2463 if (start + count >= 16)
2464 return NULL;
2465
2466 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2467 for (i = 0; i <= count; i++)
2468 {
2469 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2470 vsp += 8;
2471 }
2472 }
2473 else if (insn == 0xc7)
2474 {
2475 int mask = *entry++;
2476 int i;
2477
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask == 0 || mask >= 16)
2480 return NULL;
2481
2482 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2483 for (i = 0; i < 4; i++)
2484 if (mask & (1 << i))
2485 {
2486 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2487 vsp += 4;
2488 }
2489 }
2490 else if ((insn & 0xf8) == 0xc0)
2491 {
2492 int count = insn & 0x7;
2493 int i;
2494
2495 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2496 for (i = 0; i <= count; i++)
2497 {
2498 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2499 vsp += 8;
2500 }
2501 }
2502 else if (insn == 0xc8)
2503 {
2504 int start = *entry >> 4;
2505 int count = (*entry++) & 0xf;
2506 int i;
2507
2508 /* Only registers D0..D31 are valid. */
2509 if (start + count >= 16)
2510 return NULL;
2511
2512 /* Pop VFP double-precision registers
2513 D[16+start]..D[16+start+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc9)
2521 {
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2525
2526 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2527 for (i = 0; i <= count; i++)
2528 {
2529 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2530 vsp += 8;
2531 }
2532 }
2533 else if ((insn & 0xf8) == 0xd0)
2534 {
2535 int count = insn & 0x7;
2536 int i;
2537
2538 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2539 for (i = 0; i <= count; i++)
2540 {
2541 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2542 vsp += 8;
2543 }
2544 }
2545 else
2546 {
2547 /* Everything else is "spare". */
2548 return NULL;
2549 }
2550 }
2551
2552 /* If we restore SP from a register, assume this was the frame register.
2553 Otherwise just fall back to SP as frame register. */
2554 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2555 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2556 else
2557 cache->framereg = ARM_SP_REGNUM;
2558
2559 /* Determine offset to previous frame. */
2560 cache->framesize
2561 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2562
2563 /* We already got the previous SP. */
2564 cache->prev_sp = vsp;
2565
2566 return cache;
2567}
2568
2569/* Unwinding via ARM exception table entries. Note that the sniffer
2570 already computes a filled-in prologue cache, which is then used
2571 with the same arm_prologue_this_id and arm_prologue_prev_register
2572 routines also used for prologue-parsing based unwinding. */
2573
2574static int
2575arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2576 struct frame_info *this_frame,
2577 void **this_prologue_cache)
2578{
2579 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2580 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2581 CORE_ADDR addr_in_block, exidx_region, func_start;
2582 struct arm_prologue_cache *cache;
2583 gdb_byte *entry;
2584
2585 /* See if we have an ARM exception table entry covering this address. */
2586 addr_in_block = get_frame_address_in_block (this_frame);
2587 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2588 if (!entry)
2589 return 0;
2590
2591 /* The ARM exception table does not describe unwind information
2592 for arbitrary PC values, but is guaranteed to be correct only
2593 at call sites. We have to decide here whether we want to use
2594 ARM exception table information for this frame, or fall back
2595 to using prologue parsing. (Note that if we have DWARF CFI,
2596 this sniffer isn't even called -- CFI is always preferred.)
2597
2598 Before we make this decision, however, we check whether we
2599 actually have *symbol* information for the current frame.
2600 If not, prologue parsing would not work anyway, so we might
2601 as well use the exception table and hope for the best. */
2602 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2603 {
2604 int exc_valid = 0;
2605
2606 /* If the next frame is "normal", we are at a call site in this
2607 frame, so exception information is guaranteed to be valid. */
2608 if (get_next_frame (this_frame)
2609 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2610 exc_valid = 1;
2611
2612 /* We also assume exception information is valid if we're currently
2613 blocked in a system call. The system library is supposed to
d9311bfa
AT
2614 ensure this, so that e.g. pthread cancellation works. */
2615 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2616 {
d9311bfa 2617 LONGEST insn;
416dc9c6 2618
d9311bfa
AT
2619 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2620 byte_order_for_code, &insn)
2621 && (insn & 0xff00) == 0xdf00 /* svc */)
2622 exc_valid = 1;
0e9e9abd 2623 }
d9311bfa
AT
2624 else
2625 {
2626 LONGEST insn;
416dc9c6 2627
d9311bfa
AT
2628 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2629 byte_order_for_code, &insn)
2630 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2631 exc_valid = 1;
2632 }
2633
0e9e9abd
UW
2634 /* Bail out if we don't know that exception information is valid. */
2635 if (!exc_valid)
2636 return 0;
2637
2638 /* The ARM exception index does not mark the *end* of the region
2639 covered by the entry, and some functions will not have any entry.
2640 To correctly recognize the end of the covered region, the linker
2641 should have inserted dummy records with a CANTUNWIND marker.
2642
2643 Unfortunately, current versions of GNU ld do not reliably do
2644 this, and thus we may have found an incorrect entry above.
2645 As a (temporary) sanity check, we only use the entry if it
2646 lies *within* the bounds of the function. Note that this check
2647 might reject perfectly valid entries that just happen to cover
2648 multiple functions; therefore this check ought to be removed
2649 once the linker is fixed. */
2650 if (func_start > exidx_region)
2651 return 0;
2652 }
2653
2654 /* Decode the list of unwinding instructions into a prologue cache.
2655 Note that this may fail due to e.g. a "refuse to unwind" code. */
2656 cache = arm_exidx_fill_cache (this_frame, entry);
2657 if (!cache)
2658 return 0;
2659
2660 *this_prologue_cache = cache;
2661 return 1;
2662}
2663
2664struct frame_unwind arm_exidx_unwind = {
2665 NORMAL_FRAME,
8fbca658 2666 default_frame_unwind_stop_reason,
0e9e9abd
UW
2667 arm_prologue_this_id,
2668 arm_prologue_prev_register,
2669 NULL,
2670 arm_exidx_unwind_sniffer
2671};
2672
779aa56f
YQ
2673static struct arm_prologue_cache *
2674arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2675{
2676 struct arm_prologue_cache *cache;
779aa56f
YQ
2677 int reg;
2678
2679 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2680 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2681
2682 /* Still rely on the offset calculated from prologue. */
2683 arm_scan_prologue (this_frame, cache);
2684
2685 /* Since we are in epilogue, the SP has been restored. */
2686 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2687
2688 /* Calculate actual addresses of saved registers using offsets
2689 determined by arm_scan_prologue. */
2690 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2691 if (trad_frame_addr_p (cache->saved_regs, reg))
2692 cache->saved_regs[reg].addr += cache->prev_sp;
2693
2694 return cache;
2695}
2696
2697/* Implementation of function hook 'this_id' in
2698 'struct frame_uwnind' for epilogue unwinder. */
2699
2700static void
2701arm_epilogue_frame_this_id (struct frame_info *this_frame,
2702 void **this_cache,
2703 struct frame_id *this_id)
2704{
2705 struct arm_prologue_cache *cache;
2706 CORE_ADDR pc, func;
2707
2708 if (*this_cache == NULL)
2709 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2710 cache = (struct arm_prologue_cache *) *this_cache;
2711
2712 /* Use function start address as part of the frame ID. If we cannot
2713 identify the start address (due to missing symbol information),
2714 fall back to just using the current PC. */
2715 pc = get_frame_pc (this_frame);
2716 func = get_frame_func (this_frame);
fb3f3d25 2717 if (func == 0)
779aa56f
YQ
2718 func = pc;
2719
2720 (*this_id) = frame_id_build (cache->prev_sp, pc);
2721}
2722
2723/* Implementation of function hook 'prev_register' in
2724 'struct frame_uwnind' for epilogue unwinder. */
2725
2726static struct value *
2727arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2728 void **this_cache, int regnum)
2729{
779aa56f
YQ
2730 if (*this_cache == NULL)
2731 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2732
2733 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2734}
2735
2736static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2737 CORE_ADDR pc);
2738static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2739 CORE_ADDR pc);
2740
2741/* Implementation of function hook 'sniffer' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2743
2744static int
2745arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2746 struct frame_info *this_frame,
2747 void **this_prologue_cache)
2748{
2749 if (frame_relative_level (this_frame) == 0)
2750 {
2751 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2752 CORE_ADDR pc = get_frame_pc (this_frame);
2753
2754 if (arm_frame_is_thumb (this_frame))
2755 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2756 else
2757 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2758 }
2759 else
2760 return 0;
2761}
2762
2763/* Frame unwinder from epilogue. */
2764
2765static const struct frame_unwind arm_epilogue_frame_unwind =
2766{
2767 NORMAL_FRAME,
2768 default_frame_unwind_stop_reason,
2769 arm_epilogue_frame_this_id,
2770 arm_epilogue_frame_prev_register,
2771 NULL,
2772 arm_epilogue_frame_sniffer,
2773};
2774
80d8d390
YQ
2775/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2776 trampoline, return the target PC. Otherwise return 0.
2777
2778 void call0a (char c, short s, int i, long l) {}
2779
2780 int main (void)
2781 {
2782 (*pointer_to_call0a) (c, s, i, l);
2783 }
2784
2785 Instead of calling a stub library function _call_via_xx (xx is
2786 the register name), GCC may inline the trampoline in the object
2787 file as below (register r2 has the address of call0a).
2788
2789 .global main
2790 .type main, %function
2791 ...
2792 bl .L1
2793 ...
2794 .size main, .-main
2795
2796 .L1:
2797 bx r2
2798
2799 The trampoline 'bx r2' doesn't belong to main. */
2800
2801static CORE_ADDR
2802arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2803{
2804 /* The heuristics of recognizing such trampoline is that FRAME is
2805 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2806 if (arm_frame_is_thumb (frame))
2807 {
2808 gdb_byte buf[2];
2809
2810 if (target_read_memory (pc, buf, 2) == 0)
2811 {
2812 struct gdbarch *gdbarch = get_frame_arch (frame);
2813 enum bfd_endian byte_order_for_code
2814 = gdbarch_byte_order_for_code (gdbarch);
2815 uint16_t insn
2816 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2817
2818 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2819 {
2820 CORE_ADDR dest
2821 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2822
2823 /* Clear the LSB so that gdb core sets step-resume
2824 breakpoint at the right address. */
2825 return UNMAKE_THUMB_ADDR (dest);
2826 }
2827 }
2828 }
2829
2830 return 0;
2831}
2832
909cf6ea 2833static struct arm_prologue_cache *
a262aec2 2834arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2835{
909cf6ea 2836 struct arm_prologue_cache *cache;
909cf6ea 2837
35d5d4ee 2838 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2839 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2840
a262aec2 2841 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2842
2843 return cache;
2844}
2845
2846/* Our frame ID for a stub frame is the current SP and LR. */
2847
2848static void
a262aec2 2849arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2850 void **this_cache,
2851 struct frame_id *this_id)
2852{
2853 struct arm_prologue_cache *cache;
2854
2855 if (*this_cache == NULL)
a262aec2 2856 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2857 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2858
a262aec2 2859 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2860}
2861
a262aec2
DJ
2862static int
2863arm_stub_unwind_sniffer (const struct frame_unwind *self,
2864 struct frame_info *this_frame,
2865 void **this_prologue_cache)
909cf6ea 2866{
93d42b30 2867 CORE_ADDR addr_in_block;
948f8e3d 2868 gdb_byte dummy[4];
18d18ac8
YQ
2869 CORE_ADDR pc, start_addr;
2870 const char *name;
909cf6ea 2871
a262aec2 2872 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2873 pc = get_frame_pc (this_frame);
3e5d3a5a 2874 if (in_plt_section (addr_in_block)
fc36e839
DE
2875 /* We also use the stub winder if the target memory is unreadable
2876 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2877 || target_read_memory (pc, dummy, 4) != 0)
2878 return 1;
2879
2880 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2881 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2882 return 1;
909cf6ea 2883
a262aec2 2884 return 0;
909cf6ea
DJ
2885}
2886
a262aec2
DJ
2887struct frame_unwind arm_stub_unwind = {
2888 NORMAL_FRAME,
8fbca658 2889 default_frame_unwind_stop_reason,
a262aec2
DJ
2890 arm_stub_this_id,
2891 arm_prologue_prev_register,
2892 NULL,
2893 arm_stub_unwind_sniffer
2894};
2895
2ae28aa9
YQ
2896/* Put here the code to store, into CACHE->saved_regs, the addresses
2897 of the saved registers of frame described by THIS_FRAME. CACHE is
2898 returned. */
2899
2900static struct arm_prologue_cache *
2901arm_m_exception_cache (struct frame_info *this_frame)
2902{
2903 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2904 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2905 struct arm_prologue_cache *cache;
2906 CORE_ADDR unwound_sp;
2907 LONGEST xpsr;
2908
2909 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2910 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2911
2912 unwound_sp = get_frame_register_unsigned (this_frame,
2913 ARM_SP_REGNUM);
2914
2915 /* The hardware saves eight 32-bit words, comprising xPSR,
2916 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2917 "B1.5.6 Exception entry behavior" in
2918 "ARMv7-M Architecture Reference Manual". */
2919 cache->saved_regs[0].addr = unwound_sp;
2920 cache->saved_regs[1].addr = unwound_sp + 4;
2921 cache->saved_regs[2].addr = unwound_sp + 8;
2922 cache->saved_regs[3].addr = unwound_sp + 12;
2923 cache->saved_regs[12].addr = unwound_sp + 16;
2924 cache->saved_regs[14].addr = unwound_sp + 20;
2925 cache->saved_regs[15].addr = unwound_sp + 24;
2926 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2927
2928 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2929 aligner between the top of the 32-byte stack frame and the
2930 previous context's stack pointer. */
2931 cache->prev_sp = unwound_sp + 32;
2932 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2933 && (xpsr & (1 << 9)) != 0)
2934 cache->prev_sp += 4;
2935
2936 return cache;
2937}
2938
2939/* Implementation of function hook 'this_id' in
2940 'struct frame_uwnind'. */
2941
2942static void
2943arm_m_exception_this_id (struct frame_info *this_frame,
2944 void **this_cache,
2945 struct frame_id *this_id)
2946{
2947 struct arm_prologue_cache *cache;
2948
2949 if (*this_cache == NULL)
2950 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2951 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2952
2953 /* Our frame ID for a stub frame is the current SP and LR. */
2954 *this_id = frame_id_build (cache->prev_sp,
2955 get_frame_pc (this_frame));
2956}
2957
2958/* Implementation of function hook 'prev_register' in
2959 'struct frame_uwnind'. */
2960
2961static struct value *
2962arm_m_exception_prev_register (struct frame_info *this_frame,
2963 void **this_cache,
2964 int prev_regnum)
2965{
2ae28aa9
YQ
2966 struct arm_prologue_cache *cache;
2967
2968 if (*this_cache == NULL)
2969 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2970 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2971
2972 /* The value was already reconstructed into PREV_SP. */
2973 if (prev_regnum == ARM_SP_REGNUM)
2974 return frame_unwind_got_constant (this_frame, prev_regnum,
2975 cache->prev_sp);
2976
2977 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2978 prev_regnum);
2979}
2980
2981/* Implementation of function hook 'sniffer' in
2982 'struct frame_uwnind'. */
2983
2984static int
2985arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2986 struct frame_info *this_frame,
2987 void **this_prologue_cache)
2988{
2989 CORE_ADDR this_pc = get_frame_pc (this_frame);
2990
2991 /* No need to check is_m; this sniffer is only registered for
2992 M-profile architectures. */
2993
2994 /* Exception frames return to one of these magic PCs. Other values
2995 are not defined as of v7-M. See details in "B1.5.8 Exception
2996 return behavior" in "ARMv7-M Architecture Reference Manual". */
2997 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2998 || this_pc == 0xfffffffd)
2999 return 1;
3000
3001 return 0;
3002}
3003
3004/* Frame unwinder for M-profile exceptions. */
3005
3006struct frame_unwind arm_m_exception_unwind =
3007{
3008 SIGTRAMP_FRAME,
3009 default_frame_unwind_stop_reason,
3010 arm_m_exception_this_id,
3011 arm_m_exception_prev_register,
3012 NULL,
3013 arm_m_exception_unwind_sniffer
3014};
3015
24de872b 3016static CORE_ADDR
a262aec2 3017arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3018{
3019 struct arm_prologue_cache *cache;
3020
eb5492fa 3021 if (*this_cache == NULL)
a262aec2 3022 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3023 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3024
4be43953 3025 return cache->prev_sp - cache->framesize;
24de872b
DJ
3026}
3027
eb5492fa
DJ
3028struct frame_base arm_normal_base = {
3029 &arm_prologue_unwind,
3030 arm_normal_frame_base,
3031 arm_normal_frame_base,
3032 arm_normal_frame_base
3033};
3034
a262aec2 3035/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3036 dummy frame. The frame ID's base needs to match the TOS value
3037 saved by save_dummy_frame_tos() and returned from
3038 arm_push_dummy_call, and the PC needs to match the dummy frame's
3039 breakpoint. */
c906108c 3040
eb5492fa 3041static struct frame_id
a262aec2 3042arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3043{
0963b4bd
MS
3044 return frame_id_build (get_frame_register_unsigned (this_frame,
3045 ARM_SP_REGNUM),
a262aec2 3046 get_frame_pc (this_frame));
eb5492fa 3047}
c3b4394c 3048
eb5492fa
DJ
3049/* Given THIS_FRAME, find the previous frame's resume PC (which will
3050 be used to construct the previous frame's ID, after looking up the
3051 containing function). */
c3b4394c 3052
eb5492fa
DJ
3053static CORE_ADDR
3054arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3055{
3056 CORE_ADDR pc;
3057 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3058 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3059}
3060
3061static CORE_ADDR
3062arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3063{
3064 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3065}
3066
b39cc962
DJ
3067static struct value *
3068arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3069 int regnum)
3070{
24568a2c 3071 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3072 CORE_ADDR lr, cpsr;
9779414d 3073 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3074
3075 switch (regnum)
3076 {
3077 case ARM_PC_REGNUM:
3078 /* The PC is normally copied from the return column, which
3079 describes saves of LR. However, that version may have an
3080 extra bit set to indicate Thumb state. The bit is not
3081 part of the PC. */
3082 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3083 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3084 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3085
3086 case ARM_PS_REGNUM:
3087 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3088 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3089 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3090 if (IS_THUMB_ADDR (lr))
9779414d 3091 cpsr |= t_bit;
b39cc962 3092 else
9779414d 3093 cpsr &= ~t_bit;
ca38c58e 3094 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3095
3096 default:
3097 internal_error (__FILE__, __LINE__,
3098 _("Unexpected register %d"), regnum);
3099 }
3100}
3101
3102static void
3103arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3104 struct dwarf2_frame_state_reg *reg,
3105 struct frame_info *this_frame)
3106{
3107 switch (regnum)
3108 {
3109 case ARM_PC_REGNUM:
3110 case ARM_PS_REGNUM:
3111 reg->how = DWARF2_FRAME_REG_FN;
3112 reg->loc.fn = arm_dwarf2_prev_register;
3113 break;
3114 case ARM_SP_REGNUM:
3115 reg->how = DWARF2_FRAME_REG_CFA;
3116 break;
3117 }
3118}
3119
c9cf6e20 3120/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3121
3122static int
c9cf6e20 3123thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3124{
3125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3126 unsigned int insn, insn2;
3127 int found_return = 0, found_stack_adjust = 0;
3128 CORE_ADDR func_start, func_end;
3129 CORE_ADDR scan_pc;
3130 gdb_byte buf[4];
3131
3132 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3133 return 0;
3134
3135 /* The epilogue is a sequence of instructions along the following lines:
3136
3137 - add stack frame size to SP or FP
3138 - [if frame pointer used] restore SP from FP
3139 - restore registers from SP [may include PC]
3140 - a return-type instruction [if PC wasn't already restored]
3141
3142 In a first pass, we scan forward from the current PC and verify the
3143 instructions we find as compatible with this sequence, ending in a
3144 return instruction.
3145
3146 However, this is not sufficient to distinguish indirect function calls
3147 within a function from indirect tail calls in the epilogue in some cases.
3148 Therefore, if we didn't already find any SP-changing instruction during
3149 forward scan, we add a backward scanning heuristic to ensure we actually
3150 are in the epilogue. */
3151
3152 scan_pc = pc;
3153 while (scan_pc < func_end && !found_return)
3154 {
3155 if (target_read_memory (scan_pc, buf, 2))
3156 break;
3157
3158 scan_pc += 2;
3159 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3160
3161 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3162 found_return = 1;
3163 else if (insn == 0x46f7) /* mov pc, lr */
3164 found_return = 1;
540314bd 3165 else if (thumb_instruction_restores_sp (insn))
4024ca99 3166 {
b7576e5c 3167 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3168 found_return = 1;
3169 }
db24da6d 3170 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3171 {
3172 if (target_read_memory (scan_pc, buf, 2))
3173 break;
3174
3175 scan_pc += 2;
3176 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3177
3178 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3179 {
4024ca99
UW
3180 if (insn2 & 0x8000) /* <registers> include PC. */
3181 found_return = 1;
3182 }
3183 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3184 && (insn2 & 0x0fff) == 0x0b04)
3185 {
4024ca99
UW
3186 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3187 found_return = 1;
3188 }
3189 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3190 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3191 ;
4024ca99
UW
3192 else
3193 break;
3194 }
3195 else
3196 break;
3197 }
3198
3199 if (!found_return)
3200 return 0;
3201
3202 /* Since any instruction in the epilogue sequence, with the possible
3203 exception of return itself, updates the stack pointer, we need to
3204 scan backwards for at most one instruction. Try either a 16-bit or
3205 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3206 too much about false positives. */
4024ca99 3207
6b65d1b6
YQ
3208 if (pc - 4 < func_start)
3209 return 0;
3210 if (target_read_memory (pc - 4, buf, 4))
3211 return 0;
4024ca99 3212
6b65d1b6
YQ
3213 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3214 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3215
3216 if (thumb_instruction_restores_sp (insn2))
3217 found_stack_adjust = 1;
3218 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3219 found_stack_adjust = 1;
3220 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3221 && (insn2 & 0x0fff) == 0x0b04)
3222 found_stack_adjust = 1;
3223 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3224 && (insn2 & 0x0e00) == 0x0a00)
3225 found_stack_adjust = 1;
4024ca99
UW
3226
3227 return found_stack_adjust;
3228}
3229
4024ca99 3230static int
c58b006a 3231arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3232{
3233 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3234 unsigned int insn;
f303bc3e 3235 int found_return;
4024ca99
UW
3236 CORE_ADDR func_start, func_end;
3237
4024ca99
UW
3238 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3239 return 0;
3240
3241 /* We are in the epilogue if the previous instruction was a stack
3242 adjustment and the next instruction is a possible return (bx, mov
3243 pc, or pop). We could have to scan backwards to find the stack
3244 adjustment, or forwards to find the return, but this is a decent
3245 approximation. First scan forwards. */
3246
3247 found_return = 0;
3248 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3249 if (bits (insn, 28, 31) != INST_NV)
3250 {
3251 if ((insn & 0x0ffffff0) == 0x012fff10)
3252 /* BX. */
3253 found_return = 1;
3254 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3255 /* MOV PC. */
3256 found_return = 1;
3257 else if ((insn & 0x0fff0000) == 0x08bd0000
3258 && (insn & 0x0000c000) != 0)
3259 /* POP (LDMIA), including PC or LR. */
3260 found_return = 1;
3261 }
3262
3263 if (!found_return)
3264 return 0;
3265
3266 /* Scan backwards. This is just a heuristic, so do not worry about
3267 false positives from mode changes. */
3268
3269 if (pc < func_start + 4)
3270 return 0;
3271
3272 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3273 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3274 return 1;
3275
3276 return 0;
3277}
3278
c58b006a
YQ
3279/* Implement the stack_frame_destroyed_p gdbarch method. */
3280
3281static int
3282arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3283{
3284 if (arm_pc_is_thumb (gdbarch, pc))
3285 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3286 else
3287 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3288}
4024ca99 3289
2dd604e7
RE
3290/* When arguments must be pushed onto the stack, they go on in reverse
3291 order. The code below implements a FILO (stack) to do this. */
3292
3293struct stack_item
3294{
3295 int len;
3296 struct stack_item *prev;
7c543f7b 3297 gdb_byte *data;
2dd604e7
RE
3298};
3299
3300static struct stack_item *
df3b6708 3301push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3302{
3303 struct stack_item *si;
8d749320 3304 si = XNEW (struct stack_item);
7c543f7b 3305 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3306 si->len = len;
3307 si->prev = prev;
3308 memcpy (si->data, contents, len);
3309 return si;
3310}
3311
3312static struct stack_item *
3313pop_stack_item (struct stack_item *si)
3314{
3315 struct stack_item *dead = si;
3316 si = si->prev;
3317 xfree (dead->data);
3318 xfree (dead);
3319 return si;
3320}
3321
2af48f68
PB
3322
3323/* Return the alignment (in bytes) of the given type. */
3324
3325static int
3326arm_type_align (struct type *t)
3327{
3328 int n;
3329 int align;
3330 int falign;
3331
3332 t = check_typedef (t);
3333 switch (TYPE_CODE (t))
3334 {
3335 default:
3336 /* Should never happen. */
3337 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3338 return 4;
3339
3340 case TYPE_CODE_PTR:
3341 case TYPE_CODE_ENUM:
3342 case TYPE_CODE_INT:
3343 case TYPE_CODE_FLT:
3344 case TYPE_CODE_SET:
3345 case TYPE_CODE_RANGE:
2af48f68
PB
3346 case TYPE_CODE_REF:
3347 case TYPE_CODE_CHAR:
3348 case TYPE_CODE_BOOL:
3349 return TYPE_LENGTH (t);
3350
3351 case TYPE_CODE_ARRAY:
c4312b19
YQ
3352 if (TYPE_VECTOR (t))
3353 {
3354 /* Use the natural alignment for vector types (the same for
3355 scalar type), but the maximum alignment is 64-bit. */
3356 if (TYPE_LENGTH (t) > 8)
3357 return 8;
3358 else
3359 return TYPE_LENGTH (t);
3360 }
3361 else
3362 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3363 case TYPE_CODE_COMPLEX:
2af48f68
PB
3364 return arm_type_align (TYPE_TARGET_TYPE (t));
3365
3366 case TYPE_CODE_STRUCT:
3367 case TYPE_CODE_UNION:
3368 align = 1;
3369 for (n = 0; n < TYPE_NFIELDS (t); n++)
3370 {
3371 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3372 if (falign > align)
3373 align = falign;
3374 }
3375 return align;
3376 }
3377}
3378
90445bd3
DJ
3379/* Possible base types for a candidate for passing and returning in
3380 VFP registers. */
3381
3382enum arm_vfp_cprc_base_type
3383{
3384 VFP_CPRC_UNKNOWN,
3385 VFP_CPRC_SINGLE,
3386 VFP_CPRC_DOUBLE,
3387 VFP_CPRC_VEC64,
3388 VFP_CPRC_VEC128
3389};
3390
3391/* The length of one element of base type B. */
3392
3393static unsigned
3394arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3395{
3396 switch (b)
3397 {
3398 case VFP_CPRC_SINGLE:
3399 return 4;
3400 case VFP_CPRC_DOUBLE:
3401 return 8;
3402 case VFP_CPRC_VEC64:
3403 return 8;
3404 case VFP_CPRC_VEC128:
3405 return 16;
3406 default:
3407 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3408 (int) b);
3409 }
3410}
3411
3412/* The character ('s', 'd' or 'q') for the type of VFP register used
3413 for passing base type B. */
3414
3415static int
3416arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3417{
3418 switch (b)
3419 {
3420 case VFP_CPRC_SINGLE:
3421 return 's';
3422 case VFP_CPRC_DOUBLE:
3423 return 'd';
3424 case VFP_CPRC_VEC64:
3425 return 'd';
3426 case VFP_CPRC_VEC128:
3427 return 'q';
3428 default:
3429 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3430 (int) b);
3431 }
3432}
3433
3434/* Determine whether T may be part of a candidate for passing and
3435 returning in VFP registers, ignoring the limit on the total number
3436 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3437 classification of the first valid component found; if it is not
3438 VFP_CPRC_UNKNOWN, all components must have the same classification
3439 as *BASE_TYPE. If it is found that T contains a type not permitted
3440 for passing and returning in VFP registers, a type differently
3441 classified from *BASE_TYPE, or two types differently classified
3442 from each other, return -1, otherwise return the total number of
3443 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3444 array). Vector types are not currently supported, matching the
3445 generic AAPCS support. */
90445bd3
DJ
3446
3447static int
3448arm_vfp_cprc_sub_candidate (struct type *t,
3449 enum arm_vfp_cprc_base_type *base_type)
3450{
3451 t = check_typedef (t);
3452 switch (TYPE_CODE (t))
3453 {
3454 case TYPE_CODE_FLT:
3455 switch (TYPE_LENGTH (t))
3456 {
3457 case 4:
3458 if (*base_type == VFP_CPRC_UNKNOWN)
3459 *base_type = VFP_CPRC_SINGLE;
3460 else if (*base_type != VFP_CPRC_SINGLE)
3461 return -1;
3462 return 1;
3463
3464 case 8:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_DOUBLE;
3467 else if (*base_type != VFP_CPRC_DOUBLE)
3468 return -1;
3469 return 1;
3470
3471 default:
3472 return -1;
3473 }
3474 break;
3475
817e0957
YQ
3476 case TYPE_CODE_COMPLEX:
3477 /* Arguments of complex T where T is one of the types float or
3478 double get treated as if they are implemented as:
3479
3480 struct complexT
3481 {
3482 T real;
3483 T imag;
5f52445b
YQ
3484 };
3485
3486 */
817e0957
YQ
3487 switch (TYPE_LENGTH (t))
3488 {
3489 case 8:
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_SINGLE;
3492 else if (*base_type != VFP_CPRC_SINGLE)
3493 return -1;
3494 return 2;
3495
3496 case 16:
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_DOUBLE;
3499 else if (*base_type != VFP_CPRC_DOUBLE)
3500 return -1;
3501 return 2;
3502
3503 default:
3504 return -1;
3505 }
3506 break;
3507
90445bd3
DJ
3508 case TYPE_CODE_ARRAY:
3509 {
c4312b19 3510 if (TYPE_VECTOR (t))
90445bd3 3511 {
c4312b19
YQ
3512 /* A 64-bit or 128-bit containerized vector type are VFP
3513 CPRCs. */
3514 switch (TYPE_LENGTH (t))
3515 {
3516 case 8:
3517 if (*base_type == VFP_CPRC_UNKNOWN)
3518 *base_type = VFP_CPRC_VEC64;
3519 return 1;
3520 case 16:
3521 if (*base_type == VFP_CPRC_UNKNOWN)
3522 *base_type = VFP_CPRC_VEC128;
3523 return 1;
3524 default:
3525 return -1;
3526 }
3527 }
3528 else
3529 {
3530 int count;
3531 unsigned unitlen;
3532
3533 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3534 base_type);
3535 if (count == -1)
3536 return -1;
3537 if (TYPE_LENGTH (t) == 0)
3538 {
3539 gdb_assert (count == 0);
3540 return 0;
3541 }
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3546 return TYPE_LENGTH (t) / unitlen;
90445bd3 3547 }
90445bd3
DJ
3548 }
3549 break;
3550
3551 case TYPE_CODE_STRUCT:
3552 {
3553 int count = 0;
3554 unsigned unitlen;
3555 int i;
3556 for (i = 0; i < TYPE_NFIELDS (t); i++)
3557 {
1040b979
YQ
3558 int sub_count = 0;
3559
3560 if (!field_is_static (&TYPE_FIELD (t, i)))
3561 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3562 base_type);
90445bd3
DJ
3563 if (sub_count == -1)
3564 return -1;
3565 count += sub_count;
3566 }
3567 if (TYPE_LENGTH (t) == 0)
3568 {
3569 gdb_assert (count == 0);
3570 return 0;
3571 }
3572 else if (count == 0)
3573 return -1;
3574 unitlen = arm_vfp_cprc_unit_length (*base_type);
3575 if (TYPE_LENGTH (t) != unitlen * count)
3576 return -1;
3577 return count;
3578 }
3579
3580 case TYPE_CODE_UNION:
3581 {
3582 int count = 0;
3583 unsigned unitlen;
3584 int i;
3585 for (i = 0; i < TYPE_NFIELDS (t); i++)
3586 {
3587 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3588 base_type);
3589 if (sub_count == -1)
3590 return -1;
3591 count = (count > sub_count ? count : sub_count);
3592 }
3593 if (TYPE_LENGTH (t) == 0)
3594 {
3595 gdb_assert (count == 0);
3596 return 0;
3597 }
3598 else if (count == 0)
3599 return -1;
3600 unitlen = arm_vfp_cprc_unit_length (*base_type);
3601 if (TYPE_LENGTH (t) != unitlen * count)
3602 return -1;
3603 return count;
3604 }
3605
3606 default:
3607 break;
3608 }
3609
3610 return -1;
3611}
3612
3613/* Determine whether T is a VFP co-processor register candidate (CPRC)
3614 if passed to or returned from a non-variadic function with the VFP
3615 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3616 *BASE_TYPE to the base type for T and *COUNT to the number of
3617 elements of that base type before returning. */
3618
3619static int
3620arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3621 int *count)
3622{
3623 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3624 int c = arm_vfp_cprc_sub_candidate (t, &b);
3625 if (c <= 0 || c > 4)
3626 return 0;
3627 *base_type = b;
3628 *count = c;
3629 return 1;
3630}
3631
3632/* Return 1 if the VFP ABI should be used for passing arguments to and
3633 returning values from a function of type FUNC_TYPE, 0
3634 otherwise. */
3635
3636static int
3637arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3638{
3639 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3640 /* Variadic functions always use the base ABI. Assume that functions
3641 without debug info are not variadic. */
3642 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3643 return 0;
3644 /* The VFP ABI is only supported as a variant of AAPCS. */
3645 if (tdep->arm_abi != ARM_ABI_AAPCS)
3646 return 0;
3647 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3648}
3649
3650/* We currently only support passing parameters in integer registers, which
3651 conforms with GCC's default model, and VFP argument passing following
3652 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3653 we should probably support some of them based on the selected ABI. */
3654
3655static CORE_ADDR
7d9b040b 3656arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3657 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3658 struct value **args, CORE_ADDR sp, int struct_return,
3659 CORE_ADDR struct_addr)
2dd604e7 3660{
e17a4113 3661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3662 int argnum;
3663 int argreg;
3664 int nstack;
3665 struct stack_item *si = NULL;
90445bd3
DJ
3666 int use_vfp_abi;
3667 struct type *ftype;
3668 unsigned vfp_regs_free = (1 << 16) - 1;
3669
3670 /* Determine the type of this function and whether the VFP ABI
3671 applies. */
3672 ftype = check_typedef (value_type (function));
3673 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3674 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3675 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3676
6a65450a
AC
3677 /* Set the return address. For the ARM, the return breakpoint is
3678 always at BP_ADDR. */
9779414d 3679 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3680 bp_addr |= 1;
6a65450a 3681 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3682
3683 /* Walk through the list of args and determine how large a temporary
3684 stack is required. Need to take care here as structs may be
7a9dd1b2 3685 passed on the stack, and we have to push them. */
2dd604e7
RE
3686 nstack = 0;
3687
3688 argreg = ARM_A1_REGNUM;
3689 nstack = 0;
3690
2dd604e7
RE
3691 /* The struct_return pointer occupies the first parameter
3692 passing register. */
3693 if (struct_return)
3694 {
3695 if (arm_debug)
5af949e3 3696 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3697 gdbarch_register_name (gdbarch, argreg),
5af949e3 3698 paddress (gdbarch, struct_addr));
2dd604e7
RE
3699 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3700 argreg++;
3701 }
3702
3703 for (argnum = 0; argnum < nargs; argnum++)
3704 {
3705 int len;
3706 struct type *arg_type;
3707 struct type *target_type;
3708 enum type_code typecode;
8c6363cf 3709 const bfd_byte *val;
2af48f68 3710 int align;
90445bd3
DJ
3711 enum arm_vfp_cprc_base_type vfp_base_type;
3712 int vfp_base_count;
3713 int may_use_core_reg = 1;
2dd604e7 3714
df407dfe 3715 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3716 len = TYPE_LENGTH (arg_type);
3717 target_type = TYPE_TARGET_TYPE (arg_type);
3718 typecode = TYPE_CODE (arg_type);
8c6363cf 3719 val = value_contents (args[argnum]);
2dd604e7 3720
2af48f68
PB
3721 align = arm_type_align (arg_type);
3722 /* Round alignment up to a whole number of words. */
3723 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3724 /* Different ABIs have different maximum alignments. */
3725 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3726 {
3727 /* The APCS ABI only requires word alignment. */
3728 align = INT_REGISTER_SIZE;
3729 }
3730 else
3731 {
3732 /* The AAPCS requires at most doubleword alignment. */
3733 if (align > INT_REGISTER_SIZE * 2)
3734 align = INT_REGISTER_SIZE * 2;
3735 }
3736
90445bd3
DJ
3737 if (use_vfp_abi
3738 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3739 &vfp_base_count))
3740 {
3741 int regno;
3742 int unit_length;
3743 int shift;
3744 unsigned mask;
3745
3746 /* Because this is a CPRC it cannot go in a core register or
3747 cause a core register to be skipped for alignment.
3748 Either it goes in VFP registers and the rest of this loop
3749 iteration is skipped for this argument, or it goes on the
3750 stack (and the stack alignment code is correct for this
3751 case). */
3752 may_use_core_reg = 0;
3753
3754 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3755 shift = unit_length / 4;
3756 mask = (1 << (shift * vfp_base_count)) - 1;
3757 for (regno = 0; regno < 16; regno += shift)
3758 if (((vfp_regs_free >> regno) & mask) == mask)
3759 break;
3760
3761 if (regno < 16)
3762 {
3763 int reg_char;
3764 int reg_scaled;
3765 int i;
3766
3767 vfp_regs_free &= ~(mask << regno);
3768 reg_scaled = regno / shift;
3769 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3770 for (i = 0; i < vfp_base_count; i++)
3771 {
3772 char name_buf[4];
3773 int regnum;
58d6951d
DJ
3774 if (reg_char == 'q')
3775 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3776 val + i * unit_length);
58d6951d
DJ
3777 else
3778 {
8c042590
PM
3779 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3780 reg_char, reg_scaled + i);
58d6951d
DJ
3781 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3782 strlen (name_buf));
3783 regcache_cooked_write (regcache, regnum,
3784 val + i * unit_length);
3785 }
90445bd3
DJ
3786 }
3787 continue;
3788 }
3789 else
3790 {
3791 /* This CPRC could not go in VFP registers, so all VFP
3792 registers are now marked as used. */
3793 vfp_regs_free = 0;
3794 }
3795 }
3796
2af48f68
PB
3797 /* Push stack padding for dowubleword alignment. */
3798 if (nstack & (align - 1))
3799 {
3800 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3801 nstack += INT_REGISTER_SIZE;
3802 }
3803
3804 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3805 if (may_use_core_reg
3806 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3807 && align > INT_REGISTER_SIZE
3808 && argreg & 1)
3809 argreg++;
3810
2dd604e7
RE
3811 /* If the argument is a pointer to a function, and it is a
3812 Thumb function, create a LOCAL copy of the value and set
3813 the THUMB bit in it. */
3814 if (TYPE_CODE_PTR == typecode
3815 && target_type != NULL
f96b8fa0 3816 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3817 {
e17a4113 3818 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3819 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3820 {
224c3ddb 3821 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3822 store_unsigned_integer (copy, len, byte_order,
e17a4113 3823 MAKE_THUMB_ADDR (regval));
8c6363cf 3824 val = copy;
2dd604e7
RE
3825 }
3826 }
3827
3828 /* Copy the argument to general registers or the stack in
3829 register-sized pieces. Large arguments are split between
3830 registers and stack. */
3831 while (len > 0)
3832 {
f0c9063c 3833 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3834 CORE_ADDR regval
3835 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3836
90445bd3 3837 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3838 {
3839 /* The argument is being passed in a general purpose
3840 register. */
e17a4113 3841 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3842 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3843 if (arm_debug)
3844 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3845 argnum,
3846 gdbarch_register_name
2af46ca0 3847 (gdbarch, argreg),
f0c9063c 3848 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3849 regcache_cooked_write_unsigned (regcache, argreg, regval);
3850 argreg++;
3851 }
3852 else
3853 {
ef9bd0b8
YQ
3854 gdb_byte buf[INT_REGISTER_SIZE];
3855
3856 memset (buf, 0, sizeof (buf));
3857 store_unsigned_integer (buf, partial_len, byte_order, regval);
3858
2dd604e7
RE
3859 /* Push the arguments onto the stack. */
3860 if (arm_debug)
3861 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3862 argnum, nstack);
ef9bd0b8 3863 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3864 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3865 }
3866
3867 len -= partial_len;
3868 val += partial_len;
3869 }
3870 }
3871 /* If we have an odd number of words to push, then decrement the stack
3872 by one word now, so first stack argument will be dword aligned. */
3873 if (nstack & 4)
3874 sp -= 4;
3875
3876 while (si)
3877 {
3878 sp -= si->len;
3879 write_memory (sp, si->data, si->len);
3880 si = pop_stack_item (si);
3881 }
3882
3883 /* Finally, update teh SP register. */
3884 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3885
3886 return sp;
3887}
3888
f53f0d0b
PB
3889
3890/* Always align the frame to an 8-byte boundary. This is required on
3891 some platforms and harmless on the rest. */
3892
3893static CORE_ADDR
3894arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3895{
3896 /* Align the stack to eight bytes. */
3897 return sp & ~ (CORE_ADDR) 7;
3898}
3899
c906108c 3900static void
12b27276 3901print_fpu_flags (struct ui_file *file, int flags)
c906108c 3902{
c5aa993b 3903 if (flags & (1 << 0))
12b27276 3904 fputs_filtered ("IVO ", file);
c5aa993b 3905 if (flags & (1 << 1))
12b27276 3906 fputs_filtered ("DVZ ", file);
c5aa993b 3907 if (flags & (1 << 2))
12b27276 3908 fputs_filtered ("OFL ", file);
c5aa993b 3909 if (flags & (1 << 3))
12b27276 3910 fputs_filtered ("UFL ", file);
c5aa993b 3911 if (flags & (1 << 4))
12b27276
WN
3912 fputs_filtered ("INX ", file);
3913 fputc_filtered ('\n', file);
c906108c
SS
3914}
3915
5e74b15c
RE
3916/* Print interesting information about the floating point processor
3917 (if present) or emulator. */
34e8f22d 3918static void
d855c300 3919arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3920 struct frame_info *frame, const char *args)
c906108c 3921{
9c9acae0 3922 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3923 int type;
3924
3925 type = (status >> 24) & 127;
edefbb7c 3926 if (status & (1 << 31))
12b27276 3927 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3928 else
12b27276 3929 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3930 /* i18n: [floating point unit] mask */
12b27276
WN
3931 fputs_filtered (_("mask: "), file);
3932 print_fpu_flags (file, status >> 16);
edefbb7c 3933 /* i18n: [floating point unit] flags */
12b27276
WN
3934 fputs_filtered (_("flags: "), file);
3935 print_fpu_flags (file, status);
c906108c
SS
3936}
3937
27067745
UW
3938/* Construct the ARM extended floating point type. */
3939static struct type *
3940arm_ext_type (struct gdbarch *gdbarch)
3941{
3942 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3943
3944 if (!tdep->arm_ext_type)
3945 tdep->arm_ext_type
e9bb382b 3946 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3947 floatformats_arm_ext);
3948
3949 return tdep->arm_ext_type;
3950}
3951
58d6951d
DJ
3952static struct type *
3953arm_neon_double_type (struct gdbarch *gdbarch)
3954{
3955 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3956
3957 if (tdep->neon_double_type == NULL)
3958 {
3959 struct type *t, *elem;
3960
3961 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3962 TYPE_CODE_UNION);
3963 elem = builtin_type (gdbarch)->builtin_uint8;
3964 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3965 elem = builtin_type (gdbarch)->builtin_uint16;
3966 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3967 elem = builtin_type (gdbarch)->builtin_uint32;
3968 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_uint64;
3970 append_composite_type_field (t, "u64", elem);
3971 elem = builtin_type (gdbarch)->builtin_float;
3972 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3973 elem = builtin_type (gdbarch)->builtin_double;
3974 append_composite_type_field (t, "f64", elem);
3975
3976 TYPE_VECTOR (t) = 1;
3977 TYPE_NAME (t) = "neon_d";
3978 tdep->neon_double_type = t;
3979 }
3980
3981 return tdep->neon_double_type;
3982}
3983
3984/* FIXME: The vector types are not correctly ordered on big-endian
3985 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3986 bits of d0 - regardless of what unit size is being held in d0. So
3987 the offset of the first uint8 in d0 is 7, but the offset of the
3988 first float is 4. This code works as-is for little-endian
3989 targets. */
3990
3991static struct type *
3992arm_neon_quad_type (struct gdbarch *gdbarch)
3993{
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3995
3996 if (tdep->neon_quad_type == NULL)
3997 {
3998 struct type *t, *elem;
3999
4000 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4001 TYPE_CODE_UNION);
4002 elem = builtin_type (gdbarch)->builtin_uint8;
4003 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4004 elem = builtin_type (gdbarch)->builtin_uint16;
4005 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4006 elem = builtin_type (gdbarch)->builtin_uint32;
4007 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4008 elem = builtin_type (gdbarch)->builtin_uint64;
4009 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4010 elem = builtin_type (gdbarch)->builtin_float;
4011 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4012 elem = builtin_type (gdbarch)->builtin_double;
4013 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4014
4015 TYPE_VECTOR (t) = 1;
4016 TYPE_NAME (t) = "neon_q";
4017 tdep->neon_quad_type = t;
4018 }
4019
4020 return tdep->neon_quad_type;
4021}
4022
34e8f22d
RE
4023/* Return the GDB type object for the "standard" data type of data in
4024 register N. */
4025
4026static struct type *
7a5ea0d4 4027arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4028{
58d6951d
DJ
4029 int num_regs = gdbarch_num_regs (gdbarch);
4030
4031 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4032 && regnum >= num_regs && regnum < num_regs + 32)
4033 return builtin_type (gdbarch)->builtin_float;
4034
4035 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4036 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4037 return arm_neon_quad_type (gdbarch);
4038
4039 /* If the target description has register information, we are only
4040 in this function so that we can override the types of
4041 double-precision registers for NEON. */
4042 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4043 {
4044 struct type *t = tdesc_register_type (gdbarch, regnum);
4045
4046 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4047 && TYPE_CODE (t) == TYPE_CODE_FLT
4048 && gdbarch_tdep (gdbarch)->have_neon)
4049 return arm_neon_double_type (gdbarch);
4050 else
4051 return t;
4052 }
4053
34e8f22d 4054 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4055 {
4056 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4057 return builtin_type (gdbarch)->builtin_void;
4058
4059 return arm_ext_type (gdbarch);
4060 }
e4c16157 4061 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4062 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4063 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4064 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4065 else if (regnum >= ARRAY_SIZE (arm_register_names))
4066 /* These registers are only supported on targets which supply
4067 an XML description. */
df4df182 4068 return builtin_type (gdbarch)->builtin_int0;
032758dc 4069 else
df4df182 4070 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4071}
4072
ff6f572f
DJ
4073/* Map a DWARF register REGNUM onto the appropriate GDB register
4074 number. */
4075
4076static int
d3f73121 4077arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4078{
4079 /* Core integer regs. */
4080 if (reg >= 0 && reg <= 15)
4081 return reg;
4082
4083 /* Legacy FPA encoding. These were once used in a way which
4084 overlapped with VFP register numbering, so their use is
4085 discouraged, but GDB doesn't support the ARM toolchain
4086 which used them for VFP. */
4087 if (reg >= 16 && reg <= 23)
4088 return ARM_F0_REGNUM + reg - 16;
4089
4090 /* New assignments for the FPA registers. */
4091 if (reg >= 96 && reg <= 103)
4092 return ARM_F0_REGNUM + reg - 96;
4093
4094 /* WMMX register assignments. */
4095 if (reg >= 104 && reg <= 111)
4096 return ARM_WCGR0_REGNUM + reg - 104;
4097
4098 if (reg >= 112 && reg <= 127)
4099 return ARM_WR0_REGNUM + reg - 112;
4100
4101 if (reg >= 192 && reg <= 199)
4102 return ARM_WC0_REGNUM + reg - 192;
4103
58d6951d
DJ
4104 /* VFP v2 registers. A double precision value is actually
4105 in d1 rather than s2, but the ABI only defines numbering
4106 for the single precision registers. This will "just work"
4107 in GDB for little endian targets (we'll read eight bytes,
4108 starting in s0 and then progressing to s1), but will be
4109 reversed on big endian targets with VFP. This won't
4110 be a problem for the new Neon quad registers; you're supposed
4111 to use DW_OP_piece for those. */
4112 if (reg >= 64 && reg <= 95)
4113 {
4114 char name_buf[4];
4115
8c042590 4116 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4117 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4118 strlen (name_buf));
4119 }
4120
4121 /* VFP v3 / Neon registers. This range is also used for VFP v2
4122 registers, except that it now describes d0 instead of s0. */
4123 if (reg >= 256 && reg <= 287)
4124 {
4125 char name_buf[4];
4126
8c042590 4127 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4128 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4129 strlen (name_buf));
4130 }
4131
ff6f572f
DJ
4132 return -1;
4133}
4134
26216b98
AC
4135/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4136static int
e7faf938 4137arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4138{
4139 int reg = regnum;
e7faf938 4140 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4141
ff6f572f
DJ
4142 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4143 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4144
4145 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4146 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4147
4148 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4149 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4150
26216b98
AC
4151 if (reg < NUM_GREGS)
4152 return SIM_ARM_R0_REGNUM + reg;
4153 reg -= NUM_GREGS;
4154
4155 if (reg < NUM_FREGS)
4156 return SIM_ARM_FP0_REGNUM + reg;
4157 reg -= NUM_FREGS;
4158
4159 if (reg < NUM_SREGS)
4160 return SIM_ARM_FPS_REGNUM + reg;
4161 reg -= NUM_SREGS;
4162
edefbb7c 4163 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4164}
34e8f22d 4165
a37b3cc0
AC
4166/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4167 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4168 It is thought that this is is the floating-point register format on
4169 little-endian systems. */
c906108c 4170
ed9a39eb 4171static void
b508a996 4172convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4173 void *dbl, int endianess)
c906108c 4174{
a37b3cc0 4175 DOUBLEST d;
be8626e0
MD
4176
4177 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4178 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4179 else
4180 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4181 ptr, &d);
b508a996 4182 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4183}
4184
34e8f22d 4185static void
be8626e0
MD
4186convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4187 int endianess)
c906108c 4188{
a37b3cc0 4189 DOUBLEST d;
be8626e0 4190
b508a996 4191 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4192 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4193 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4194 else
4195 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4196 &d, dbl);
c906108c 4197}
ed9a39eb 4198
d9311bfa
AT
4199/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4200 of the appropriate mode (as encoded in the PC value), even if this
4201 differs from what would be expected according to the symbol tables. */
4202
4203void
4204arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4205 struct address_space *aspace,
4206 CORE_ADDR pc)
c906108c 4207{
d9311bfa
AT
4208 struct cleanup *old_chain
4209 = make_cleanup_restore_integer (&arm_override_mode);
c5aa993b 4210
d9311bfa
AT
4211 arm_override_mode = IS_THUMB_ADDR (pc);
4212 pc = gdbarch_addr_bits_remove (gdbarch, pc);
c5aa993b 4213
d9311bfa 4214 insert_single_step_breakpoint (gdbarch, aspace, pc);
c906108c 4215
d9311bfa
AT
4216 do_cleanups (old_chain);
4217}
c5aa993b 4218
d9311bfa
AT
4219/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4220 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4221 NULL if an error occurs. BUF is freed. */
c906108c 4222
d9311bfa
AT
4223static gdb_byte *
4224extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4225 int old_len, int new_len)
4226{
4227 gdb_byte *new_buf;
4228 int bytes_to_read = new_len - old_len;
c906108c 4229
d9311bfa
AT
4230 new_buf = (gdb_byte *) xmalloc (new_len);
4231 memcpy (new_buf + bytes_to_read, buf, old_len);
4232 xfree (buf);
4233 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4234 {
4235 xfree (new_buf);
4236 return NULL;
c906108c 4237 }
d9311bfa 4238 return new_buf;
c906108c
SS
4239}
4240
d9311bfa
AT
4241/* An IT block is at most the 2-byte IT instruction followed by
4242 four 4-byte instructions. The furthest back we must search to
4243 find an IT block that affects the current instruction is thus
4244 2 + 3 * 4 == 14 bytes. */
4245#define MAX_IT_BLOCK_PREFIX 14
177321bd 4246
d9311bfa
AT
4247/* Use a quick scan if there are more than this many bytes of
4248 code. */
4249#define IT_SCAN_THRESHOLD 32
177321bd 4250
d9311bfa
AT
4251/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4252 A breakpoint in an IT block may not be hit, depending on the
4253 condition flags. */
ad527d2e 4254static CORE_ADDR
d9311bfa 4255arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4256{
d9311bfa
AT
4257 gdb_byte *buf;
4258 char map_type;
4259 CORE_ADDR boundary, func_start;
4260 int buf_len;
4261 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4262 int i, any, last_it, last_it_count;
177321bd 4263
d9311bfa
AT
4264 /* If we are using BKPT breakpoints, none of this is necessary. */
4265 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4266 return bpaddr;
177321bd 4267
d9311bfa
AT
4268 /* ARM mode does not have this problem. */
4269 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4270 return bpaddr;
177321bd 4271
d9311bfa
AT
4272 /* We are setting a breakpoint in Thumb code that could potentially
4273 contain an IT block. The first step is to find how much Thumb
4274 code there is; we do not need to read outside of known Thumb
4275 sequences. */
4276 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4277 if (map_type == 0)
4278 /* Thumb-2 code must have mapping symbols to have a chance. */
4279 return bpaddr;
9dca5578 4280
d9311bfa 4281 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4282
d9311bfa
AT
4283 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4284 && func_start > boundary)
4285 boundary = func_start;
9dca5578 4286
d9311bfa
AT
4287 /* Search for a candidate IT instruction. We have to do some fancy
4288 footwork to distinguish a real IT instruction from the second
4289 half of a 32-bit instruction, but there is no need for that if
4290 there's no candidate. */
325fac50 4291 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4292 if (buf_len == 0)
4293 /* No room for an IT instruction. */
4294 return bpaddr;
c906108c 4295
d9311bfa
AT
4296 buf = (gdb_byte *) xmalloc (buf_len);
4297 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4298 return bpaddr;
4299 any = 0;
4300 for (i = 0; i < buf_len; i += 2)
c906108c 4301 {
d9311bfa
AT
4302 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4303 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4304 {
d9311bfa
AT
4305 any = 1;
4306 break;
25b41d01 4307 }
c906108c 4308 }
d9311bfa
AT
4309
4310 if (any == 0)
c906108c 4311 {
d9311bfa
AT
4312 xfree (buf);
4313 return bpaddr;
f9d67f43
DJ
4314 }
4315
4316 /* OK, the code bytes before this instruction contain at least one
4317 halfword which resembles an IT instruction. We know that it's
4318 Thumb code, but there are still two possibilities. Either the
4319 halfword really is an IT instruction, or it is the second half of
4320 a 32-bit Thumb instruction. The only way we can tell is to
4321 scan forwards from a known instruction boundary. */
4322 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4323 {
4324 int definite;
4325
4326 /* There's a lot of code before this instruction. Start with an
4327 optimistic search; it's easy to recognize halfwords that can
4328 not be the start of a 32-bit instruction, and use that to
4329 lock on to the instruction boundaries. */
4330 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4331 if (buf == NULL)
4332 return bpaddr;
4333 buf_len = IT_SCAN_THRESHOLD;
4334
4335 definite = 0;
4336 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4337 {
4338 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4339 if (thumb_insn_size (inst1) == 2)
4340 {
4341 definite = 1;
4342 break;
4343 }
4344 }
4345
4346 /* At this point, if DEFINITE, BUF[I] is the first place we
4347 are sure that we know the instruction boundaries, and it is far
4348 enough from BPADDR that we could not miss an IT instruction
4349 affecting BPADDR. If ! DEFINITE, give up - start from a
4350 known boundary. */
4351 if (! definite)
4352 {
0963b4bd
MS
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4354 bpaddr - boundary);
f9d67f43
DJ
4355 if (buf == NULL)
4356 return bpaddr;
4357 buf_len = bpaddr - boundary;
4358 i = 0;
4359 }
4360 }
4361 else
4362 {
4363 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4364 if (buf == NULL)
4365 return bpaddr;
4366 buf_len = bpaddr - boundary;
4367 i = 0;
4368 }
4369
4370 /* Scan forwards. Find the last IT instruction before BPADDR. */
4371 last_it = -1;
4372 last_it_count = 0;
4373 while (i < buf_len)
4374 {
4375 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4376 last_it_count--;
4377 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4378 {
4379 last_it = i;
4380 if (inst1 & 0x0001)
4381 last_it_count = 4;
4382 else if (inst1 & 0x0002)
4383 last_it_count = 3;
4384 else if (inst1 & 0x0004)
4385 last_it_count = 2;
4386 else
4387 last_it_count = 1;
4388 }
4389 i += thumb_insn_size (inst1);
4390 }
4391
4392 xfree (buf);
4393
4394 if (last_it == -1)
4395 /* There wasn't really an IT instruction after all. */
4396 return bpaddr;
4397
4398 if (last_it_count < 1)
4399 /* It was too far away. */
4400 return bpaddr;
4401
4402 /* This really is a trouble spot. Move the breakpoint to the IT
4403 instruction. */
4404 return bpaddr - buf_len + last_it;
4405}
4406
cca44b1b 4407/* ARM displaced stepping support.
c906108c 4408
cca44b1b 4409 Generally ARM displaced stepping works as follows:
c906108c 4410
cca44b1b 4411 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4412 arm_process_displaced_insn. Depending on the type of instruction, it is
4413 then copied to a scratch location, possibly in a modified form. The
4414 copy_* set of functions performs such modification, as necessary. A
4415 breakpoint is placed after the modified instruction in the scratch space
4416 to return control to GDB. Note in particular that instructions which
4417 modify the PC will no longer do so after modification.
c5aa993b 4418
cca44b1b
JB
4419 2. The instruction is single-stepped, by setting the PC to the scratch
4420 location address, and resuming. Control returns to GDB when the
4421 breakpoint is hit.
c5aa993b 4422
cca44b1b
JB
4423 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4424 function used for the current instruction. This function's job is to
4425 put the CPU/memory state back to what it would have been if the
4426 instruction had been executed unmodified in its original location. */
c5aa993b 4427
cca44b1b
JB
4428/* NOP instruction (mov r0, r0). */
4429#define ARM_NOP 0xe1a00000
34518530 4430#define THUMB_NOP 0x4600
cca44b1b
JB
4431
4432/* Helper for register reads for displaced stepping. In particular, this
4433 returns the PC as it would be seen by the instruction at its original
4434 location. */
4435
4436ULONGEST
36073a92
YQ
4437displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4438 int regno)
cca44b1b
JB
4439{
4440 ULONGEST ret;
36073a92 4441 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4442
bf9f652a 4443 if (regno == ARM_PC_REGNUM)
cca44b1b 4444 {
4db71c0b
YQ
4445 /* Compute pipeline offset:
4446 - When executing an ARM instruction, PC reads as the address of the
4447 current instruction plus 8.
4448 - When executing a Thumb instruction, PC reads as the address of the
4449 current instruction plus 4. */
4450
36073a92 4451 if (!dsc->is_thumb)
4db71c0b
YQ
4452 from += 8;
4453 else
4454 from += 4;
4455
cca44b1b
JB
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4458 (unsigned long) from);
4459 return (ULONGEST) from;
cca44b1b 4460 }
c906108c 4461 else
cca44b1b
JB
4462 {
4463 regcache_cooked_read_unsigned (regs, regno, &ret);
4464 if (debug_displaced)
4465 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4466 regno, (unsigned long) ret);
4467 return ret;
4468 }
c906108c
SS
4469}
4470
cca44b1b
JB
4471static int
4472displaced_in_arm_mode (struct regcache *regs)
4473{
4474 ULONGEST ps;
9779414d 4475 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4476
cca44b1b 4477 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4478
9779414d 4479 return (ps & t_bit) == 0;
cca44b1b 4480}
66e810cd 4481
cca44b1b 4482/* Write to the PC as from a branch instruction. */
c906108c 4483
cca44b1b 4484static void
36073a92
YQ
4485branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4486 ULONGEST val)
c906108c 4487{
36073a92 4488 if (!dsc->is_thumb)
cca44b1b
JB
4489 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4490 architecture versions < 6. */
0963b4bd
MS
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4492 val & ~(ULONGEST) 0x3);
cca44b1b 4493 else
0963b4bd
MS
4494 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4495 val & ~(ULONGEST) 0x1);
cca44b1b 4496}
66e810cd 4497
cca44b1b
JB
4498/* Write to the PC as from a branch-exchange instruction. */
4499
4500static void
4501bx_write_pc (struct regcache *regs, ULONGEST val)
4502{
4503 ULONGEST ps;
9779414d 4504 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4505
4506 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4507
4508 if ((val & 1) == 1)
c906108c 4509 {
9779414d 4510 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4511 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4512 }
4513 else if ((val & 2) == 0)
4514 {
9779414d 4515 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4516 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4517 }
4518 else
4519 {
cca44b1b
JB
4520 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4521 mode, align dest to 4 bytes). */
4522 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4523 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4524 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4525 }
4526}
ed9a39eb 4527
cca44b1b 4528/* Write to the PC as if from a load instruction. */
ed9a39eb 4529
34e8f22d 4530static void
36073a92
YQ
4531load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4532 ULONGEST val)
ed9a39eb 4533{
cca44b1b
JB
4534 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4535 bx_write_pc (regs, val);
4536 else
36073a92 4537 branch_write_pc (regs, dsc, val);
cca44b1b 4538}
be8626e0 4539
cca44b1b
JB
4540/* Write to the PC as if from an ALU instruction. */
4541
4542static void
36073a92
YQ
4543alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4544 ULONGEST val)
cca44b1b 4545{
36073a92 4546 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4547 bx_write_pc (regs, val);
4548 else
36073a92 4549 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4550}
4551
4552/* Helper for writing to registers for displaced stepping. Writing to the PC
4553 has a varying effects depending on the instruction which does the write:
4554 this is controlled by the WRITE_PC argument. */
4555
4556void
4557displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4558 int regno, ULONGEST val, enum pc_write_style write_pc)
4559{
bf9f652a 4560 if (regno == ARM_PC_REGNUM)
08216dd7 4561 {
cca44b1b
JB
4562 if (debug_displaced)
4563 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4564 (unsigned long) val);
4565 switch (write_pc)
08216dd7 4566 {
cca44b1b 4567 case BRANCH_WRITE_PC:
36073a92 4568 branch_write_pc (regs, dsc, val);
08216dd7
RE
4569 break;
4570
cca44b1b
JB
4571 case BX_WRITE_PC:
4572 bx_write_pc (regs, val);
4573 break;
4574
4575 case LOAD_WRITE_PC:
36073a92 4576 load_write_pc (regs, dsc, val);
cca44b1b
JB
4577 break;
4578
4579 case ALU_WRITE_PC:
36073a92 4580 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4581 break;
4582
4583 case CANNOT_WRITE_PC:
4584 warning (_("Instruction wrote to PC in an unexpected way when "
4585 "single-stepping"));
08216dd7
RE
4586 break;
4587
4588 default:
97b9747c
JB
4589 internal_error (__FILE__, __LINE__,
4590 _("Invalid argument to displaced_write_reg"));
08216dd7 4591 }
b508a996 4592
cca44b1b 4593 dsc->wrote_to_pc = 1;
b508a996 4594 }
ed9a39eb 4595 else
b508a996 4596 {
cca44b1b
JB
4597 if (debug_displaced)
4598 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4599 regno, (unsigned long) val);
4600 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4601 }
34e8f22d
RE
4602}
4603
cca44b1b
JB
4604/* This function is used to concisely determine if an instruction INSN
4605 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4606 corresponding fields of BITMASK set to 0b1111. The function
4607 returns return 1 if any of these fields in INSN reference the PC
4608 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4609
4610static int
cca44b1b 4611insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4612{
cca44b1b 4613 uint32_t lowbit = 1;
67255d04 4614
cca44b1b
JB
4615 while (bitmask != 0)
4616 {
4617 uint32_t mask;
44e1a9eb 4618
cca44b1b
JB
4619 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4620 ;
67255d04 4621
cca44b1b
JB
4622 if (!lowbit)
4623 break;
67255d04 4624
cca44b1b 4625 mask = lowbit * 0xf;
67255d04 4626
cca44b1b
JB
4627 if ((insn & mask) == mask)
4628 return 1;
4629
4630 bitmask &= ~mask;
67255d04
RE
4631 }
4632
cca44b1b
JB
4633 return 0;
4634}
2af48f68 4635
cca44b1b
JB
4636/* The simplest copy function. Many instructions have the same effect no
4637 matter what address they are executed at: in those cases, use this. */
67255d04 4638
cca44b1b 4639static int
7ff120b4
YQ
4640arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4641 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4642{
4643 if (debug_displaced)
4644 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4645 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4646 iname);
67255d04 4647
cca44b1b 4648 dsc->modinsn[0] = insn;
67255d04 4649
cca44b1b
JB
4650 return 0;
4651}
4652
34518530
YQ
4653static int
4654thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4655 uint16_t insn2, const char *iname,
4656 struct displaced_step_closure *dsc)
4657{
4658 if (debug_displaced)
4659 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4660 "opcode/class '%s' unmodified\n", insn1, insn2,
4661 iname);
4662
4663 dsc->modinsn[0] = insn1;
4664 dsc->modinsn[1] = insn2;
4665 dsc->numinsns = 2;
4666
4667 return 0;
4668}
4669
4670/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4671 modification. */
4672static int
615234c1 4673thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4674 const char *iname,
4675 struct displaced_step_closure *dsc)
4676{
4677 if (debug_displaced)
4678 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4679 "opcode/class '%s' unmodified\n", insn,
4680 iname);
4681
4682 dsc->modinsn[0] = insn;
4683
4684 return 0;
4685}
4686
cca44b1b
JB
4687/* Preload instructions with immediate offset. */
4688
4689static void
6e39997a 4690cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4691 struct regcache *regs, struct displaced_step_closure *dsc)
4692{
4693 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4694 if (!dsc->u.preload.immed)
4695 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4696}
4697
7ff120b4
YQ
4698static void
4699install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4700 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4701{
cca44b1b 4702 ULONGEST rn_val;
cca44b1b
JB
4703 /* Preload instructions:
4704
4705 {pli/pld} [rn, #+/-imm]
4706 ->
4707 {pli/pld} [r0, #+/-imm]. */
4708
36073a92
YQ
4709 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4710 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4711 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4712 dsc->u.preload.immed = 1;
4713
cca44b1b 4714 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4715}
4716
cca44b1b 4717static int
7ff120b4 4718arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4719 struct displaced_step_closure *dsc)
4720{
4721 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4722
7ff120b4
YQ
4723 if (!insn_references_pc (insn, 0x000f0000ul))
4724 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4725
4726 if (debug_displaced)
4727 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4728 (unsigned long) insn);
4729
7ff120b4
YQ
4730 dsc->modinsn[0] = insn & 0xfff0ffff;
4731
4732 install_preload (gdbarch, regs, dsc, rn);
4733
4734 return 0;
4735}
4736
34518530
YQ
4737static int
4738thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4739 struct regcache *regs, struct displaced_step_closure *dsc)
4740{
4741 unsigned int rn = bits (insn1, 0, 3);
4742 unsigned int u_bit = bit (insn1, 7);
4743 int imm12 = bits (insn2, 0, 11);
4744 ULONGEST pc_val;
4745
4746 if (rn != ARM_PC_REGNUM)
4747 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4748
4749 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4750 PLD (literal) Encoding T1. */
4751 if (debug_displaced)
4752 fprintf_unfiltered (gdb_stdlog,
4753 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4754 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4755 imm12);
4756
4757 if (!u_bit)
4758 imm12 = -1 * imm12;
4759
4760 /* Rewrite instruction {pli/pld} PC imm12 into:
4761 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4762
4763 {pli/pld} [r0, r1]
4764
4765 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4766
4767 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4768 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4769
4770 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4771
4772 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4773 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4774 dsc->u.preload.immed = 0;
4775
4776 /* {pli/pld} [r0, r1] */
4777 dsc->modinsn[0] = insn1 & 0xfff0;
4778 dsc->modinsn[1] = 0xf001;
4779 dsc->numinsns = 2;
4780
4781 dsc->cleanup = &cleanup_preload;
4782 return 0;
4783}
4784
7ff120b4
YQ
4785/* Preload instructions with register offset. */
4786
4787static void
4788install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4789 struct displaced_step_closure *dsc, unsigned int rn,
4790 unsigned int rm)
4791{
4792 ULONGEST rn_val, rm_val;
4793
cca44b1b
JB
4794 /* Preload register-offset instructions:
4795
4796 {pli/pld} [rn, rm {, shift}]
4797 ->
4798 {pli/pld} [r0, r1 {, shift}]. */
4799
36073a92
YQ
4800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4801 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4802 rn_val = displaced_read_reg (regs, dsc, rn);
4803 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4804 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4805 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4806 dsc->u.preload.immed = 0;
4807
cca44b1b 4808 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4809}
4810
4811static int
4812arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4813 struct regcache *regs,
4814 struct displaced_step_closure *dsc)
4815{
4816 unsigned int rn = bits (insn, 16, 19);
4817 unsigned int rm = bits (insn, 0, 3);
4818
4819
4820 if (!insn_references_pc (insn, 0x000f000ful))
4821 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4822
4823 if (debug_displaced)
4824 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4825 (unsigned long) insn);
4826
4827 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4828
7ff120b4 4829 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4830 return 0;
4831}
4832
4833/* Copy/cleanup coprocessor load and store instructions. */
4834
4835static void
6e39997a 4836cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4837 struct regcache *regs,
4838 struct displaced_step_closure *dsc)
4839{
36073a92 4840 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4841
4842 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4843
4844 if (dsc->u.ldst.writeback)
4845 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4846}
4847
7ff120b4
YQ
4848static void
4849install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4850 struct displaced_step_closure *dsc,
4851 int writeback, unsigned int rn)
cca44b1b 4852{
cca44b1b 4853 ULONGEST rn_val;
cca44b1b 4854
cca44b1b
JB
4855 /* Coprocessor load/store instructions:
4856
4857 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4858 ->
4859 {stc/stc2} [r0, #+/-imm].
4860
4861 ldc/ldc2 are handled identically. */
4862
36073a92
YQ
4863 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4864 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4865 /* PC should be 4-byte aligned. */
4866 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4867 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4868
7ff120b4 4869 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4870 dsc->u.ldst.rn = rn;
4871
7ff120b4
YQ
4872 dsc->cleanup = &cleanup_copro_load_store;
4873}
4874
4875static int
4876arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4877 struct regcache *regs,
4878 struct displaced_step_closure *dsc)
4879{
4880 unsigned int rn = bits (insn, 16, 19);
4881
4882 if (!insn_references_pc (insn, 0x000f0000ul))
4883 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4884
4885 if (debug_displaced)
4886 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4887 "load/store insn %.8lx\n", (unsigned long) insn);
4888
cca44b1b
JB
4889 dsc->modinsn[0] = insn & 0xfff0ffff;
4890
7ff120b4 4891 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4892
4893 return 0;
4894}
4895
34518530
YQ
4896static int
4897thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4898 uint16_t insn2, struct regcache *regs,
4899 struct displaced_step_closure *dsc)
4900{
4901 unsigned int rn = bits (insn1, 0, 3);
4902
4903 if (rn != ARM_PC_REGNUM)
4904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4905 "copro load/store", dsc);
4906
4907 if (debug_displaced)
4908 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4909 "load/store insn %.4x%.4x\n", insn1, insn2);
4910
4911 dsc->modinsn[0] = insn1 & 0xfff0;
4912 dsc->modinsn[1] = insn2;
4913 dsc->numinsns = 2;
4914
4915 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4916 doesn't support writeback, so pass 0. */
4917 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4918
4919 return 0;
4920}
4921
cca44b1b
JB
4922/* Clean up branch instructions (actually perform the branch, by setting
4923 PC). */
4924
4925static void
6e39997a 4926cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4927 struct displaced_step_closure *dsc)
4928{
36073a92 4929 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4930 int branch_taken = condition_true (dsc->u.branch.cond, status);
4931 enum pc_write_style write_pc = dsc->u.branch.exchange
4932 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4933
4934 if (!branch_taken)
4935 return;
4936
4937 if (dsc->u.branch.link)
4938 {
8c8dba6d
YQ
4939 /* The value of LR should be the next insn of current one. In order
4940 not to confuse logic hanlding later insn `bx lr', if current insn mode
4941 is Thumb, the bit 0 of LR value should be set to 1. */
4942 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4943
4944 if (dsc->is_thumb)
4945 next_insn_addr |= 0x1;
4946
4947 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4948 CANNOT_WRITE_PC);
cca44b1b
JB
4949 }
4950
bf9f652a 4951 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4952}
4953
4954/* Copy B/BL/BLX instructions with immediate destinations. */
4955
7ff120b4
YQ
4956static void
4957install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4958 struct displaced_step_closure *dsc,
4959 unsigned int cond, int exchange, int link, long offset)
4960{
4961 /* Implement "BL<cond> <label>" as:
4962
4963 Preparation: cond <- instruction condition
4964 Insn: mov r0, r0 (nop)
4965 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4966
4967 B<cond> similar, but don't set r14 in cleanup. */
4968
4969 dsc->u.branch.cond = cond;
4970 dsc->u.branch.link = link;
4971 dsc->u.branch.exchange = exchange;
4972
2b16b2e3
YQ
4973 dsc->u.branch.dest = dsc->insn_addr;
4974 if (link && exchange)
4975 /* For BLX, offset is computed from the Align (PC, 4). */
4976 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4977
7ff120b4 4978 if (dsc->is_thumb)
2b16b2e3 4979 dsc->u.branch.dest += 4 + offset;
7ff120b4 4980 else
2b16b2e3 4981 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4982
4983 dsc->cleanup = &cleanup_branch;
4984}
cca44b1b 4985static int
7ff120b4
YQ
4986arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4987 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
4988{
4989 unsigned int cond = bits (insn, 28, 31);
4990 int exchange = (cond == 0xf);
4991 int link = exchange || bit (insn, 24);
cca44b1b
JB
4992 long offset;
4993
4994 if (debug_displaced)
4995 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4996 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4997 (unsigned long) insn);
cca44b1b
JB
4998 if (exchange)
4999 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5000 then arrange the switch into Thumb mode. */
5001 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5002 else
5003 offset = bits (insn, 0, 23) << 2;
5004
5005 if (bit (offset, 25))
5006 offset = offset | ~0x3ffffff;
5007
cca44b1b
JB
5008 dsc->modinsn[0] = ARM_NOP;
5009
7ff120b4 5010 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5011 return 0;
5012}
5013
34518530
YQ
5014static int
5015thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5016 uint16_t insn2, struct regcache *regs,
5017 struct displaced_step_closure *dsc)
5018{
5019 int link = bit (insn2, 14);
5020 int exchange = link && !bit (insn2, 12);
5021 int cond = INST_AL;
5022 long offset = 0;
5023 int j1 = bit (insn2, 13);
5024 int j2 = bit (insn2, 11);
5025 int s = sbits (insn1, 10, 10);
5026 int i1 = !(j1 ^ bit (insn1, 10));
5027 int i2 = !(j2 ^ bit (insn1, 10));
5028
5029 if (!link && !exchange) /* B */
5030 {
5031 offset = (bits (insn2, 0, 10) << 1);
5032 if (bit (insn2, 12)) /* Encoding T4 */
5033 {
5034 offset |= (bits (insn1, 0, 9) << 12)
5035 | (i2 << 22)
5036 | (i1 << 23)
5037 | (s << 24);
5038 cond = INST_AL;
5039 }
5040 else /* Encoding T3 */
5041 {
5042 offset |= (bits (insn1, 0, 5) << 12)
5043 | (j1 << 18)
5044 | (j2 << 19)
5045 | (s << 20);
5046 cond = bits (insn1, 6, 9);
5047 }
5048 }
5049 else
5050 {
5051 offset = (bits (insn1, 0, 9) << 12);
5052 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5053 offset |= exchange ?
5054 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5055 }
5056
5057 if (debug_displaced)
5058 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5059 "%.4x %.4x with offset %.8lx\n",
5060 link ? (exchange) ? "blx" : "bl" : "b",
5061 insn1, insn2, offset);
5062
5063 dsc->modinsn[0] = THUMB_NOP;
5064
5065 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5066 return 0;
5067}
5068
5069/* Copy B Thumb instructions. */
5070static int
615234c1 5071thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5072 struct displaced_step_closure *dsc)
5073{
5074 unsigned int cond = 0;
5075 int offset = 0;
5076 unsigned short bit_12_15 = bits (insn, 12, 15);
5077 CORE_ADDR from = dsc->insn_addr;
5078
5079 if (bit_12_15 == 0xd)
5080 {
5081 /* offset = SignExtend (imm8:0, 32) */
5082 offset = sbits ((insn << 1), 0, 8);
5083 cond = bits (insn, 8, 11);
5084 }
5085 else if (bit_12_15 == 0xe) /* Encoding T2 */
5086 {
5087 offset = sbits ((insn << 1), 0, 11);
5088 cond = INST_AL;
5089 }
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog,
5093 "displaced: copying b immediate insn %.4x "
5094 "with offset %d\n", insn, offset);
5095
5096 dsc->u.branch.cond = cond;
5097 dsc->u.branch.link = 0;
5098 dsc->u.branch.exchange = 0;
5099 dsc->u.branch.dest = from + 4 + offset;
5100
5101 dsc->modinsn[0] = THUMB_NOP;
5102
5103 dsc->cleanup = &cleanup_branch;
5104
5105 return 0;
5106}
5107
cca44b1b
JB
5108/* Copy BX/BLX with register-specified destinations. */
5109
7ff120b4
YQ
5110static void
5111install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5112 struct displaced_step_closure *dsc, int link,
5113 unsigned int cond, unsigned int rm)
cca44b1b 5114{
cca44b1b
JB
5115 /* Implement {BX,BLX}<cond> <reg>" as:
5116
5117 Preparation: cond <- instruction condition
5118 Insn: mov r0, r0 (nop)
5119 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5120
5121 Don't set r14 in cleanup for BX. */
5122
36073a92 5123 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5124
5125 dsc->u.branch.cond = cond;
5126 dsc->u.branch.link = link;
cca44b1b 5127
7ff120b4 5128 dsc->u.branch.exchange = 1;
cca44b1b
JB
5129
5130 dsc->cleanup = &cleanup_branch;
7ff120b4 5131}
cca44b1b 5132
7ff120b4
YQ
5133static int
5134arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5135 struct regcache *regs, struct displaced_step_closure *dsc)
5136{
5137 unsigned int cond = bits (insn, 28, 31);
5138 /* BX: x12xxx1x
5139 BLX: x12xxx3x. */
5140 int link = bit (insn, 5);
5141 unsigned int rm = bits (insn, 0, 3);
5142
5143 if (debug_displaced)
5144 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5145 (unsigned long) insn);
5146
5147 dsc->modinsn[0] = ARM_NOP;
5148
5149 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5150 return 0;
5151}
5152
34518530
YQ
5153static int
5154thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5155 struct regcache *regs,
5156 struct displaced_step_closure *dsc)
5157{
5158 int link = bit (insn, 7);
5159 unsigned int rm = bits (insn, 3, 6);
5160
5161 if (debug_displaced)
5162 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5163 (unsigned short) insn);
5164
5165 dsc->modinsn[0] = THUMB_NOP;
5166
5167 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5168
5169 return 0;
5170}
5171
5172
0963b4bd 5173/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5174
5175static void
6e39997a 5176cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5177 struct regcache *regs, struct displaced_step_closure *dsc)
5178{
36073a92 5179 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5180 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5181 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5182 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5183}
5184
5185static int
7ff120b4
YQ
5186arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5187 struct displaced_step_closure *dsc)
cca44b1b
JB
5188{
5189 unsigned int rn = bits (insn, 16, 19);
5190 unsigned int rd = bits (insn, 12, 15);
5191 unsigned int op = bits (insn, 21, 24);
5192 int is_mov = (op == 0xd);
5193 ULONGEST rd_val, rn_val;
cca44b1b
JB
5194
5195 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5196 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5197
5198 if (debug_displaced)
5199 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5200 "%.8lx\n", is_mov ? "move" : "ALU",
5201 (unsigned long) insn);
5202
5203 /* Instruction is of form:
5204
5205 <op><cond> rd, [rn,] #imm
5206
5207 Rewrite as:
5208
5209 Preparation: tmp1, tmp2 <- r0, r1;
5210 r0, r1 <- rd, rn
5211 Insn: <op><cond> r0, r1, #imm
5212 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5213 */
5214
36073a92
YQ
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 rn_val = displaced_read_reg (regs, dsc, rn);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5219 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5221 dsc->rd = rd;
5222
5223 if (is_mov)
5224 dsc->modinsn[0] = insn & 0xfff00fff;
5225 else
5226 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5227
5228 dsc->cleanup = &cleanup_alu_imm;
5229
5230 return 0;
5231}
5232
34518530
YQ
5233static int
5234thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5235 uint16_t insn2, struct regcache *regs,
5236 struct displaced_step_closure *dsc)
5237{
5238 unsigned int op = bits (insn1, 5, 8);
5239 unsigned int rn, rm, rd;
5240 ULONGEST rd_val, rn_val;
5241
5242 rn = bits (insn1, 0, 3); /* Rn */
5243 rm = bits (insn2, 0, 3); /* Rm */
5244 rd = bits (insn2, 8, 11); /* Rd */
5245
5246 /* This routine is only called for instruction MOV. */
5247 gdb_assert (op == 0x2 && rn == 0xf);
5248
5249 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5250 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5251
5252 if (debug_displaced)
5253 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5254 "ALU", insn1, insn2);
5255
5256 /* Instruction is of form:
5257
5258 <op><cond> rd, [rn,] #imm
5259
5260 Rewrite as:
5261
5262 Preparation: tmp1, tmp2 <- r0, r1;
5263 r0, r1 <- rd, rn
5264 Insn: <op><cond> r0, r1, #imm
5265 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5266 */
5267
5268 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5269 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5270 rn_val = displaced_read_reg (regs, dsc, rn);
5271 rd_val = displaced_read_reg (regs, dsc, rd);
5272 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5273 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5274 dsc->rd = rd;
5275
5276 dsc->modinsn[0] = insn1;
5277 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5278 dsc->numinsns = 2;
5279
5280 dsc->cleanup = &cleanup_alu_imm;
5281
5282 return 0;
5283}
5284
cca44b1b
JB
5285/* Copy/cleanup arithmetic/logic insns with register RHS. */
5286
5287static void
6e39997a 5288cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5289 struct regcache *regs, struct displaced_step_closure *dsc)
5290{
5291 ULONGEST rd_val;
5292 int i;
5293
36073a92 5294 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5295
5296 for (i = 0; i < 3; i++)
5297 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5298
5299 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5300}
5301
7ff120b4
YQ
5302static void
5303install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5304 struct displaced_step_closure *dsc,
5305 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5306{
cca44b1b 5307 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5308
cca44b1b
JB
5309 /* Instruction is of form:
5310
5311 <op><cond> rd, [rn,] rm [, <shift>]
5312
5313 Rewrite as:
5314
5315 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5316 r0, r1, r2 <- rd, rn, rm
ef713951 5317 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5318 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5319 */
5320
36073a92
YQ
5321 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5322 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5323 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5324 rd_val = displaced_read_reg (regs, dsc, rd);
5325 rn_val = displaced_read_reg (regs, dsc, rn);
5326 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5327 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5328 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5329 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5330 dsc->rd = rd;
5331
7ff120b4
YQ
5332 dsc->cleanup = &cleanup_alu_reg;
5333}
5334
5335static int
5336arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5337 struct displaced_step_closure *dsc)
5338{
5339 unsigned int op = bits (insn, 21, 24);
5340 int is_mov = (op == 0xd);
5341
5342 if (!insn_references_pc (insn, 0x000ff00ful))
5343 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5344
5345 if (debug_displaced)
5346 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5347 is_mov ? "move" : "ALU", (unsigned long) insn);
5348
cca44b1b
JB
5349 if (is_mov)
5350 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5351 else
5352 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5353
7ff120b4
YQ
5354 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5355 bits (insn, 0, 3));
cca44b1b
JB
5356 return 0;
5357}
5358
34518530
YQ
5359static int
5360thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5361 struct regcache *regs,
5362 struct displaced_step_closure *dsc)
5363{
ef713951 5364 unsigned rm, rd;
34518530 5365
ef713951
YQ
5366 rm = bits (insn, 3, 6);
5367 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5368
ef713951 5369 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5370 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5371
5372 if (debug_displaced)
ef713951
YQ
5373 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5374 (unsigned short) insn);
34518530 5375
ef713951 5376 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5377
ef713951 5378 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5379
5380 return 0;
5381}
5382
cca44b1b
JB
5383/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5384
5385static void
6e39997a 5386cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5387 struct regcache *regs,
5388 struct displaced_step_closure *dsc)
5389{
36073a92 5390 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5391 int i;
5392
5393 for (i = 0; i < 4; i++)
5394 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5395
5396 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5397}
5398
7ff120b4
YQ
5399static void
5400install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5401 struct displaced_step_closure *dsc,
5402 unsigned int rd, unsigned int rn, unsigned int rm,
5403 unsigned rs)
cca44b1b 5404{
7ff120b4 5405 int i;
cca44b1b 5406 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5407
cca44b1b
JB
5408 /* Instruction is of form:
5409
5410 <op><cond> rd, [rn,] rm, <shift> rs
5411
5412 Rewrite as:
5413
5414 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5415 r0, r1, r2, r3 <- rd, rn, rm, rs
5416 Insn: <op><cond> r0, r1, r2, <shift> r3
5417 Cleanup: tmp5 <- r0
5418 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5419 rd <- tmp5
5420 */
5421
5422 for (i = 0; i < 4; i++)
36073a92 5423 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5424
36073a92
YQ
5425 rd_val = displaced_read_reg (regs, dsc, rd);
5426 rn_val = displaced_read_reg (regs, dsc, rn);
5427 rm_val = displaced_read_reg (regs, dsc, rm);
5428 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5429 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5430 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5431 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5432 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5433 dsc->rd = rd;
7ff120b4
YQ
5434 dsc->cleanup = &cleanup_alu_shifted_reg;
5435}
5436
5437static int
5438arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5439 struct regcache *regs,
5440 struct displaced_step_closure *dsc)
5441{
5442 unsigned int op = bits (insn, 21, 24);
5443 int is_mov = (op == 0xd);
5444 unsigned int rd, rn, rm, rs;
5445
5446 if (!insn_references_pc (insn, 0x000fff0ful))
5447 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5448
5449 if (debug_displaced)
5450 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5451 "%.8lx\n", is_mov ? "move" : "ALU",
5452 (unsigned long) insn);
5453
5454 rn = bits (insn, 16, 19);
5455 rm = bits (insn, 0, 3);
5456 rs = bits (insn, 8, 11);
5457 rd = bits (insn, 12, 15);
cca44b1b
JB
5458
5459 if (is_mov)
5460 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5461 else
5462 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5463
7ff120b4 5464 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5465
5466 return 0;
5467}
5468
5469/* Clean up load instructions. */
5470
5471static void
6e39997a 5472cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5473 struct displaced_step_closure *dsc)
5474{
5475 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5476
36073a92 5477 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5478 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5479 rt_val2 = displaced_read_reg (regs, dsc, 1);
5480 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5481
5482 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5483 if (dsc->u.ldst.xfersize > 4)
5484 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5485 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5486 if (!dsc->u.ldst.immed)
5487 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5488
5489 /* Handle register writeback. */
5490 if (dsc->u.ldst.writeback)
5491 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5492 /* Put result in right place. */
5493 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5494 if (dsc->u.ldst.xfersize == 8)
5495 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5496}
5497
5498/* Clean up store instructions. */
5499
5500static void
6e39997a 5501cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5502 struct displaced_step_closure *dsc)
5503{
36073a92 5504 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5505
5506 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5507 if (dsc->u.ldst.xfersize > 4)
5508 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5510 if (!dsc->u.ldst.immed)
5511 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5512 if (!dsc->u.ldst.restore_r4)
5513 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5514
5515 /* Writeback. */
5516 if (dsc->u.ldst.writeback)
5517 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5518}
5519
5520/* Copy "extra" load/store instructions. These are halfword/doubleword
5521 transfers, which have a different encoding to byte/word transfers. */
5522
5523static int
550dc4e2 5524arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5525 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5526{
5527 unsigned int op1 = bits (insn, 20, 24);
5528 unsigned int op2 = bits (insn, 5, 6);
5529 unsigned int rt = bits (insn, 12, 15);
5530 unsigned int rn = bits (insn, 16, 19);
5531 unsigned int rm = bits (insn, 0, 3);
5532 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5533 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5534 int immed = (op1 & 0x4) != 0;
5535 int opcode;
5536 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5537
5538 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5539 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5540
5541 if (debug_displaced)
5542 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5543 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5544 (unsigned long) insn);
5545
5546 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5547
5548 if (opcode < 0)
5549 internal_error (__FILE__, __LINE__,
5550 _("copy_extra_ld_st: instruction decode error"));
5551
36073a92
YQ
5552 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5553 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5554 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5555 if (!immed)
36073a92 5556 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5557
36073a92 5558 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5559 if (bytesize[opcode] == 8)
36073a92
YQ
5560 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5561 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5562 if (!immed)
36073a92 5563 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5564
5565 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5566 if (bytesize[opcode] == 8)
5567 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5568 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5569 if (!immed)
5570 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5571
5572 dsc->rd = rt;
5573 dsc->u.ldst.xfersize = bytesize[opcode];
5574 dsc->u.ldst.rn = rn;
5575 dsc->u.ldst.immed = immed;
5576 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5577 dsc->u.ldst.restore_r4 = 0;
5578
5579 if (immed)
5580 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5581 ->
5582 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5583 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5584 else
5585 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5586 ->
5587 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5588 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5589
5590 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5591
5592 return 0;
5593}
5594
0f6f04ba 5595/* Copy byte/half word/word loads and stores. */
cca44b1b 5596
7ff120b4 5597static void
0f6f04ba
YQ
5598install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5599 struct displaced_step_closure *dsc, int load,
5600 int immed, int writeback, int size, int usermode,
5601 int rt, int rm, int rn)
cca44b1b 5602{
cca44b1b 5603 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5604
36073a92
YQ
5605 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5606 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5607 if (!immed)
36073a92 5608 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5609 if (!load)
36073a92 5610 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5611
36073a92
YQ
5612 rt_val = displaced_read_reg (regs, dsc, rt);
5613 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5614 if (!immed)
36073a92 5615 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5616
5617 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5618 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5619 if (!immed)
5620 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5621 dsc->rd = rt;
0f6f04ba 5622 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5623 dsc->u.ldst.rn = rn;
5624 dsc->u.ldst.immed = immed;
7ff120b4 5625 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5626
5627 /* To write PC we can do:
5628
494e194e
YQ
5629 Before this sequence of instructions:
5630 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5631 r2 is the Rn value got from dispalced_read_reg.
5632
5633 Insn1: push {pc} Write address of STR instruction + offset on stack
5634 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5635 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5636 = addr(Insn1) + offset - addr(Insn3) - 8
5637 = offset - 16
5638 Insn4: add r4, r4, #8 r4 = offset - 8
5639 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5640 = from + offset
5641 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5642
5643 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5644 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5645 of this can be found in Section "Saving from r15" in
5646 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5647
7ff120b4
YQ
5648 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5649}
5650
34518530
YQ
5651
5652static int
5653thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5654 uint16_t insn2, struct regcache *regs,
5655 struct displaced_step_closure *dsc, int size)
5656{
5657 unsigned int u_bit = bit (insn1, 7);
5658 unsigned int rt = bits (insn2, 12, 15);
5659 int imm12 = bits (insn2, 0, 11);
5660 ULONGEST pc_val;
5661
5662 if (debug_displaced)
5663 fprintf_unfiltered (gdb_stdlog,
5664 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5665 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5666 imm12);
5667
5668 if (!u_bit)
5669 imm12 = -1 * imm12;
5670
5671 /* Rewrite instruction LDR Rt imm12 into:
5672
5673 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5674
5675 LDR R0, R2, R3,
5676
5677 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5678
5679
5680 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5681 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5682 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5683
5684 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5685
5686 pc_val = pc_val & 0xfffffffc;
5687
5688 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5689 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5690
5691 dsc->rd = rt;
5692
5693 dsc->u.ldst.xfersize = size;
5694 dsc->u.ldst.immed = 0;
5695 dsc->u.ldst.writeback = 0;
5696 dsc->u.ldst.restore_r4 = 0;
5697
5698 /* LDR R0, R2, R3 */
5699 dsc->modinsn[0] = 0xf852;
5700 dsc->modinsn[1] = 0x3;
5701 dsc->numinsns = 2;
5702
5703 dsc->cleanup = &cleanup_load;
5704
5705 return 0;
5706}
5707
5708static int
5709thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, struct regcache *regs,
5711 struct displaced_step_closure *dsc,
5712 int writeback, int immed)
5713{
5714 unsigned int rt = bits (insn2, 12, 15);
5715 unsigned int rn = bits (insn1, 0, 3);
5716 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5717 /* In LDR (register), there is also a register Rm, which is not allowed to
5718 be PC, so we don't have to check it. */
5719
5720 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5722 dsc);
5723
5724 if (debug_displaced)
5725 fprintf_unfiltered (gdb_stdlog,
5726 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5727 rt, rn, insn1, insn2);
5728
5729 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5730 0, rt, rm, rn);
5731
5732 dsc->u.ldst.restore_r4 = 0;
5733
5734 if (immed)
5735 /* ldr[b]<cond> rt, [rn, #imm], etc.
5736 ->
5737 ldr[b]<cond> r0, [r2, #imm]. */
5738 {
5739 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5740 dsc->modinsn[1] = insn2 & 0x0fff;
5741 }
5742 else
5743 /* ldr[b]<cond> rt, [rn, rm], etc.
5744 ->
5745 ldr[b]<cond> r0, [r2, r3]. */
5746 {
5747 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5748 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5749 }
5750
5751 dsc->numinsns = 2;
5752
5753 return 0;
5754}
5755
5756
7ff120b4
YQ
5757static int
5758arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5759 struct regcache *regs,
5760 struct displaced_step_closure *dsc,
0f6f04ba 5761 int load, int size, int usermode)
7ff120b4
YQ
5762{
5763 int immed = !bit (insn, 25);
5764 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5765 unsigned int rt = bits (insn, 12, 15);
5766 unsigned int rn = bits (insn, 16, 19);
5767 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5768
5769 if (!insn_references_pc (insn, 0x000ff00ful))
5770 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5771
5772 if (debug_displaced)
5773 fprintf_unfiltered (gdb_stdlog,
5774 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5775 load ? (size == 1 ? "ldrb" : "ldr")
5776 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5777 rt, rn,
5778 (unsigned long) insn);
5779
0f6f04ba
YQ
5780 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5781 usermode, rt, rm, rn);
7ff120b4 5782
bf9f652a 5783 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5784 {
5785 dsc->u.ldst.restore_r4 = 0;
5786
5787 if (immed)
5788 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5789 ->
5790 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5791 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5792 else
5793 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5794 ->
5795 {ldr,str}[b]<cond> r0, [r2, r3]. */
5796 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5797 }
5798 else
5799 {
5800 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5801 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5802 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5803 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5804 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5805 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5806 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5807
5808 /* As above. */
5809 if (immed)
5810 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5811 else
5812 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5813
cca44b1b
JB
5814 dsc->numinsns = 6;
5815 }
5816
5817 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5818
5819 return 0;
5820}
5821
5822/* Cleanup LDM instructions with fully-populated register list. This is an
5823 unfortunate corner case: it's impossible to implement correctly by modifying
5824 the instruction. The issue is as follows: we have an instruction,
5825
5826 ldm rN, {r0-r15}
5827
5828 which we must rewrite to avoid loading PC. A possible solution would be to
5829 do the load in two halves, something like (with suitable cleanup
5830 afterwards):
5831
5832 mov r8, rN
5833 ldm[id][ab] r8!, {r0-r7}
5834 str r7, <temp>
5835 ldm[id][ab] r8, {r7-r14}
5836 <bkpt>
5837
5838 but at present there's no suitable place for <temp>, since the scratch space
5839 is overwritten before the cleanup routine is called. For now, we simply
5840 emulate the instruction. */
5841
5842static void
5843cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5844 struct displaced_step_closure *dsc)
5845{
cca44b1b
JB
5846 int inc = dsc->u.block.increment;
5847 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5848 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5849 uint32_t regmask = dsc->u.block.regmask;
5850 int regno = inc ? 0 : 15;
5851 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5852 int exception_return = dsc->u.block.load && dsc->u.block.user
5853 && (regmask & 0x8000) != 0;
36073a92 5854 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5855 int do_transfer = condition_true (dsc->u.block.cond, status);
5856 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5857
5858 if (!do_transfer)
5859 return;
5860
5861 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5862 sensible we can do here. Complain loudly. */
5863 if (exception_return)
5864 error (_("Cannot single-step exception return"));
5865
5866 /* We don't handle any stores here for now. */
5867 gdb_assert (dsc->u.block.load != 0);
5868
5869 if (debug_displaced)
5870 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5871 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5872 dsc->u.block.increment ? "inc" : "dec",
5873 dsc->u.block.before ? "before" : "after");
5874
5875 while (regmask)
5876 {
5877 uint32_t memword;
5878
5879 if (inc)
bf9f652a 5880 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5881 regno++;
5882 else
5883 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5884 regno--;
5885
5886 xfer_addr += bump_before;
5887
5888 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5889 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5890
5891 xfer_addr += bump_after;
5892
5893 regmask &= ~(1 << regno);
5894 }
5895
5896 if (dsc->u.block.writeback)
5897 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5898 CANNOT_WRITE_PC);
5899}
5900
5901/* Clean up an STM which included the PC in the register list. */
5902
5903static void
5904cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5905 struct displaced_step_closure *dsc)
5906{
36073a92 5907 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5908 int store_executed = condition_true (dsc->u.block.cond, status);
5909 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5910 CORE_ADDR stm_insn_addr;
5911 uint32_t pc_val;
5912 long offset;
5913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5914
5915 /* If condition code fails, there's nothing else to do. */
5916 if (!store_executed)
5917 return;
5918
5919 if (dsc->u.block.increment)
5920 {
5921 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5922
5923 if (dsc->u.block.before)
5924 pc_stored_at += 4;
5925 }
5926 else
5927 {
5928 pc_stored_at = dsc->u.block.xfer_addr;
5929
5930 if (dsc->u.block.before)
5931 pc_stored_at -= 4;
5932 }
5933
5934 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5935 stm_insn_addr = dsc->scratch_base;
5936 offset = pc_val - stm_insn_addr;
5937
5938 if (debug_displaced)
5939 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5940 "STM instruction\n", offset);
5941
5942 /* Rewrite the stored PC to the proper value for the non-displaced original
5943 instruction. */
5944 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5945 dsc->insn_addr + offset);
5946}
5947
5948/* Clean up an LDM which includes the PC in the register list. We clumped all
5949 the registers in the transferred list into a contiguous range r0...rX (to
5950 avoid loading PC directly and losing control of the debugged program), so we
5951 must undo that here. */
5952
5953static void
6e39997a 5954cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5955 struct regcache *regs,
5956 struct displaced_step_closure *dsc)
5957{
36073a92 5958 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5959 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5960 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5961 unsigned int regs_loaded = bitcount (mask);
5962 unsigned int num_to_shuffle = regs_loaded, clobbered;
5963
5964 /* The method employed here will fail if the register list is fully populated
5965 (we need to avoid loading PC directly). */
5966 gdb_assert (num_to_shuffle < 16);
5967
5968 if (!load_executed)
5969 return;
5970
5971 clobbered = (1 << num_to_shuffle) - 1;
5972
5973 while (num_to_shuffle > 0)
5974 {
5975 if ((mask & (1 << write_reg)) != 0)
5976 {
5977 unsigned int read_reg = num_to_shuffle - 1;
5978
5979 if (read_reg != write_reg)
5980 {
36073a92 5981 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5982 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5983 if (debug_displaced)
5984 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5985 "loaded register r%d to r%d\n"), read_reg,
5986 write_reg);
5987 }
5988 else if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5990 "r%d already in the right place\n"),
5991 write_reg);
5992
5993 clobbered &= ~(1 << write_reg);
5994
5995 num_to_shuffle--;
5996 }
5997
5998 write_reg--;
5999 }
6000
6001 /* Restore any registers we scribbled over. */
6002 for (write_reg = 0; clobbered != 0; write_reg++)
6003 {
6004 if ((clobbered & (1 << write_reg)) != 0)
6005 {
6006 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6007 CANNOT_WRITE_PC);
6008 if (debug_displaced)
6009 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6010 "clobbered register r%d\n"), write_reg);
6011 clobbered &= ~(1 << write_reg);
6012 }
6013 }
6014
6015 /* Perform register writeback manually. */
6016 if (dsc->u.block.writeback)
6017 {
6018 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6019
6020 if (dsc->u.block.increment)
6021 new_rn_val += regs_loaded * 4;
6022 else
6023 new_rn_val -= regs_loaded * 4;
6024
6025 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6026 CANNOT_WRITE_PC);
6027 }
6028}
6029
6030/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6031 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6032
6033static int
7ff120b4
YQ
6034arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6035 struct regcache *regs,
6036 struct displaced_step_closure *dsc)
cca44b1b
JB
6037{
6038 int load = bit (insn, 20);
6039 int user = bit (insn, 22);
6040 int increment = bit (insn, 23);
6041 int before = bit (insn, 24);
6042 int writeback = bit (insn, 21);
6043 int rn = bits (insn, 16, 19);
cca44b1b 6044
0963b4bd
MS
6045 /* Block transfers which don't mention PC can be run directly
6046 out-of-line. */
bf9f652a 6047 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6048 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6049
bf9f652a 6050 if (rn == ARM_PC_REGNUM)
cca44b1b 6051 {
0963b4bd
MS
6052 warning (_("displaced: Unpredictable LDM or STM with "
6053 "base register r15"));
7ff120b4 6054 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6055 }
6056
6057 if (debug_displaced)
6058 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6059 "%.8lx\n", (unsigned long) insn);
6060
36073a92 6061 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6062 dsc->u.block.rn = rn;
6063
6064 dsc->u.block.load = load;
6065 dsc->u.block.user = user;
6066 dsc->u.block.increment = increment;
6067 dsc->u.block.before = before;
6068 dsc->u.block.writeback = writeback;
6069 dsc->u.block.cond = bits (insn, 28, 31);
6070
6071 dsc->u.block.regmask = insn & 0xffff;
6072
6073 if (load)
6074 {
6075 if ((insn & 0xffff) == 0xffff)
6076 {
6077 /* LDM with a fully-populated register list. This case is
6078 particularly tricky. Implement for now by fully emulating the
6079 instruction (which might not behave perfectly in all cases, but
6080 these instructions should be rare enough for that not to matter
6081 too much). */
6082 dsc->modinsn[0] = ARM_NOP;
6083
6084 dsc->cleanup = &cleanup_block_load_all;
6085 }
6086 else
6087 {
6088 /* LDM of a list of registers which includes PC. Implement by
6089 rewriting the list of registers to be transferred into a
6090 contiguous chunk r0...rX before doing the transfer, then shuffling
6091 registers into the correct places in the cleanup routine. */
6092 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6093 unsigned int num_in_list = bitcount (regmask), new_regmask;
6094 unsigned int i;
cca44b1b
JB
6095
6096 for (i = 0; i < num_in_list; i++)
36073a92 6097 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6098
6099 /* Writeback makes things complicated. We need to avoid clobbering
6100 the base register with one of the registers in our modified
6101 register list, but just using a different register can't work in
6102 all cases, e.g.:
6103
6104 ldm r14!, {r0-r13,pc}
6105
6106 which would need to be rewritten as:
6107
6108 ldm rN!, {r0-r14}
6109
6110 but that can't work, because there's no free register for N.
6111
6112 Solve this by turning off the writeback bit, and emulating
6113 writeback manually in the cleanup routine. */
6114
6115 if (writeback)
6116 insn &= ~(1 << 21);
6117
6118 new_regmask = (1 << num_in_list) - 1;
6119
6120 if (debug_displaced)
6121 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6122 "{..., pc}: original reg list %.4x, modified "
6123 "list %.4x\n"), rn, writeback ? "!" : "",
6124 (int) insn & 0xffff, new_regmask);
6125
6126 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6127
6128 dsc->cleanup = &cleanup_block_load_pc;
6129 }
6130 }
6131 else
6132 {
6133 /* STM of a list of registers which includes PC. Run the instruction
6134 as-is, but out of line: this will store the wrong value for the PC,
6135 so we must manually fix up the memory in the cleanup routine.
6136 Doing things this way has the advantage that we can auto-detect
6137 the offset of the PC write (which is architecture-dependent) in
6138 the cleanup routine. */
6139 dsc->modinsn[0] = insn;
6140
6141 dsc->cleanup = &cleanup_block_store_pc;
6142 }
6143
6144 return 0;
6145}
6146
34518530
YQ
6147static int
6148thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6149 struct regcache *regs,
6150 struct displaced_step_closure *dsc)
cca44b1b 6151{
34518530
YQ
6152 int rn = bits (insn1, 0, 3);
6153 int load = bit (insn1, 4);
6154 int writeback = bit (insn1, 5);
cca44b1b 6155
34518530
YQ
6156 /* Block transfers which don't mention PC can be run directly
6157 out-of-line. */
6158 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6159 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6160
34518530
YQ
6161 if (rn == ARM_PC_REGNUM)
6162 {
6163 warning (_("displaced: Unpredictable LDM or STM with "
6164 "base register r15"));
6165 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6166 "unpredictable ldm/stm", dsc);
6167 }
cca44b1b
JB
6168
6169 if (debug_displaced)
34518530
YQ
6170 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6171 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6172
34518530
YQ
6173 /* Clear bit 13, since it should be always zero. */
6174 dsc->u.block.regmask = (insn2 & 0xdfff);
6175 dsc->u.block.rn = rn;
cca44b1b 6176
34518530
YQ
6177 dsc->u.block.load = load;
6178 dsc->u.block.user = 0;
6179 dsc->u.block.increment = bit (insn1, 7);
6180 dsc->u.block.before = bit (insn1, 8);
6181 dsc->u.block.writeback = writeback;
6182 dsc->u.block.cond = INST_AL;
6183 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6184
34518530
YQ
6185 if (load)
6186 {
6187 if (dsc->u.block.regmask == 0xffff)
6188 {
6189 /* This branch is impossible to happen. */
6190 gdb_assert (0);
6191 }
6192 else
6193 {
6194 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6195 unsigned int num_in_list = bitcount (regmask), new_regmask;
6196 unsigned int i;
34518530
YQ
6197
6198 for (i = 0; i < num_in_list; i++)
6199 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6200
6201 if (writeback)
6202 insn1 &= ~(1 << 5);
6203
6204 new_regmask = (1 << num_in_list) - 1;
6205
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6208 "{..., pc}: original reg list %.4x, modified "
6209 "list %.4x\n"), rn, writeback ? "!" : "",
6210 (int) dsc->u.block.regmask, new_regmask);
6211
6212 dsc->modinsn[0] = insn1;
6213 dsc->modinsn[1] = (new_regmask & 0xffff);
6214 dsc->numinsns = 2;
6215
6216 dsc->cleanup = &cleanup_block_load_pc;
6217 }
6218 }
6219 else
6220 {
6221 dsc->modinsn[0] = insn1;
6222 dsc->modinsn[1] = insn2;
6223 dsc->numinsns = 2;
6224 dsc->cleanup = &cleanup_block_store_pc;
6225 }
6226 return 0;
6227}
6228
d9311bfa
AT
6229/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6230 This is used to avoid a dependency on BFD's bfd_endian enum. */
6231
6232ULONGEST
6233arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6234 int byte_order)
6235{
5f2dfcfd
AT
6236 return read_memory_unsigned_integer (memaddr, len,
6237 (enum bfd_endian) byte_order);
d9311bfa
AT
6238}
6239
6240/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6241
6242CORE_ADDR
6243arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6244 CORE_ADDR val)
6245{
6246 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6247}
6248
6249/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6250
e7cf25a8 6251static CORE_ADDR
553cb527 6252arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6253{
d9311bfa
AT
6254 return 0;
6255}
6256
6257/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6258
6259int
6260arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6261{
6262 return arm_is_thumb (self->regcache);
6263}
6264
6265/* single_step() is called just before we want to resume the inferior,
6266 if we want to single-step it but there is no hardware or kernel
6267 single-step support. We find the target of the coming instructions
6268 and breakpoint them. */
6269
6270int
6271arm_software_single_step (struct frame_info *frame)
6272{
6273 struct regcache *regcache = get_current_regcache ();
6274 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6275 struct address_space *aspace = get_regcache_aspace (regcache);
6276 struct arm_get_next_pcs next_pcs_ctx;
6277 CORE_ADDR pc;
6278 int i;
6279 VEC (CORE_ADDR) *next_pcs = NULL;
6280 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6281
6282 arm_get_next_pcs_ctor (&next_pcs_ctx,
6283 &arm_get_next_pcs_ops,
6284 gdbarch_byte_order (gdbarch),
6285 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6286 0,
d9311bfa
AT
6287 regcache);
6288
4d18591b 6289 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6290
6291 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6292 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6293
6294 do_cleanups (old_chain);
6295
6296 return 1;
6297}
6298
34518530
YQ
6299/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6300 for Linux, where some SVC instructions must be treated specially. */
6301
6302static void
6303cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6304 struct displaced_step_closure *dsc)
6305{
6306 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6307
6308 if (debug_displaced)
6309 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6310 "%.8lx\n", (unsigned long) resume_addr);
6311
6312 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6313}
6314
6315
6316/* Common copy routine for svc instruciton. */
6317
6318static int
6319install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6320 struct displaced_step_closure *dsc)
6321{
6322 /* Preparation: none.
6323 Insn: unmodified svc.
6324 Cleanup: pc <- insn_addr + insn_size. */
6325
6326 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6327 instruction. */
6328 dsc->wrote_to_pc = 1;
6329
6330 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6331 if (dsc->u.svc.copy_svc_os)
6332 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6333 else
6334 {
6335 dsc->cleanup = &cleanup_svc;
6336 return 0;
6337 }
34518530
YQ
6338}
6339
6340static int
6341arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6342 struct regcache *regs, struct displaced_step_closure *dsc)
6343{
6344
6345 if (debug_displaced)
6346 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6347 (unsigned long) insn);
6348
6349 dsc->modinsn[0] = insn;
6350
6351 return install_svc (gdbarch, regs, dsc);
6352}
6353
6354static int
6355thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6356 struct regcache *regs, struct displaced_step_closure *dsc)
6357{
6358
6359 if (debug_displaced)
6360 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6361 insn);
bd18283a 6362
34518530
YQ
6363 dsc->modinsn[0] = insn;
6364
6365 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6366}
6367
6368/* Copy undefined instructions. */
6369
6370static int
7ff120b4
YQ
6371arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6372 struct displaced_step_closure *dsc)
cca44b1b
JB
6373{
6374 if (debug_displaced)
0963b4bd
MS
6375 fprintf_unfiltered (gdb_stdlog,
6376 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6377 (unsigned long) insn);
6378
6379 dsc->modinsn[0] = insn;
6380
6381 return 0;
6382}
6383
34518530
YQ
6384static int
6385thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6386 struct displaced_step_closure *dsc)
6387{
6388
6389 if (debug_displaced)
6390 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6391 "%.4x %.4x\n", (unsigned short) insn1,
6392 (unsigned short) insn2);
6393
6394 dsc->modinsn[0] = insn1;
6395 dsc->modinsn[1] = insn2;
6396 dsc->numinsns = 2;
6397
6398 return 0;
6399}
6400
cca44b1b
JB
6401/* Copy unpredictable instructions. */
6402
6403static int
7ff120b4
YQ
6404arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6405 struct displaced_step_closure *dsc)
cca44b1b
JB
6406{
6407 if (debug_displaced)
6408 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6409 "%.8lx\n", (unsigned long) insn);
6410
6411 dsc->modinsn[0] = insn;
6412
6413 return 0;
6414}
6415
6416/* The decode_* functions are instruction decoding helpers. They mostly follow
6417 the presentation in the ARM ARM. */
6418
6419static int
7ff120b4
YQ
6420arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6421 struct regcache *regs,
6422 struct displaced_step_closure *dsc)
cca44b1b
JB
6423{
6424 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6425 unsigned int rn = bits (insn, 16, 19);
6426
6427 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6428 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6429 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6430 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6431 else if ((op1 & 0x60) == 0x20)
7ff120b4 6432 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6433 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6434 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6435 dsc);
cca44b1b 6436 else if ((op1 & 0x77) == 0x41)
7ff120b4 6437 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6438 else if ((op1 & 0x77) == 0x45)
7ff120b4 6439 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6440 else if ((op1 & 0x77) == 0x51)
6441 {
6442 if (rn != 0xf)
7ff120b4 6443 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6444 else
7ff120b4 6445 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6446 }
6447 else if ((op1 & 0x77) == 0x55)
7ff120b4 6448 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6449 else if (op1 == 0x57)
6450 switch (op2)
6451 {
7ff120b4
YQ
6452 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6453 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6454 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6455 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6456 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6457 }
6458 else if ((op1 & 0x63) == 0x43)
7ff120b4 6459 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6460 else if ((op2 & 0x1) == 0x0)
6461 switch (op1 & ~0x80)
6462 {
6463 case 0x61:
7ff120b4 6464 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6465 case 0x65:
7ff120b4 6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6467 case 0x71: case 0x75:
6468 /* pld/pldw reg. */
7ff120b4 6469 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6470 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6471 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6472 default:
7ff120b4 6473 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6474 }
6475 else
7ff120b4 6476 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6477}
6478
6479static int
7ff120b4
YQ
6480arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6481 struct regcache *regs,
6482 struct displaced_step_closure *dsc)
cca44b1b
JB
6483{
6484 if (bit (insn, 27) == 0)
7ff120b4 6485 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6486 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6487 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6488 {
6489 case 0x0: case 0x2:
7ff120b4 6490 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6491
6492 case 0x1: case 0x3:
7ff120b4 6493 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6494
6495 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6496 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6497
6498 case 0x8:
6499 switch ((insn & 0xe00000) >> 21)
6500 {
6501 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6502 /* stc/stc2. */
7ff120b4 6503 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6504
6505 case 0x2:
7ff120b4 6506 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6507
6508 default:
7ff120b4 6509 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6510 }
6511
6512 case 0x9:
6513 {
6514 int rn_f = (bits (insn, 16, 19) == 0xf);
6515 switch ((insn & 0xe00000) >> 21)
6516 {
6517 case 0x1: case 0x3:
6518 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6519 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6520 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6521
6522 case 0x2:
7ff120b4 6523 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6524
6525 case 0x4: case 0x5: case 0x6: case 0x7:
6526 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6527 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6528 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6529
6530 default:
7ff120b4 6531 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6532 }
6533 }
6534
6535 case 0xa:
7ff120b4 6536 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6537
6538 case 0xb:
6539 if (bits (insn, 16, 19) == 0xf)
6540 /* ldc/ldc2 lit. */
7ff120b4 6541 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6542 else
7ff120b4 6543 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6544
6545 case 0xc:
6546 if (bit (insn, 4))
7ff120b4 6547 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6548 else
7ff120b4 6549 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6550
6551 case 0xd:
6552 if (bit (insn, 4))
7ff120b4 6553 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6554 else
7ff120b4 6555 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6556
6557 default:
7ff120b4 6558 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6559 }
6560}
6561
6562/* Decode miscellaneous instructions in dp/misc encoding space. */
6563
6564static int
7ff120b4
YQ
6565arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6566 struct regcache *regs,
6567 struct displaced_step_closure *dsc)
cca44b1b
JB
6568{
6569 unsigned int op2 = bits (insn, 4, 6);
6570 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6571
6572 switch (op2)
6573 {
6574 case 0x0:
7ff120b4 6575 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6576
6577 case 0x1:
6578 if (op == 0x1) /* bx. */
7ff120b4 6579 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6580 else if (op == 0x3)
7ff120b4 6581 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6582 else
7ff120b4 6583 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6584
6585 case 0x2:
6586 if (op == 0x1)
6587 /* Not really supported. */
7ff120b4 6588 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6589 else
7ff120b4 6590 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6591
6592 case 0x3:
6593 if (op == 0x1)
7ff120b4 6594 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6595 regs, dsc); /* blx register. */
cca44b1b 6596 else
7ff120b4 6597 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6598
6599 case 0x5:
7ff120b4 6600 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6601
6602 case 0x7:
6603 if (op == 0x1)
7ff120b4 6604 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6605 else if (op == 0x3)
6606 /* Not really supported. */
7ff120b4 6607 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6608
6609 default:
7ff120b4 6610 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6611 }
6612}
6613
6614static int
7ff120b4
YQ
6615arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6616 struct regcache *regs,
6617 struct displaced_step_closure *dsc)
cca44b1b
JB
6618{
6619 if (bit (insn, 25))
6620 switch (bits (insn, 20, 24))
6621 {
6622 case 0x10:
7ff120b4 6623 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6624
6625 case 0x14:
7ff120b4 6626 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6627
6628 case 0x12: case 0x16:
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6630
6631 default:
7ff120b4 6632 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6633 }
6634 else
6635 {
6636 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6637
6638 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6639 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6640 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6641 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6643 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6644 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6645 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6646 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6647 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6648 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6649 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6650 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6651 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6652 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6653 dsc);
cca44b1b
JB
6654 }
6655
6656 /* Should be unreachable. */
6657 return 1;
6658}
6659
6660static int
7ff120b4
YQ
6661arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6662 struct regcache *regs,
6663 struct displaced_step_closure *dsc)
cca44b1b
JB
6664{
6665 int a = bit (insn, 25), b = bit (insn, 4);
6666 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6667
6668 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6669 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6671 else if ((!a && (op1 & 0x17) == 0x02)
6672 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6674 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6675 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6676 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6677 else if ((!a && (op1 & 0x17) == 0x03)
6678 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6679 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6680 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6681 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6682 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6683 else if ((!a && (op1 & 0x17) == 0x06)
6684 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6685 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6686 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6687 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6688 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6689 else if ((!a && (op1 & 0x17) == 0x07)
6690 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6691 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6692
6693 /* Should be unreachable. */
6694 return 1;
6695}
6696
6697static int
7ff120b4
YQ
6698arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6699 struct displaced_step_closure *dsc)
cca44b1b
JB
6700{
6701 switch (bits (insn, 20, 24))
6702 {
6703 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6704 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6705
6706 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6707 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6708
6709 case 0x08: case 0x09: case 0x0a: case 0x0b:
6710 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6711 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6712 "decode/pack/unpack/saturate/reverse", dsc);
6713
6714 case 0x18:
6715 if (bits (insn, 5, 7) == 0) /* op2. */
6716 {
6717 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6718 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6719 else
7ff120b4 6720 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6721 }
6722 else
7ff120b4 6723 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6724
6725 case 0x1a: case 0x1b:
6726 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6727 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6728 else
7ff120b4 6729 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6730
6731 case 0x1c: case 0x1d:
6732 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6733 {
6734 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6735 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6736 else
7ff120b4 6737 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6738 }
6739 else
7ff120b4 6740 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6741
6742 case 0x1e: case 0x1f:
6743 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6744 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6745 else
7ff120b4 6746 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6747 }
6748
6749 /* Should be unreachable. */
6750 return 1;
6751}
6752
6753static int
615234c1 6754arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6755 struct regcache *regs,
6756 struct displaced_step_closure *dsc)
cca44b1b
JB
6757{
6758 if (bit (insn, 25))
7ff120b4 6759 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6760 else
7ff120b4 6761 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6762}
6763
6764static int
7ff120b4
YQ
6765arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6766 struct regcache *regs,
6767 struct displaced_step_closure *dsc)
cca44b1b
JB
6768{
6769 unsigned int opcode = bits (insn, 20, 24);
6770
6771 switch (opcode)
6772 {
6773 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6775
6776 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6777 case 0x12: case 0x16:
7ff120b4 6778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6779
6780 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6781 case 0x13: case 0x17:
7ff120b4 6782 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6783
6784 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6785 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6786 /* Note: no writeback for these instructions. Bit 25 will always be
6787 zero though (via caller), so the following works OK. */
7ff120b4 6788 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6789 }
6790
6791 /* Should be unreachable. */
6792 return 1;
6793}
6794
34518530
YQ
6795/* Decode shifted register instructions. */
6796
6797static int
6798thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6799 uint16_t insn2, struct regcache *regs,
6800 struct displaced_step_closure *dsc)
6801{
6802 /* PC is only allowed to be used in instruction MOV. */
6803
6804 unsigned int op = bits (insn1, 5, 8);
6805 unsigned int rn = bits (insn1, 0, 3);
6806
6807 if (op == 0x2 && rn == 0xf) /* MOV */
6808 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6809 else
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "dp (shift reg)", dsc);
6812}
6813
6814
6815/* Decode extension register load/store. Exactly the same as
6816 arm_decode_ext_reg_ld_st. */
6817
6818static int
6819thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6820 uint16_t insn2, struct regcache *regs,
6821 struct displaced_step_closure *dsc)
6822{
6823 unsigned int opcode = bits (insn1, 4, 8);
6824
6825 switch (opcode)
6826 {
6827 case 0x04: case 0x05:
6828 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6829 "vfp/neon vmov", dsc);
6830
6831 case 0x08: case 0x0c: /* 01x00 */
6832 case 0x0a: case 0x0e: /* 01x10 */
6833 case 0x12: case 0x16: /* 10x10 */
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6835 "vfp/neon vstm/vpush", dsc);
6836
6837 case 0x09: case 0x0d: /* 01x01 */
6838 case 0x0b: case 0x0f: /* 01x11 */
6839 case 0x13: case 0x17: /* 10x11 */
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6841 "vfp/neon vldm/vpop", dsc);
6842
6843 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6844 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6845 "vstr", dsc);
6846 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6847 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6848 }
6849
6850 /* Should be unreachable. */
6851 return 1;
6852}
6853
cca44b1b 6854static int
12545665 6855arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6856 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6857{
6858 unsigned int op1 = bits (insn, 20, 25);
6859 int op = bit (insn, 4);
6860 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6861
6862 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6863 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6864 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6865 && (coproc & 0xe) != 0xa)
6866 /* stc/stc2. */
7ff120b4 6867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6868 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6869 && (coproc & 0xe) != 0xa)
6870 /* ldc/ldc2 imm/lit. */
7ff120b4 6871 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6872 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6873 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6874 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6875 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6876 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6877 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6878 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6879 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6880 else if ((op1 & 0x30) == 0x20 && !op)
6881 {
6882 if ((coproc & 0xe) == 0xa)
7ff120b4 6883 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6884 else
7ff120b4 6885 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6886 }
6887 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6888 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6889 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6890 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6891 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6892 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6893 else if ((op1 & 0x30) == 0x30)
7ff120b4 6894 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6895 else
7ff120b4 6896 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6897}
6898
34518530
YQ
6899static int
6900thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6901 uint16_t insn2, struct regcache *regs,
6902 struct displaced_step_closure *dsc)
6903{
6904 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6905 unsigned int bit_5_8 = bits (insn1, 5, 8);
6906 unsigned int bit_9 = bit (insn1, 9);
6907 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6908
6909 if (bit_9 == 0)
6910 {
6911 if (bit_5_8 == 2)
6912 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6913 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6914 dsc);
6915 else if (bit_5_8 == 0) /* UNDEFINED. */
6916 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6917 else
6918 {
6919 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6920 if ((coproc & 0xe) == 0xa)
6921 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6922 dsc);
6923 else /* coproc is not 101x. */
6924 {
6925 if (bit_4 == 0) /* STC/STC2. */
6926 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6927 "stc/stc2", dsc);
6928 else /* LDC/LDC2 {literal, immeidate}. */
6929 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6930 regs, dsc);
6931 }
6932 }
6933 }
6934 else
6935 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6936
6937 return 0;
6938}
6939
6940static void
6941install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc, int rd)
6943{
6944 /* ADR Rd, #imm
6945
6946 Rewrite as:
6947
6948 Preparation: Rd <- PC
6949 Insn: ADD Rd, #imm
6950 Cleanup: Null.
6951 */
6952
6953 /* Rd <- PC */
6954 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6955 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6956}
6957
6958static int
6959thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6960 struct displaced_step_closure *dsc,
6961 int rd, unsigned int imm)
6962{
6963
6964 /* Encoding T2: ADDS Rd, #imm */
6965 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6966
6967 install_pc_relative (gdbarch, regs, dsc, rd);
6968
6969 return 0;
6970}
6971
6972static int
6973thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6974 struct regcache *regs,
6975 struct displaced_step_closure *dsc)
6976{
6977 unsigned int rd = bits (insn, 8, 10);
6978 unsigned int imm8 = bits (insn, 0, 7);
6979
6980 if (debug_displaced)
6981 fprintf_unfiltered (gdb_stdlog,
6982 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6983 rd, imm8, insn);
6984
6985 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6986}
6987
6988static int
6989thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6990 uint16_t insn2, struct regcache *regs,
6991 struct displaced_step_closure *dsc)
6992{
6993 unsigned int rd = bits (insn2, 8, 11);
6994 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6995 extract raw immediate encoding rather than computing immediate. When
6996 generating ADD or SUB instruction, we can simply perform OR operation to
6997 set immediate into ADD. */
6998 unsigned int imm_3_8 = insn2 & 0x70ff;
6999 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7000
7001 if (debug_displaced)
7002 fprintf_unfiltered (gdb_stdlog,
7003 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7004 rd, imm_i, imm_3_8, insn1, insn2);
7005
7006 if (bit (insn1, 7)) /* Encoding T2 */
7007 {
7008 /* Encoding T3: SUB Rd, Rd, #imm */
7009 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7010 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7011 }
7012 else /* Encoding T3 */
7013 {
7014 /* Encoding T3: ADD Rd, Rd, #imm */
7015 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7016 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7017 }
7018 dsc->numinsns = 2;
7019
7020 install_pc_relative (gdbarch, regs, dsc, rd);
7021
7022 return 0;
7023}
7024
7025static int
615234c1 7026thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7027 struct regcache *regs,
7028 struct displaced_step_closure *dsc)
7029{
7030 unsigned int rt = bits (insn1, 8, 10);
7031 unsigned int pc;
7032 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7033
7034 /* LDR Rd, #imm8
7035
7036 Rwrite as:
7037
7038 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7039
7040 Insn: LDR R0, [R2, R3];
7041 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7042
7043 if (debug_displaced)
7044 fprintf_unfiltered (gdb_stdlog,
7045 "displaced: copying thumb ldr r%d [pc #%d]\n"
7046 , rt, imm8);
7047
7048 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7049 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7050 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7051 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7052 /* The assembler calculates the required value of the offset from the
7053 Align(PC,4) value of this instruction to the label. */
7054 pc = pc & 0xfffffffc;
7055
7056 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7057 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7058
7059 dsc->rd = rt;
7060 dsc->u.ldst.xfersize = 4;
7061 dsc->u.ldst.rn = 0;
7062 dsc->u.ldst.immed = 0;
7063 dsc->u.ldst.writeback = 0;
7064 dsc->u.ldst.restore_r4 = 0;
7065
7066 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7067
7068 dsc->cleanup = &cleanup_load;
7069
7070 return 0;
7071}
7072
7073/* Copy Thumb cbnz/cbz insruction. */
7074
7075static int
7076thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7077 struct regcache *regs,
7078 struct displaced_step_closure *dsc)
7079{
7080 int non_zero = bit (insn1, 11);
7081 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7082 CORE_ADDR from = dsc->insn_addr;
7083 int rn = bits (insn1, 0, 2);
7084 int rn_val = displaced_read_reg (regs, dsc, rn);
7085
7086 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7087 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7088 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7089 condition is false, let it be, cleanup_branch will do nothing. */
7090 if (dsc->u.branch.cond)
7091 {
7092 dsc->u.branch.cond = INST_AL;
7093 dsc->u.branch.dest = from + 4 + imm5;
7094 }
7095 else
7096 dsc->u.branch.dest = from + 2;
7097
7098 dsc->u.branch.link = 0;
7099 dsc->u.branch.exchange = 0;
7100
7101 if (debug_displaced)
7102 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7103 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7104 rn, rn_val, insn1, dsc->u.branch.dest);
7105
7106 dsc->modinsn[0] = THUMB_NOP;
7107
7108 dsc->cleanup = &cleanup_branch;
7109 return 0;
7110}
7111
7112/* Copy Table Branch Byte/Halfword */
7113static int
7114thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7115 uint16_t insn2, struct regcache *regs,
7116 struct displaced_step_closure *dsc)
7117{
7118 ULONGEST rn_val, rm_val;
7119 int is_tbh = bit (insn2, 4);
7120 CORE_ADDR halfwords = 0;
7121 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7122
7123 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7124 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7125
7126 if (is_tbh)
7127 {
7128 gdb_byte buf[2];
7129
7130 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7131 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7132 }
7133 else
7134 {
7135 gdb_byte buf[1];
7136
7137 target_read_memory (rn_val + rm_val, buf, 1);
7138 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7139 }
7140
7141 if (debug_displaced)
7142 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7143 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7144 (unsigned int) rn_val, (unsigned int) rm_val,
7145 (unsigned int) halfwords);
7146
7147 dsc->u.branch.cond = INST_AL;
7148 dsc->u.branch.link = 0;
7149 dsc->u.branch.exchange = 0;
7150 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7151
7152 dsc->cleanup = &cleanup_branch;
7153
7154 return 0;
7155}
7156
7157static void
7158cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7160{
7161 /* PC <- r7 */
7162 int val = displaced_read_reg (regs, dsc, 7);
7163 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7164
7165 /* r7 <- r8 */
7166 val = displaced_read_reg (regs, dsc, 8);
7167 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7168
7169 /* r8 <- tmp[0] */
7170 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7171
7172}
7173
7174static int
615234c1 7175thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7176 struct regcache *regs,
7177 struct displaced_step_closure *dsc)
7178{
7179 dsc->u.block.regmask = insn1 & 0x00ff;
7180
7181 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7182 to :
7183
7184 (1) register list is full, that is, r0-r7 are used.
7185 Prepare: tmp[0] <- r8
7186
7187 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7188 MOV r8, r7; Move value of r7 to r8;
7189 POP {r7}; Store PC value into r7.
7190
7191 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7192
7193 (2) register list is not full, supposing there are N registers in
7194 register list (except PC, 0 <= N <= 7).
7195 Prepare: for each i, 0 - N, tmp[i] <- ri.
7196
7197 POP {r0, r1, ...., rN};
7198
7199 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7200 from tmp[] properly.
7201 */
7202 if (debug_displaced)
7203 fprintf_unfiltered (gdb_stdlog,
7204 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7205 dsc->u.block.regmask, insn1);
7206
7207 if (dsc->u.block.regmask == 0xff)
7208 {
7209 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7210
7211 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7212 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7213 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7214
7215 dsc->numinsns = 3;
7216 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7217 }
7218 else
7219 {
7220 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7221 unsigned int i;
7222 unsigned int new_regmask;
34518530
YQ
7223
7224 for (i = 0; i < num_in_list + 1; i++)
7225 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7226
7227 new_regmask = (1 << (num_in_list + 1)) - 1;
7228
7229 if (debug_displaced)
7230 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7231 "{..., pc}: original reg list %.4x,"
7232 " modified list %.4x\n"),
7233 (int) dsc->u.block.regmask, new_regmask);
7234
7235 dsc->u.block.regmask |= 0x8000;
7236 dsc->u.block.writeback = 0;
7237 dsc->u.block.cond = INST_AL;
7238
7239 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7240
7241 dsc->cleanup = &cleanup_block_load_pc;
7242 }
7243
7244 return 0;
7245}
7246
7247static void
7248thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7249 struct regcache *regs,
7250 struct displaced_step_closure *dsc)
7251{
7252 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7253 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7254 int err = 0;
7255
7256 /* 16-bit thumb instructions. */
7257 switch (op_bit_12_15)
7258 {
7259 /* Shift (imme), add, subtract, move and compare. */
7260 case 0: case 1: case 2: case 3:
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7262 "shift/add/sub/mov/cmp",
7263 dsc);
7264 break;
7265 case 4:
7266 switch (op_bit_10_11)
7267 {
7268 case 0: /* Data-processing */
7269 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7270 "data-processing",
7271 dsc);
7272 break;
7273 case 1: /* Special data instructions and branch and exchange. */
7274 {
7275 unsigned short op = bits (insn1, 7, 9);
7276 if (op == 6 || op == 7) /* BX or BLX */
7277 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7278 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7279 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7280 else
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7282 dsc);
7283 }
7284 break;
7285 default: /* LDR (literal) */
7286 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7287 }
7288 break;
7289 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7291 break;
7292 case 10:
7293 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7294 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7295 else /* Generate SP-relative address */
7296 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7297 break;
7298 case 11: /* Misc 16-bit instructions */
7299 {
7300 switch (bits (insn1, 8, 11))
7301 {
7302 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7303 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7304 break;
7305 case 12: case 13: /* POP */
7306 if (bit (insn1, 8)) /* PC is in register list. */
7307 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7308 else
7309 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7310 break;
7311 case 15: /* If-Then, and hints */
7312 if (bits (insn1, 0, 3))
7313 /* If-Then makes up to four following instructions conditional.
7314 IT instruction itself is not conditional, so handle it as a
7315 common unmodified instruction. */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7317 dsc);
7318 else
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7320 break;
7321 default:
7322 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7323 }
7324 }
7325 break;
7326 case 12:
7327 if (op_bit_10_11 < 2) /* Store multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7329 else /* Load multiple registers */
7330 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7331 break;
7332 case 13: /* Conditional branch and supervisor call */
7333 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7334 err = thumb_copy_b (gdbarch, insn1, dsc);
7335 else
7336 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7337 break;
7338 case 14: /* Unconditional branch */
7339 err = thumb_copy_b (gdbarch, insn1, dsc);
7340 break;
7341 default:
7342 err = 1;
7343 }
7344
7345 if (err)
7346 internal_error (__FILE__, __LINE__,
7347 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7348}
7349
7350static int
7351decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7352 uint16_t insn1, uint16_t insn2,
7353 struct regcache *regs,
7354 struct displaced_step_closure *dsc)
7355{
7356 int rt = bits (insn2, 12, 15);
7357 int rn = bits (insn1, 0, 3);
7358 int op1 = bits (insn1, 7, 8);
34518530
YQ
7359
7360 switch (bits (insn1, 5, 6))
7361 {
7362 case 0: /* Load byte and memory hints */
7363 if (rt == 0xf) /* PLD/PLI */
7364 {
7365 if (rn == 0xf)
7366 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7367 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7368 else
7369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 "pli/pld", dsc);
7371 }
7372 else
7373 {
7374 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7375 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7376 1);
7377 else
7378 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7379 "ldrb{reg, immediate}/ldrbt",
7380 dsc);
7381 }
7382
7383 break;
7384 case 1: /* Load halfword and memory hints. */
7385 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7386 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 "pld/unalloc memhint", dsc);
7388 else
7389 {
7390 if (rn == 0xf)
7391 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7392 2);
7393 else
7394 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "ldrh/ldrht", dsc);
7396 }
7397 break;
7398 case 2: /* Load word */
7399 {
7400 int insn2_bit_8_11 = bits (insn2, 8, 11);
7401
7402 if (rn == 0xf)
7403 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7404 else if (op1 == 0x1) /* Encoding T3 */
7405 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7406 0, 1);
7407 else /* op1 == 0x0 */
7408 {
7409 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7410 /* LDR (immediate) */
7411 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7412 dsc, bit (insn2, 8), 1);
7413 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7414 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7415 "ldrt", dsc);
7416 else
7417 /* LDR (register) */
7418 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7419 dsc, 0, 0);
7420 }
7421 break;
7422 }
7423 default:
7424 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7425 break;
7426 }
7427 return 0;
7428}
7429
7430static void
7431thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7432 uint16_t insn2, struct regcache *regs,
7433 struct displaced_step_closure *dsc)
7434{
7435 int err = 0;
7436 unsigned short op = bit (insn2, 15);
7437 unsigned int op1 = bits (insn1, 11, 12);
7438
7439 switch (op1)
7440 {
7441 case 1:
7442 {
7443 switch (bits (insn1, 9, 10))
7444 {
7445 case 0:
7446 if (bit (insn1, 6))
7447 {
7448 /* Load/store {dual, execlusive}, table branch. */
7449 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7450 && bits (insn2, 5, 7) == 0)
7451 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7452 dsc);
7453 else
7454 /* PC is not allowed to use in load/store {dual, exclusive}
7455 instructions. */
7456 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7457 "load/store dual/ex", dsc);
7458 }
7459 else /* load/store multiple */
7460 {
7461 switch (bits (insn1, 7, 8))
7462 {
7463 case 0: case 3: /* SRS, RFE */
7464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 "srs/rfe", dsc);
7466 break;
7467 case 1: case 2: /* LDM/STM/PUSH/POP */
7468 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7469 break;
7470 }
7471 }
7472 break;
7473
7474 case 1:
7475 /* Data-processing (shift register). */
7476 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7477 dsc);
7478 break;
7479 default: /* Coprocessor instructions. */
7480 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7481 break;
7482 }
7483 break;
7484 }
7485 case 2: /* op1 = 2 */
7486 if (op) /* Branch and misc control. */
7487 {
7488 if (bit (insn2, 14) /* BLX/BL */
7489 || bit (insn2, 12) /* Unconditional branch */
7490 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7491 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7492 else
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "misc ctrl", dsc);
7495 }
7496 else
7497 {
7498 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7499 {
7500 int op = bits (insn1, 4, 8);
7501 int rn = bits (insn1, 0, 3);
7502 if ((op == 0 || op == 0xa) && rn == 0xf)
7503 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7504 regs, dsc);
7505 else
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "dp/pb", dsc);
7508 }
7509 else /* Data processing (modified immeidate) */
7510 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7511 "dp/mi", dsc);
7512 }
7513 break;
7514 case 3: /* op1 = 3 */
7515 switch (bits (insn1, 9, 10))
7516 {
7517 case 0:
7518 if (bit (insn1, 4))
7519 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7520 regs, dsc);
7521 else /* NEON Load/Store and Store single data item */
7522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7523 "neon elt/struct load/store",
7524 dsc);
7525 break;
7526 case 1: /* op1 = 3, bits (9, 10) == 1 */
7527 switch (bits (insn1, 7, 8))
7528 {
7529 case 0: case 1: /* Data processing (register) */
7530 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7531 "dp(reg)", dsc);
7532 break;
7533 case 2: /* Multiply and absolute difference */
7534 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7535 "mul/mua/diff", dsc);
7536 break;
7537 case 3: /* Long multiply and divide */
7538 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7539 "lmul/lmua", dsc);
7540 break;
7541 }
7542 break;
7543 default: /* Coprocessor instructions */
7544 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7545 break;
7546 }
7547 break;
7548 default:
7549 err = 1;
7550 }
7551
7552 if (err)
7553 internal_error (__FILE__, __LINE__,
7554 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7555
7556}
7557
b434a28f
YQ
7558static void
7559thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7560 struct regcache *regs,
b434a28f
YQ
7561 struct displaced_step_closure *dsc)
7562{
34518530
YQ
7563 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7564 uint16_t insn1
7565 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7566
7567 if (debug_displaced)
7568 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7569 "at %.8lx\n", insn1, (unsigned long) from);
7570
7571 dsc->is_thumb = 1;
7572 dsc->insn_size = thumb_insn_size (insn1);
7573 if (thumb_insn_size (insn1) == 4)
7574 {
7575 uint16_t insn2
7576 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7577 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7578 }
7579 else
7580 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7581}
7582
cca44b1b 7583void
b434a28f
YQ
7584arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7585 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7586 struct displaced_step_closure *dsc)
7587{
7588 int err = 0;
b434a28f
YQ
7589 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7590 uint32_t insn;
cca44b1b
JB
7591
7592 /* Most displaced instructions use a 1-instruction scratch space, so set this
7593 here and override below if/when necessary. */
7594 dsc->numinsns = 1;
7595 dsc->insn_addr = from;
7596 dsc->scratch_base = to;
7597 dsc->cleanup = NULL;
7598 dsc->wrote_to_pc = 0;
7599
b434a28f 7600 if (!displaced_in_arm_mode (regs))
12545665 7601 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7602
4db71c0b
YQ
7603 dsc->is_thumb = 0;
7604 dsc->insn_size = 4;
b434a28f
YQ
7605 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7606 if (debug_displaced)
7607 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7608 "at %.8lx\n", (unsigned long) insn,
7609 (unsigned long) from);
7610
cca44b1b 7611 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7612 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7613 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7614 {
7615 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7616 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7617 break;
7618
7619 case 0x4: case 0x5: case 0x6:
7ff120b4 7620 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7621 break;
7622
7623 case 0x7:
7ff120b4 7624 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7625 break;
7626
7627 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7628 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7629 break;
7630
7631 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7632 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7633 break;
7634 }
7635
7636 if (err)
7637 internal_error (__FILE__, __LINE__,
7638 _("arm_process_displaced_insn: Instruction decode error"));
7639}
7640
7641/* Actually set up the scratch space for a displaced instruction. */
7642
7643void
7644arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7645 CORE_ADDR to, struct displaced_step_closure *dsc)
7646{
7647 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7648 unsigned int i, len, offset;
cca44b1b 7649 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7650 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7651 const gdb_byte *bkp_insn;
cca44b1b 7652
4db71c0b 7653 offset = 0;
cca44b1b
JB
7654 /* Poke modified instruction(s). */
7655 for (i = 0; i < dsc->numinsns; i++)
7656 {
7657 if (debug_displaced)
4db71c0b
YQ
7658 {
7659 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7660 if (size == 4)
7661 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7662 dsc->modinsn[i]);
7663 else if (size == 2)
7664 fprintf_unfiltered (gdb_stdlog, "%.4x",
7665 (unsigned short)dsc->modinsn[i]);
7666
7667 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7668 (unsigned long) to + offset);
7669
7670 }
7671 write_memory_unsigned_integer (to + offset, size,
7672 byte_order_for_code,
cca44b1b 7673 dsc->modinsn[i]);
4db71c0b
YQ
7674 offset += size;
7675 }
7676
7677 /* Choose the correct breakpoint instruction. */
7678 if (dsc->is_thumb)
7679 {
7680 bkp_insn = tdep->thumb_breakpoint;
7681 len = tdep->thumb_breakpoint_size;
7682 }
7683 else
7684 {
7685 bkp_insn = tdep->arm_breakpoint;
7686 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7687 }
7688
7689 /* Put breakpoint afterwards. */
4db71c0b 7690 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7691
7692 if (debug_displaced)
7693 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7694 paddress (gdbarch, from), paddress (gdbarch, to));
7695}
7696
cca44b1b
JB
7697/* Entry point for cleaning things up after a displaced instruction has been
7698 single-stepped. */
7699
7700void
7701arm_displaced_step_fixup (struct gdbarch *gdbarch,
7702 struct displaced_step_closure *dsc,
7703 CORE_ADDR from, CORE_ADDR to,
7704 struct regcache *regs)
7705{
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7708
7709 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7712
cca44b1b
JB
7713}
7714
7715#include "bfd-in2.h"
7716#include "libcoff.h"
7717
7718static int
7719gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7720{
9a3c8263 7721 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7722
7723 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7724 {
7725 static asymbol *asym;
7726 static combined_entry_type ce;
7727 static struct coff_symbol_struct csym;
7728 static struct bfd fake_bfd;
7729 static bfd_target fake_target;
7730
7731 if (csym.native == NULL)
7732 {
7733 /* Create a fake symbol vector containing a Thumb symbol.
7734 This is solely so that the code in print_insn_little_arm()
7735 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7736 the presence of a Thumb symbol and switch to decoding
7737 Thumb instructions. */
7738
7739 fake_target.flavour = bfd_target_coff_flavour;
7740 fake_bfd.xvec = &fake_target;
7741 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7742 csym.native = &ce;
7743 csym.symbol.the_bfd = &fake_bfd;
7744 csym.symbol.name = "fake";
7745 asym = (asymbol *) & csym;
7746 }
7747
7748 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7749 info->symbols = &asym;
7750 }
7751 else
7752 info->symbols = NULL;
7753
7754 if (info->endian == BFD_ENDIAN_BIG)
7755 return print_insn_big_arm (memaddr, info);
7756 else
7757 return print_insn_little_arm (memaddr, info);
7758}
7759
7760/* The following define instruction sequences that will cause ARM
7761 cpu's to take an undefined instruction trap. These are used to
7762 signal a breakpoint to GDB.
7763
7764 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7765 modes. A different instruction is required for each mode. The ARM
7766 cpu's can also be big or little endian. Thus four different
7767 instructions are needed to support all cases.
7768
7769 Note: ARMv4 defines several new instructions that will take the
7770 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7771 not in fact add the new instructions. The new undefined
7772 instructions in ARMv4 are all instructions that had no defined
7773 behaviour in earlier chips. There is no guarantee that they will
7774 raise an exception, but may be treated as NOP's. In practice, it
7775 may only safe to rely on instructions matching:
7776
7777 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7778 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7779 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7780
0963b4bd 7781 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7782 following use a condition predicate of ALWAYS so it is always TRUE.
7783
7784 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7785 and NetBSD all use a software interrupt rather than an undefined
7786 instruction to force a trap. This can be handled by by the
7787 abi-specific code during establishment of the gdbarch vector. */
7788
7789#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7790#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7791#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7792#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7793
948f8e3d
PA
7794static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7795static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7796static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7797static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b
JB
7798
7799/* Determine the type and size of breakpoint to insert at PCPTR. Uses
7800 the program counter value to determine whether a 16-bit or 32-bit
7801 breakpoint should be used. It returns a pointer to a string of
7802 bytes that encode a breakpoint instruction, stores the length of
7803 the string to *lenptr, and adjusts the program counter (if
7804 necessary) to point to the actual memory location where the
7805 breakpoint should be inserted. */
7806
7807static const unsigned char *
7808arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7809{
7810 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7811 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7812
9779414d 7813 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7814 {
7815 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7816
7817 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7818 check whether we are replacing a 32-bit instruction. */
7819 if (tdep->thumb2_breakpoint != NULL)
7820 {
7821 gdb_byte buf[2];
7822 if (target_read_memory (*pcptr, buf, 2) == 0)
7823 {
7824 unsigned short inst1;
7825 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7826 if (thumb_insn_size (inst1) == 4)
177321bd
DJ
7827 {
7828 *lenptr = tdep->thumb2_breakpoint_size;
7829 return tdep->thumb2_breakpoint;
7830 }
7831 }
7832 }
7833
cca44b1b
JB
7834 *lenptr = tdep->thumb_breakpoint_size;
7835 return tdep->thumb_breakpoint;
7836 }
7837 else
7838 {
7839 *lenptr = tdep->arm_breakpoint_size;
7840 return tdep->arm_breakpoint;
7841 }
7842}
7843
177321bd
DJ
7844static void
7845arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7846 int *kindptr)
7847{
177321bd
DJ
7848 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7849
9779414d 7850 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
177321bd
DJ
7851 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7852 that this is not confused with a 32-bit ARM breakpoint. */
7853 *kindptr = 3;
7854}
7855
cca44b1b
JB
7856/* Extract from an array REGBUF containing the (raw) register state a
7857 function return value of type TYPE, and copy that, in virtual
7858 format, into VALBUF. */
7859
7860static void
7861arm_extract_return_value (struct type *type, struct regcache *regs,
7862 gdb_byte *valbuf)
7863{
7864 struct gdbarch *gdbarch = get_regcache_arch (regs);
7865 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7866
7867 if (TYPE_CODE_FLT == TYPE_CODE (type))
7868 {
7869 switch (gdbarch_tdep (gdbarch)->fp_model)
7870 {
7871 case ARM_FLOAT_FPA:
7872 {
7873 /* The value is in register F0 in internal format. We need to
7874 extract the raw value and then convert it to the desired
7875 internal type. */
7876 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7877
7878 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7879 convert_from_extended (floatformat_from_type (type), tmpbuf,
7880 valbuf, gdbarch_byte_order (gdbarch));
7881 }
7882 break;
7883
7884 case ARM_FLOAT_SOFT_FPA:
7885 case ARM_FLOAT_SOFT_VFP:
7886 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7887 not using the VFP ABI code. */
7888 case ARM_FLOAT_VFP:
7889 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7890 if (TYPE_LENGTH (type) > 4)
7891 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7892 valbuf + INT_REGISTER_SIZE);
7893 break;
7894
7895 default:
0963b4bd
MS
7896 internal_error (__FILE__, __LINE__,
7897 _("arm_extract_return_value: "
7898 "Floating point model not supported"));
cca44b1b
JB
7899 break;
7900 }
7901 }
7902 else if (TYPE_CODE (type) == TYPE_CODE_INT
7903 || TYPE_CODE (type) == TYPE_CODE_CHAR
7904 || TYPE_CODE (type) == TYPE_CODE_BOOL
7905 || TYPE_CODE (type) == TYPE_CODE_PTR
7906 || TYPE_CODE (type) == TYPE_CODE_REF
7907 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7908 {
b021a221
MS
7909 /* If the type is a plain integer, then the access is
7910 straight-forward. Otherwise we have to play around a bit
7911 more. */
cca44b1b
JB
7912 int len = TYPE_LENGTH (type);
7913 int regno = ARM_A1_REGNUM;
7914 ULONGEST tmp;
7915
7916 while (len > 0)
7917 {
7918 /* By using store_unsigned_integer we avoid having to do
7919 anything special for small big-endian values. */
7920 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7921 store_unsigned_integer (valbuf,
7922 (len > INT_REGISTER_SIZE
7923 ? INT_REGISTER_SIZE : len),
7924 byte_order, tmp);
7925 len -= INT_REGISTER_SIZE;
7926 valbuf += INT_REGISTER_SIZE;
7927 }
7928 }
7929 else
7930 {
7931 /* For a structure or union the behaviour is as if the value had
7932 been stored to word-aligned memory and then loaded into
7933 registers with 32-bit load instruction(s). */
7934 int len = TYPE_LENGTH (type);
7935 int regno = ARM_A1_REGNUM;
7936 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7937
7938 while (len > 0)
7939 {
7940 regcache_cooked_read (regs, regno++, tmpbuf);
7941 memcpy (valbuf, tmpbuf,
7942 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7943 len -= INT_REGISTER_SIZE;
7944 valbuf += INT_REGISTER_SIZE;
7945 }
7946 }
7947}
7948
7949
7950/* Will a function return an aggregate type in memory or in a
7951 register? Return 0 if an aggregate type can be returned in a
7952 register, 1 if it must be returned in memory. */
7953
7954static int
7955arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7956{
cca44b1b
JB
7957 enum type_code code;
7958
f168693b 7959 type = check_typedef (type);
cca44b1b 7960
b13c8ab2
YQ
7961 /* Simple, non-aggregate types (ie not including vectors and
7962 complex) are always returned in a register (or registers). */
7963 code = TYPE_CODE (type);
7964 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7965 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7966 return 0;
cca44b1b 7967
c4312b19
YQ
7968 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7969 {
7970 /* Vector values should be returned using ARM registers if they
7971 are not over 16 bytes. */
7972 return (TYPE_LENGTH (type) > 16);
7973 }
7974
b13c8ab2 7975 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 7976 {
b13c8ab2
YQ
7977 /* The AAPCS says all aggregates not larger than a word are returned
7978 in a register. */
7979 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7980 return 0;
7981
cca44b1b
JB
7982 return 1;
7983 }
b13c8ab2
YQ
7984 else
7985 {
7986 int nRc;
cca44b1b 7987
b13c8ab2
YQ
7988 /* All aggregate types that won't fit in a register must be returned
7989 in memory. */
7990 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7991 return 1;
cca44b1b 7992
b13c8ab2
YQ
7993 /* In the ARM ABI, "integer" like aggregate types are returned in
7994 registers. For an aggregate type to be integer like, its size
7995 must be less than or equal to INT_REGISTER_SIZE and the
7996 offset of each addressable subfield must be zero. Note that bit
7997 fields are not addressable, and all addressable subfields of
7998 unions always start at offset zero.
cca44b1b 7999
b13c8ab2
YQ
8000 This function is based on the behaviour of GCC 2.95.1.
8001 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8002
b13c8ab2
YQ
8003 Note: All versions of GCC before GCC 2.95.2 do not set up the
8004 parameters correctly for a function returning the following
8005 structure: struct { float f;}; This should be returned in memory,
8006 not a register. Richard Earnshaw sent me a patch, but I do not
8007 know of any way to detect if a function like the above has been
8008 compiled with the correct calling convention. */
8009
8010 /* Assume all other aggregate types can be returned in a register.
8011 Run a check for structures, unions and arrays. */
8012 nRc = 0;
67255d04 8013
b13c8ab2
YQ
8014 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8015 {
8016 int i;
8017 /* Need to check if this struct/union is "integer" like. For
8018 this to be true, its size must be less than or equal to
8019 INT_REGISTER_SIZE and the offset of each addressable
8020 subfield must be zero. Note that bit fields are not
8021 addressable, and unions always start at offset zero. If any
8022 of the subfields is a floating point type, the struct/union
8023 cannot be an integer type. */
8024
8025 /* For each field in the object, check:
8026 1) Is it FP? --> yes, nRc = 1;
8027 2) Is it addressable (bitpos != 0) and
8028 not packed (bitsize == 0)?
8029 --> yes, nRc = 1
8030 */
8031
8032 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8033 {
b13c8ab2
YQ
8034 enum type_code field_type_code;
8035
8036 field_type_code
8037 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8038 i)));
8039
8040 /* Is it a floating point type field? */
8041 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8042 {
8043 nRc = 1;
8044 break;
8045 }
b13c8ab2
YQ
8046
8047 /* If bitpos != 0, then we have to care about it. */
8048 if (TYPE_FIELD_BITPOS (type, i) != 0)
8049 {
8050 /* Bitfields are not addressable. If the field bitsize is
8051 zero, then the field is not packed. Hence it cannot be
8052 a bitfield or any other packed type. */
8053 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8054 {
8055 nRc = 1;
8056 break;
8057 }
8058 }
67255d04
RE
8059 }
8060 }
67255d04 8061
b13c8ab2
YQ
8062 return nRc;
8063 }
67255d04
RE
8064}
8065
34e8f22d
RE
8066/* Write into appropriate registers a function return value of type
8067 TYPE, given in virtual format. */
8068
8069static void
b508a996 8070arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8071 const gdb_byte *valbuf)
34e8f22d 8072{
be8626e0 8073 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8074 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8075
34e8f22d
RE
8076 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8077 {
e362b510 8078 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8079
be8626e0 8080 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8081 {
8082 case ARM_FLOAT_FPA:
8083
be8626e0
MD
8084 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8085 gdbarch_byte_order (gdbarch));
b508a996 8086 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8087 break;
8088
fd50bc42 8089 case ARM_FLOAT_SOFT_FPA:
08216dd7 8090 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8091 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8092 not using the VFP ABI code. */
8093 case ARM_FLOAT_VFP:
b508a996
RE
8094 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8095 if (TYPE_LENGTH (type) > 4)
8096 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8097 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8098 break;
8099
8100 default:
9b20d036
MS
8101 internal_error (__FILE__, __LINE__,
8102 _("arm_store_return_value: Floating "
8103 "point model not supported"));
08216dd7
RE
8104 break;
8105 }
34e8f22d 8106 }
b508a996
RE
8107 else if (TYPE_CODE (type) == TYPE_CODE_INT
8108 || TYPE_CODE (type) == TYPE_CODE_CHAR
8109 || TYPE_CODE (type) == TYPE_CODE_BOOL
8110 || TYPE_CODE (type) == TYPE_CODE_PTR
8111 || TYPE_CODE (type) == TYPE_CODE_REF
8112 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8113 {
8114 if (TYPE_LENGTH (type) <= 4)
8115 {
8116 /* Values of one word or less are zero/sign-extended and
8117 returned in r0. */
7a5ea0d4 8118 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8119 LONGEST val = unpack_long (type, valbuf);
8120
e17a4113 8121 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8122 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8123 }
8124 else
8125 {
8126 /* Integral values greater than one word are stored in consecutive
8127 registers starting with r0. This will always be a multiple of
8128 the regiser size. */
8129 int len = TYPE_LENGTH (type);
8130 int regno = ARM_A1_REGNUM;
8131
8132 while (len > 0)
8133 {
8134 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8135 len -= INT_REGISTER_SIZE;
8136 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8137 }
8138 }
8139 }
34e8f22d 8140 else
b508a996
RE
8141 {
8142 /* For a structure or union the behaviour is as if the value had
8143 been stored to word-aligned memory and then loaded into
8144 registers with 32-bit load instruction(s). */
8145 int len = TYPE_LENGTH (type);
8146 int regno = ARM_A1_REGNUM;
7a5ea0d4 8147 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8148
8149 while (len > 0)
8150 {
8151 memcpy (tmpbuf, valbuf,
7a5ea0d4 8152 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8153 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8154 len -= INT_REGISTER_SIZE;
8155 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8156 }
8157 }
34e8f22d
RE
8158}
8159
2af48f68
PB
8160
8161/* Handle function return values. */
8162
8163static enum return_value_convention
6a3a010b 8164arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8165 struct type *valtype, struct regcache *regcache,
8166 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8167{
7c00367c 8168 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8169 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8170 enum arm_vfp_cprc_base_type vfp_base_type;
8171 int vfp_base_count;
8172
8173 if (arm_vfp_abi_for_function (gdbarch, func_type)
8174 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8175 {
8176 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8177 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8178 int i;
8179 for (i = 0; i < vfp_base_count; i++)
8180 {
58d6951d
DJ
8181 if (reg_char == 'q')
8182 {
8183 if (writebuf)
8184 arm_neon_quad_write (gdbarch, regcache, i,
8185 writebuf + i * unit_length);
8186
8187 if (readbuf)
8188 arm_neon_quad_read (gdbarch, regcache, i,
8189 readbuf + i * unit_length);
8190 }
8191 else
8192 {
8193 char name_buf[4];
8194 int regnum;
8195
8c042590 8196 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8197 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8198 strlen (name_buf));
8199 if (writebuf)
8200 regcache_cooked_write (regcache, regnum,
8201 writebuf + i * unit_length);
8202 if (readbuf)
8203 regcache_cooked_read (regcache, regnum,
8204 readbuf + i * unit_length);
8205 }
90445bd3
DJ
8206 }
8207 return RETURN_VALUE_REGISTER_CONVENTION;
8208 }
7c00367c 8209
2af48f68
PB
8210 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8211 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8212 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8213 {
7c00367c
MK
8214 if (tdep->struct_return == pcc_struct_return
8215 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8216 return RETURN_VALUE_STRUCT_CONVENTION;
8217 }
b13c8ab2
YQ
8218 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8219 {
8220 if (arm_return_in_memory (gdbarch, valtype))
8221 return RETURN_VALUE_STRUCT_CONVENTION;
8222 }
7052e42c 8223
2af48f68
PB
8224 if (writebuf)
8225 arm_store_return_value (valtype, regcache, writebuf);
8226
8227 if (readbuf)
8228 arm_extract_return_value (valtype, regcache, readbuf);
8229
8230 return RETURN_VALUE_REGISTER_CONVENTION;
8231}
8232
8233
9df628e0 8234static int
60ade65d 8235arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8236{
e17a4113
UW
8237 struct gdbarch *gdbarch = get_frame_arch (frame);
8238 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8239 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8240 CORE_ADDR jb_addr;
e362b510 8241 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8242
60ade65d 8243 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8244
8245 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8246 INT_REGISTER_SIZE))
9df628e0
RE
8247 return 0;
8248
e17a4113 8249 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8250 return 1;
8251}
8252
faa95490
DJ
8253/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8254 return the target PC. Otherwise return 0. */
c906108c
SS
8255
8256CORE_ADDR
52f729a7 8257arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8258{
2c02bd72 8259 const char *name;
faa95490 8260 int namelen;
c906108c
SS
8261 CORE_ADDR start_addr;
8262
8263 /* Find the starting address and name of the function containing the PC. */
8264 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8265 {
8266 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8267 check here. */
8268 start_addr = arm_skip_bx_reg (frame, pc);
8269 if (start_addr != 0)
8270 return start_addr;
8271
8272 return 0;
8273 }
c906108c 8274
faa95490
DJ
8275 /* If PC is in a Thumb call or return stub, return the address of the
8276 target PC, which is in a register. The thunk functions are called
8277 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8278 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8279 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8280 if (startswith (name, "_call_via_")
8281 || startswith (name, "__ARM_call_via_"))
c906108c 8282 {
ed9a39eb
JM
8283 /* Use the name suffix to determine which register contains the
8284 target PC. */
c5aa993b
JM
8285 static char *table[15] =
8286 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8287 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8288 };
c906108c 8289 int regno;
faa95490 8290 int offset = strlen (name) - 2;
c906108c
SS
8291
8292 for (regno = 0; regno <= 14; regno++)
faa95490 8293 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8294 return get_frame_register_unsigned (frame, regno);
c906108c 8295 }
ed9a39eb 8296
faa95490
DJ
8297 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8298 non-interworking calls to foo. We could decode the stubs
8299 to find the target but it's easier to use the symbol table. */
8300 namelen = strlen (name);
8301 if (name[0] == '_' && name[1] == '_'
8302 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8303 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8304 || (namelen > 2 + strlen ("_from_arm")
61012eef 8305 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8306 {
8307 char *target_name;
8308 int target_len = namelen - 2;
3b7344d5 8309 struct bound_minimal_symbol minsym;
faa95490
DJ
8310 struct objfile *objfile;
8311 struct obj_section *sec;
8312
8313 if (name[namelen - 1] == 'b')
8314 target_len -= strlen ("_from_thumb");
8315 else
8316 target_len -= strlen ("_from_arm");
8317
224c3ddb 8318 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8319 memcpy (target_name, name + 2, target_len);
8320 target_name[target_len] = '\0';
8321
8322 sec = find_pc_section (pc);
8323 objfile = (sec == NULL) ? NULL : sec->objfile;
8324 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8325 if (minsym.minsym != NULL)
77e371c0 8326 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8327 else
8328 return 0;
8329 }
8330
c5aa993b 8331 return 0; /* not a stub */
c906108c
SS
8332}
8333
afd7eef0
RE
8334static void
8335set_arm_command (char *args, int from_tty)
8336{
edefbb7c
AC
8337 printf_unfiltered (_("\
8338\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8339 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8340}
8341
8342static void
8343show_arm_command (char *args, int from_tty)
8344{
26304000 8345 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8346}
8347
28e97307
DJ
8348static void
8349arm_update_current_architecture (void)
fd50bc42 8350{
28e97307 8351 struct gdbarch_info info;
fd50bc42 8352
28e97307 8353 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8354 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8355 return;
fd50bc42 8356
28e97307
DJ
8357 /* Update the architecture. */
8358 gdbarch_info_init (&info);
fd50bc42 8359
28e97307 8360 if (!gdbarch_update_p (info))
9b20d036 8361 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8362}
8363
8364static void
8365set_fp_model_sfunc (char *args, int from_tty,
8366 struct cmd_list_element *c)
8367{
570dc176 8368 int fp_model;
fd50bc42
RE
8369
8370 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8371 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8372 {
aead7601 8373 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8374 break;
8375 }
8376
8377 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8378 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8379 current_fp_model);
8380
28e97307 8381 arm_update_current_architecture ();
fd50bc42
RE
8382}
8383
8384static void
08546159
AC
8385show_fp_model (struct ui_file *file, int from_tty,
8386 struct cmd_list_element *c, const char *value)
fd50bc42 8387{
f5656ead 8388 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8389
28e97307 8390 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8391 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8392 fprintf_filtered (file, _("\
8393The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8394 fp_model_strings[tdep->fp_model]);
8395 else
8396 fprintf_filtered (file, _("\
8397The current ARM floating point model is \"%s\".\n"),
8398 fp_model_strings[arm_fp_model]);
8399}
8400
8401static void
8402arm_set_abi (char *args, int from_tty,
8403 struct cmd_list_element *c)
8404{
570dc176 8405 int arm_abi;
28e97307
DJ
8406
8407 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8408 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8409 {
aead7601 8410 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8411 break;
8412 }
8413
8414 if (arm_abi == ARM_ABI_LAST)
8415 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8416 arm_abi_string);
8417
8418 arm_update_current_architecture ();
8419}
8420
8421static void
8422arm_show_abi (struct ui_file *file, int from_tty,
8423 struct cmd_list_element *c, const char *value)
8424{
f5656ead 8425 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8426
8427 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8428 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8429 fprintf_filtered (file, _("\
8430The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8431 arm_abi_strings[tdep->arm_abi]);
8432 else
8433 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8434 arm_abi_string);
fd50bc42
RE
8435}
8436
0428b8f5
DJ
8437static void
8438arm_show_fallback_mode (struct ui_file *file, int from_tty,
8439 struct cmd_list_element *c, const char *value)
8440{
0963b4bd
MS
8441 fprintf_filtered (file,
8442 _("The current execution mode assumed "
8443 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8444 arm_fallback_mode_string);
8445}
8446
8447static void
8448arm_show_force_mode (struct ui_file *file, int from_tty,
8449 struct cmd_list_element *c, const char *value)
8450{
0963b4bd
MS
8451 fprintf_filtered (file,
8452 _("The current execution mode assumed "
8453 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8454 arm_force_mode_string);
8455}
8456
afd7eef0
RE
8457/* If the user changes the register disassembly style used for info
8458 register and other commands, we have to also switch the style used
8459 in opcodes for disassembly output. This function is run in the "set
8460 arm disassembly" command, and does that. */
bc90b915
FN
8461
8462static void
afd7eef0 8463set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8464 struct cmd_list_element *c)
8465{
afd7eef0 8466 set_disassembly_style ();
bc90b915
FN
8467}
8468\f
966fbf70 8469/* Return the ARM register name corresponding to register I. */
a208b0cb 8470static const char *
d93859e2 8471arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8472{
58d6951d
DJ
8473 const int num_regs = gdbarch_num_regs (gdbarch);
8474
8475 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8476 && i >= num_regs && i < num_regs + 32)
8477 {
8478 static const char *const vfp_pseudo_names[] = {
8479 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8480 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8481 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8482 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8483 };
8484
8485 return vfp_pseudo_names[i - num_regs];
8486 }
8487
8488 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8489 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8490 {
8491 static const char *const neon_pseudo_names[] = {
8492 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8493 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8494 };
8495
8496 return neon_pseudo_names[i - num_regs - 32];
8497 }
8498
ff6f572f
DJ
8499 if (i >= ARRAY_SIZE (arm_register_names))
8500 /* These registers are only supported on targets which supply
8501 an XML description. */
8502 return "";
8503
966fbf70
RE
8504 return arm_register_names[i];
8505}
8506
bc90b915 8507static void
afd7eef0 8508set_disassembly_style (void)
bc90b915 8509{
123dc839 8510 int current;
bc90b915 8511
123dc839
DJ
8512 /* Find the style that the user wants. */
8513 for (current = 0; current < num_disassembly_options; current++)
8514 if (disassembly_style == valid_disassembly_styles[current])
8515 break;
8516 gdb_assert (current < num_disassembly_options);
bc90b915 8517
94c30b78 8518 /* Synchronize the disassembler. */
bc90b915
FN
8519 set_arm_regname_option (current);
8520}
8521
082fc60d
RE
8522/* Test whether the coff symbol specific value corresponds to a Thumb
8523 function. */
8524
8525static int
8526coff_sym_is_thumb (int val)
8527{
f8bf5763
PM
8528 return (val == C_THUMBEXT
8529 || val == C_THUMBSTAT
8530 || val == C_THUMBEXTFUNC
8531 || val == C_THUMBSTATFUNC
8532 || val == C_THUMBLABEL);
082fc60d
RE
8533}
8534
8535/* arm_coff_make_msymbol_special()
8536 arm_elf_make_msymbol_special()
8537
8538 These functions test whether the COFF or ELF symbol corresponds to
8539 an address in thumb code, and set a "special" bit in a minimal
8540 symbol to indicate that it does. */
8541
34e8f22d 8542static void
082fc60d
RE
8543arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8544{
39d911fc
TP
8545 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8546
8547 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8548 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8549 MSYMBOL_SET_SPECIAL (msym);
8550}
8551
34e8f22d 8552static void
082fc60d
RE
8553arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8554{
8555 if (coff_sym_is_thumb (val))
8556 MSYMBOL_SET_SPECIAL (msym);
8557}
8558
60c5725c 8559static void
c1bd65d0 8560arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8561{
9a3c8263 8562 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8563 unsigned int i;
8564
8565 for (i = 0; i < objfile->obfd->section_count; i++)
8566 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8567}
8568
8569static void
8570arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8571 asymbol *sym)
8572{
8573 const char *name = bfd_asymbol_name (sym);
8574 struct arm_per_objfile *data;
8575 VEC(arm_mapping_symbol_s) **map_p;
8576 struct arm_mapping_symbol new_map_sym;
8577
8578 gdb_assert (name[0] == '$');
8579 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8580 return;
8581
9a3c8263
SM
8582 data = (struct arm_per_objfile *) objfile_data (objfile,
8583 arm_objfile_data_key);
60c5725c
DJ
8584 if (data == NULL)
8585 {
8586 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8587 struct arm_per_objfile);
8588 set_objfile_data (objfile, arm_objfile_data_key, data);
8589 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8590 objfile->obfd->section_count,
8591 VEC(arm_mapping_symbol_s) *);
8592 }
8593 map_p = &data->section_maps[bfd_get_section (sym)->index];
8594
8595 new_map_sym.value = sym->value;
8596 new_map_sym.type = name[1];
8597
8598 /* Assume that most mapping symbols appear in order of increasing
8599 value. If they were randomly distributed, it would be faster to
8600 always push here and then sort at first use. */
8601 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8602 {
8603 struct arm_mapping_symbol *prev_map_sym;
8604
8605 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8606 if (prev_map_sym->value >= sym->value)
8607 {
8608 unsigned int idx;
8609 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8610 arm_compare_mapping_symbols);
8611 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8612 return;
8613 }
8614 }
8615
8616 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8617}
8618
756fe439 8619static void
61a1198a 8620arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8621{
9779414d 8622 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8623 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8624
8625 /* If necessary, set the T bit. */
8626 if (arm_apcs_32)
8627 {
9779414d 8628 ULONGEST val, t_bit;
61a1198a 8629 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8630 t_bit = arm_psr_thumb_bit (gdbarch);
8631 if (arm_pc_is_thumb (gdbarch, pc))
8632 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8633 val | t_bit);
756fe439 8634 else
61a1198a 8635 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8636 val & ~t_bit);
756fe439
DJ
8637 }
8638}
123dc839 8639
58d6951d
DJ
8640/* Read the contents of a NEON quad register, by reading from two
8641 double registers. This is used to implement the quad pseudo
8642 registers, and for argument passing in case the quad registers are
8643 missing; vectors are passed in quad registers when using the VFP
8644 ABI, even if a NEON unit is not present. REGNUM is the index of
8645 the quad register, in [0, 15]. */
8646
05d1431c 8647static enum register_status
58d6951d
DJ
8648arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8649 int regnum, gdb_byte *buf)
8650{
8651 char name_buf[4];
8652 gdb_byte reg_buf[8];
8653 int offset, double_regnum;
05d1431c 8654 enum register_status status;
58d6951d 8655
8c042590 8656 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8657 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8658 strlen (name_buf));
8659
8660 /* d0 is always the least significant half of q0. */
8661 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8662 offset = 8;
8663 else
8664 offset = 0;
8665
05d1431c
PA
8666 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8667 if (status != REG_VALID)
8668 return status;
58d6951d
DJ
8669 memcpy (buf + offset, reg_buf, 8);
8670
8671 offset = 8 - offset;
05d1431c
PA
8672 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8673 if (status != REG_VALID)
8674 return status;
58d6951d 8675 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8676
8677 return REG_VALID;
58d6951d
DJ
8678}
8679
05d1431c 8680static enum register_status
58d6951d
DJ
8681arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8682 int regnum, gdb_byte *buf)
8683{
8684 const int num_regs = gdbarch_num_regs (gdbarch);
8685 char name_buf[4];
8686 gdb_byte reg_buf[8];
8687 int offset, double_regnum;
8688
8689 gdb_assert (regnum >= num_regs);
8690 regnum -= num_regs;
8691
8692 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8693 /* Quad-precision register. */
05d1431c 8694 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8695 else
8696 {
05d1431c
PA
8697 enum register_status status;
8698
58d6951d
DJ
8699 /* Single-precision register. */
8700 gdb_assert (regnum < 32);
8701
8702 /* s0 is always the least significant half of d0. */
8703 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8704 offset = (regnum & 1) ? 0 : 4;
8705 else
8706 offset = (regnum & 1) ? 4 : 0;
8707
8c042590 8708 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8709 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8710 strlen (name_buf));
8711
05d1431c
PA
8712 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8713 if (status == REG_VALID)
8714 memcpy (buf, reg_buf + offset, 4);
8715 return status;
58d6951d
DJ
8716 }
8717}
8718
8719/* Store the contents of BUF to a NEON quad register, by writing to
8720 two double registers. This is used to implement the quad pseudo
8721 registers, and for argument passing in case the quad registers are
8722 missing; vectors are passed in quad registers when using the VFP
8723 ABI, even if a NEON unit is not present. REGNUM is the index
8724 of the quad register, in [0, 15]. */
8725
8726static void
8727arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8728 int regnum, const gdb_byte *buf)
8729{
8730 char name_buf[4];
58d6951d
DJ
8731 int offset, double_regnum;
8732
8c042590 8733 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8734 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8735 strlen (name_buf));
8736
8737 /* d0 is always the least significant half of q0. */
8738 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8739 offset = 8;
8740 else
8741 offset = 0;
8742
8743 regcache_raw_write (regcache, double_regnum, buf + offset);
8744 offset = 8 - offset;
8745 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8746}
8747
8748static void
8749arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8750 int regnum, const gdb_byte *buf)
8751{
8752 const int num_regs = gdbarch_num_regs (gdbarch);
8753 char name_buf[4];
8754 gdb_byte reg_buf[8];
8755 int offset, double_regnum;
8756
8757 gdb_assert (regnum >= num_regs);
8758 regnum -= num_regs;
8759
8760 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8761 /* Quad-precision register. */
8762 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8763 else
8764 {
8765 /* Single-precision register. */
8766 gdb_assert (regnum < 32);
8767
8768 /* s0 is always the least significant half of d0. */
8769 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8770 offset = (regnum & 1) ? 0 : 4;
8771 else
8772 offset = (regnum & 1) ? 4 : 0;
8773
8c042590 8774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8776 strlen (name_buf));
8777
8778 regcache_raw_read (regcache, double_regnum, reg_buf);
8779 memcpy (reg_buf + offset, buf, 4);
8780 regcache_raw_write (regcache, double_regnum, reg_buf);
8781 }
8782}
8783
123dc839
DJ
8784static struct value *
8785value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8786{
9a3c8263 8787 const int *reg_p = (const int *) baton;
123dc839
DJ
8788 return value_of_register (*reg_p, frame);
8789}
97e03143 8790\f
70f80edf
JT
8791static enum gdb_osabi
8792arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8793{
2af48f68 8794 unsigned int elfosabi;
70f80edf 8795 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8796
70f80edf 8797 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8798
28e97307
DJ
8799 if (elfosabi == ELFOSABI_ARM)
8800 /* GNU tools use this value. Check note sections in this case,
8801 as well. */
8802 bfd_map_over_sections (abfd,
8803 generic_elf_osabi_sniff_abi_tag_sections,
8804 &osabi);
97e03143 8805
28e97307 8806 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8807 return osabi;
97e03143
RE
8808}
8809
54483882
YQ
8810static int
8811arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8812 struct reggroup *group)
8813{
2c291032
YQ
8814 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8815 this, FPS register belongs to save_regroup, restore_reggroup, and
8816 all_reggroup, of course. */
54483882 8817 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8818 return (group == float_reggroup
8819 || group == save_reggroup
8820 || group == restore_reggroup
8821 || group == all_reggroup);
54483882
YQ
8822 else
8823 return default_register_reggroup_p (gdbarch, regnum, group);
8824}
8825
25f8c692
JL
8826\f
8827/* For backward-compatibility we allow two 'g' packet lengths with
8828 the remote protocol depending on whether FPA registers are
8829 supplied. M-profile targets do not have FPA registers, but some
8830 stubs already exist in the wild which use a 'g' packet which
8831 supplies them albeit with dummy values. The packet format which
8832 includes FPA registers should be considered deprecated for
8833 M-profile targets. */
8834
8835static void
8836arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8837{
8838 if (gdbarch_tdep (gdbarch)->is_m)
8839 {
8840 /* If we know from the executable this is an M-profile target,
8841 cater for remote targets whose register set layout is the
8842 same as the FPA layout. */
8843 register_remote_g_packet_guess (gdbarch,
03145bf4 8844 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8845 (16 * INT_REGISTER_SIZE)
8846 + (8 * FP_REGISTER_SIZE)
8847 + (2 * INT_REGISTER_SIZE),
8848 tdesc_arm_with_m_fpa_layout);
8849
8850 /* The regular M-profile layout. */
8851 register_remote_g_packet_guess (gdbarch,
8852 /* r0-r12,sp,lr,pc; xpsr */
8853 (16 * INT_REGISTER_SIZE)
8854 + INT_REGISTER_SIZE,
8855 tdesc_arm_with_m);
3184d3f9
JL
8856
8857 /* M-profile plus M4F VFP. */
8858 register_remote_g_packet_guess (gdbarch,
8859 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8860 (16 * INT_REGISTER_SIZE)
8861 + (16 * VFP_REGISTER_SIZE)
8862 + (2 * INT_REGISTER_SIZE),
8863 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8864 }
8865
8866 /* Otherwise we don't have a useful guess. */
8867}
8868
7eb89530
YQ
8869/* Implement the code_of_frame_writable gdbarch method. */
8870
8871static int
8872arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8873{
8874 if (gdbarch_tdep (gdbarch)->is_m
8875 && get_frame_type (frame) == SIGTRAMP_FRAME)
8876 {
8877 /* M-profile exception frames return to some magic PCs, where
8878 isn't writable at all. */
8879 return 0;
8880 }
8881 else
8882 return 1;
8883}
8884
70f80edf 8885\f
da3c6d4a
MS
8886/* Initialize the current architecture based on INFO. If possible,
8887 re-use an architecture from ARCHES, which is a list of
8888 architectures already created during this debugging session.
97e03143 8889
da3c6d4a
MS
8890 Called e.g. at program startup, when reading a core file, and when
8891 reading a binary file. */
97e03143 8892
39bbf761
RE
8893static struct gdbarch *
8894arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8895{
97e03143 8896 struct gdbarch_tdep *tdep;
39bbf761 8897 struct gdbarch *gdbarch;
28e97307
DJ
8898 struct gdbarch_list *best_arch;
8899 enum arm_abi_kind arm_abi = arm_abi_global;
8900 enum arm_float_model fp_model = arm_fp_model;
123dc839 8901 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8902 int i, is_m = 0;
330c6ca9 8903 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8904 int have_wmmx_registers = 0;
58d6951d 8905 int have_neon = 0;
ff6f572f 8906 int have_fpa_registers = 1;
9779414d
DJ
8907 const struct target_desc *tdesc = info.target_desc;
8908
8909 /* If we have an object to base this architecture on, try to determine
8910 its ABI. */
8911
8912 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8913 {
8914 int ei_osabi, e_flags;
8915
8916 switch (bfd_get_flavour (info.abfd))
8917 {
8918 case bfd_target_aout_flavour:
8919 /* Assume it's an old APCS-style ABI. */
8920 arm_abi = ARM_ABI_APCS;
8921 break;
8922
8923 case bfd_target_coff_flavour:
8924 /* Assume it's an old APCS-style ABI. */
8925 /* XXX WinCE? */
8926 arm_abi = ARM_ABI_APCS;
8927 break;
8928
8929 case bfd_target_elf_flavour:
8930 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8931 e_flags = elf_elfheader (info.abfd)->e_flags;
8932
8933 if (ei_osabi == ELFOSABI_ARM)
8934 {
8935 /* GNU tools used to use this value, but do not for EABI
8936 objects. There's nowhere to tag an EABI version
8937 anyway, so assume APCS. */
8938 arm_abi = ARM_ABI_APCS;
8939 }
d403db27 8940 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8941 {
8942 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8943 int attr_arch, attr_profile;
8944
8945 switch (eabi_ver)
8946 {
8947 case EF_ARM_EABI_UNKNOWN:
8948 /* Assume GNU tools. */
8949 arm_abi = ARM_ABI_APCS;
8950 break;
8951
8952 case EF_ARM_EABI_VER4:
8953 case EF_ARM_EABI_VER5:
8954 arm_abi = ARM_ABI_AAPCS;
8955 /* EABI binaries default to VFP float ordering.
8956 They may also contain build attributes that can
8957 be used to identify if the VFP argument-passing
8958 ABI is in use. */
8959 if (fp_model == ARM_FLOAT_AUTO)
8960 {
8961#ifdef HAVE_ELF
8962 switch (bfd_elf_get_obj_attr_int (info.abfd,
8963 OBJ_ATTR_PROC,
8964 Tag_ABI_VFP_args))
8965 {
b35b0298 8966 case AEABI_VFP_args_base:
9779414d
DJ
8967 /* "The user intended FP parameter/result
8968 passing to conform to AAPCS, base
8969 variant". */
8970 fp_model = ARM_FLOAT_SOFT_VFP;
8971 break;
b35b0298 8972 case AEABI_VFP_args_vfp:
9779414d
DJ
8973 /* "The user intended FP parameter/result
8974 passing to conform to AAPCS, VFP
8975 variant". */
8976 fp_model = ARM_FLOAT_VFP;
8977 break;
b35b0298 8978 case AEABI_VFP_args_toolchain:
9779414d
DJ
8979 /* "The user intended FP parameter/result
8980 passing to conform to tool chain-specific
8981 conventions" - we don't know any such
8982 conventions, so leave it as "auto". */
8983 break;
b35b0298 8984 case AEABI_VFP_args_compatible:
5c294fee
TG
8985 /* "Code is compatible with both the base
8986 and VFP variants; the user did not permit
8987 non-variadic functions to pass FP
8988 parameters/results" - leave it as
8989 "auto". */
8990 break;
9779414d
DJ
8991 default:
8992 /* Attribute value not mentioned in the
5c294fee 8993 November 2012 ABI, so leave it as
9779414d
DJ
8994 "auto". */
8995 break;
8996 }
8997#else
8998 fp_model = ARM_FLOAT_SOFT_VFP;
8999#endif
9000 }
9001 break;
9002
9003 default:
9004 /* Leave it as "auto". */
9005 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9006 break;
9007 }
9008
9009#ifdef HAVE_ELF
9010 /* Detect M-profile programs. This only works if the
9011 executable file includes build attributes; GCC does
9012 copy them to the executable, but e.g. RealView does
9013 not. */
9014 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9015 Tag_CPU_arch);
0963b4bd
MS
9016 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9017 OBJ_ATTR_PROC,
9779414d
DJ
9018 Tag_CPU_arch_profile);
9019 /* GCC specifies the profile for v6-M; RealView only
9020 specifies the profile for architectures starting with
9021 V7 (as opposed to architectures with a tag
9022 numerically greater than TAG_CPU_ARCH_V7). */
9023 if (!tdesc_has_registers (tdesc)
9024 && (attr_arch == TAG_CPU_ARCH_V6_M
9025 || attr_arch == TAG_CPU_ARCH_V6S_M
9026 || attr_profile == 'M'))
25f8c692 9027 is_m = 1;
9779414d
DJ
9028#endif
9029 }
9030
9031 if (fp_model == ARM_FLOAT_AUTO)
9032 {
9033 int e_flags = elf_elfheader (info.abfd)->e_flags;
9034
9035 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9036 {
9037 case 0:
9038 /* Leave it as "auto". Strictly speaking this case
9039 means FPA, but almost nobody uses that now, and
9040 many toolchains fail to set the appropriate bits
9041 for the floating-point model they use. */
9042 break;
9043 case EF_ARM_SOFT_FLOAT:
9044 fp_model = ARM_FLOAT_SOFT_FPA;
9045 break;
9046 case EF_ARM_VFP_FLOAT:
9047 fp_model = ARM_FLOAT_VFP;
9048 break;
9049 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9050 fp_model = ARM_FLOAT_SOFT_VFP;
9051 break;
9052 }
9053 }
9054
9055 if (e_flags & EF_ARM_BE8)
9056 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9057
9058 break;
9059
9060 default:
9061 /* Leave it as "auto". */
9062 break;
9063 }
9064 }
123dc839
DJ
9065
9066 /* Check any target description for validity. */
9779414d 9067 if (tdesc_has_registers (tdesc))
123dc839
DJ
9068 {
9069 /* For most registers we require GDB's default names; but also allow
9070 the numeric names for sp / lr / pc, as a convenience. */
9071 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9072 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9073 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9074
9075 const struct tdesc_feature *feature;
58d6951d 9076 int valid_p;
123dc839 9077
9779414d 9078 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9079 "org.gnu.gdb.arm.core");
9080 if (feature == NULL)
9779414d
DJ
9081 {
9082 feature = tdesc_find_feature (tdesc,
9083 "org.gnu.gdb.arm.m-profile");
9084 if (feature == NULL)
9085 return NULL;
9086 else
9087 is_m = 1;
9088 }
123dc839
DJ
9089
9090 tdesc_data = tdesc_data_alloc ();
9091
9092 valid_p = 1;
9093 for (i = 0; i < ARM_SP_REGNUM; i++)
9094 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9095 arm_register_names[i]);
9096 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9097 ARM_SP_REGNUM,
9098 arm_sp_names);
9099 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9100 ARM_LR_REGNUM,
9101 arm_lr_names);
9102 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9103 ARM_PC_REGNUM,
9104 arm_pc_names);
9779414d
DJ
9105 if (is_m)
9106 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9107 ARM_PS_REGNUM, "xpsr");
9108 else
9109 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9110 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9111
9112 if (!valid_p)
9113 {
9114 tdesc_data_cleanup (tdesc_data);
9115 return NULL;
9116 }
9117
9779414d 9118 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9119 "org.gnu.gdb.arm.fpa");
9120 if (feature != NULL)
9121 {
9122 valid_p = 1;
9123 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9124 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9125 arm_register_names[i]);
9126 if (!valid_p)
9127 {
9128 tdesc_data_cleanup (tdesc_data);
9129 return NULL;
9130 }
9131 }
ff6f572f
DJ
9132 else
9133 have_fpa_registers = 0;
9134
9779414d 9135 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9136 "org.gnu.gdb.xscale.iwmmxt");
9137 if (feature != NULL)
9138 {
9139 static const char *const iwmmxt_names[] = {
9140 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9141 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9142 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9143 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9144 };
9145
9146 valid_p = 1;
9147 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9148 valid_p
9149 &= tdesc_numbered_register (feature, tdesc_data, i,
9150 iwmmxt_names[i - ARM_WR0_REGNUM]);
9151
9152 /* Check for the control registers, but do not fail if they
9153 are missing. */
9154 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9155 tdesc_numbered_register (feature, tdesc_data, i,
9156 iwmmxt_names[i - ARM_WR0_REGNUM]);
9157
9158 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9159 valid_p
9160 &= tdesc_numbered_register (feature, tdesc_data, i,
9161 iwmmxt_names[i - ARM_WR0_REGNUM]);
9162
9163 if (!valid_p)
9164 {
9165 tdesc_data_cleanup (tdesc_data);
9166 return NULL;
9167 }
a56cc1ce
YQ
9168
9169 have_wmmx_registers = 1;
ff6f572f 9170 }
58d6951d
DJ
9171
9172 /* If we have a VFP unit, check whether the single precision registers
9173 are present. If not, then we will synthesize them as pseudo
9174 registers. */
9779414d 9175 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9176 "org.gnu.gdb.arm.vfp");
9177 if (feature != NULL)
9178 {
9179 static const char *const vfp_double_names[] = {
9180 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9181 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9182 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9183 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9184 };
9185
9186 /* Require the double precision registers. There must be either
9187 16 or 32. */
9188 valid_p = 1;
9189 for (i = 0; i < 32; i++)
9190 {
9191 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9192 ARM_D0_REGNUM + i,
9193 vfp_double_names[i]);
9194 if (!valid_p)
9195 break;
9196 }
2b9e5ea6
UW
9197 if (!valid_p && i == 16)
9198 valid_p = 1;
58d6951d 9199
2b9e5ea6
UW
9200 /* Also require FPSCR. */
9201 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9202 ARM_FPSCR_REGNUM, "fpscr");
9203 if (!valid_p)
58d6951d
DJ
9204 {
9205 tdesc_data_cleanup (tdesc_data);
9206 return NULL;
9207 }
9208
9209 if (tdesc_unnumbered_register (feature, "s0") == 0)
9210 have_vfp_pseudos = 1;
9211
330c6ca9 9212 vfp_register_count = i;
58d6951d
DJ
9213
9214 /* If we have VFP, also check for NEON. The architecture allows
9215 NEON without VFP (integer vector operations only), but GDB
9216 does not support that. */
9779414d 9217 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9218 "org.gnu.gdb.arm.neon");
9219 if (feature != NULL)
9220 {
9221 /* NEON requires 32 double-precision registers. */
9222 if (i != 32)
9223 {
9224 tdesc_data_cleanup (tdesc_data);
9225 return NULL;
9226 }
9227
9228 /* If there are quad registers defined by the stub, use
9229 their type; otherwise (normally) provide them with
9230 the default type. */
9231 if (tdesc_unnumbered_register (feature, "q0") == 0)
9232 have_neon_pseudos = 1;
9233
9234 have_neon = 1;
9235 }
9236 }
123dc839 9237 }
39bbf761 9238
28e97307
DJ
9239 /* If there is already a candidate, use it. */
9240 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9241 best_arch != NULL;
9242 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9243 {
b8926edc
DJ
9244 if (arm_abi != ARM_ABI_AUTO
9245 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9246 continue;
9247
b8926edc
DJ
9248 if (fp_model != ARM_FLOAT_AUTO
9249 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9250 continue;
9251
58d6951d
DJ
9252 /* There are various other properties in tdep that we do not
9253 need to check here: those derived from a target description,
9254 since gdbarches with a different target description are
9255 automatically disqualified. */
9256
9779414d
DJ
9257 /* Do check is_m, though, since it might come from the binary. */
9258 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9259 continue;
9260
28e97307
DJ
9261 /* Found a match. */
9262 break;
9263 }
97e03143 9264
28e97307 9265 if (best_arch != NULL)
123dc839
DJ
9266 {
9267 if (tdesc_data != NULL)
9268 tdesc_data_cleanup (tdesc_data);
9269 return best_arch->gdbarch;
9270 }
28e97307 9271
8d749320 9272 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9273 gdbarch = gdbarch_alloc (&info, tdep);
9274
28e97307
DJ
9275 /* Record additional information about the architecture we are defining.
9276 These are gdbarch discriminators, like the OSABI. */
9277 tdep->arm_abi = arm_abi;
9278 tdep->fp_model = fp_model;
9779414d 9279 tdep->is_m = is_m;
ff6f572f 9280 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9281 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9282 gdb_assert (vfp_register_count == 0
9283 || vfp_register_count == 16
9284 || vfp_register_count == 32);
9285 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9286 tdep->have_vfp_pseudos = have_vfp_pseudos;
9287 tdep->have_neon_pseudos = have_neon_pseudos;
9288 tdep->have_neon = have_neon;
08216dd7 9289
25f8c692
JL
9290 arm_register_g_packet_guesses (gdbarch);
9291
08216dd7 9292 /* Breakpoints. */
9d4fde75 9293 switch (info.byte_order_for_code)
67255d04
RE
9294 {
9295 case BFD_ENDIAN_BIG:
66e810cd
RE
9296 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9297 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9298 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9299 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9300
67255d04
RE
9301 break;
9302
9303 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9304 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9305 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9306 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9307 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9308
67255d04
RE
9309 break;
9310
9311 default:
9312 internal_error (__FILE__, __LINE__,
edefbb7c 9313 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9314 }
9315
d7b486e7
RE
9316 /* On ARM targets char defaults to unsigned. */
9317 set_gdbarch_char_signed (gdbarch, 0);
9318
cca44b1b
JB
9319 /* Note: for displaced stepping, this includes the breakpoint, and one word
9320 of additional scratch space. This setting isn't used for anything beside
9321 displaced stepping at present. */
9322 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9323
9df628e0 9324 /* This should be low enough for everything. */
97e03143 9325 tdep->lowest_pc = 0x20;
94c30b78 9326 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9327
7c00367c
MK
9328 /* The default, for both APCS and AAPCS, is to return small
9329 structures in registers. */
9330 tdep->struct_return = reg_struct_return;
9331
2dd604e7 9332 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9333 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9334
7eb89530
YQ
9335 if (is_m)
9336 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9337
756fe439
DJ
9338 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9339
148754e5 9340 /* Frame handling. */
a262aec2 9341 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9342 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9343 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9344
eb5492fa 9345 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9346
34e8f22d 9347 /* Address manipulation. */
34e8f22d
RE
9348 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9349
34e8f22d
RE
9350 /* Advance PC across function entry code. */
9351 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9352
c9cf6e20
MG
9353 /* Detect whether PC is at a point where the stack has been destroyed. */
9354 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9355
190dce09
UW
9356 /* Skip trampolines. */
9357 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9358
34e8f22d
RE
9359 /* The stack grows downward. */
9360 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9361
9362 /* Breakpoint manipulation. */
9363 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
177321bd
DJ
9364 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9365 arm_remote_breakpoint_from_pc);
34e8f22d
RE
9366
9367 /* Information about registers, etc. */
34e8f22d
RE
9368 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9369 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9370 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9371 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9372 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9373
ff6f572f
DJ
9374 /* This "info float" is FPA-specific. Use the generic version if we
9375 do not have FPA. */
9376 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9377 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9378
26216b98 9379 /* Internal <-> external register number maps. */
ff6f572f 9380 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9381 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9382
34e8f22d
RE
9383 set_gdbarch_register_name (gdbarch, arm_register_name);
9384
9385 /* Returning results. */
2af48f68 9386 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9387
03d48a7d
RE
9388 /* Disassembly. */
9389 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9390
34e8f22d
RE
9391 /* Minsymbol frobbing. */
9392 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9393 set_gdbarch_coff_make_msymbol_special (gdbarch,
9394 arm_coff_make_msymbol_special);
60c5725c 9395 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9396
f9d67f43
DJ
9397 /* Thumb-2 IT block support. */
9398 set_gdbarch_adjust_breakpoint_address (gdbarch,
9399 arm_adjust_breakpoint_address);
9400
0d5de010
DJ
9401 /* Virtual tables. */
9402 set_gdbarch_vbit_in_delta (gdbarch, 1);
9403
97e03143 9404 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9405 gdbarch_init_osabi (info, gdbarch);
97e03143 9406
b39cc962
DJ
9407 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9408
eb5492fa 9409 /* Add some default predicates. */
2ae28aa9
YQ
9410 if (is_m)
9411 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9412 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9413 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9414 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9415 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9416 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9417
97e03143
RE
9418 /* Now we have tuned the configuration, set a few final things,
9419 based on what the OS ABI has told us. */
9420
b8926edc
DJ
9421 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9422 binaries are always marked. */
9423 if (tdep->arm_abi == ARM_ABI_AUTO)
9424 tdep->arm_abi = ARM_ABI_APCS;
9425
e3039479
UW
9426 /* Watchpoints are not steppable. */
9427 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9428
b8926edc
DJ
9429 /* We used to default to FPA for generic ARM, but almost nobody
9430 uses that now, and we now provide a way for the user to force
9431 the model. So default to the most useful variant. */
9432 if (tdep->fp_model == ARM_FLOAT_AUTO)
9433 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9434
9df628e0
RE
9435 if (tdep->jb_pc >= 0)
9436 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9437
08216dd7 9438 /* Floating point sizes and format. */
8da61cc4 9439 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9440 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9441 {
8da61cc4
DJ
9442 set_gdbarch_double_format
9443 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9444 set_gdbarch_long_double_format
9445 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9446 }
9447 else
9448 {
9449 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9450 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9451 }
9452
58d6951d
DJ
9453 if (have_vfp_pseudos)
9454 {
9455 /* NOTE: These are the only pseudo registers used by
9456 the ARM target at the moment. If more are added, a
9457 little more care in numbering will be needed. */
9458
9459 int num_pseudos = 32;
9460 if (have_neon_pseudos)
9461 num_pseudos += 16;
9462 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9463 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9464 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9465 }
9466
123dc839 9467 if (tdesc_data)
58d6951d
DJ
9468 {
9469 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9470
9779414d 9471 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9472
9473 /* Override tdesc_register_type to adjust the types of VFP
9474 registers for NEON. */
9475 set_gdbarch_register_type (gdbarch, arm_register_type);
9476 }
123dc839
DJ
9477
9478 /* Add standard register aliases. We add aliases even for those
9479 nanes which are used by the current architecture - it's simpler,
9480 and does no harm, since nothing ever lists user registers. */
9481 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9482 user_reg_add (gdbarch, arm_register_aliases[i].name,
9483 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9484
39bbf761
RE
9485 return gdbarch;
9486}
9487
97e03143 9488static void
2af46ca0 9489arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9490{
2af46ca0 9491 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9492
9493 if (tdep == NULL)
9494 return;
9495
edefbb7c 9496 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9497 (unsigned long) tdep->lowest_pc);
9498}
9499
a78f21af
AC
9500extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9501
c906108c 9502void
ed9a39eb 9503_initialize_arm_tdep (void)
c906108c 9504{
bc90b915
FN
9505 struct ui_file *stb;
9506 long length;
53904c9e
AC
9507 const char *setname;
9508 const char *setdesc;
4bd7b427 9509 const char *const *regnames;
bec2ab5a 9510 int i;
bc90b915 9511 static char *helptext;
edefbb7c
AC
9512 char regdesc[1024], *rdptr = regdesc;
9513 size_t rest = sizeof (regdesc);
085dd6e6 9514
42cf1509 9515 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9516
60c5725c 9517 arm_objfile_data_key
c1bd65d0 9518 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9519
0e9e9abd
UW
9520 /* Add ourselves to objfile event chain. */
9521 observer_attach_new_objfile (arm_exidx_new_objfile);
9522 arm_exidx_data_key
9523 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9524
70f80edf
JT
9525 /* Register an ELF OS ABI sniffer for ARM binaries. */
9526 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9527 bfd_target_elf_flavour,
9528 arm_elf_osabi_sniffer);
9529
9779414d
DJ
9530 /* Initialize the standard target descriptions. */
9531 initialize_tdesc_arm_with_m ();
25f8c692 9532 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9533 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9534 initialize_tdesc_arm_with_iwmmxt ();
9535 initialize_tdesc_arm_with_vfpv2 ();
9536 initialize_tdesc_arm_with_vfpv3 ();
9537 initialize_tdesc_arm_with_neon ();
9779414d 9538
94c30b78 9539 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9540 num_disassembly_options = get_arm_regname_num_options ();
9541
9542 /* Add root prefix command for all "set arm"/"show arm" commands. */
9543 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9544 _("Various ARM-specific commands."),
afd7eef0
RE
9545 &setarmcmdlist, "set arm ", 0, &setlist);
9546
9547 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9548 _("Various ARM-specific commands."),
afd7eef0 9549 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9550
94c30b78 9551 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9552 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9553
eefe576e
AC
9554 /* Initialize the array that will be passed to
9555 add_setshow_enum_cmd(). */
8d749320
SM
9556 valid_disassembly_styles = XNEWVEC (const char *,
9557 num_disassembly_options + 1);
afd7eef0 9558 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9559 {
bec2ab5a 9560 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9561 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9562 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9563 rdptr += length;
9564 rest -= length;
123dc839
DJ
9565 /* When we find the default names, tell the disassembler to use
9566 them. */
bc90b915
FN
9567 if (!strcmp (setname, "std"))
9568 {
afd7eef0 9569 disassembly_style = setname;
bc90b915
FN
9570 set_arm_regname_option (i);
9571 }
9572 }
94c30b78 9573 /* Mark the end of valid options. */
afd7eef0 9574 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9575
edefbb7c
AC
9576 /* Create the help text. */
9577 stb = mem_fileopen ();
9578 fprintf_unfiltered (stb, "%s%s%s",
9579 _("The valid values are:\n"),
9580 regdesc,
9581 _("The default is \"std\"."));
759ef836 9582 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9583 ui_file_delete (stb);
ed9a39eb 9584
edefbb7c
AC
9585 add_setshow_enum_cmd("disassembler", no_class,
9586 valid_disassembly_styles, &disassembly_style,
9587 _("Set the disassembly style."),
9588 _("Show the disassembly style."),
9589 helptext,
2c5b56ce 9590 set_disassembly_style_sfunc,
0963b4bd
MS
9591 NULL, /* FIXME: i18n: The disassembly style is
9592 \"%s\". */
7376b4c2 9593 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9594
9595 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9596 _("Set usage of ARM 32-bit mode."),
9597 _("Show usage of ARM 32-bit mode."),
9598 _("When off, a 26-bit PC will be used."),
2c5b56ce 9599 NULL,
0963b4bd
MS
9600 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9601 mode is %s. */
26304000 9602 &setarmcmdlist, &showarmcmdlist);
c906108c 9603
fd50bc42 9604 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9605 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9606 _("Set the floating point type."),
9607 _("Show the floating point type."),
9608 _("auto - Determine the FP typefrom the OS-ABI.\n\
9609softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9610fpa - FPA co-processor (GCC compiled).\n\
9611softvfp - Software FP with pure-endian doubles.\n\
9612vfp - VFP co-processor."),
edefbb7c 9613 set_fp_model_sfunc, show_fp_model,
7376b4c2 9614 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9615
28e97307
DJ
9616 /* Add a command to allow the user to force the ABI. */
9617 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9618 _("Set the ABI."),
9619 _("Show the ABI."),
9620 NULL, arm_set_abi, arm_show_abi,
9621 &setarmcmdlist, &showarmcmdlist);
9622
0428b8f5
DJ
9623 /* Add two commands to allow the user to force the assumed
9624 execution mode. */
9625 add_setshow_enum_cmd ("fallback-mode", class_support,
9626 arm_mode_strings, &arm_fallback_mode_string,
9627 _("Set the mode assumed when symbols are unavailable."),
9628 _("Show the mode assumed when symbols are unavailable."),
9629 NULL, NULL, arm_show_fallback_mode,
9630 &setarmcmdlist, &showarmcmdlist);
9631 add_setshow_enum_cmd ("force-mode", class_support,
9632 arm_mode_strings, &arm_force_mode_string,
9633 _("Set the mode assumed even when symbols are available."),
9634 _("Show the mode assumed even when symbols are available."),
9635 NULL, NULL, arm_show_force_mode,
9636 &setarmcmdlist, &showarmcmdlist);
9637
6529d2dd 9638 /* Debugging flag. */
edefbb7c
AC
9639 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9640 _("Set ARM debugging."),
9641 _("Show ARM debugging."),
9642 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9643 NULL,
7915a72c 9644 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9645 &setdebuglist, &showdebuglist);
c906108c 9646}
72508ac0
PO
9647
9648/* ARM-reversible process record data structures. */
9649
9650#define ARM_INSN_SIZE_BYTES 4
9651#define THUMB_INSN_SIZE_BYTES 2
9652#define THUMB2_INSN_SIZE_BYTES 4
9653
9654
71e396f9
LM
9655/* Position of the bit within a 32-bit ARM instruction
9656 that defines whether the instruction is a load or store. */
72508ac0
PO
9657#define INSN_S_L_BIT_NUM 20
9658
9659#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9660 do \
9661 { \
9662 unsigned int reg_len = LENGTH; \
9663 if (reg_len) \
9664 { \
9665 REGS = XNEWVEC (uint32_t, reg_len); \
9666 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9667 } \
9668 } \
9669 while (0)
9670
9671#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9672 do \
9673 { \
9674 unsigned int mem_len = LENGTH; \
9675 if (mem_len) \
9676 { \
9677 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9678 memcpy(&MEMS->len, &RECORD_BUF[0], \
9679 sizeof(struct arm_mem_r) * LENGTH); \
9680 } \
9681 } \
9682 while (0)
9683
9684/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9685#define INSN_RECORDED(ARM_RECORD) \
9686 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9687
9688/* ARM memory record structure. */
9689struct arm_mem_r
9690{
9691 uint32_t len; /* Record length. */
bfbbec00 9692 uint32_t addr; /* Memory address. */
72508ac0
PO
9693};
9694
9695/* ARM instruction record contains opcode of current insn
9696 and execution state (before entry to decode_insn()),
9697 contains list of to-be-modified registers and
9698 memory blocks (on return from decode_insn()). */
9699
9700typedef struct insn_decode_record_t
9701{
9702 struct gdbarch *gdbarch;
9703 struct regcache *regcache;
9704 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9705 uint32_t arm_insn; /* Should accommodate thumb. */
9706 uint32_t cond; /* Condition code. */
9707 uint32_t opcode; /* Insn opcode. */
9708 uint32_t decode; /* Insn decode bits. */
9709 uint32_t mem_rec_count; /* No of mem records. */
9710 uint32_t reg_rec_count; /* No of reg records. */
9711 uint32_t *arm_regs; /* Registers to be saved for this record. */
9712 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9713} insn_decode_record;
9714
9715
9716/* Checks ARM SBZ and SBO mandatory fields. */
9717
9718static int
9719sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9720{
9721 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9722
9723 if (!len)
9724 return 1;
9725
9726 if (!sbo)
9727 ones = ~ones;
9728
9729 while (ones)
9730 {
9731 if (!(ones & sbo))
9732 {
9733 return 0;
9734 }
9735 ones = ones >> 1;
9736 }
9737 return 1;
9738}
9739
c6ec2b30
OJ
9740enum arm_record_result
9741{
9742 ARM_RECORD_SUCCESS = 0,
9743 ARM_RECORD_FAILURE = 1
9744};
9745
72508ac0
PO
9746typedef enum
9747{
9748 ARM_RECORD_STRH=1,
9749 ARM_RECORD_STRD
9750} arm_record_strx_t;
9751
9752typedef enum
9753{
9754 ARM_RECORD=1,
9755 THUMB_RECORD,
9756 THUMB2_RECORD
9757} record_type_t;
9758
9759
9760static int
9761arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9762 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9763{
9764
9765 struct regcache *reg_cache = arm_insn_r->regcache;
9766 ULONGEST u_regval[2]= {0};
9767
9768 uint32_t reg_src1 = 0, reg_src2 = 0;
9769 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9770
9771 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9772 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9773
9774 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9775 {
9776 /* 1) Handle misc store, immediate offset. */
9777 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9778 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9779 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9780 regcache_raw_read_unsigned (reg_cache, reg_src1,
9781 &u_regval[0]);
9782 if (ARM_PC_REGNUM == reg_src1)
9783 {
9784 /* If R15 was used as Rn, hence current PC+8. */
9785 u_regval[0] = u_regval[0] + 8;
9786 }
9787 offset_8 = (immed_high << 4) | immed_low;
9788 /* Calculate target store address. */
9789 if (14 == arm_insn_r->opcode)
9790 {
9791 tgt_mem_addr = u_regval[0] + offset_8;
9792 }
9793 else
9794 {
9795 tgt_mem_addr = u_regval[0] - offset_8;
9796 }
9797 if (ARM_RECORD_STRH == str_type)
9798 {
9799 record_buf_mem[0] = 2;
9800 record_buf_mem[1] = tgt_mem_addr;
9801 arm_insn_r->mem_rec_count = 1;
9802 }
9803 else if (ARM_RECORD_STRD == str_type)
9804 {
9805 record_buf_mem[0] = 4;
9806 record_buf_mem[1] = tgt_mem_addr;
9807 record_buf_mem[2] = 4;
9808 record_buf_mem[3] = tgt_mem_addr + 4;
9809 arm_insn_r->mem_rec_count = 2;
9810 }
9811 }
9812 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9813 {
9814 /* 2) Store, register offset. */
9815 /* Get Rm. */
9816 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9817 /* Get Rn. */
9818 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9819 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9820 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9821 if (15 == reg_src2)
9822 {
9823 /* If R15 was used as Rn, hence current PC+8. */
9824 u_regval[0] = u_regval[0] + 8;
9825 }
9826 /* Calculate target store address, Rn +/- Rm, register offset. */
9827 if (12 == arm_insn_r->opcode)
9828 {
9829 tgt_mem_addr = u_regval[0] + u_regval[1];
9830 }
9831 else
9832 {
9833 tgt_mem_addr = u_regval[1] - u_regval[0];
9834 }
9835 if (ARM_RECORD_STRH == str_type)
9836 {
9837 record_buf_mem[0] = 2;
9838 record_buf_mem[1] = tgt_mem_addr;
9839 arm_insn_r->mem_rec_count = 1;
9840 }
9841 else if (ARM_RECORD_STRD == str_type)
9842 {
9843 record_buf_mem[0] = 4;
9844 record_buf_mem[1] = tgt_mem_addr;
9845 record_buf_mem[2] = 4;
9846 record_buf_mem[3] = tgt_mem_addr + 4;
9847 arm_insn_r->mem_rec_count = 2;
9848 }
9849 }
9850 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9851 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9852 {
9853 /* 3) Store, immediate pre-indexed. */
9854 /* 5) Store, immediate post-indexed. */
9855 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9856 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9857 offset_8 = (immed_high << 4) | immed_low;
9858 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9859 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9860 /* Calculate target store address, Rn +/- Rm, register offset. */
9861 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9862 {
9863 tgt_mem_addr = u_regval[0] + offset_8;
9864 }
9865 else
9866 {
9867 tgt_mem_addr = u_regval[0] - offset_8;
9868 }
9869 if (ARM_RECORD_STRH == str_type)
9870 {
9871 record_buf_mem[0] = 2;
9872 record_buf_mem[1] = tgt_mem_addr;
9873 arm_insn_r->mem_rec_count = 1;
9874 }
9875 else if (ARM_RECORD_STRD == str_type)
9876 {
9877 record_buf_mem[0] = 4;
9878 record_buf_mem[1] = tgt_mem_addr;
9879 record_buf_mem[2] = 4;
9880 record_buf_mem[3] = tgt_mem_addr + 4;
9881 arm_insn_r->mem_rec_count = 2;
9882 }
9883 /* Record Rn also as it changes. */
9884 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9885 arm_insn_r->reg_rec_count = 1;
9886 }
9887 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9888 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9889 {
9890 /* 4) Store, register pre-indexed. */
9891 /* 6) Store, register post -indexed. */
9892 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9893 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9894 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9895 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9896 /* Calculate target store address, Rn +/- Rm, register offset. */
9897 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9898 {
9899 tgt_mem_addr = u_regval[0] + u_regval[1];
9900 }
9901 else
9902 {
9903 tgt_mem_addr = u_regval[1] - u_regval[0];
9904 }
9905 if (ARM_RECORD_STRH == str_type)
9906 {
9907 record_buf_mem[0] = 2;
9908 record_buf_mem[1] = tgt_mem_addr;
9909 arm_insn_r->mem_rec_count = 1;
9910 }
9911 else if (ARM_RECORD_STRD == str_type)
9912 {
9913 record_buf_mem[0] = 4;
9914 record_buf_mem[1] = tgt_mem_addr;
9915 record_buf_mem[2] = 4;
9916 record_buf_mem[3] = tgt_mem_addr + 4;
9917 arm_insn_r->mem_rec_count = 2;
9918 }
9919 /* Record Rn also as it changes. */
9920 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9921 arm_insn_r->reg_rec_count = 1;
9922 }
9923 return 0;
9924}
9925
9926/* Handling ARM extension space insns. */
9927
9928static int
9929arm_record_extension_space (insn_decode_record *arm_insn_r)
9930{
9931 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9932 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9933 uint32_t record_buf[8], record_buf_mem[8];
9934 uint32_t reg_src1 = 0;
72508ac0
PO
9935 struct regcache *reg_cache = arm_insn_r->regcache;
9936 ULONGEST u_regval = 0;
9937
9938 gdb_assert (!INSN_RECORDED(arm_insn_r));
9939 /* Handle unconditional insn extension space. */
9940
9941 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9942 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9943 if (arm_insn_r->cond)
9944 {
9945 /* PLD has no affect on architectural state, it just affects
9946 the caches. */
9947 if (5 == ((opcode1 & 0xE0) >> 5))
9948 {
9949 /* BLX(1) */
9950 record_buf[0] = ARM_PS_REGNUM;
9951 record_buf[1] = ARM_LR_REGNUM;
9952 arm_insn_r->reg_rec_count = 2;
9953 }
9954 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9955 }
9956
9957
9958 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9959 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9960 {
9961 ret = -1;
9962 /* Undefined instruction on ARM V5; need to handle if later
9963 versions define it. */
9964 }
9965
9966 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9967 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9968 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9969
9970 /* Handle arithmetic insn extension space. */
9971 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9972 && !INSN_RECORDED(arm_insn_r))
9973 {
9974 /* Handle MLA(S) and MUL(S). */
9975 if (0 <= insn_op1 && 3 >= insn_op1)
9976 {
9977 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9978 record_buf[1] = ARM_PS_REGNUM;
9979 arm_insn_r->reg_rec_count = 2;
9980 }
9981 else if (4 <= insn_op1 && 15 >= insn_op1)
9982 {
9983 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9984 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9985 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9986 record_buf[2] = ARM_PS_REGNUM;
9987 arm_insn_r->reg_rec_count = 3;
9988 }
9989 }
9990
9991 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9992 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9993 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9994
9995 /* Handle control insn extension space. */
9996
9997 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9998 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9999 {
10000 if (!bit (arm_insn_r->arm_insn,25))
10001 {
10002 if (!bits (arm_insn_r->arm_insn, 4, 7))
10003 {
10004 if ((0 == insn_op1) || (2 == insn_op1))
10005 {
10006 /* MRS. */
10007 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10008 arm_insn_r->reg_rec_count = 1;
10009 }
10010 else if (1 == insn_op1)
10011 {
10012 /* CSPR is going to be changed. */
10013 record_buf[0] = ARM_PS_REGNUM;
10014 arm_insn_r->reg_rec_count = 1;
10015 }
10016 else if (3 == insn_op1)
10017 {
10018 /* SPSR is going to be changed. */
10019 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10020 return -1;
10021 }
10022 }
10023 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10024 {
10025 if (1 == insn_op1)
10026 {
10027 /* BX. */
10028 record_buf[0] = ARM_PS_REGNUM;
10029 arm_insn_r->reg_rec_count = 1;
10030 }
10031 else if (3 == insn_op1)
10032 {
10033 /* CLZ. */
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10035 arm_insn_r->reg_rec_count = 1;
10036 }
10037 }
10038 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10039 {
10040 /* BLX. */
10041 record_buf[0] = ARM_PS_REGNUM;
10042 record_buf[1] = ARM_LR_REGNUM;
10043 arm_insn_r->reg_rec_count = 2;
10044 }
10045 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10046 {
10047 /* QADD, QSUB, QDADD, QDSUB */
10048 record_buf[0] = ARM_PS_REGNUM;
10049 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10050 arm_insn_r->reg_rec_count = 2;
10051 }
10052 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10053 {
10054 /* BKPT. */
10055 record_buf[0] = ARM_PS_REGNUM;
10056 record_buf[1] = ARM_LR_REGNUM;
10057 arm_insn_r->reg_rec_count = 2;
10058
10059 /* Save SPSR also;how? */
72508ac0
PO
10060 return -1;
10061 }
10062 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10063 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10064 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10065 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10066 )
10067 {
10068 if (0 == insn_op1 || 1 == insn_op1)
10069 {
10070 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10071 /* We dont do optimization for SMULW<y> where we
10072 need only Rd. */
10073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10074 record_buf[1] = ARM_PS_REGNUM;
10075 arm_insn_r->reg_rec_count = 2;
10076 }
10077 else if (2 == insn_op1)
10078 {
10079 /* SMLAL<x><y>. */
10080 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10081 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10082 arm_insn_r->reg_rec_count = 2;
10083 }
10084 else if (3 == insn_op1)
10085 {
10086 /* SMUL<x><y>. */
10087 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 1;
10089 }
10090 }
10091 }
10092 else
10093 {
10094 /* MSR : immediate form. */
10095 if (1 == insn_op1)
10096 {
10097 /* CSPR is going to be changed. */
10098 record_buf[0] = ARM_PS_REGNUM;
10099 arm_insn_r->reg_rec_count = 1;
10100 }
10101 else if (3 == insn_op1)
10102 {
10103 /* SPSR is going to be changed. */
10104 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10105 return -1;
10106 }
10107 }
10108 }
10109
10110 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10111 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10112 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10113
10114 /* Handle load/store insn extension space. */
10115
10116 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10117 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10118 && !INSN_RECORDED(arm_insn_r))
10119 {
10120 /* SWP/SWPB. */
10121 if (0 == insn_op1)
10122 {
10123 /* These insn, changes register and memory as well. */
10124 /* SWP or SWPB insn. */
10125 /* Get memory address given by Rn. */
10126 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10127 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10128 /* SWP insn ?, swaps word. */
10129 if (8 == arm_insn_r->opcode)
10130 {
10131 record_buf_mem[0] = 4;
10132 }
10133 else
10134 {
10135 /* SWPB insn, swaps only byte. */
10136 record_buf_mem[0] = 1;
10137 }
10138 record_buf_mem[1] = u_regval;
10139 arm_insn_r->mem_rec_count = 1;
10140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10141 arm_insn_r->reg_rec_count = 1;
10142 }
10143 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10144 {
10145 /* STRH. */
10146 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10147 ARM_RECORD_STRH);
10148 }
10149 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10150 {
10151 /* LDRD. */
10152 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10153 record_buf[1] = record_buf[0] + 1;
10154 arm_insn_r->reg_rec_count = 2;
10155 }
10156 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10157 {
10158 /* STRD. */
10159 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10160 ARM_RECORD_STRD);
10161 }
10162 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10163 {
10164 /* LDRH, LDRSB, LDRSH. */
10165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10166 arm_insn_r->reg_rec_count = 1;
10167 }
10168
10169 }
10170
10171 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10172 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10173 && !INSN_RECORDED(arm_insn_r))
10174 {
10175 ret = -1;
10176 /* Handle coprocessor insn extension space. */
10177 }
10178
10179 /* To be done for ARMv5 and later; as of now we return -1. */
10180 if (-1 == ret)
ca92db2d 10181 return ret;
72508ac0
PO
10182
10183 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10184 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10185
10186 return ret;
10187}
10188
10189/* Handling opcode 000 insns. */
10190
10191static int
10192arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10193{
10194 struct regcache *reg_cache = arm_insn_r->regcache;
10195 uint32_t record_buf[8], record_buf_mem[8];
10196 ULONGEST u_regval[2] = {0};
10197
bec2ab5a 10198 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10199 uint32_t opcode1 = 0;
10200
10201 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10202 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10203 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10204
10205 /* Data processing insn /multiply insn. */
10206 if (9 == arm_insn_r->decode
10207 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10208 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10209 {
10210 /* Handle multiply instructions. */
10211 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10212 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10213 {
10214 /* Handle MLA and MUL. */
10215 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10216 record_buf[1] = ARM_PS_REGNUM;
10217 arm_insn_r->reg_rec_count = 2;
10218 }
10219 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10220 {
10221 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10223 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10224 record_buf[2] = ARM_PS_REGNUM;
10225 arm_insn_r->reg_rec_count = 3;
10226 }
10227 }
10228 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10229 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10230 {
10231 /* Handle misc load insns, as 20th bit (L = 1). */
10232 /* LDR insn has a capability to do branching, if
10233 MOV LR, PC is precceded by LDR insn having Rn as R15
10234 in that case, it emulates branch and link insn, and hence we
10235 need to save CSPR and PC as well. I am not sure this is right
10236 place; as opcode = 010 LDR insn make this happen, if R15 was
10237 used. */
10238 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10239 if (15 != reg_dest)
10240 {
10241 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10242 arm_insn_r->reg_rec_count = 1;
10243 }
10244 else
10245 {
10246 record_buf[0] = reg_dest;
10247 record_buf[1] = ARM_PS_REGNUM;
10248 arm_insn_r->reg_rec_count = 2;
10249 }
10250 }
10251 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10252 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10253 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10254 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10255 {
10256 /* Handle MSR insn. */
10257 if (9 == arm_insn_r->opcode)
10258 {
10259 /* CSPR is going to be changed. */
10260 record_buf[0] = ARM_PS_REGNUM;
10261 arm_insn_r->reg_rec_count = 1;
10262 }
10263 else
10264 {
10265 /* SPSR is going to be changed. */
10266 /* How to read SPSR value? */
72508ac0
PO
10267 return -1;
10268 }
10269 }
10270 else if (9 == arm_insn_r->decode
10271 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10272 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10273 {
10274 /* Handling SWP, SWPB. */
10275 /* These insn, changes register and memory as well. */
10276 /* SWP or SWPB insn. */
10277
10278 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10279 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10280 /* SWP insn ?, swaps word. */
10281 if (8 == arm_insn_r->opcode)
10282 {
10283 record_buf_mem[0] = 4;
10284 }
10285 else
10286 {
10287 /* SWPB insn, swaps only byte. */
10288 record_buf_mem[0] = 1;
10289 }
10290 record_buf_mem[1] = u_regval[0];
10291 arm_insn_r->mem_rec_count = 1;
10292 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10293 arm_insn_r->reg_rec_count = 1;
10294 }
10295 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10296 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10297 {
10298 /* Handle BLX, branch and link/exchange. */
10299 if (9 == arm_insn_r->opcode)
10300 {
10301 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10302 and R14 stores the return address. */
10303 record_buf[0] = ARM_PS_REGNUM;
10304 record_buf[1] = ARM_LR_REGNUM;
10305 arm_insn_r->reg_rec_count = 2;
10306 }
10307 }
10308 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10309 {
10310 /* Handle enhanced software breakpoint insn, BKPT. */
10311 /* CPSR is changed to be executed in ARM state, disabling normal
10312 interrupts, entering abort mode. */
10313 /* According to high vector configuration PC is set. */
10314 /* user hit breakpoint and type reverse, in
10315 that case, we need to go back with previous CPSR and
10316 Program Counter. */
10317 record_buf[0] = ARM_PS_REGNUM;
10318 record_buf[1] = ARM_LR_REGNUM;
10319 arm_insn_r->reg_rec_count = 2;
10320
10321 /* Save SPSR also; how? */
72508ac0
PO
10322 return -1;
10323 }
10324 else if (11 == arm_insn_r->decode
10325 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10326 {
10327 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10328
10329 /* Handle str(x) insn */
10330 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10331 ARM_RECORD_STRH);
10332 }
10333 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10334 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10335 {
10336 /* Handle BX, branch and link/exchange. */
10337 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10338 record_buf[0] = ARM_PS_REGNUM;
10339 arm_insn_r->reg_rec_count = 1;
10340 }
10341 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10342 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10343 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10344 {
10345 /* Count leading zeros: CLZ. */
10346 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10347 arm_insn_r->reg_rec_count = 1;
10348 }
10349 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10350 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10351 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10352 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10353 )
10354 {
10355 /* Handle MRS insn. */
10356 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10357 arm_insn_r->reg_rec_count = 1;
10358 }
10359 else if (arm_insn_r->opcode <= 15)
10360 {
10361 /* Normal data processing insns. */
10362 /* Out of 11 shifter operands mode, all the insn modifies destination
10363 register, which is specified by 13-16 decode. */
10364 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10365 record_buf[1] = ARM_PS_REGNUM;
10366 arm_insn_r->reg_rec_count = 2;
10367 }
10368 else
10369 {
10370 return -1;
10371 }
10372
10373 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10374 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10375 return 0;
10376}
10377
10378/* Handling opcode 001 insns. */
10379
10380static int
10381arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10382{
10383 uint32_t record_buf[8], record_buf_mem[8];
10384
10385 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10386 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10387
10388 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10389 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10390 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10391 )
10392 {
10393 /* Handle MSR insn. */
10394 if (9 == arm_insn_r->opcode)
10395 {
10396 /* CSPR is going to be changed. */
10397 record_buf[0] = ARM_PS_REGNUM;
10398 arm_insn_r->reg_rec_count = 1;
10399 }
10400 else
10401 {
10402 /* SPSR is going to be changed. */
10403 }
10404 }
10405 else if (arm_insn_r->opcode <= 15)
10406 {
10407 /* Normal data processing insns. */
10408 /* Out of 11 shifter operands mode, all the insn modifies destination
10409 register, which is specified by 13-16 decode. */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 record_buf[1] = ARM_PS_REGNUM;
10412 arm_insn_r->reg_rec_count = 2;
10413 }
10414 else
10415 {
10416 return -1;
10417 }
10418
10419 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10420 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10421 return 0;
10422}
10423
c55978a6
YQ
10424static int
10425arm_record_media (insn_decode_record *arm_insn_r)
10426{
10427 uint32_t record_buf[8];
10428
10429 switch (bits (arm_insn_r->arm_insn, 22, 24))
10430 {
10431 case 0:
10432 /* Parallel addition and subtraction, signed */
10433 case 1:
10434 /* Parallel addition and subtraction, unsigned */
10435 case 2:
10436 case 3:
10437 /* Packing, unpacking, saturation and reversal */
10438 {
10439 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10440
10441 record_buf[arm_insn_r->reg_rec_count++] = rd;
10442 }
10443 break;
10444
10445 case 4:
10446 case 5:
10447 /* Signed multiplies */
10448 {
10449 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10450 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10451
10452 record_buf[arm_insn_r->reg_rec_count++] = rd;
10453 if (op1 == 0x0)
10454 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10455 else if (op1 == 0x4)
10456 record_buf[arm_insn_r->reg_rec_count++]
10457 = bits (arm_insn_r->arm_insn, 12, 15);
10458 }
10459 break;
10460
10461 case 6:
10462 {
10463 if (bit (arm_insn_r->arm_insn, 21)
10464 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10465 {
10466 /* SBFX */
10467 record_buf[arm_insn_r->reg_rec_count++]
10468 = bits (arm_insn_r->arm_insn, 12, 15);
10469 }
10470 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10471 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10472 {
10473 /* USAD8 and USADA8 */
10474 record_buf[arm_insn_r->reg_rec_count++]
10475 = bits (arm_insn_r->arm_insn, 16, 19);
10476 }
10477 }
10478 break;
10479
10480 case 7:
10481 {
10482 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10483 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10484 {
10485 /* Permanently UNDEFINED */
10486 return -1;
10487 }
10488 else
10489 {
10490 /* BFC, BFI and UBFX */
10491 record_buf[arm_insn_r->reg_rec_count++]
10492 = bits (arm_insn_r->arm_insn, 12, 15);
10493 }
10494 }
10495 break;
10496
10497 default:
10498 return -1;
10499 }
10500
10501 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10502
10503 return 0;
10504}
10505
71e396f9 10506/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10507
10508static int
10509arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10510{
10511 struct regcache *reg_cache = arm_insn_r->regcache;
10512
71e396f9
LM
10513 uint32_t reg_base , reg_dest;
10514 uint32_t offset_12, tgt_mem_addr;
72508ac0 10515 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10516 unsigned char wback;
10517 ULONGEST u_regval;
72508ac0 10518
71e396f9
LM
10519 /* Calculate wback. */
10520 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10521 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10522
71e396f9
LM
10523 arm_insn_r->reg_rec_count = 0;
10524 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10525
10526 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10527 {
71e396f9
LM
10528 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10529 and LDRT. */
10530
72508ac0 10531 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10532 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10533
10534 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10535 preceeds a LDR instruction having R15 as reg_base, it
10536 emulates a branch and link instruction, and hence we need to save
10537 CPSR and PC as well. */
10538 if (ARM_PC_REGNUM == reg_dest)
10539 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10540
10541 /* If wback is true, also save the base register, which is going to be
10542 written to. */
10543 if (wback)
10544 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10545 }
10546 else
10547 {
71e396f9
LM
10548 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10549
72508ac0 10550 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10551 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10552
10553 /* Handle bit U. */
72508ac0 10554 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10555 {
10556 /* U == 1: Add the offset. */
10557 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10558 }
72508ac0 10559 else
71e396f9
LM
10560 {
10561 /* U == 0: subtract the offset. */
10562 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10563 }
10564
10565 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10566 bytes. */
10567 if (bit (arm_insn_r->arm_insn, 22))
10568 {
10569 /* STRB and STRBT: 1 byte. */
10570 record_buf_mem[0] = 1;
10571 }
10572 else
10573 {
10574 /* STR and STRT: 4 bytes. */
10575 record_buf_mem[0] = 4;
10576 }
10577
10578 /* Handle bit P. */
10579 if (bit (arm_insn_r->arm_insn, 24))
10580 record_buf_mem[1] = tgt_mem_addr;
10581 else
10582 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10583
72508ac0
PO
10584 arm_insn_r->mem_rec_count = 1;
10585
71e396f9
LM
10586 /* If wback is true, also save the base register, which is going to be
10587 written to. */
10588 if (wback)
10589 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10590 }
10591
10592 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10593 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10594 return 0;
10595}
10596
10597/* Handling opcode 011 insns. */
10598
10599static int
10600arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10601{
10602 struct regcache *reg_cache = arm_insn_r->regcache;
10603
10604 uint32_t shift_imm = 0;
10605 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10606 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10607 uint32_t record_buf[8], record_buf_mem[8];
10608
10609 LONGEST s_word;
10610 ULONGEST u_regval[2];
10611
c55978a6
YQ
10612 if (bit (arm_insn_r->arm_insn, 4))
10613 return arm_record_media (arm_insn_r);
10614
72508ac0
PO
10615 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10616 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10617
10618 /* Handle enhanced store insns and LDRD DSP insn,
10619 order begins according to addressing modes for store insns
10620 STRH insn. */
10621
10622 /* LDR or STR? */
10623 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10624 {
10625 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10626 /* LDR insn has a capability to do branching, if
10627 MOV LR, PC is precedded by LDR insn having Rn as R15
10628 in that case, it emulates branch and link insn, and hence we
10629 need to save CSPR and PC as well. */
10630 if (15 != reg_dest)
10631 {
10632 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10633 arm_insn_r->reg_rec_count = 1;
10634 }
10635 else
10636 {
10637 record_buf[0] = reg_dest;
10638 record_buf[1] = ARM_PS_REGNUM;
10639 arm_insn_r->reg_rec_count = 2;
10640 }
10641 }
10642 else
10643 {
10644 if (! bits (arm_insn_r->arm_insn, 4, 11))
10645 {
10646 /* Store insn, register offset and register pre-indexed,
10647 register post-indexed. */
10648 /* Get Rm. */
10649 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10650 /* Get Rn. */
10651 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10652 regcache_raw_read_unsigned (reg_cache, reg_src1
10653 , &u_regval[0]);
10654 regcache_raw_read_unsigned (reg_cache, reg_src2
10655 , &u_regval[1]);
10656 if (15 == reg_src2)
10657 {
10658 /* If R15 was used as Rn, hence current PC+8. */
10659 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10660 u_regval[0] = u_regval[0] + 8;
10661 }
10662 /* Calculate target store address, Rn +/- Rm, register offset. */
10663 /* U == 1. */
10664 if (bit (arm_insn_r->arm_insn, 23))
10665 {
10666 tgt_mem_addr = u_regval[0] + u_regval[1];
10667 }
10668 else
10669 {
10670 tgt_mem_addr = u_regval[1] - u_regval[0];
10671 }
10672
10673 switch (arm_insn_r->opcode)
10674 {
10675 /* STR. */
10676 case 8:
10677 case 12:
10678 /* STR. */
10679 case 9:
10680 case 13:
10681 /* STRT. */
10682 case 1:
10683 case 5:
10684 /* STR. */
10685 case 0:
10686 case 4:
10687 record_buf_mem[0] = 4;
10688 break;
10689
10690 /* STRB. */
10691 case 10:
10692 case 14:
10693 /* STRB. */
10694 case 11:
10695 case 15:
10696 /* STRBT. */
10697 case 3:
10698 case 7:
10699 /* STRB. */
10700 case 2:
10701 case 6:
10702 record_buf_mem[0] = 1;
10703 break;
10704
10705 default:
10706 gdb_assert_not_reached ("no decoding pattern found");
10707 break;
10708 }
10709 record_buf_mem[1] = tgt_mem_addr;
10710 arm_insn_r->mem_rec_count = 1;
10711
10712 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10713 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10714 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10715 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10716 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10717 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10718 )
10719 {
10720 /* Rn is going to be changed in pre-indexed mode and
10721 post-indexed mode as well. */
10722 record_buf[0] = reg_src2;
10723 arm_insn_r->reg_rec_count = 1;
10724 }
10725 }
10726 else
10727 {
10728 /* Store insn, scaled register offset; scaled pre-indexed. */
10729 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10730 /* Get Rm. */
10731 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10732 /* Get Rn. */
10733 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10734 /* Get shift_imm. */
10735 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10736 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10737 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10738 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10739 /* Offset_12 used as shift. */
10740 switch (offset_12)
10741 {
10742 case 0:
10743 /* Offset_12 used as index. */
10744 offset_12 = u_regval[0] << shift_imm;
10745 break;
10746
10747 case 1:
10748 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10749 break;
10750
10751 case 2:
10752 if (!shift_imm)
10753 {
10754 if (bit (u_regval[0], 31))
10755 {
10756 offset_12 = 0xFFFFFFFF;
10757 }
10758 else
10759 {
10760 offset_12 = 0;
10761 }
10762 }
10763 else
10764 {
10765 /* This is arithmetic shift. */
10766 offset_12 = s_word >> shift_imm;
10767 }
10768 break;
10769
10770 case 3:
10771 if (!shift_imm)
10772 {
10773 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10774 &u_regval[1]);
10775 /* Get C flag value and shift it by 31. */
10776 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10777 | (u_regval[0]) >> 1);
10778 }
10779 else
10780 {
10781 offset_12 = (u_regval[0] >> shift_imm) \
10782 | (u_regval[0] <<
10783 (sizeof(uint32_t) - shift_imm));
10784 }
10785 break;
10786
10787 default:
10788 gdb_assert_not_reached ("no decoding pattern found");
10789 break;
10790 }
10791
10792 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10793 /* bit U set. */
10794 if (bit (arm_insn_r->arm_insn, 23))
10795 {
10796 tgt_mem_addr = u_regval[1] + offset_12;
10797 }
10798 else
10799 {
10800 tgt_mem_addr = u_regval[1] - offset_12;
10801 }
10802
10803 switch (arm_insn_r->opcode)
10804 {
10805 /* STR. */
10806 case 8:
10807 case 12:
10808 /* STR. */
10809 case 9:
10810 case 13:
10811 /* STRT. */
10812 case 1:
10813 case 5:
10814 /* STR. */
10815 case 0:
10816 case 4:
10817 record_buf_mem[0] = 4;
10818 break;
10819
10820 /* STRB. */
10821 case 10:
10822 case 14:
10823 /* STRB. */
10824 case 11:
10825 case 15:
10826 /* STRBT. */
10827 case 3:
10828 case 7:
10829 /* STRB. */
10830 case 2:
10831 case 6:
10832 record_buf_mem[0] = 1;
10833 break;
10834
10835 default:
10836 gdb_assert_not_reached ("no decoding pattern found");
10837 break;
10838 }
10839 record_buf_mem[1] = tgt_mem_addr;
10840 arm_insn_r->mem_rec_count = 1;
10841
10842 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10843 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10844 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10845 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10846 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10847 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10848 )
10849 {
10850 /* Rn is going to be changed in register scaled pre-indexed
10851 mode,and scaled post indexed mode. */
10852 record_buf[0] = reg_src2;
10853 arm_insn_r->reg_rec_count = 1;
10854 }
10855 }
10856 }
10857
10858 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10859 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10860 return 0;
10861}
10862
71e396f9 10863/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10864
10865static int
10866arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10867{
10868 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10869 uint32_t register_count = 0, register_bits;
10870 uint32_t reg_base, addr_mode;
72508ac0 10871 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10872 uint32_t wback;
10873 ULONGEST u_regval;
72508ac0 10874
71e396f9
LM
10875 /* Fetch the list of registers. */
10876 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10877 arm_insn_r->reg_rec_count = 0;
10878
10879 /* Fetch the base register that contains the address we are loading data
10880 to. */
10881 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10882
71e396f9
LM
10883 /* Calculate wback. */
10884 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10885
10886 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10887 {
71e396f9 10888 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10889
71e396f9 10890 /* Find out which registers are going to be loaded from memory. */
72508ac0 10891 while (register_bits)
71e396f9
LM
10892 {
10893 if (register_bits & 0x00000001)
10894 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10895 register_bits = register_bits >> 1;
10896 register_count++;
10897 }
72508ac0 10898
71e396f9
LM
10899
10900 /* If wback is true, also save the base register, which is going to be
10901 written to. */
10902 if (wback)
10903 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10904
10905 /* Save the CPSR register. */
10906 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10907 }
10908 else
10909 {
71e396f9 10910 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10911
71e396f9
LM
10912 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10913
10914 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10915
10916 /* Find out how many registers are going to be stored to memory. */
72508ac0 10917 while (register_bits)
71e396f9
LM
10918 {
10919 if (register_bits & 0x00000001)
10920 register_count++;
10921 register_bits = register_bits >> 1;
10922 }
72508ac0
PO
10923
10924 switch (addr_mode)
71e396f9
LM
10925 {
10926 /* STMDA (STMED): Decrement after. */
10927 case 0:
10928 record_buf_mem[1] = (uint32_t) u_regval
10929 - register_count * INT_REGISTER_SIZE + 4;
10930 break;
10931 /* STM (STMIA, STMEA): Increment after. */
10932 case 1:
10933 record_buf_mem[1] = (uint32_t) u_regval;
10934 break;
10935 /* STMDB (STMFD): Decrement before. */
10936 case 2:
10937 record_buf_mem[1] = (uint32_t) u_regval
10938 - register_count * INT_REGISTER_SIZE;
10939 break;
10940 /* STMIB (STMFA): Increment before. */
10941 case 3:
10942 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10943 break;
10944 default:
10945 gdb_assert_not_reached ("no decoding pattern found");
10946 break;
10947 }
72508ac0 10948
71e396f9
LM
10949 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10950 arm_insn_r->mem_rec_count = 1;
10951
10952 /* If wback is true, also save the base register, which is going to be
10953 written to. */
10954 if (wback)
10955 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10956 }
10957
10958 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10959 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10960 return 0;
10961}
10962
10963/* Handling opcode 101 insns. */
10964
10965static int
10966arm_record_b_bl (insn_decode_record *arm_insn_r)
10967{
10968 uint32_t record_buf[8];
10969
10970 /* Handle B, BL, BLX(1) insns. */
10971 /* B simply branches so we do nothing here. */
10972 /* Note: BLX(1) doesnt fall here but instead it falls into
10973 extension space. */
10974 if (bit (arm_insn_r->arm_insn, 24))
10975 {
10976 record_buf[0] = ARM_LR_REGNUM;
10977 arm_insn_r->reg_rec_count = 1;
10978 }
10979
10980 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10981
10982 return 0;
10983}
10984
72508ac0 10985static int
c6ec2b30 10986arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
10987{
10988 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
10989 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10990 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
10991
10992 return -1;
10993}
10994
5a578da5
OJ
10995/* Record handler for vector data transfer instructions. */
10996
10997static int
10998arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10999{
11000 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11001 uint32_t record_buf[4];
11002
5a578da5
OJ
11003 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11004 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11005 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11006 bit_l = bit (arm_insn_r->arm_insn, 20);
11007 bit_c = bit (arm_insn_r->arm_insn, 8);
11008
11009 /* Handle VMOV instruction. */
11010 if (bit_l && bit_c)
11011 {
11012 record_buf[0] = reg_t;
11013 arm_insn_r->reg_rec_count = 1;
11014 }
11015 else if (bit_l && !bit_c)
11016 {
11017 /* Handle VMOV instruction. */
11018 if (bits_a == 0x00)
11019 {
f1771dce 11020 record_buf[0] = reg_t;
5a578da5
OJ
11021 arm_insn_r->reg_rec_count = 1;
11022 }
11023 /* Handle VMRS instruction. */
11024 else if (bits_a == 0x07)
11025 {
11026 if (reg_t == 15)
11027 reg_t = ARM_PS_REGNUM;
11028
11029 record_buf[0] = reg_t;
11030 arm_insn_r->reg_rec_count = 1;
11031 }
11032 }
11033 else if (!bit_l && !bit_c)
11034 {
11035 /* Handle VMOV instruction. */
11036 if (bits_a == 0x00)
11037 {
f1771dce 11038 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11039
11040 arm_insn_r->reg_rec_count = 1;
11041 }
11042 /* Handle VMSR instruction. */
11043 else if (bits_a == 0x07)
11044 {
11045 record_buf[0] = ARM_FPSCR_REGNUM;
11046 arm_insn_r->reg_rec_count = 1;
11047 }
11048 }
11049 else if (!bit_l && bit_c)
11050 {
11051 /* Handle VMOV instruction. */
11052 if (!(bits_a & 0x04))
11053 {
11054 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11055 + ARM_D0_REGNUM;
11056 arm_insn_r->reg_rec_count = 1;
11057 }
11058 /* Handle VDUP instruction. */
11059 else
11060 {
11061 if (bit (arm_insn_r->arm_insn, 21))
11062 {
11063 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11064 record_buf[0] = reg_v + ARM_D0_REGNUM;
11065 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11066 arm_insn_r->reg_rec_count = 2;
11067 }
11068 else
11069 {
11070 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11071 record_buf[0] = reg_v + ARM_D0_REGNUM;
11072 arm_insn_r->reg_rec_count = 1;
11073 }
11074 }
11075 }
11076
11077 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11078 return 0;
11079}
11080
f20f80dd
OJ
11081/* Record handler for extension register load/store instructions. */
11082
11083static int
11084arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11085{
11086 uint32_t opcode, single_reg;
11087 uint8_t op_vldm_vstm;
11088 uint32_t record_buf[8], record_buf_mem[128];
11089 ULONGEST u_regval = 0;
11090
11091 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11092
11093 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11094 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11095 op_vldm_vstm = opcode & 0x1b;
11096
11097 /* Handle VMOV instructions. */
11098 if ((opcode & 0x1e) == 0x04)
11099 {
9fde51ed 11100 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11101 {
11102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11103 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11104 arm_insn_r->reg_rec_count = 2;
11105 }
f20f80dd 11106 else
01e57735 11107 {
9fde51ed
YQ
11108 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11109 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11110
9fde51ed 11111 if (single_reg)
01e57735 11112 {
9fde51ed
YQ
11113 /* The first S register number m is REG_M:M (M is bit 5),
11114 the corresponding D register number is REG_M:M / 2, which
11115 is REG_M. */
11116 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11117 /* The second S register number is REG_M:M + 1, the
11118 corresponding D register number is (REG_M:M + 1) / 2.
11119 IOW, if bit M is 1, the first and second S registers
11120 are mapped to different D registers, otherwise, they are
11121 in the same D register. */
11122 if (bit_m)
11123 {
11124 record_buf[arm_insn_r->reg_rec_count++]
11125 = ARM_D0_REGNUM + reg_m + 1;
11126 }
01e57735
YQ
11127 }
11128 else
11129 {
9fde51ed 11130 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11131 arm_insn_r->reg_rec_count = 1;
11132 }
11133 }
f20f80dd
OJ
11134 }
11135 /* Handle VSTM and VPUSH instructions. */
11136 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11137 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11138 {
11139 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11140 uint32_t memory_index = 0;
11141
11142 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11143 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11144 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11145 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11146 memory_count = imm_off8;
11147
11148 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11149 start_address = u_regval;
f20f80dd 11150 else
01e57735 11151 start_address = u_regval - imm_off32;
f20f80dd
OJ
11152
11153 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11154 {
11155 record_buf[0] = reg_rn;
11156 arm_insn_r->reg_rec_count = 1;
11157 }
f20f80dd
OJ
11158
11159 while (memory_count > 0)
01e57735 11160 {
9fde51ed 11161 if (single_reg)
01e57735 11162 {
9fde51ed
YQ
11163 record_buf_mem[memory_index] = 4;
11164 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11165 start_address = start_address + 4;
11166 memory_index = memory_index + 2;
11167 }
11168 else
11169 {
9fde51ed
YQ
11170 record_buf_mem[memory_index] = 4;
11171 record_buf_mem[memory_index + 1] = start_address;
11172 record_buf_mem[memory_index + 2] = 4;
11173 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11174 start_address = start_address + 8;
11175 memory_index = memory_index + 4;
11176 }
11177 memory_count--;
11178 }
f20f80dd
OJ
11179 arm_insn_r->mem_rec_count = (memory_index >> 1);
11180 }
11181 /* Handle VLDM instructions. */
11182 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11183 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11184 {
11185 uint32_t reg_count, reg_vd;
11186 uint32_t reg_index = 0;
9fde51ed 11187 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11188
11189 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11190 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11191
9fde51ed
YQ
11192 /* REG_VD is the first D register number. If the instruction
11193 loads memory to S registers (SINGLE_REG is TRUE), the register
11194 number is (REG_VD << 1 | bit D), so the corresponding D
11195 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11196 if (!single_reg)
11197 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11198
9fde51ed 11199 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11200 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11201
9fde51ed
YQ
11202 /* If the instruction loads memory to D register, REG_COUNT should
11203 be divided by 2, according to the ARM Architecture Reference
11204 Manual. If the instruction loads memory to S register, divide by
11205 2 as well because two S registers are mapped to D register. */
11206 reg_count = reg_count / 2;
11207 if (single_reg && bit_d)
01e57735 11208 {
9fde51ed
YQ
11209 /* Increase the register count if S register list starts from
11210 an odd number (bit d is one). */
11211 reg_count++;
11212 }
f20f80dd 11213
9fde51ed
YQ
11214 while (reg_count > 0)
11215 {
11216 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11217 reg_count--;
11218 }
f20f80dd
OJ
11219 arm_insn_r->reg_rec_count = reg_index;
11220 }
11221 /* VSTR Vector store register. */
11222 else if ((opcode & 0x13) == 0x10)
11223 {
bec2ab5a 11224 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11225 uint32_t memory_index = 0;
11226
11227 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11228 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11229 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11230 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11231
11232 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11233 start_address = u_regval + imm_off32;
f20f80dd 11234 else
01e57735 11235 start_address = u_regval - imm_off32;
f20f80dd
OJ
11236
11237 if (single_reg)
01e57735 11238 {
9fde51ed
YQ
11239 record_buf_mem[memory_index] = 4;
11240 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11241 arm_insn_r->mem_rec_count = 1;
11242 }
f20f80dd 11243 else
01e57735 11244 {
9fde51ed
YQ
11245 record_buf_mem[memory_index] = 4;
11246 record_buf_mem[memory_index + 1] = start_address;
11247 record_buf_mem[memory_index + 2] = 4;
11248 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11249 arm_insn_r->mem_rec_count = 2;
11250 }
f20f80dd
OJ
11251 }
11252 /* VLDR Vector load register. */
11253 else if ((opcode & 0x13) == 0x11)
11254 {
11255 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11256
11257 if (!single_reg)
01e57735
YQ
11258 {
11259 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11260 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11261 }
f20f80dd 11262 else
01e57735
YQ
11263 {
11264 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11265 /* Record register D rather than pseudo register S. */
11266 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11267 }
f20f80dd
OJ
11268 arm_insn_r->reg_rec_count = 1;
11269 }
11270
11271 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11272 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11273 return 0;
11274}
11275
851f26ae
OJ
11276/* Record handler for arm/thumb mode VFP data processing instructions. */
11277
11278static int
11279arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11280{
11281 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11282 uint32_t record_buf[4];
11283 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11284 enum insn_types curr_insn_type = INSN_INV;
11285
11286 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11287 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11288 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11289 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11290 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11291 bit_d = bit (arm_insn_r->arm_insn, 22);
11292 opc1 = opc1 & 0x04;
11293
11294 /* Handle VMLA, VMLS. */
11295 if (opc1 == 0x00)
11296 {
11297 if (bit (arm_insn_r->arm_insn, 10))
11298 {
11299 if (bit (arm_insn_r->arm_insn, 6))
11300 curr_insn_type = INSN_T0;
11301 else
11302 curr_insn_type = INSN_T1;
11303 }
11304 else
11305 {
11306 if (dp_op_sz)
11307 curr_insn_type = INSN_T1;
11308 else
11309 curr_insn_type = INSN_T2;
11310 }
11311 }
11312 /* Handle VNMLA, VNMLS, VNMUL. */
11313 else if (opc1 == 0x01)
11314 {
11315 if (dp_op_sz)
11316 curr_insn_type = INSN_T1;
11317 else
11318 curr_insn_type = INSN_T2;
11319 }
11320 /* Handle VMUL. */
11321 else if (opc1 == 0x02 && !(opc3 & 0x01))
11322 {
11323 if (bit (arm_insn_r->arm_insn, 10))
11324 {
11325 if (bit (arm_insn_r->arm_insn, 6))
11326 curr_insn_type = INSN_T0;
11327 else
11328 curr_insn_type = INSN_T1;
11329 }
11330 else
11331 {
11332 if (dp_op_sz)
11333 curr_insn_type = INSN_T1;
11334 else
11335 curr_insn_type = INSN_T2;
11336 }
11337 }
11338 /* Handle VADD, VSUB. */
11339 else if (opc1 == 0x03)
11340 {
11341 if (!bit (arm_insn_r->arm_insn, 9))
11342 {
11343 if (bit (arm_insn_r->arm_insn, 6))
11344 curr_insn_type = INSN_T0;
11345 else
11346 curr_insn_type = INSN_T1;
11347 }
11348 else
11349 {
11350 if (dp_op_sz)
11351 curr_insn_type = INSN_T1;
11352 else
11353 curr_insn_type = INSN_T2;
11354 }
11355 }
11356 /* Handle VDIV. */
11357 else if (opc1 == 0x0b)
11358 {
11359 if (dp_op_sz)
11360 curr_insn_type = INSN_T1;
11361 else
11362 curr_insn_type = INSN_T2;
11363 }
11364 /* Handle all other vfp data processing instructions. */
11365 else if (opc1 == 0x0b)
11366 {
11367 /* Handle VMOV. */
11368 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11369 {
11370 if (bit (arm_insn_r->arm_insn, 4))
11371 {
11372 if (bit (arm_insn_r->arm_insn, 6))
11373 curr_insn_type = INSN_T0;
11374 else
11375 curr_insn_type = INSN_T1;
11376 }
11377 else
11378 {
11379 if (dp_op_sz)
11380 curr_insn_type = INSN_T1;
11381 else
11382 curr_insn_type = INSN_T2;
11383 }
11384 }
11385 /* Handle VNEG and VABS. */
11386 else if ((opc2 == 0x01 && opc3 == 0x01)
11387 || (opc2 == 0x00 && opc3 == 0x03))
11388 {
11389 if (!bit (arm_insn_r->arm_insn, 11))
11390 {
11391 if (bit (arm_insn_r->arm_insn, 6))
11392 curr_insn_type = INSN_T0;
11393 else
11394 curr_insn_type = INSN_T1;
11395 }
11396 else
11397 {
11398 if (dp_op_sz)
11399 curr_insn_type = INSN_T1;
11400 else
11401 curr_insn_type = INSN_T2;
11402 }
11403 }
11404 /* Handle VSQRT. */
11405 else if (opc2 == 0x01 && opc3 == 0x03)
11406 {
11407 if (dp_op_sz)
11408 curr_insn_type = INSN_T1;
11409 else
11410 curr_insn_type = INSN_T2;
11411 }
11412 /* Handle VCVT. */
11413 else if (opc2 == 0x07 && opc3 == 0x03)
11414 {
11415 if (!dp_op_sz)
11416 curr_insn_type = INSN_T1;
11417 else
11418 curr_insn_type = INSN_T2;
11419 }
11420 else if (opc3 & 0x01)
11421 {
11422 /* Handle VCVT. */
11423 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11424 {
11425 if (!bit (arm_insn_r->arm_insn, 18))
11426 curr_insn_type = INSN_T2;
11427 else
11428 {
11429 if (dp_op_sz)
11430 curr_insn_type = INSN_T1;
11431 else
11432 curr_insn_type = INSN_T2;
11433 }
11434 }
11435 /* Handle VCVT. */
11436 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11437 {
11438 if (dp_op_sz)
11439 curr_insn_type = INSN_T1;
11440 else
11441 curr_insn_type = INSN_T2;
11442 }
11443 /* Handle VCVTB, VCVTT. */
11444 else if ((opc2 & 0x0e) == 0x02)
11445 curr_insn_type = INSN_T2;
11446 /* Handle VCMP, VCMPE. */
11447 else if ((opc2 & 0x0e) == 0x04)
11448 curr_insn_type = INSN_T3;
11449 }
11450 }
11451
11452 switch (curr_insn_type)
11453 {
11454 case INSN_T0:
11455 reg_vd = reg_vd | (bit_d << 4);
11456 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11457 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11458 arm_insn_r->reg_rec_count = 2;
11459 break;
11460
11461 case INSN_T1:
11462 reg_vd = reg_vd | (bit_d << 4);
11463 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11464 arm_insn_r->reg_rec_count = 1;
11465 break;
11466
11467 case INSN_T2:
11468 reg_vd = (reg_vd << 1) | bit_d;
11469 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11470 arm_insn_r->reg_rec_count = 1;
11471 break;
11472
11473 case INSN_T3:
11474 record_buf[0] = ARM_FPSCR_REGNUM;
11475 arm_insn_r->reg_rec_count = 1;
11476 break;
11477
11478 default:
11479 gdb_assert_not_reached ("no decoding pattern found");
11480 break;
11481 }
11482
11483 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11484 return 0;
11485}
11486
60cc5e93
OJ
11487/* Handling opcode 110 insns. */
11488
11489static int
11490arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11491{
bec2ab5a 11492 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11493
11494 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11495 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11496 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11497
11498 if ((coproc & 0x0e) == 0x0a)
11499 {
11500 /* Handle extension register ld/st instructions. */
11501 if (!(op1 & 0x20))
f20f80dd 11502 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11503
11504 /* 64-bit transfers between arm core and extension registers. */
11505 if ((op1 & 0x3e) == 0x04)
f20f80dd 11506 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11507 }
11508 else
11509 {
11510 /* Handle coprocessor ld/st instructions. */
11511 if (!(op1 & 0x3a))
11512 {
11513 /* Store. */
11514 if (!op1_ebit)
11515 return arm_record_unsupported_insn (arm_insn_r);
11516 else
11517 /* Load. */
11518 return arm_record_unsupported_insn (arm_insn_r);
11519 }
11520
11521 /* Move to coprocessor from two arm core registers. */
11522 if (op1 == 0x4)
11523 return arm_record_unsupported_insn (arm_insn_r);
11524
11525 /* Move to two arm core registers from coprocessor. */
11526 if (op1 == 0x5)
11527 {
11528 uint32_t reg_t[2];
11529
11530 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11531 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11532 arm_insn_r->reg_rec_count = 2;
11533
11534 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11535 return 0;
11536 }
11537 }
11538 return arm_record_unsupported_insn (arm_insn_r);
11539}
11540
72508ac0
PO
11541/* Handling opcode 111 insns. */
11542
11543static int
11544arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11545{
60cc5e93 11546 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11547 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11548 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11549
11550 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11551 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11552 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11553 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11554 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11555
11556 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11557 if (op1_sbit)
97dfe206
OJ
11558 {
11559 if (tdep->arm_syscall_record != NULL)
11560 {
11561 ULONGEST svc_operand, svc_number;
11562
11563 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11564
11565 if (svc_operand) /* OABI. */
11566 svc_number = svc_operand - 0x900000;
11567 else /* EABI. */
11568 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11569
60cc5e93 11570 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11571 }
11572 else
11573 {
11574 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11575 return -1;
97dfe206
OJ
11576 }
11577 }
60cc5e93
OJ
11578
11579 if ((coproc & 0x0e) == 0x0a)
11580 {
11581 /* VFP data-processing instructions. */
11582 if (!op1_sbit && !op)
851f26ae 11583 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11584
11585 /* Advanced SIMD, VFP instructions. */
11586 if (!op1_sbit && op)
5a578da5 11587 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11588 }
97dfe206
OJ
11589 else
11590 {
60cc5e93
OJ
11591 /* Coprocessor data operations. */
11592 if (!op1_sbit && !op)
11593 return arm_record_unsupported_insn (arm_insn_r);
11594
11595 /* Move to Coprocessor from ARM core register. */
11596 if (!op1_sbit && !op1_ebit && op)
11597 return arm_record_unsupported_insn (arm_insn_r);
11598
11599 /* Move to arm core register from coprocessor. */
11600 if (!op1_sbit && op1_ebit && op)
11601 {
11602 uint32_t record_buf[1];
11603
11604 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11605 if (record_buf[0] == 15)
11606 record_buf[0] = ARM_PS_REGNUM;
11607
11608 arm_insn_r->reg_rec_count = 1;
11609 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11610 record_buf);
11611 return 0;
11612 }
97dfe206 11613 }
72508ac0 11614
60cc5e93 11615 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11616}
11617
11618/* Handling opcode 000 insns. */
11619
11620static int
11621thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11622{
11623 uint32_t record_buf[8];
11624 uint32_t reg_src1 = 0;
11625
11626 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11627
11628 record_buf[0] = ARM_PS_REGNUM;
11629 record_buf[1] = reg_src1;
11630 thumb_insn_r->reg_rec_count = 2;
11631
11632 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11633
11634 return 0;
11635}
11636
11637
11638/* Handling opcode 001 insns. */
11639
11640static int
11641thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11642{
11643 uint32_t record_buf[8];
11644 uint32_t reg_src1 = 0;
11645
11646 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11647
11648 record_buf[0] = ARM_PS_REGNUM;
11649 record_buf[1] = reg_src1;
11650 thumb_insn_r->reg_rec_count = 2;
11651
11652 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11653
11654 return 0;
11655}
11656
11657/* Handling opcode 010 insns. */
11658
11659static int
11660thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11661{
11662 struct regcache *reg_cache = thumb_insn_r->regcache;
11663 uint32_t record_buf[8], record_buf_mem[8];
11664
11665 uint32_t reg_src1 = 0, reg_src2 = 0;
11666 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11667
11668 ULONGEST u_regval[2] = {0};
11669
11670 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11671
11672 if (bit (thumb_insn_r->arm_insn, 12))
11673 {
11674 /* Handle load/store register offset. */
11675 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11676 if (opcode2 >= 12 && opcode2 <= 15)
11677 {
11678 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11679 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11680 record_buf[0] = reg_src1;
11681 thumb_insn_r->reg_rec_count = 1;
11682 }
11683 else if (opcode2 >= 8 && opcode2 <= 10)
11684 {
11685 /* STR(2), STRB(2), STRH(2) . */
11686 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11687 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11688 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11689 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11690 if (8 == opcode2)
11691 record_buf_mem[0] = 4; /* STR (2). */
11692 else if (10 == opcode2)
11693 record_buf_mem[0] = 1; /* STRB (2). */
11694 else if (9 == opcode2)
11695 record_buf_mem[0] = 2; /* STRH (2). */
11696 record_buf_mem[1] = u_regval[0] + u_regval[1];
11697 thumb_insn_r->mem_rec_count = 1;
11698 }
11699 }
11700 else if (bit (thumb_insn_r->arm_insn, 11))
11701 {
11702 /* Handle load from literal pool. */
11703 /* LDR(3). */
11704 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11705 record_buf[0] = reg_src1;
11706 thumb_insn_r->reg_rec_count = 1;
11707 }
11708 else if (opcode1)
11709 {
11710 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11711 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11712 if ((3 == opcode2) && (!opcode3))
11713 {
11714 /* Branch with exchange. */
11715 record_buf[0] = ARM_PS_REGNUM;
11716 thumb_insn_r->reg_rec_count = 1;
11717 }
11718 else
11719 {
1f33efec
YQ
11720 /* Format 8; special data processing insns. */
11721 record_buf[0] = ARM_PS_REGNUM;
11722 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11723 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11724 thumb_insn_r->reg_rec_count = 2;
11725 }
11726 }
11727 else
11728 {
11729 /* Format 5; data processing insns. */
11730 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11731 if (bit (thumb_insn_r->arm_insn, 7))
11732 {
11733 reg_src1 = reg_src1 + 8;
11734 }
11735 record_buf[0] = ARM_PS_REGNUM;
11736 record_buf[1] = reg_src1;
11737 thumb_insn_r->reg_rec_count = 2;
11738 }
11739
11740 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11741 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11742 record_buf_mem);
11743
11744 return 0;
11745}
11746
11747/* Handling opcode 001 insns. */
11748
11749static int
11750thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11751{
11752 struct regcache *reg_cache = thumb_insn_r->regcache;
11753 uint32_t record_buf[8], record_buf_mem[8];
11754
11755 uint32_t reg_src1 = 0;
11756 uint32_t opcode = 0, immed_5 = 0;
11757
11758 ULONGEST u_regval = 0;
11759
11760 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11761
11762 if (opcode)
11763 {
11764 /* LDR(1). */
11765 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11766 record_buf[0] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 1;
11768 }
11769 else
11770 {
11771 /* STR(1). */
11772 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11773 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11774 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11775 record_buf_mem[0] = 4;
11776 record_buf_mem[1] = u_regval + (immed_5 * 4);
11777 thumb_insn_r->mem_rec_count = 1;
11778 }
11779
11780 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11781 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11782 record_buf_mem);
11783
11784 return 0;
11785}
11786
11787/* Handling opcode 100 insns. */
11788
11789static int
11790thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11791{
11792 struct regcache *reg_cache = thumb_insn_r->regcache;
11793 uint32_t record_buf[8], record_buf_mem[8];
11794
11795 uint32_t reg_src1 = 0;
11796 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11797
11798 ULONGEST u_regval = 0;
11799
11800 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11801
11802 if (3 == opcode)
11803 {
11804 /* LDR(4). */
11805 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11806 record_buf[0] = reg_src1;
11807 thumb_insn_r->reg_rec_count = 1;
11808 }
11809 else if (1 == opcode)
11810 {
11811 /* LDRH(1). */
11812 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11813 record_buf[0] = reg_src1;
11814 thumb_insn_r->reg_rec_count = 1;
11815 }
11816 else if (2 == opcode)
11817 {
11818 /* STR(3). */
11819 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11820 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11821 record_buf_mem[0] = 4;
11822 record_buf_mem[1] = u_regval + (immed_8 * 4);
11823 thumb_insn_r->mem_rec_count = 1;
11824 }
11825 else if (0 == opcode)
11826 {
11827 /* STRH(1). */
11828 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11829 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11830 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11831 record_buf_mem[0] = 2;
11832 record_buf_mem[1] = u_regval + (immed_5 * 2);
11833 thumb_insn_r->mem_rec_count = 1;
11834 }
11835
11836 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11837 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11838 record_buf_mem);
11839
11840 return 0;
11841}
11842
11843/* Handling opcode 101 insns. */
11844
11845static int
11846thumb_record_misc (insn_decode_record *thumb_insn_r)
11847{
11848 struct regcache *reg_cache = thumb_insn_r->regcache;
11849
11850 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11851 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11852 uint32_t index = 0, start_address = 0;
72508ac0
PO
11853 uint32_t record_buf[24], record_buf_mem[48];
11854 uint32_t reg_src1;
11855
11856 ULONGEST u_regval = 0;
11857
11858 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11859 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11860 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11861
11862 if (14 == opcode2)
11863 {
11864 /* POP. */
11865 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11866 while (register_bits)
f969241e
OJ
11867 {
11868 if (register_bits & 0x00000001)
11869 record_buf[index++] = register_count;
11870 register_bits = register_bits >> 1;
11871 register_count++;
11872 }
11873 record_buf[index++] = ARM_PS_REGNUM;
11874 record_buf[index++] = ARM_SP_REGNUM;
11875 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11876 }
11877 else if (10 == opcode2)
11878 {
11879 /* PUSH. */
11880 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11881 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11882 while (register_bits)
11883 {
11884 if (register_bits & 0x00000001)
11885 register_count++;
11886 register_bits = register_bits >> 1;
11887 }
11888 start_address = u_regval - \
11889 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11890 thumb_insn_r->mem_rec_count = register_count;
11891 while (register_count)
11892 {
11893 record_buf_mem[(register_count * 2) - 1] = start_address;
11894 record_buf_mem[(register_count * 2) - 2] = 4;
11895 start_address = start_address + 4;
11896 register_count--;
11897 }
11898 record_buf[0] = ARM_SP_REGNUM;
11899 thumb_insn_r->reg_rec_count = 1;
11900 }
11901 else if (0x1E == opcode1)
11902 {
11903 /* BKPT insn. */
11904 /* Handle enhanced software breakpoint insn, BKPT. */
11905 /* CPSR is changed to be executed in ARM state, disabling normal
11906 interrupts, entering abort mode. */
11907 /* According to high vector configuration PC is set. */
11908 /* User hits breakpoint and type reverse, in that case, we need to go back with
11909 previous CPSR and Program Counter. */
11910 record_buf[0] = ARM_PS_REGNUM;
11911 record_buf[1] = ARM_LR_REGNUM;
11912 thumb_insn_r->reg_rec_count = 2;
11913 /* We need to save SPSR value, which is not yet done. */
11914 printf_unfiltered (_("Process record does not support instruction "
11915 "0x%0x at address %s.\n"),
11916 thumb_insn_r->arm_insn,
11917 paddress (thumb_insn_r->gdbarch,
11918 thumb_insn_r->this_addr));
11919 return -1;
11920 }
11921 else if ((0 == opcode) || (1 == opcode))
11922 {
11923 /* ADD(5), ADD(6). */
11924 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11925 record_buf[0] = reg_src1;
11926 thumb_insn_r->reg_rec_count = 1;
11927 }
11928 else if (2 == opcode)
11929 {
11930 /* ADD(7), SUB(4). */
11931 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11932 record_buf[0] = ARM_SP_REGNUM;
11933 thumb_insn_r->reg_rec_count = 1;
11934 }
11935
11936 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11937 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11938 record_buf_mem);
11939
11940 return 0;
11941}
11942
11943/* Handling opcode 110 insns. */
11944
11945static int
11946thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11947{
11948 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11949 struct regcache *reg_cache = thumb_insn_r->regcache;
11950
11951 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11952 uint32_t reg_src1 = 0;
11953 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 11954 uint32_t index = 0, start_address = 0;
72508ac0
PO
11955 uint32_t record_buf[24], record_buf_mem[48];
11956
11957 ULONGEST u_regval = 0;
11958
11959 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11960 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11961
11962 if (1 == opcode2)
11963 {
11964
11965 /* LDMIA. */
11966 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11967 /* Get Rn. */
11968 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11969 while (register_bits)
11970 {
11971 if (register_bits & 0x00000001)
f969241e 11972 record_buf[index++] = register_count;
72508ac0 11973 register_bits = register_bits >> 1;
f969241e 11974 register_count++;
72508ac0 11975 }
f969241e
OJ
11976 record_buf[index++] = reg_src1;
11977 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11978 }
11979 else if (0 == opcode2)
11980 {
11981 /* It handles both STMIA. */
11982 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11983 /* Get Rn. */
11984 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11985 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11986 while (register_bits)
11987 {
11988 if (register_bits & 0x00000001)
11989 register_count++;
11990 register_bits = register_bits >> 1;
11991 }
11992 start_address = u_regval;
11993 thumb_insn_r->mem_rec_count = register_count;
11994 while (register_count)
11995 {
11996 record_buf_mem[(register_count * 2) - 1] = start_address;
11997 record_buf_mem[(register_count * 2) - 2] = 4;
11998 start_address = start_address + 4;
11999 register_count--;
12000 }
12001 }
12002 else if (0x1F == opcode1)
12003 {
12004 /* Handle arm syscall insn. */
97dfe206 12005 if (tdep->arm_syscall_record != NULL)
72508ac0 12006 {
97dfe206
OJ
12007 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12008 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12009 }
12010 else
12011 {
12012 printf_unfiltered (_("no syscall record support\n"));
12013 return -1;
12014 }
12015 }
12016
12017 /* B (1), conditional branch is automatically taken care in process_record,
12018 as PC is saved there. */
12019
12020 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12021 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12022 record_buf_mem);
12023
12024 return ret;
12025}
12026
12027/* Handling opcode 111 insns. */
12028
12029static int
12030thumb_record_branch (insn_decode_record *thumb_insn_r)
12031{
12032 uint32_t record_buf[8];
12033 uint32_t bits_h = 0;
12034
12035 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12036
12037 if (2 == bits_h || 3 == bits_h)
12038 {
12039 /* BL */
12040 record_buf[0] = ARM_LR_REGNUM;
12041 thumb_insn_r->reg_rec_count = 1;
12042 }
12043 else if (1 == bits_h)
12044 {
12045 /* BLX(1). */
12046 record_buf[0] = ARM_PS_REGNUM;
12047 record_buf[1] = ARM_LR_REGNUM;
12048 thumb_insn_r->reg_rec_count = 2;
12049 }
12050
12051 /* B(2) is automatically taken care in process_record, as PC is
12052 saved there. */
12053
12054 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12055
12056 return 0;
12057}
12058
c6ec2b30
OJ
12059/* Handler for thumb2 load/store multiple instructions. */
12060
12061static int
12062thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12063{
12064 struct regcache *reg_cache = thumb2_insn_r->regcache;
12065
12066 uint32_t reg_rn, op;
12067 uint32_t register_bits = 0, register_count = 0;
12068 uint32_t index = 0, start_address = 0;
12069 uint32_t record_buf[24], record_buf_mem[48];
12070
12071 ULONGEST u_regval = 0;
12072
12073 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12074 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12075
12076 if (0 == op || 3 == op)
12077 {
12078 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12079 {
12080 /* Handle RFE instruction. */
12081 record_buf[0] = ARM_PS_REGNUM;
12082 thumb2_insn_r->reg_rec_count = 1;
12083 }
12084 else
12085 {
12086 /* Handle SRS instruction after reading banked SP. */
12087 return arm_record_unsupported_insn (thumb2_insn_r);
12088 }
12089 }
12090 else if (1 == op || 2 == op)
12091 {
12092 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12093 {
12094 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12095 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12096 while (register_bits)
12097 {
12098 if (register_bits & 0x00000001)
12099 record_buf[index++] = register_count;
12100
12101 register_count++;
12102 register_bits = register_bits >> 1;
12103 }
12104 record_buf[index++] = reg_rn;
12105 record_buf[index++] = ARM_PS_REGNUM;
12106 thumb2_insn_r->reg_rec_count = index;
12107 }
12108 else
12109 {
12110 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12111 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12112 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12113 while (register_bits)
12114 {
12115 if (register_bits & 0x00000001)
12116 register_count++;
12117
12118 register_bits = register_bits >> 1;
12119 }
12120
12121 if (1 == op)
12122 {
12123 /* Start address calculation for LDMDB/LDMEA. */
12124 start_address = u_regval;
12125 }
12126 else if (2 == op)
12127 {
12128 /* Start address calculation for LDMDB/LDMEA. */
12129 start_address = u_regval - register_count * 4;
12130 }
12131
12132 thumb2_insn_r->mem_rec_count = register_count;
12133 while (register_count)
12134 {
12135 record_buf_mem[register_count * 2 - 1] = start_address;
12136 record_buf_mem[register_count * 2 - 2] = 4;
12137 start_address = start_address + 4;
12138 register_count--;
12139 }
12140 record_buf[0] = reg_rn;
12141 record_buf[1] = ARM_PS_REGNUM;
12142 thumb2_insn_r->reg_rec_count = 2;
12143 }
12144 }
12145
12146 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12147 record_buf_mem);
12148 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12149 record_buf);
12150 return ARM_RECORD_SUCCESS;
12151}
12152
12153/* Handler for thumb2 load/store (dual/exclusive) and table branch
12154 instructions. */
12155
12156static int
12157thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12158{
12159 struct regcache *reg_cache = thumb2_insn_r->regcache;
12160
12161 uint32_t reg_rd, reg_rn, offset_imm;
12162 uint32_t reg_dest1, reg_dest2;
12163 uint32_t address, offset_addr;
12164 uint32_t record_buf[8], record_buf_mem[8];
12165 uint32_t op1, op2, op3;
c6ec2b30
OJ
12166
12167 ULONGEST u_regval[2];
12168
12169 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12170 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12171 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12172
12173 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12174 {
12175 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12176 {
12177 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12178 record_buf[0] = reg_dest1;
12179 record_buf[1] = ARM_PS_REGNUM;
12180 thumb2_insn_r->reg_rec_count = 2;
12181 }
12182
12183 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12184 {
12185 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12186 record_buf[2] = reg_dest2;
12187 thumb2_insn_r->reg_rec_count = 3;
12188 }
12189 }
12190 else
12191 {
12192 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12193 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12194
12195 if (0 == op1 && 0 == op2)
12196 {
12197 /* Handle STREX. */
12198 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12199 address = u_regval[0] + (offset_imm * 4);
12200 record_buf_mem[0] = 4;
12201 record_buf_mem[1] = address;
12202 thumb2_insn_r->mem_rec_count = 1;
12203 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12204 record_buf[0] = reg_rd;
12205 thumb2_insn_r->reg_rec_count = 1;
12206 }
12207 else if (1 == op1 && 0 == op2)
12208 {
12209 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12210 record_buf[0] = reg_rd;
12211 thumb2_insn_r->reg_rec_count = 1;
12212 address = u_regval[0];
12213 record_buf_mem[1] = address;
12214
12215 if (4 == op3)
12216 {
12217 /* Handle STREXB. */
12218 record_buf_mem[0] = 1;
12219 thumb2_insn_r->mem_rec_count = 1;
12220 }
12221 else if (5 == op3)
12222 {
12223 /* Handle STREXH. */
12224 record_buf_mem[0] = 2 ;
12225 thumb2_insn_r->mem_rec_count = 1;
12226 }
12227 else if (7 == op3)
12228 {
12229 /* Handle STREXD. */
12230 address = u_regval[0];
12231 record_buf_mem[0] = 4;
12232 record_buf_mem[2] = 4;
12233 record_buf_mem[3] = address + 4;
12234 thumb2_insn_r->mem_rec_count = 2;
12235 }
12236 }
12237 else
12238 {
12239 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12240
12241 if (bit (thumb2_insn_r->arm_insn, 24))
12242 {
12243 if (bit (thumb2_insn_r->arm_insn, 23))
12244 offset_addr = u_regval[0] + (offset_imm * 4);
12245 else
12246 offset_addr = u_regval[0] - (offset_imm * 4);
12247
12248 address = offset_addr;
12249 }
12250 else
12251 address = u_regval[0];
12252
12253 record_buf_mem[0] = 4;
12254 record_buf_mem[1] = address;
12255 record_buf_mem[2] = 4;
12256 record_buf_mem[3] = address + 4;
12257 thumb2_insn_r->mem_rec_count = 2;
12258 record_buf[0] = reg_rn;
12259 thumb2_insn_r->reg_rec_count = 1;
12260 }
12261 }
12262
12263 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12264 record_buf);
12265 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12266 record_buf_mem);
12267 return ARM_RECORD_SUCCESS;
12268}
12269
12270/* Handler for thumb2 data processing (shift register and modified immediate)
12271 instructions. */
12272
12273static int
12274thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12275{
12276 uint32_t reg_rd, op;
12277 uint32_t record_buf[8];
12278
12279 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12280 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12281
12282 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12283 {
12284 record_buf[0] = ARM_PS_REGNUM;
12285 thumb2_insn_r->reg_rec_count = 1;
12286 }
12287 else
12288 {
12289 record_buf[0] = reg_rd;
12290 record_buf[1] = ARM_PS_REGNUM;
12291 thumb2_insn_r->reg_rec_count = 2;
12292 }
12293
12294 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12295 record_buf);
12296 return ARM_RECORD_SUCCESS;
12297}
12298
12299/* Generic handler for thumb2 instructions which effect destination and PS
12300 registers. */
12301
12302static int
12303thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12304{
12305 uint32_t reg_rd;
12306 uint32_t record_buf[8];
12307
12308 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12309
12310 record_buf[0] = reg_rd;
12311 record_buf[1] = ARM_PS_REGNUM;
12312 thumb2_insn_r->reg_rec_count = 2;
12313
12314 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12315 record_buf);
12316 return ARM_RECORD_SUCCESS;
12317}
12318
12319/* Handler for thumb2 branch and miscellaneous control instructions. */
12320
12321static int
12322thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12323{
12324 uint32_t op, op1, op2;
12325 uint32_t record_buf[8];
12326
12327 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12328 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12329 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12330
12331 /* Handle MSR insn. */
12332 if (!(op1 & 0x2) && 0x38 == op)
12333 {
12334 if (!(op2 & 0x3))
12335 {
12336 /* CPSR is going to be changed. */
12337 record_buf[0] = ARM_PS_REGNUM;
12338 thumb2_insn_r->reg_rec_count = 1;
12339 }
12340 else
12341 {
12342 arm_record_unsupported_insn(thumb2_insn_r);
12343 return -1;
12344 }
12345 }
12346 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12347 {
12348 /* BLX. */
12349 record_buf[0] = ARM_PS_REGNUM;
12350 record_buf[1] = ARM_LR_REGNUM;
12351 thumb2_insn_r->reg_rec_count = 2;
12352 }
12353
12354 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12355 record_buf);
12356 return ARM_RECORD_SUCCESS;
12357}
12358
12359/* Handler for thumb2 store single data item instructions. */
12360
12361static int
12362thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12363{
12364 struct regcache *reg_cache = thumb2_insn_r->regcache;
12365
12366 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12367 uint32_t address, offset_addr;
12368 uint32_t record_buf[8], record_buf_mem[8];
12369 uint32_t op1, op2;
12370
12371 ULONGEST u_regval[2];
12372
12373 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12374 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12375 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12376 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12377
12378 if (bit (thumb2_insn_r->arm_insn, 23))
12379 {
12380 /* T2 encoding. */
12381 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12382 offset_addr = u_regval[0] + offset_imm;
12383 address = offset_addr;
12384 }
12385 else
12386 {
12387 /* T3 encoding. */
12388 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12389 {
12390 /* Handle STRB (register). */
12391 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12392 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12393 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12394 offset_addr = u_regval[1] << shift_imm;
12395 address = u_regval[0] + offset_addr;
12396 }
12397 else
12398 {
12399 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12400 if (bit (thumb2_insn_r->arm_insn, 10))
12401 {
12402 if (bit (thumb2_insn_r->arm_insn, 9))
12403 offset_addr = u_regval[0] + offset_imm;
12404 else
12405 offset_addr = u_regval[0] - offset_imm;
12406
12407 address = offset_addr;
12408 }
12409 else
12410 address = u_regval[0];
12411 }
12412 }
12413
12414 switch (op1)
12415 {
12416 /* Store byte instructions. */
12417 case 4:
12418 case 0:
12419 record_buf_mem[0] = 1;
12420 break;
12421 /* Store half word instructions. */
12422 case 1:
12423 case 5:
12424 record_buf_mem[0] = 2;
12425 break;
12426 /* Store word instructions. */
12427 case 2:
12428 case 6:
12429 record_buf_mem[0] = 4;
12430 break;
12431
12432 default:
12433 gdb_assert_not_reached ("no decoding pattern found");
12434 break;
12435 }
12436
12437 record_buf_mem[1] = address;
12438 thumb2_insn_r->mem_rec_count = 1;
12439 record_buf[0] = reg_rn;
12440 thumb2_insn_r->reg_rec_count = 1;
12441
12442 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12443 record_buf);
12444 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12445 record_buf_mem);
12446 return ARM_RECORD_SUCCESS;
12447}
12448
12449/* Handler for thumb2 load memory hints instructions. */
12450
12451static int
12452thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12453{
12454 uint32_t record_buf[8];
12455 uint32_t reg_rt, reg_rn;
12456
12457 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12458 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12459
12460 if (ARM_PC_REGNUM != reg_rt)
12461 {
12462 record_buf[0] = reg_rt;
12463 record_buf[1] = reg_rn;
12464 record_buf[2] = ARM_PS_REGNUM;
12465 thumb2_insn_r->reg_rec_count = 3;
12466
12467 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12468 record_buf);
12469 return ARM_RECORD_SUCCESS;
12470 }
12471
12472 return ARM_RECORD_FAILURE;
12473}
12474
12475/* Handler for thumb2 load word instructions. */
12476
12477static int
12478thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12479{
c6ec2b30
OJ
12480 uint32_t record_buf[8];
12481
12482 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12483 record_buf[1] = ARM_PS_REGNUM;
12484 thumb2_insn_r->reg_rec_count = 2;
12485
12486 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12487 record_buf);
12488 return ARM_RECORD_SUCCESS;
12489}
12490
12491/* Handler for thumb2 long multiply, long multiply accumulate, and
12492 divide instructions. */
12493
12494static int
12495thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12496{
12497 uint32_t opcode1 = 0, opcode2 = 0;
12498 uint32_t record_buf[8];
c6ec2b30
OJ
12499
12500 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12501 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12502
12503 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12504 {
12505 /* Handle SMULL, UMULL, SMULAL. */
12506 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12507 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12508 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12509 record_buf[2] = ARM_PS_REGNUM;
12510 thumb2_insn_r->reg_rec_count = 3;
12511 }
12512 else if (1 == opcode1 || 3 == opcode2)
12513 {
12514 /* Handle SDIV and UDIV. */
12515 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12516 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12517 record_buf[2] = ARM_PS_REGNUM;
12518 thumb2_insn_r->reg_rec_count = 3;
12519 }
12520 else
12521 return ARM_RECORD_FAILURE;
12522
12523 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12524 record_buf);
12525 return ARM_RECORD_SUCCESS;
12526}
12527
60cc5e93
OJ
12528/* Record handler for thumb32 coprocessor instructions. */
12529
12530static int
12531thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12532{
12533 if (bit (thumb2_insn_r->arm_insn, 25))
12534 return arm_record_coproc_data_proc (thumb2_insn_r);
12535 else
12536 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12537}
12538
1e1b6563
OJ
12539/* Record handler for advance SIMD structure load/store instructions. */
12540
12541static int
12542thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12543{
12544 struct regcache *reg_cache = thumb2_insn_r->regcache;
12545 uint32_t l_bit, a_bit, b_bits;
12546 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12547 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12548 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12549 uint8_t f_ebytes;
12550
12551 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12552 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12553 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12554 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12555 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12556 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12557 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12558 f_elem = 8 / f_ebytes;
12559
12560 if (!l_bit)
12561 {
12562 ULONGEST u_regval = 0;
12563 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12564 address = u_regval;
12565
12566 if (!a_bit)
12567 {
12568 /* Handle VST1. */
12569 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12570 {
12571 if (b_bits == 0x07)
12572 bf_regs = 1;
12573 else if (b_bits == 0x0a)
12574 bf_regs = 2;
12575 else if (b_bits == 0x06)
12576 bf_regs = 3;
12577 else if (b_bits == 0x02)
12578 bf_regs = 4;
12579 else
12580 bf_regs = 0;
12581
12582 for (index_r = 0; index_r < bf_regs; index_r++)
12583 {
12584 for (index_e = 0; index_e < f_elem; index_e++)
12585 {
12586 record_buf_mem[index_m++] = f_ebytes;
12587 record_buf_mem[index_m++] = address;
12588 address = address + f_ebytes;
12589 thumb2_insn_r->mem_rec_count += 1;
12590 }
12591 }
12592 }
12593 /* Handle VST2. */
12594 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12595 {
12596 if (b_bits == 0x09 || b_bits == 0x08)
12597 bf_regs = 1;
12598 else if (b_bits == 0x03)
12599 bf_regs = 2;
12600 else
12601 bf_regs = 0;
12602
12603 for (index_r = 0; index_r < bf_regs; index_r++)
12604 for (index_e = 0; index_e < f_elem; index_e++)
12605 {
12606 for (loop_t = 0; loop_t < 2; loop_t++)
12607 {
12608 record_buf_mem[index_m++] = f_ebytes;
12609 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12610 thumb2_insn_r->mem_rec_count += 1;
12611 }
12612 address = address + (2 * f_ebytes);
12613 }
12614 }
12615 /* Handle VST3. */
12616 else if ((b_bits & 0x0e) == 0x04)
12617 {
12618 for (index_e = 0; index_e < f_elem; index_e++)
12619 {
12620 for (loop_t = 0; loop_t < 3; loop_t++)
12621 {
12622 record_buf_mem[index_m++] = f_ebytes;
12623 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12624 thumb2_insn_r->mem_rec_count += 1;
12625 }
12626 address = address + (3 * f_ebytes);
12627 }
12628 }
12629 /* Handle VST4. */
12630 else if (!(b_bits & 0x0e))
12631 {
12632 for (index_e = 0; index_e < f_elem; index_e++)
12633 {
12634 for (loop_t = 0; loop_t < 4; loop_t++)
12635 {
12636 record_buf_mem[index_m++] = f_ebytes;
12637 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12638 thumb2_insn_r->mem_rec_count += 1;
12639 }
12640 address = address + (4 * f_ebytes);
12641 }
12642 }
12643 }
12644 else
12645 {
12646 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12647
12648 if (bft_size == 0x00)
12649 f_ebytes = 1;
12650 else if (bft_size == 0x01)
12651 f_ebytes = 2;
12652 else if (bft_size == 0x02)
12653 f_ebytes = 4;
12654 else
12655 f_ebytes = 0;
12656
12657 /* Handle VST1. */
12658 if (!(b_bits & 0x0b) || b_bits == 0x08)
12659 thumb2_insn_r->mem_rec_count = 1;
12660 /* Handle VST2. */
12661 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12662 thumb2_insn_r->mem_rec_count = 2;
12663 /* Handle VST3. */
12664 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12665 thumb2_insn_r->mem_rec_count = 3;
12666 /* Handle VST4. */
12667 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12668 thumb2_insn_r->mem_rec_count = 4;
12669
12670 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12671 {
12672 record_buf_mem[index_m] = f_ebytes;
12673 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12674 }
12675 }
12676 }
12677 else
12678 {
12679 if (!a_bit)
12680 {
12681 /* Handle VLD1. */
12682 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12683 thumb2_insn_r->reg_rec_count = 1;
12684 /* Handle VLD2. */
12685 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12686 thumb2_insn_r->reg_rec_count = 2;
12687 /* Handle VLD3. */
12688 else if ((b_bits & 0x0e) == 0x04)
12689 thumb2_insn_r->reg_rec_count = 3;
12690 /* Handle VLD4. */
12691 else if (!(b_bits & 0x0e))
12692 thumb2_insn_r->reg_rec_count = 4;
12693 }
12694 else
12695 {
12696 /* Handle VLD1. */
12697 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12698 thumb2_insn_r->reg_rec_count = 1;
12699 /* Handle VLD2. */
12700 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12701 thumb2_insn_r->reg_rec_count = 2;
12702 /* Handle VLD3. */
12703 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12704 thumb2_insn_r->reg_rec_count = 3;
12705 /* Handle VLD4. */
12706 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12707 thumb2_insn_r->reg_rec_count = 4;
12708
12709 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12710 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12711 }
12712 }
12713
12714 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12715 {
12716 record_buf[index_r] = reg_rn;
12717 thumb2_insn_r->reg_rec_count += 1;
12718 }
12719
12720 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12721 record_buf);
12722 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12723 record_buf_mem);
12724 return 0;
12725}
12726
c6ec2b30
OJ
12727/* Decodes thumb2 instruction type and invokes its record handler. */
12728
12729static unsigned int
12730thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12731{
12732 uint32_t op, op1, op2;
12733
12734 op = bit (thumb2_insn_r->arm_insn, 15);
12735 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12736 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12737
12738 if (op1 == 0x01)
12739 {
12740 if (!(op2 & 0x64 ))
12741 {
12742 /* Load/store multiple instruction. */
12743 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12744 }
12745 else if (!((op2 & 0x64) ^ 0x04))
12746 {
12747 /* Load/store (dual/exclusive) and table branch instruction. */
12748 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12749 }
12750 else if (!((op2 & 0x20) ^ 0x20))
12751 {
12752 /* Data-processing (shifted register). */
12753 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12754 }
12755 else if (op2 & 0x40)
12756 {
12757 /* Co-processor instructions. */
60cc5e93 12758 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12759 }
12760 }
12761 else if (op1 == 0x02)
12762 {
12763 if (op)
12764 {
12765 /* Branches and miscellaneous control instructions. */
12766 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12767 }
12768 else if (op2 & 0x20)
12769 {
12770 /* Data-processing (plain binary immediate) instruction. */
12771 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12772 }
12773 else
12774 {
12775 /* Data-processing (modified immediate). */
12776 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12777 }
12778 }
12779 else if (op1 == 0x03)
12780 {
12781 if (!(op2 & 0x71 ))
12782 {
12783 /* Store single data item. */
12784 return thumb2_record_str_single_data (thumb2_insn_r);
12785 }
12786 else if (!((op2 & 0x71) ^ 0x10))
12787 {
12788 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12789 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12790 }
12791 else if (!((op2 & 0x67) ^ 0x01))
12792 {
12793 /* Load byte, memory hints instruction. */
12794 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12795 }
12796 else if (!((op2 & 0x67) ^ 0x03))
12797 {
12798 /* Load halfword, memory hints instruction. */
12799 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12800 }
12801 else if (!((op2 & 0x67) ^ 0x05))
12802 {
12803 /* Load word instruction. */
12804 return thumb2_record_ld_word (thumb2_insn_r);
12805 }
12806 else if (!((op2 & 0x70) ^ 0x20))
12807 {
12808 /* Data-processing (register) instruction. */
12809 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12810 }
12811 else if (!((op2 & 0x78) ^ 0x30))
12812 {
12813 /* Multiply, multiply accumulate, abs diff instruction. */
12814 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12815 }
12816 else if (!((op2 & 0x78) ^ 0x38))
12817 {
12818 /* Long multiply, long multiply accumulate, and divide. */
12819 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12820 }
12821 else if (op2 & 0x40)
12822 {
12823 /* Co-processor instructions. */
60cc5e93 12824 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12825 }
12826 }
12827
12828 return -1;
12829}
72508ac0
PO
12830
12831/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12832and positive val on fauilure. */
12833
12834static int
12835extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12836{
12837 gdb_byte buf[insn_size];
12838
12839 memset (&buf[0], 0, insn_size);
12840
12841 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12842 return 1;
12843 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12844 insn_size,
2959fed9 12845 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12846 return 0;
12847}
12848
12849typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12850
12851/* Decode arm/thumb insn depending on condition cods and opcodes; and
12852 dispatch it. */
12853
12854static int
12855decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12856 uint32_t insn_size)
72508ac0
PO
12857{
12858
01e57735
YQ
12859 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12860 instruction. */
0fa9c223 12861 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12862 {
12863 arm_record_data_proc_misc_ld_str, /* 000. */
12864 arm_record_data_proc_imm, /* 001. */
12865 arm_record_ld_st_imm_offset, /* 010. */
12866 arm_record_ld_st_reg_offset, /* 011. */
12867 arm_record_ld_st_multiple, /* 100. */
12868 arm_record_b_bl, /* 101. */
60cc5e93 12869 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12870 arm_record_coproc_data_proc /* 111. */
12871 };
12872
01e57735
YQ
12873 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12874 instruction. */
0fa9c223 12875 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12876 { \
12877 thumb_record_shift_add_sub, /* 000. */
12878 thumb_record_add_sub_cmp_mov, /* 001. */
12879 thumb_record_ld_st_reg_offset, /* 010. */
12880 thumb_record_ld_st_imm_offset, /* 011. */
12881 thumb_record_ld_st_stack, /* 100. */
12882 thumb_record_misc, /* 101. */
12883 thumb_record_ldm_stm_swi, /* 110. */
12884 thumb_record_branch /* 111. */
12885 };
12886
12887 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12888 uint32_t insn_id = 0;
12889
12890 if (extract_arm_insn (arm_record, insn_size))
12891 {
12892 if (record_debug)
01e57735
YQ
12893 {
12894 printf_unfiltered (_("Process record: error reading memory at "
12895 "addr %s len = %d.\n"),
12896 paddress (arm_record->gdbarch,
12897 arm_record->this_addr), insn_size);
12898 }
72508ac0
PO
12899 return -1;
12900 }
12901 else if (ARM_RECORD == record_type)
12902 {
12903 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12904 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12905
12906 if (arm_record->cond == 0xf)
12907 ret = arm_record_extension_space (arm_record);
12908 else
01e57735 12909 {
ca92db2d
YQ
12910 /* If this insn has fallen into extension space
12911 then we need not decode it anymore. */
01e57735
YQ
12912 ret = arm_handle_insn[insn_id] (arm_record);
12913 }
ca92db2d
YQ
12914 if (ret != ARM_RECORD_SUCCESS)
12915 {
12916 arm_record_unsupported_insn (arm_record);
12917 ret = -1;
12918 }
72508ac0
PO
12919 }
12920 else if (THUMB_RECORD == record_type)
12921 {
12922 /* As thumb does not have condition codes, we set negative. */
12923 arm_record->cond = -1;
12924 insn_id = bits (arm_record->arm_insn, 13, 15);
12925 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
12926 if (ret != ARM_RECORD_SUCCESS)
12927 {
12928 arm_record_unsupported_insn (arm_record);
12929 ret = -1;
12930 }
72508ac0
PO
12931 }
12932 else if (THUMB2_RECORD == record_type)
12933 {
c6ec2b30
OJ
12934 /* As thumb does not have condition codes, we set negative. */
12935 arm_record->cond = -1;
12936
12937 /* Swap first half of 32bit thumb instruction with second half. */
12938 arm_record->arm_insn
01e57735 12939 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 12940
ca92db2d 12941 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 12942
ca92db2d 12943 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
12944 {
12945 arm_record_unsupported_insn (arm_record);
12946 ret = -1;
12947 }
72508ac0
PO
12948 }
12949 else
12950 {
12951 /* Throw assertion. */
12952 gdb_assert_not_reached ("not a valid instruction, could not decode");
12953 }
12954
12955 return ret;
12956}
12957
12958
12959/* Cleans up local record registers and memory allocations. */
12960
12961static void
12962deallocate_reg_mem (insn_decode_record *record)
12963{
12964 xfree (record->arm_regs);
12965 xfree (record->arm_mems);
12966}
12967
12968
01e57735 12969/* Parse the current instruction and record the values of the registers and
72508ac0
PO
12970 memory that will be changed in current instruction to record_arch_list".
12971 Return -1 if something is wrong. */
12972
12973int
01e57735
YQ
12974arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12975 CORE_ADDR insn_addr)
72508ac0
PO
12976{
12977
72508ac0
PO
12978 uint32_t no_of_rec = 0;
12979 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12980 ULONGEST t_bit = 0, insn_id = 0;
12981
12982 ULONGEST u_regval = 0;
12983
12984 insn_decode_record arm_record;
12985
12986 memset (&arm_record, 0, sizeof (insn_decode_record));
12987 arm_record.regcache = regcache;
12988 arm_record.this_addr = insn_addr;
12989 arm_record.gdbarch = gdbarch;
12990
12991
12992 if (record_debug > 1)
12993 {
12994 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 12995 "addr = %s\n",
72508ac0
PO
12996 paddress (gdbarch, arm_record.this_addr));
12997 }
12998
12999 if (extract_arm_insn (&arm_record, 2))
13000 {
13001 if (record_debug)
01e57735
YQ
13002 {
13003 printf_unfiltered (_("Process record: error reading memory at "
13004 "addr %s len = %d.\n"),
13005 paddress (arm_record.gdbarch,
13006 arm_record.this_addr), 2);
13007 }
72508ac0
PO
13008 return -1;
13009 }
13010
13011 /* Check the insn, whether it is thumb or arm one. */
13012
13013 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13014 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13015
13016
13017 if (!(u_regval & t_bit))
13018 {
13019 /* We are decoding arm insn. */
13020 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13021 }
13022 else
13023 {
13024 insn_id = bits (arm_record.arm_insn, 11, 15);
13025 /* is it thumb2 insn? */
13026 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13027 {
13028 ret = decode_insn (&arm_record, THUMB2_RECORD,
13029 THUMB2_INSN_SIZE_BYTES);
13030 }
72508ac0 13031 else
01e57735
YQ
13032 {
13033 /* We are decoding thumb insn. */
13034 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13035 }
72508ac0
PO
13036 }
13037
13038 if (0 == ret)
13039 {
13040 /* Record registers. */
25ea693b 13041 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13042 if (arm_record.arm_regs)
01e57735
YQ
13043 {
13044 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13045 {
13046 if (record_full_arch_list_add_reg
25ea693b 13047 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13048 ret = -1;
13049 }
13050 }
72508ac0
PO
13051 /* Record memories. */
13052 if (arm_record.arm_mems)
01e57735
YQ
13053 {
13054 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13055 {
13056 if (record_full_arch_list_add_mem
13057 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13058 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13059 ret = -1;
13060 }
13061 }
72508ac0 13062
25ea693b 13063 if (record_full_arch_list_add_end ())
01e57735 13064 ret = -1;
72508ac0
PO
13065 }
13066
13067
13068 deallocate_reg_mem (&arm_record);
13069
13070 return ret;
13071}
This page took 2.576075 seconds and 4 git commands to generate.