1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
58 #include "common/vec.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
73 #include "common/selftest.h"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 readable_regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
495 arm_m_addr_is_magic (CORE_ADDR addr)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
507 /* Address is magic. */
511 /* Address is not magic. */
516 /* Remove useless bits from addresses in a running program. */
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
527 return UNMAKE_THUMB_ADDR (val);
529 return (val & 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
549 /* The GNU linker's Thumb call stub to foo is named
551 if (strstr (name, "_from_thumb") != NULL)
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
557 if (startswith (name, "__truncdfsf2"))
559 if (startswith (name, "__aeabi_d2f"))
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
565 if (startswith (name, "__aeabi_read_tp"))
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
604 thumb_expand_immediate (unsigned int imm)
606 unsigned int count = imm >> 7;
614 return (imm & 0xff) | ((imm & 0xff) << 16);
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
622 return (0x80 | (imm & 0x7f)) << (32 - count);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
629 thumb_instruction_restores_sp (unsigned short insn)
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
652 CORE_ADDR unrecognized_pc = 0;
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 while (start < limit)
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 else if (thumb_instruction_restores_sp (insn))
693 /* Don't scan past the epilogue. */
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736 if (stack.store_would_trash (addr))
739 stack.store (addr, 4, regs[regno]);
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
750 if (stack.store_would_trash (addr))
753 stack.store (addr, 4, regs[rd]);
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2;
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
802 int j1, j2, imm1, imm2;
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 pv_t addr = regs[bits (insn, 0, 3)];
829 if (stack.store_would_trash (addr))
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
841 regs[bits (insn, 0, 3)] = addr;
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
852 offset = inst2 & 0xff;
854 addr = pv_add_constant (addr, offset);
856 addr = pv_add_constant (addr, -offset);
858 if (stack.store_would_trash (addr))
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
866 regs[bits (insn, 0, 3)] = addr;
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
876 offset = inst2 & 0xff;
878 addr = pv_add_constant (addr, offset);
880 addr = pv_add_constant (addr, -offset);
882 if (stack.store_would_trash (addr))
885 stack.store (addr, 4, regs[regno]);
888 regs[bits (insn, 0, 3)] = addr;
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 int regno = bits (inst2, 12, 15);
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900 if (stack.store_would_trash (addr))
903 stack.store (addr, 4, regs[regno]);
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant;
1020 offset = bits (inst2, 0, 11);
1022 loc = start + 4 + offset;
1024 loc = start + 4 - offset;
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant;
1036 offset = bits (inst2, 0, 7) << 2;
1038 loc = start + 4 + offset;
1040 loc = start + 4 - offset;
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 /* Don't scan past anything that might change control flow. */
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1063 else if (thumb_instruction_changes_pc (insn))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1086 return unrecognized_pc;
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1111 return unrecognized_pc;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg = bits (insn1, 8, 10);
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1161 address = (high << 16 | low);
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1176 *destreg = bits (insn, 12, 15);
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1191 address = (high << 16 | low);
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1254 unsigned int destreg;
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1261 if (bits (insn, 3, 5) != basereg)
1263 destreg = bits (insn, 0, 2);
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1270 if (destreg != bits (insn, 0, 2))
1275 unsigned int destreg;
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1282 if (bits (insn, 16, 19) != basereg)
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1290 if (bits (insn, 12, 15) != destreg)
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1296 return pc + offset + 4;
1298 return pc + offset + 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1319 CORE_ADDR func_addr, limit_pc;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1330 if (post_prologue_pc)
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1348 if (post_prologue_pc != 0)
1350 CORE_ADDR analyzed_limit;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1368 if (analyzed_limit != post_prologue_pc)
1371 return post_prologue_pc;
1375 /* Can't determine prologue from the symbol table, need to examine
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1384 limit_pc = pc + 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1426 /* See comment in arm_scan_prologue for an explanation of
1428 if (prologue_end > prologue_start + 64)
1430 prologue_end = prologue_start + 64;
1434 /* We're in the boondocks: we have no idea where the start of the
1438 prologue_end = std::min (prologue_end, prev_pc);
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1447 arm_instruction_restores_sp (unsigned int insn)
1449 if (bits (insn, 28, 31) != INST_NV)
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1457 || (insn & 0x0fff0000) == 0x08bd0000
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask = insn & 0xffff;
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1624 n_saved_fp_regs = 1;
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1631 n_saved_fp_regs = 4;
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1650 if (skip_prologue_function (gdbarch, dest, 0))
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1660 else if (arm_instruction_restores_sp (insn))
1662 /* Don't scan past the epilogue. */
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1686 unrecognized_pc = current_pc;
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1699 int framereg, framesize;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1728 return unrecognized_pc;
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1789 if (prologue_end > prologue_start + 64)
1791 prologue_end = prologue_start + 64; /* See above. */
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1804 /* AAPCS does not use a frame register, so we can abort here. */
1805 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1808 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1809 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1814 prologue_start = gdbarch_addr_bits_remove
1815 (gdbarch, return_value) - 8;
1816 prologue_end = prologue_start + 64; /* See above. */
1820 if (prev_pc < prologue_end)
1821 prologue_end = prev_pc;
1823 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1826 static struct arm_prologue_cache *
1827 arm_make_prologue_cache (struct frame_info *this_frame)
1830 struct arm_prologue_cache *cache;
1831 CORE_ADDR unwound_fp;
1833 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1834 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1836 arm_scan_prologue (this_frame, cache);
1838 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1839 if (unwound_fp == 0)
1842 cache->prev_sp = unwound_fp + cache->framesize;
1844 /* Calculate actual addresses of saved registers using offsets
1845 determined by arm_scan_prologue. */
1846 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1847 if (trad_frame_addr_p (cache->saved_regs, reg))
1848 cache->saved_regs[reg].addr += cache->prev_sp;
1853 /* Implementation of the stop_reason hook for arm_prologue frames. */
1855 static enum unwind_stop_reason
1856 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1859 struct arm_prologue_cache *cache;
1862 if (*this_cache == NULL)
1863 *this_cache = arm_make_prologue_cache (this_frame);
1864 cache = (struct arm_prologue_cache *) *this_cache;
1866 /* This is meant to halt the backtrace at "_start". */
1867 pc = get_frame_pc (this_frame);
1868 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1869 return UNWIND_OUTERMOST;
1871 /* If we've hit a wall, stop. */
1872 if (cache->prev_sp == 0)
1873 return UNWIND_OUTERMOST;
1875 return UNWIND_NO_REASON;
1878 /* Our frame ID for a normal frame is the current function's starting PC
1879 and the caller's SP when we were called. */
1882 arm_prologue_this_id (struct frame_info *this_frame,
1884 struct frame_id *this_id)
1886 struct arm_prologue_cache *cache;
1890 if (*this_cache == NULL)
1891 *this_cache = arm_make_prologue_cache (this_frame);
1892 cache = (struct arm_prologue_cache *) *this_cache;
1894 /* Use function start address as part of the frame ID. If we cannot
1895 identify the start address (due to missing symbol information),
1896 fall back to just using the current PC. */
1897 pc = get_frame_pc (this_frame);
1898 func = get_frame_func (this_frame);
1902 id = frame_id_build (cache->prev_sp, func);
1906 static struct value *
1907 arm_prologue_prev_register (struct frame_info *this_frame,
1911 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1912 struct arm_prologue_cache *cache;
1914 if (*this_cache == NULL)
1915 *this_cache = arm_make_prologue_cache (this_frame);
1916 cache = (struct arm_prologue_cache *) *this_cache;
1918 /* If we are asked to unwind the PC, then we need to return the LR
1919 instead. The prologue may save PC, but it will point into this
1920 frame's prologue, not the next frame's resume location. Also
1921 strip the saved T bit. A valid LR may have the low bit set, but
1922 a valid PC never does. */
1923 if (prev_regnum == ARM_PC_REGNUM)
1927 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1928 return frame_unwind_got_constant (this_frame, prev_regnum,
1929 arm_addr_bits_remove (gdbarch, lr));
1932 /* SP is generally not saved to the stack, but this frame is
1933 identified by the next frame's stack pointer at the time of the call.
1934 The value was already reconstructed into PREV_SP. */
1935 if (prev_regnum == ARM_SP_REGNUM)
1936 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1938 /* The CPSR may have been changed by the call instruction and by the
1939 called function. The only bit we can reconstruct is the T bit,
1940 by checking the low bit of LR as of the call. This is a reliable
1941 indicator of Thumb-ness except for some ARM v4T pre-interworking
1942 Thumb code, which could get away with a clear low bit as long as
1943 the called function did not use bx. Guess that all other
1944 bits are unchanged; the condition flags are presumably lost,
1945 but the processor status is likely valid. */
1946 if (prev_regnum == ARM_PS_REGNUM)
1949 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1951 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1952 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1953 if (IS_THUMB_ADDR (lr))
1957 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1960 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1964 struct frame_unwind arm_prologue_unwind = {
1966 arm_prologue_unwind_stop_reason,
1967 arm_prologue_this_id,
1968 arm_prologue_prev_register,
1970 default_frame_sniffer
1973 /* Maintain a list of ARM exception table entries per objfile, similar to the
1974 list of mapping symbols. We only cache entries for standard ARM-defined
1975 personality routines; the cache will contain only the frame unwinding
1976 instructions associated with the entry (not the descriptors). */
1978 static const struct objfile_data *arm_exidx_data_key;
1980 struct arm_exidx_entry
1985 typedef struct arm_exidx_entry arm_exidx_entry_s;
1986 DEF_VEC_O(arm_exidx_entry_s);
1988 struct arm_exidx_data
1990 VEC(arm_exidx_entry_s) **section_maps;
1994 arm_exidx_data_free (struct objfile *objfile, void *arg)
1996 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1999 for (i = 0; i < objfile->obfd->section_count; i++)
2000 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2004 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2005 const struct arm_exidx_entry *rhs)
2007 return lhs->addr < rhs->addr;
2010 static struct obj_section *
2011 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2013 struct obj_section *osect;
2015 ALL_OBJFILE_OSECTIONS (objfile, osect)
2016 if (bfd_get_section_flags (objfile->obfd,
2017 osect->the_bfd_section) & SEC_ALLOC)
2019 bfd_vma start, size;
2020 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2021 size = bfd_get_section_size (osect->the_bfd_section);
2023 if (start <= vma && vma < start + size)
2030 /* Parse contents of exception table and exception index sections
2031 of OBJFILE, and fill in the exception table entry cache.
2033 For each entry that refers to a standard ARM-defined personality
2034 routine, extract the frame unwinding instructions (from either
2035 the index or the table section). The unwinding instructions
2037 - extracting them from the rest of the table data
2038 - converting to host endianness
2039 - appending the implicit 0xb0 ("Finish") code
2041 The extracted and normalized instructions are stored for later
2042 retrieval by the arm_find_exidx_entry routine. */
2045 arm_exidx_new_objfile (struct objfile *objfile)
2047 struct arm_exidx_data *data;
2048 asection *exidx, *extab;
2049 bfd_vma exidx_vma = 0, extab_vma = 0;
2052 /* If we've already touched this file, do nothing. */
2053 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2058 gdb::byte_vector exidx_data;
2061 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2062 exidx_data.resize (bfd_get_section_size (exidx));
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data.data (), 0,
2066 exidx_data.size ()))
2070 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2071 gdb::byte_vector extab_data;
2074 extab_vma = bfd_section_vma (objfile->obfd, extab);
2075 extab_data.resize (bfd_get_section_size (extab));
2077 if (!bfd_get_section_contents (objfile->obfd, extab,
2078 extab_data.data (), 0,
2079 extab_data.size ()))
2083 /* Allocate exception table data structure. */
2084 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2085 set_objfile_data (objfile, arm_exidx_data_key, data);
2086 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2087 objfile->obfd->section_count,
2088 VEC(arm_exidx_entry_s) *);
2090 /* Fill in exception table. */
2091 for (i = 0; i < exidx_data.size () / 8; i++)
2093 struct arm_exidx_entry new_exidx_entry;
2094 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2095 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2096 exidx_data.data () + i * 8 + 4);
2097 bfd_vma addr = 0, word = 0;
2098 int n_bytes = 0, n_words = 0;
2099 struct obj_section *sec;
2100 gdb_byte *entry = NULL;
2102 /* Extract address of start of function. */
2103 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2104 idx += exidx_vma + i * 8;
2106 /* Find section containing function and compute section offset. */
2107 sec = arm_obj_section_from_vma (objfile, idx);
2110 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2112 /* Determine address of exception table entry. */
2115 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2117 else if ((val & 0xff000000) == 0x80000000)
2119 /* Exception table entry embedded in .ARM.exidx
2120 -- must be short form. */
2124 else if (!(val & 0x80000000))
2126 /* Exception table entry in .ARM.extab. */
2127 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2128 addr += exidx_vma + i * 8 + 4;
2130 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2132 word = bfd_h_get_32 (objfile->obfd,
2133 extab_data.data () + addr - extab_vma);
2136 if ((word & 0xff000000) == 0x80000000)
2141 else if ((word & 0xff000000) == 0x81000000
2142 || (word & 0xff000000) == 0x82000000)
2146 n_words = ((word >> 16) & 0xff);
2148 else if (!(word & 0x80000000))
2151 struct obj_section *pers_sec;
2152 int gnu_personality = 0;
2154 /* Custom personality routine. */
2155 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2156 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2158 /* Check whether we've got one of the variants of the
2159 GNU personality routines. */
2160 pers_sec = arm_obj_section_from_vma (objfile, pers);
2163 static const char *personality[] =
2165 "__gcc_personality_v0",
2166 "__gxx_personality_v0",
2167 "__gcj_personality_v0",
2168 "__gnu_objc_personality_v0",
2172 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2175 for (k = 0; personality[k]; k++)
2176 if (lookup_minimal_symbol_by_pc_name
2177 (pc, personality[k], objfile))
2179 gnu_personality = 1;
2184 /* If so, the next word contains a word count in the high
2185 byte, followed by the same unwind instructions as the
2186 pre-defined forms. */
2188 && addr + 4 <= extab_vma + extab_data.size ())
2190 word = bfd_h_get_32 (objfile->obfd,
2192 + addr - extab_vma));
2195 n_words = ((word >> 24) & 0xff);
2201 /* Sanity check address. */
2203 if (addr < extab_vma
2204 || addr + 4 * n_words > extab_vma + extab_data.size ())
2205 n_words = n_bytes = 0;
2207 /* The unwind instructions reside in WORD (only the N_BYTES least
2208 significant bytes are valid), followed by N_WORDS words in the
2209 extab section starting at ADDR. */
2210 if (n_bytes || n_words)
2213 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2214 n_bytes + n_words * 4 + 1);
2217 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2221 word = bfd_h_get_32 (objfile->obfd,
2222 extab_data.data () + addr - extab_vma);
2225 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2226 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2227 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2228 *p++ = (gdb_byte) (word & 0xff);
2231 /* Implied "Finish" to terminate the list. */
2235 /* Push entry onto vector. They are guaranteed to always
2236 appear in order of increasing addresses. */
2237 new_exidx_entry.addr = idx;
2238 new_exidx_entry.entry = entry;
2239 VEC_safe_push (arm_exidx_entry_s,
2240 data->section_maps[sec->the_bfd_section->index],
2245 /* Search for the exception table entry covering MEMADDR. If one is found,
2246 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2247 set *START to the start of the region covered by this entry. */
2250 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2252 struct obj_section *sec;
2254 sec = find_pc_section (memaddr);
2257 struct arm_exidx_data *data;
2258 VEC(arm_exidx_entry_s) *map;
2259 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2262 data = ((struct arm_exidx_data *)
2263 objfile_data (sec->objfile, arm_exidx_data_key));
2266 map = data->section_maps[sec->the_bfd_section->index];
2267 if (!VEC_empty (arm_exidx_entry_s, map))
2269 struct arm_exidx_entry *map_sym;
2271 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2272 arm_compare_exidx_entries);
2274 /* VEC_lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < VEC_length (arm_exidx_entry_s, map))
2280 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2281 if (map_sym->addr == map_key.addr)
2284 *start = map_sym->addr + obj_section_addr (sec);
2285 return map_sym->entry;
2291 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2293 *start = map_sym->addr + obj_section_addr (sec);
2294 return map_sym->entry;
2303 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2304 instruction list from the ARM exception table entry ENTRY, allocate and
2305 return a prologue cache structure describing how to unwind this frame.
2307 Return NULL if the unwinding instruction list contains a "spare",
2308 "reserved" or "refuse to unwind" instruction as defined in section
2309 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2310 for the ARM Architecture" document. */
2312 static struct arm_prologue_cache *
2313 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2318 struct arm_prologue_cache *cache;
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2326 /* Whenever we reload SP, we actually have to retrieve its
2327 actual value in the current frame. */
2330 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2332 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2333 vsp = get_frame_register_unsigned (this_frame, reg);
2337 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2338 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2344 /* Decode next unwind instruction. */
2347 if ((insn & 0xc0) == 0)
2349 int offset = insn & 0x3f;
2350 vsp += (offset << 2) + 4;
2352 else if ((insn & 0xc0) == 0x40)
2354 int offset = insn & 0x3f;
2355 vsp -= (offset << 2) + 4;
2357 else if ((insn & 0xf0) == 0x80)
2359 int mask = ((insn & 0xf) << 8) | *entry++;
2362 /* The special case of an all-zero mask identifies
2363 "Refuse to unwind". We return NULL to fall back
2364 to the prologue analyzer. */
2368 /* Pop registers r4..r15 under mask. */
2369 for (i = 0; i < 12; i++)
2370 if (mask & (1 << i))
2372 cache->saved_regs[4 + i].addr = vsp;
2376 /* Special-case popping SP -- we need to reload vsp. */
2377 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2380 else if ((insn & 0xf0) == 0x90)
2382 int reg = insn & 0xf;
2384 /* Reserved cases. */
2385 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2388 /* Set SP from another register and mark VSP for reload. */
2389 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2392 else if ((insn & 0xf0) == 0xa0)
2394 int count = insn & 0x7;
2395 int pop_lr = (insn & 0x8) != 0;
2398 /* Pop r4..r[4+count]. */
2399 for (i = 0; i <= count; i++)
2401 cache->saved_regs[4 + i].addr = vsp;
2405 /* If indicated by flag, pop LR as well. */
2408 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2412 else if (insn == 0xb0)
2414 /* We could only have updated PC by popping into it; if so, it
2415 will show up as address. Otherwise, copy LR into PC. */
2416 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2417 cache->saved_regs[ARM_PC_REGNUM]
2418 = cache->saved_regs[ARM_LR_REGNUM];
2423 else if (insn == 0xb1)
2425 int mask = *entry++;
2428 /* All-zero mask and mask >= 16 is "spare". */
2429 if (mask == 0 || mask >= 16)
2432 /* Pop r0..r3 under mask. */
2433 for (i = 0; i < 4; i++)
2434 if (mask & (1 << i))
2436 cache->saved_regs[i].addr = vsp;
2440 else if (insn == 0xb2)
2442 ULONGEST offset = 0;
2447 offset |= (*entry & 0x7f) << shift;
2450 while (*entry++ & 0x80);
2452 vsp += 0x204 + (offset << 2);
2454 else if (insn == 0xb3)
2456 int start = *entry >> 4;
2457 int count = (*entry++) & 0xf;
2460 /* Only registers D0..D15 are valid here. */
2461 if (start + count >= 16)
2464 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2465 for (i = 0; i <= count; i++)
2467 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2474 else if ((insn & 0xf8) == 0xb8)
2476 int count = insn & 0x7;
2479 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2480 for (i = 0; i <= count; i++)
2482 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2489 else if (insn == 0xc6)
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2495 /* Only registers WR0..WR15 are valid. */
2496 if (start + count >= 16)
2499 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2506 else if (insn == 0xc7)
2508 int mask = *entry++;
2511 /* All-zero mask and mask >= 16 is "spare". */
2512 if (mask == 0 || mask >= 16)
2515 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2516 for (i = 0; i < 4; i++)
2517 if (mask & (1 << i))
2519 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2523 else if ((insn & 0xf8) == 0xc0)
2525 int count = insn & 0x7;
2528 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2529 for (i = 0; i <= count; i++)
2531 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2535 else if (insn == 0xc8)
2537 int start = *entry >> 4;
2538 int count = (*entry++) & 0xf;
2541 /* Only registers D0..D31 are valid. */
2542 if (start + count >= 16)
2545 /* Pop VFP double-precision registers
2546 D[16+start]..D[16+start+count]. */
2547 for (i = 0; i <= count; i++)
2549 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2553 else if (insn == 0xc9)
2555 int start = *entry >> 4;
2556 int count = (*entry++) & 0xf;
2559 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2560 for (i = 0; i <= count; i++)
2562 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2566 else if ((insn & 0xf8) == 0xd0)
2568 int count = insn & 0x7;
2571 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2572 for (i = 0; i <= count; i++)
2574 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2580 /* Everything else is "spare". */
2585 /* If we restore SP from a register, assume this was the frame register.
2586 Otherwise just fall back to SP as frame register. */
2587 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2588 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2590 cache->framereg = ARM_SP_REGNUM;
2592 /* Determine offset to previous frame. */
2594 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2596 /* We already got the previous SP. */
2597 cache->prev_sp = vsp;
2602 /* Unwinding via ARM exception table entries. Note that the sniffer
2603 already computes a filled-in prologue cache, which is then used
2604 with the same arm_prologue_this_id and arm_prologue_prev_register
2605 routines also used for prologue-parsing based unwinding. */
2608 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2609 struct frame_info *this_frame,
2610 void **this_prologue_cache)
2612 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2614 CORE_ADDR addr_in_block, exidx_region, func_start;
2615 struct arm_prologue_cache *cache;
2618 /* See if we have an ARM exception table entry covering this address. */
2619 addr_in_block = get_frame_address_in_block (this_frame);
2620 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2624 /* The ARM exception table does not describe unwind information
2625 for arbitrary PC values, but is guaranteed to be correct only
2626 at call sites. We have to decide here whether we want to use
2627 ARM exception table information for this frame, or fall back
2628 to using prologue parsing. (Note that if we have DWARF CFI,
2629 this sniffer isn't even called -- CFI is always preferred.)
2631 Before we make this decision, however, we check whether we
2632 actually have *symbol* information for the current frame.
2633 If not, prologue parsing would not work anyway, so we might
2634 as well use the exception table and hope for the best. */
2635 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2639 /* If the next frame is "normal", we are at a call site in this
2640 frame, so exception information is guaranteed to be valid. */
2641 if (get_next_frame (this_frame)
2642 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2645 /* We also assume exception information is valid if we're currently
2646 blocked in a system call. The system library is supposed to
2647 ensure this, so that e.g. pthread cancellation works. */
2648 if (arm_frame_is_thumb (this_frame))
2652 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2653 2, byte_order_for_code, &insn)
2654 && (insn & 0xff00) == 0xdf00 /* svc */)
2661 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2662 4, byte_order_for_code, &insn)
2663 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2667 /* Bail out if we don't know that exception information is valid. */
2671 /* The ARM exception index does not mark the *end* of the region
2672 covered by the entry, and some functions will not have any entry.
2673 To correctly recognize the end of the covered region, the linker
2674 should have inserted dummy records with a CANTUNWIND marker.
2676 Unfortunately, current versions of GNU ld do not reliably do
2677 this, and thus we may have found an incorrect entry above.
2678 As a (temporary) sanity check, we only use the entry if it
2679 lies *within* the bounds of the function. Note that this check
2680 might reject perfectly valid entries that just happen to cover
2681 multiple functions; therefore this check ought to be removed
2682 once the linker is fixed. */
2683 if (func_start > exidx_region)
2687 /* Decode the list of unwinding instructions into a prologue cache.
2688 Note that this may fail due to e.g. a "refuse to unwind" code. */
2689 cache = arm_exidx_fill_cache (this_frame, entry);
2693 *this_prologue_cache = cache;
2697 struct frame_unwind arm_exidx_unwind = {
2699 default_frame_unwind_stop_reason,
2700 arm_prologue_this_id,
2701 arm_prologue_prev_register,
2703 arm_exidx_unwind_sniffer
2706 static struct arm_prologue_cache *
2707 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2709 struct arm_prologue_cache *cache;
2712 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2713 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2715 /* Still rely on the offset calculated from prologue. */
2716 arm_scan_prologue (this_frame, cache);
2718 /* Since we are in epilogue, the SP has been restored. */
2719 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2721 /* Calculate actual addresses of saved registers using offsets
2722 determined by arm_scan_prologue. */
2723 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2724 if (trad_frame_addr_p (cache->saved_regs, reg))
2725 cache->saved_regs[reg].addr += cache->prev_sp;
2730 /* Implementation of function hook 'this_id' in
2731 'struct frame_uwnind' for epilogue unwinder. */
2734 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2736 struct frame_id *this_id)
2738 struct arm_prologue_cache *cache;
2741 if (*this_cache == NULL)
2742 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2743 cache = (struct arm_prologue_cache *) *this_cache;
2745 /* Use function start address as part of the frame ID. If we cannot
2746 identify the start address (due to missing symbol information),
2747 fall back to just using the current PC. */
2748 pc = get_frame_pc (this_frame);
2749 func = get_frame_func (this_frame);
2753 (*this_id) = frame_id_build (cache->prev_sp, pc);
2756 /* Implementation of function hook 'prev_register' in
2757 'struct frame_uwnind' for epilogue unwinder. */
2759 static struct value *
2760 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2761 void **this_cache, int regnum)
2763 if (*this_cache == NULL)
2764 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2769 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2771 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2774 /* Implementation of function hook 'sniffer' in
2775 'struct frame_uwnind' for epilogue unwinder. */
2778 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 if (frame_relative_level (this_frame) == 0)
2784 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2785 CORE_ADDR pc = get_frame_pc (this_frame);
2787 if (arm_frame_is_thumb (this_frame))
2788 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2790 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2796 /* Frame unwinder from epilogue. */
2798 static const struct frame_unwind arm_epilogue_frame_unwind =
2801 default_frame_unwind_stop_reason,
2802 arm_epilogue_frame_this_id,
2803 arm_epilogue_frame_prev_register,
2805 arm_epilogue_frame_sniffer,
2808 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2809 trampoline, return the target PC. Otherwise return 0.
2811 void call0a (char c, short s, int i, long l) {}
2815 (*pointer_to_call0a) (c, s, i, l);
2818 Instead of calling a stub library function _call_via_xx (xx is
2819 the register name), GCC may inline the trampoline in the object
2820 file as below (register r2 has the address of call0a).
2823 .type main, %function
2832 The trampoline 'bx r2' doesn't belong to main. */
2835 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2837 /* The heuristics of recognizing such trampoline is that FRAME is
2838 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2839 if (arm_frame_is_thumb (frame))
2843 if (target_read_memory (pc, buf, 2) == 0)
2845 struct gdbarch *gdbarch = get_frame_arch (frame);
2846 enum bfd_endian byte_order_for_code
2847 = gdbarch_byte_order_for_code (gdbarch);
2849 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2851 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2854 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2856 /* Clear the LSB so that gdb core sets step-resume
2857 breakpoint at the right address. */
2858 return UNMAKE_THUMB_ADDR (dest);
2866 static struct arm_prologue_cache *
2867 arm_make_stub_cache (struct frame_info *this_frame)
2869 struct arm_prologue_cache *cache;
2871 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2872 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2874 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2879 /* Our frame ID for a stub frame is the current SP and LR. */
2882 arm_stub_this_id (struct frame_info *this_frame,
2884 struct frame_id *this_id)
2886 struct arm_prologue_cache *cache;
2888 if (*this_cache == NULL)
2889 *this_cache = arm_make_stub_cache (this_frame);
2890 cache = (struct arm_prologue_cache *) *this_cache;
2892 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2896 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2897 struct frame_info *this_frame,
2898 void **this_prologue_cache)
2900 CORE_ADDR addr_in_block;
2902 CORE_ADDR pc, start_addr;
2905 addr_in_block = get_frame_address_in_block (this_frame);
2906 pc = get_frame_pc (this_frame);
2907 if (in_plt_section (addr_in_block)
2908 /* We also use the stub winder if the target memory is unreadable
2909 to avoid having the prologue unwinder trying to read it. */
2910 || target_read_memory (pc, dummy, 4) != 0)
2913 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2914 && arm_skip_bx_reg (this_frame, pc) != 0)
2920 struct frame_unwind arm_stub_unwind = {
2922 default_frame_unwind_stop_reason,
2924 arm_prologue_prev_register,
2926 arm_stub_unwind_sniffer
2929 /* Put here the code to store, into CACHE->saved_regs, the addresses
2930 of the saved registers of frame described by THIS_FRAME. CACHE is
2933 static struct arm_prologue_cache *
2934 arm_m_exception_cache (struct frame_info *this_frame)
2936 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2937 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2938 struct arm_prologue_cache *cache;
2939 CORE_ADDR unwound_sp;
2942 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2943 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2945 unwound_sp = get_frame_register_unsigned (this_frame,
2948 /* The hardware saves eight 32-bit words, comprising xPSR,
2949 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2950 "B1.5.6 Exception entry behavior" in
2951 "ARMv7-M Architecture Reference Manual". */
2952 cache->saved_regs[0].addr = unwound_sp;
2953 cache->saved_regs[1].addr = unwound_sp + 4;
2954 cache->saved_regs[2].addr = unwound_sp + 8;
2955 cache->saved_regs[3].addr = unwound_sp + 12;
2956 cache->saved_regs[12].addr = unwound_sp + 16;
2957 cache->saved_regs[14].addr = unwound_sp + 20;
2958 cache->saved_regs[15].addr = unwound_sp + 24;
2959 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2961 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2962 aligner between the top of the 32-byte stack frame and the
2963 previous context's stack pointer. */
2964 cache->prev_sp = unwound_sp + 32;
2965 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2966 && (xpsr & (1 << 9)) != 0)
2967 cache->prev_sp += 4;
2972 /* Implementation of function hook 'this_id' in
2973 'struct frame_uwnind'. */
2976 arm_m_exception_this_id (struct frame_info *this_frame,
2978 struct frame_id *this_id)
2980 struct arm_prologue_cache *cache;
2982 if (*this_cache == NULL)
2983 *this_cache = arm_m_exception_cache (this_frame);
2984 cache = (struct arm_prologue_cache *) *this_cache;
2986 /* Our frame ID for a stub frame is the current SP and LR. */
2987 *this_id = frame_id_build (cache->prev_sp,
2988 get_frame_pc (this_frame));
2991 /* Implementation of function hook 'prev_register' in
2992 'struct frame_uwnind'. */
2994 static struct value *
2995 arm_m_exception_prev_register (struct frame_info *this_frame,
2999 struct arm_prologue_cache *cache;
3001 if (*this_cache == NULL)
3002 *this_cache = arm_m_exception_cache (this_frame);
3003 cache = (struct arm_prologue_cache *) *this_cache;
3005 /* The value was already reconstructed into PREV_SP. */
3006 if (prev_regnum == ARM_SP_REGNUM)
3007 return frame_unwind_got_constant (this_frame, prev_regnum,
3010 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3014 /* Implementation of function hook 'sniffer' in
3015 'struct frame_uwnind'. */
3018 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3019 struct frame_info *this_frame,
3020 void **this_prologue_cache)
3022 CORE_ADDR this_pc = get_frame_pc (this_frame);
3024 /* No need to check is_m; this sniffer is only registered for
3025 M-profile architectures. */
3027 /* Check if exception frame returns to a magic PC value. */
3028 return arm_m_addr_is_magic (this_pc);
3031 /* Frame unwinder for M-profile exceptions. */
3033 struct frame_unwind arm_m_exception_unwind =
3036 default_frame_unwind_stop_reason,
3037 arm_m_exception_this_id,
3038 arm_m_exception_prev_register,
3040 arm_m_exception_unwind_sniffer
3044 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3046 struct arm_prologue_cache *cache;
3048 if (*this_cache == NULL)
3049 *this_cache = arm_make_prologue_cache (this_frame);
3050 cache = (struct arm_prologue_cache *) *this_cache;
3052 return cache->prev_sp - cache->framesize;
3055 struct frame_base arm_normal_base = {
3056 &arm_prologue_unwind,
3057 arm_normal_frame_base,
3058 arm_normal_frame_base,
3059 arm_normal_frame_base
3062 static struct value *
3063 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3066 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3068 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3073 /* The PC is normally copied from the return column, which
3074 describes saves of LR. However, that version may have an
3075 extra bit set to indicate Thumb state. The bit is not
3077 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3078 return frame_unwind_got_constant (this_frame, regnum,
3079 arm_addr_bits_remove (gdbarch, lr));
3082 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3083 cpsr = get_frame_register_unsigned (this_frame, regnum);
3084 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3085 if (IS_THUMB_ADDR (lr))
3089 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3092 internal_error (__FILE__, __LINE__,
3093 _("Unexpected register %d"), regnum);
3098 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3099 struct dwarf2_frame_state_reg *reg,
3100 struct frame_info *this_frame)
3106 reg->how = DWARF2_FRAME_REG_FN;
3107 reg->loc.fn = arm_dwarf2_prev_register;
3110 reg->how = DWARF2_FRAME_REG_CFA;
3115 /* Implement the stack_frame_destroyed_p gdbarch method. */
3118 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3120 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3121 unsigned int insn, insn2;
3122 int found_return = 0, found_stack_adjust = 0;
3123 CORE_ADDR func_start, func_end;
3127 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3130 /* The epilogue is a sequence of instructions along the following lines:
3132 - add stack frame size to SP or FP
3133 - [if frame pointer used] restore SP from FP
3134 - restore registers from SP [may include PC]
3135 - a return-type instruction [if PC wasn't already restored]
3137 In a first pass, we scan forward from the current PC and verify the
3138 instructions we find as compatible with this sequence, ending in a
3141 However, this is not sufficient to distinguish indirect function calls
3142 within a function from indirect tail calls in the epilogue in some cases.
3143 Therefore, if we didn't already find any SP-changing instruction during
3144 forward scan, we add a backward scanning heuristic to ensure we actually
3145 are in the epilogue. */
3148 while (scan_pc < func_end && !found_return)
3150 if (target_read_memory (scan_pc, buf, 2))
3154 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3156 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3158 else if (insn == 0x46f7) /* mov pc, lr */
3160 else if (thumb_instruction_restores_sp (insn))
3162 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3165 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3167 if (target_read_memory (scan_pc, buf, 2))
3171 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3173 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3175 if (insn2 & 0x8000) /* <registers> include PC. */
3178 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3179 && (insn2 & 0x0fff) == 0x0b04)
3181 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3184 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3185 && (insn2 & 0x0e00) == 0x0a00)
3197 /* Since any instruction in the epilogue sequence, with the possible
3198 exception of return itself, updates the stack pointer, we need to
3199 scan backwards for at most one instruction. Try either a 16-bit or
3200 a 32-bit instruction. This is just a heuristic, so we do not worry
3201 too much about false positives. */
3203 if (pc - 4 < func_start)
3205 if (target_read_memory (pc - 4, buf, 4))
3208 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3209 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3211 if (thumb_instruction_restores_sp (insn2))
3212 found_stack_adjust = 1;
3213 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3214 found_stack_adjust = 1;
3215 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3216 && (insn2 & 0x0fff) == 0x0b04)
3217 found_stack_adjust = 1;
3218 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3219 && (insn2 & 0x0e00) == 0x0a00)
3220 found_stack_adjust = 1;
3222 return found_stack_adjust;
3226 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3228 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3231 CORE_ADDR func_start, func_end;
3233 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3236 /* We are in the epilogue if the previous instruction was a stack
3237 adjustment and the next instruction is a possible return (bx, mov
3238 pc, or pop). We could have to scan backwards to find the stack
3239 adjustment, or forwards to find the return, but this is a decent
3240 approximation. First scan forwards. */
3243 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3244 if (bits (insn, 28, 31) != INST_NV)
3246 if ((insn & 0x0ffffff0) == 0x012fff10)
3249 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3252 else if ((insn & 0x0fff0000) == 0x08bd0000
3253 && (insn & 0x0000c000) != 0)
3254 /* POP (LDMIA), including PC or LR. */
3261 /* Scan backwards. This is just a heuristic, so do not worry about
3262 false positives from mode changes. */
3264 if (pc < func_start + 4)
3267 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3268 if (arm_instruction_restores_sp (insn))
3274 /* Implement the stack_frame_destroyed_p gdbarch method. */
3277 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3279 if (arm_pc_is_thumb (gdbarch, pc))
3280 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3282 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3285 /* When arguments must be pushed onto the stack, they go on in reverse
3286 order. The code below implements a FILO (stack) to do this. */
3291 struct stack_item *prev;
3295 static struct stack_item *
3296 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3298 struct stack_item *si;
3299 si = XNEW (struct stack_item);
3300 si->data = (gdb_byte *) xmalloc (len);
3303 memcpy (si->data, contents, len);
3307 static struct stack_item *
3308 pop_stack_item (struct stack_item *si)
3310 struct stack_item *dead = si;
3317 /* Implement the gdbarch type alignment method, overrides the generic
3318 alignment algorithm for anything that is arm specific. */
3321 arm_type_align (gdbarch *gdbarch, struct type *t)
3323 t = check_typedef (t);
3324 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3326 /* Use the natural alignment for vector types (the same for
3327 scalar type), but the maximum alignment is 64-bit. */
3328 if (TYPE_LENGTH (t) > 8)
3331 return TYPE_LENGTH (t);
3334 /* Allow the common code to calculate the alignment. */
3338 /* Possible base types for a candidate for passing and returning in
3341 enum arm_vfp_cprc_base_type
3350 /* The length of one element of base type B. */
3353 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3357 case VFP_CPRC_SINGLE:
3359 case VFP_CPRC_DOUBLE:
3361 case VFP_CPRC_VEC64:
3363 case VFP_CPRC_VEC128:
3366 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3371 /* The character ('s', 'd' or 'q') for the type of VFP register used
3372 for passing base type B. */
3375 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3379 case VFP_CPRC_SINGLE:
3381 case VFP_CPRC_DOUBLE:
3383 case VFP_CPRC_VEC64:
3385 case VFP_CPRC_VEC128:
3388 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3393 /* Determine whether T may be part of a candidate for passing and
3394 returning in VFP registers, ignoring the limit on the total number
3395 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3396 classification of the first valid component found; if it is not
3397 VFP_CPRC_UNKNOWN, all components must have the same classification
3398 as *BASE_TYPE. If it is found that T contains a type not permitted
3399 for passing and returning in VFP registers, a type differently
3400 classified from *BASE_TYPE, or two types differently classified
3401 from each other, return -1, otherwise return the total number of
3402 base-type elements found (possibly 0 in an empty structure or
3403 array). Vector types are not currently supported, matching the
3404 generic AAPCS support. */
3407 arm_vfp_cprc_sub_candidate (struct type *t,
3408 enum arm_vfp_cprc_base_type *base_type)
3410 t = check_typedef (t);
3411 switch (TYPE_CODE (t))
3414 switch (TYPE_LENGTH (t))
3417 if (*base_type == VFP_CPRC_UNKNOWN)
3418 *base_type = VFP_CPRC_SINGLE;
3419 else if (*base_type != VFP_CPRC_SINGLE)
3424 if (*base_type == VFP_CPRC_UNKNOWN)
3425 *base_type = VFP_CPRC_DOUBLE;
3426 else if (*base_type != VFP_CPRC_DOUBLE)
3435 case TYPE_CODE_COMPLEX:
3436 /* Arguments of complex T where T is one of the types float or
3437 double get treated as if they are implemented as:
3446 switch (TYPE_LENGTH (t))
3449 if (*base_type == VFP_CPRC_UNKNOWN)
3450 *base_type = VFP_CPRC_SINGLE;
3451 else if (*base_type != VFP_CPRC_SINGLE)
3456 if (*base_type == VFP_CPRC_UNKNOWN)
3457 *base_type = VFP_CPRC_DOUBLE;
3458 else if (*base_type != VFP_CPRC_DOUBLE)
3467 case TYPE_CODE_ARRAY:
3469 if (TYPE_VECTOR (t))
3471 /* A 64-bit or 128-bit containerized vector type are VFP
3473 switch (TYPE_LENGTH (t))
3476 if (*base_type == VFP_CPRC_UNKNOWN)
3477 *base_type = VFP_CPRC_VEC64;
3480 if (*base_type == VFP_CPRC_UNKNOWN)
3481 *base_type = VFP_CPRC_VEC128;
3492 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3496 if (TYPE_LENGTH (t) == 0)
3498 gdb_assert (count == 0);
3501 else if (count == 0)
3503 unitlen = arm_vfp_cprc_unit_length (*base_type);
3504 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3505 return TYPE_LENGTH (t) / unitlen;
3510 case TYPE_CODE_STRUCT:
3515 for (i = 0; i < TYPE_NFIELDS (t); i++)
3519 if (!field_is_static (&TYPE_FIELD (t, i)))
3520 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3522 if (sub_count == -1)
3526 if (TYPE_LENGTH (t) == 0)
3528 gdb_assert (count == 0);
3531 else if (count == 0)
3533 unitlen = arm_vfp_cprc_unit_length (*base_type);
3534 if (TYPE_LENGTH (t) != unitlen * count)
3539 case TYPE_CODE_UNION:
3544 for (i = 0; i < TYPE_NFIELDS (t); i++)
3546 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3548 if (sub_count == -1)
3550 count = (count > sub_count ? count : sub_count);
3552 if (TYPE_LENGTH (t) == 0)
3554 gdb_assert (count == 0);
3557 else if (count == 0)
3559 unitlen = arm_vfp_cprc_unit_length (*base_type);
3560 if (TYPE_LENGTH (t) != unitlen * count)
3572 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3573 if passed to or returned from a non-variadic function with the VFP
3574 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3575 *BASE_TYPE to the base type for T and *COUNT to the number of
3576 elements of that base type before returning. */
3579 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3582 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3583 int c = arm_vfp_cprc_sub_candidate (t, &b);
3584 if (c <= 0 || c > 4)
3591 /* Return 1 if the VFP ABI should be used for passing arguments to and
3592 returning values from a function of type FUNC_TYPE, 0
3596 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3598 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3599 /* Variadic functions always use the base ABI. Assume that functions
3600 without debug info are not variadic. */
3601 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3603 /* The VFP ABI is only supported as a variant of AAPCS. */
3604 if (tdep->arm_abi != ARM_ABI_AAPCS)
3606 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3609 /* We currently only support passing parameters in integer registers, which
3610 conforms with GCC's default model, and VFP argument passing following
3611 the VFP variant of AAPCS. Several other variants exist and
3612 we should probably support some of them based on the selected ABI. */
3615 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3616 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3617 struct value **args, CORE_ADDR sp,
3618 function_call_return_method return_method,
3619 CORE_ADDR struct_addr)
3621 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3625 struct stack_item *si = NULL;
3628 unsigned vfp_regs_free = (1 << 16) - 1;
3630 /* Determine the type of this function and whether the VFP ABI
3632 ftype = check_typedef (value_type (function));
3633 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3634 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3635 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3637 /* Set the return address. For the ARM, the return breakpoint is
3638 always at BP_ADDR. */
3639 if (arm_pc_is_thumb (gdbarch, bp_addr))
3641 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3643 /* Walk through the list of args and determine how large a temporary
3644 stack is required. Need to take care here as structs may be
3645 passed on the stack, and we have to push them. */
3648 argreg = ARM_A1_REGNUM;
3651 /* The struct_return pointer occupies the first parameter
3652 passing register. */
3653 if (return_method == return_method_struct)
3656 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3657 gdbarch_register_name (gdbarch, argreg),
3658 paddress (gdbarch, struct_addr));
3659 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3663 for (argnum = 0; argnum < nargs; argnum++)
3666 struct type *arg_type;
3667 struct type *target_type;
3668 enum type_code typecode;
3669 const bfd_byte *val;
3671 enum arm_vfp_cprc_base_type vfp_base_type;
3673 int may_use_core_reg = 1;
3675 arg_type = check_typedef (value_type (args[argnum]));
3676 len = TYPE_LENGTH (arg_type);
3677 target_type = TYPE_TARGET_TYPE (arg_type);
3678 typecode = TYPE_CODE (arg_type);
3679 val = value_contents (args[argnum]);
3681 align = type_align (arg_type);
3682 /* Round alignment up to a whole number of words. */
3683 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3684 /* Different ABIs have different maximum alignments. */
3685 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3687 /* The APCS ABI only requires word alignment. */
3688 align = INT_REGISTER_SIZE;
3692 /* The AAPCS requires at most doubleword alignment. */
3693 if (align > INT_REGISTER_SIZE * 2)
3694 align = INT_REGISTER_SIZE * 2;
3698 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3706 /* Because this is a CPRC it cannot go in a core register or
3707 cause a core register to be skipped for alignment.
3708 Either it goes in VFP registers and the rest of this loop
3709 iteration is skipped for this argument, or it goes on the
3710 stack (and the stack alignment code is correct for this
3712 may_use_core_reg = 0;
3714 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3715 shift = unit_length / 4;
3716 mask = (1 << (shift * vfp_base_count)) - 1;
3717 for (regno = 0; regno < 16; regno += shift)
3718 if (((vfp_regs_free >> regno) & mask) == mask)
3727 vfp_regs_free &= ~(mask << regno);
3728 reg_scaled = regno / shift;
3729 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3730 for (i = 0; i < vfp_base_count; i++)
3734 if (reg_char == 'q')
3735 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3736 val + i * unit_length);
3739 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3740 reg_char, reg_scaled + i);
3741 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3743 regcache->cooked_write (regnum, val + i * unit_length);
3750 /* This CPRC could not go in VFP registers, so all VFP
3751 registers are now marked as used. */
3756 /* Push stack padding for dowubleword alignment. */
3757 if (nstack & (align - 1))
3759 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3760 nstack += INT_REGISTER_SIZE;
3763 /* Doubleword aligned quantities must go in even register pairs. */
3764 if (may_use_core_reg
3765 && argreg <= ARM_LAST_ARG_REGNUM
3766 && align > INT_REGISTER_SIZE
3770 /* If the argument is a pointer to a function, and it is a
3771 Thumb function, create a LOCAL copy of the value and set
3772 the THUMB bit in it. */
3773 if (TYPE_CODE_PTR == typecode
3774 && target_type != NULL
3775 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3777 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3778 if (arm_pc_is_thumb (gdbarch, regval))
3780 bfd_byte *copy = (bfd_byte *) alloca (len);
3781 store_unsigned_integer (copy, len, byte_order,
3782 MAKE_THUMB_ADDR (regval));
3787 /* Copy the argument to general registers or the stack in
3788 register-sized pieces. Large arguments are split between
3789 registers and stack. */
3792 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3794 = extract_unsigned_integer (val, partial_len, byte_order);
3796 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3798 /* The argument is being passed in a general purpose
3800 if (byte_order == BFD_ENDIAN_BIG)
3801 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3803 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3805 gdbarch_register_name
3807 phex (regval, INT_REGISTER_SIZE));
3808 regcache_cooked_write_unsigned (regcache, argreg, regval);
3813 gdb_byte buf[INT_REGISTER_SIZE];
3815 memset (buf, 0, sizeof (buf));
3816 store_unsigned_integer (buf, partial_len, byte_order, regval);
3818 /* Push the arguments onto the stack. */
3820 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3822 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3823 nstack += INT_REGISTER_SIZE;
3830 /* If we have an odd number of words to push, then decrement the stack
3831 by one word now, so first stack argument will be dword aligned. */
3838 write_memory (sp, si->data, si->len);
3839 si = pop_stack_item (si);
3842 /* Finally, update teh SP register. */
3843 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3849 /* Always align the frame to an 8-byte boundary. This is required on
3850 some platforms and harmless on the rest. */
3853 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3855 /* Align the stack to eight bytes. */
3856 return sp & ~ (CORE_ADDR) 7;
3860 print_fpu_flags (struct ui_file *file, int flags)
3862 if (flags & (1 << 0))
3863 fputs_filtered ("IVO ", file);
3864 if (flags & (1 << 1))
3865 fputs_filtered ("DVZ ", file);
3866 if (flags & (1 << 2))
3867 fputs_filtered ("OFL ", file);
3868 if (flags & (1 << 3))
3869 fputs_filtered ("UFL ", file);
3870 if (flags & (1 << 4))
3871 fputs_filtered ("INX ", file);
3872 fputc_filtered ('\n', file);
3875 /* Print interesting information about the floating point processor
3876 (if present) or emulator. */
3878 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3879 struct frame_info *frame, const char *args)
3881 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3884 type = (status >> 24) & 127;
3885 if (status & (1 << 31))
3886 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3888 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3889 /* i18n: [floating point unit] mask */
3890 fputs_filtered (_("mask: "), file);
3891 print_fpu_flags (file, status >> 16);
3892 /* i18n: [floating point unit] flags */
3893 fputs_filtered (_("flags: "), file);
3894 print_fpu_flags (file, status);
3897 /* Construct the ARM extended floating point type. */
3898 static struct type *
3899 arm_ext_type (struct gdbarch *gdbarch)
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3903 if (!tdep->arm_ext_type)
3905 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3906 floatformats_arm_ext);
3908 return tdep->arm_ext_type;
3911 static struct type *
3912 arm_neon_double_type (struct gdbarch *gdbarch)
3914 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3916 if (tdep->neon_double_type == NULL)
3918 struct type *t, *elem;
3920 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3922 elem = builtin_type (gdbarch)->builtin_uint8;
3923 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3924 elem = builtin_type (gdbarch)->builtin_uint16;
3925 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3926 elem = builtin_type (gdbarch)->builtin_uint32;
3927 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3928 elem = builtin_type (gdbarch)->builtin_uint64;
3929 append_composite_type_field (t, "u64", elem);
3930 elem = builtin_type (gdbarch)->builtin_float;
3931 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3932 elem = builtin_type (gdbarch)->builtin_double;
3933 append_composite_type_field (t, "f64", elem);
3935 TYPE_VECTOR (t) = 1;
3936 TYPE_NAME (t) = "neon_d";
3937 tdep->neon_double_type = t;
3940 return tdep->neon_double_type;
3943 /* FIXME: The vector types are not correctly ordered on big-endian
3944 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3945 bits of d0 - regardless of what unit size is being held in d0. So
3946 the offset of the first uint8 in d0 is 7, but the offset of the
3947 first float is 4. This code works as-is for little-endian
3950 static struct type *
3951 arm_neon_quad_type (struct gdbarch *gdbarch)
3953 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3955 if (tdep->neon_quad_type == NULL)
3957 struct type *t, *elem;
3959 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3961 elem = builtin_type (gdbarch)->builtin_uint8;
3962 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3963 elem = builtin_type (gdbarch)->builtin_uint16;
3964 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3965 elem = builtin_type (gdbarch)->builtin_uint32;
3966 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3967 elem = builtin_type (gdbarch)->builtin_uint64;
3968 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_float;
3970 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3971 elem = builtin_type (gdbarch)->builtin_double;
3972 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3974 TYPE_VECTOR (t) = 1;
3975 TYPE_NAME (t) = "neon_q";
3976 tdep->neon_quad_type = t;
3979 return tdep->neon_quad_type;
3982 /* Return the GDB type object for the "standard" data type of data in
3985 static struct type *
3986 arm_register_type (struct gdbarch *gdbarch, int regnum)
3988 int num_regs = gdbarch_num_regs (gdbarch);
3990 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3991 && regnum >= num_regs && regnum < num_regs + 32)
3992 return builtin_type (gdbarch)->builtin_float;
3994 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3995 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3996 return arm_neon_quad_type (gdbarch);
3998 /* If the target description has register information, we are only
3999 in this function so that we can override the types of
4000 double-precision registers for NEON. */
4001 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4003 struct type *t = tdesc_register_type (gdbarch, regnum);
4005 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4006 && TYPE_CODE (t) == TYPE_CODE_FLT
4007 && gdbarch_tdep (gdbarch)->have_neon)
4008 return arm_neon_double_type (gdbarch);
4013 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4015 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4016 return builtin_type (gdbarch)->builtin_void;
4018 return arm_ext_type (gdbarch);
4020 else if (regnum == ARM_SP_REGNUM)
4021 return builtin_type (gdbarch)->builtin_data_ptr;
4022 else if (regnum == ARM_PC_REGNUM)
4023 return builtin_type (gdbarch)->builtin_func_ptr;
4024 else if (regnum >= ARRAY_SIZE (arm_register_names))
4025 /* These registers are only supported on targets which supply
4026 an XML description. */
4027 return builtin_type (gdbarch)->builtin_int0;
4029 return builtin_type (gdbarch)->builtin_uint32;
4032 /* Map a DWARF register REGNUM onto the appropriate GDB register
4036 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4038 /* Core integer regs. */
4039 if (reg >= 0 && reg <= 15)
4042 /* Legacy FPA encoding. These were once used in a way which
4043 overlapped with VFP register numbering, so their use is
4044 discouraged, but GDB doesn't support the ARM toolchain
4045 which used them for VFP. */
4046 if (reg >= 16 && reg <= 23)
4047 return ARM_F0_REGNUM + reg - 16;
4049 /* New assignments for the FPA registers. */
4050 if (reg >= 96 && reg <= 103)
4051 return ARM_F0_REGNUM + reg - 96;
4053 /* WMMX register assignments. */
4054 if (reg >= 104 && reg <= 111)
4055 return ARM_WCGR0_REGNUM + reg - 104;
4057 if (reg >= 112 && reg <= 127)
4058 return ARM_WR0_REGNUM + reg - 112;
4060 if (reg >= 192 && reg <= 199)
4061 return ARM_WC0_REGNUM + reg - 192;
4063 /* VFP v2 registers. A double precision value is actually
4064 in d1 rather than s2, but the ABI only defines numbering
4065 for the single precision registers. This will "just work"
4066 in GDB for little endian targets (we'll read eight bytes,
4067 starting in s0 and then progressing to s1), but will be
4068 reversed on big endian targets with VFP. This won't
4069 be a problem for the new Neon quad registers; you're supposed
4070 to use DW_OP_piece for those. */
4071 if (reg >= 64 && reg <= 95)
4075 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4076 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4080 /* VFP v3 / Neon registers. This range is also used for VFP v2
4081 registers, except that it now describes d0 instead of s0. */
4082 if (reg >= 256 && reg <= 287)
4086 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4087 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4094 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4096 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4099 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4101 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4102 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4104 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4105 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4107 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4108 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4110 if (reg < NUM_GREGS)
4111 return SIM_ARM_R0_REGNUM + reg;
4114 if (reg < NUM_FREGS)
4115 return SIM_ARM_FP0_REGNUM + reg;
4118 if (reg < NUM_SREGS)
4119 return SIM_ARM_FPS_REGNUM + reg;
4122 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4125 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4126 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4127 NULL if an error occurs. BUF is freed. */
4130 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4131 int old_len, int new_len)
4134 int bytes_to_read = new_len - old_len;
4136 new_buf = (gdb_byte *) xmalloc (new_len);
4137 memcpy (new_buf + bytes_to_read, buf, old_len);
4139 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4147 /* An IT block is at most the 2-byte IT instruction followed by
4148 four 4-byte instructions. The furthest back we must search to
4149 find an IT block that affects the current instruction is thus
4150 2 + 3 * 4 == 14 bytes. */
4151 #define MAX_IT_BLOCK_PREFIX 14
4153 /* Use a quick scan if there are more than this many bytes of
4155 #define IT_SCAN_THRESHOLD 32
4157 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4158 A breakpoint in an IT block may not be hit, depending on the
4161 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4165 CORE_ADDR boundary, func_start;
4167 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4168 int i, any, last_it, last_it_count;
4170 /* If we are using BKPT breakpoints, none of this is necessary. */
4171 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4174 /* ARM mode does not have this problem. */
4175 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4178 /* We are setting a breakpoint in Thumb code that could potentially
4179 contain an IT block. The first step is to find how much Thumb
4180 code there is; we do not need to read outside of known Thumb
4182 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4184 /* Thumb-2 code must have mapping symbols to have a chance. */
4187 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4189 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4190 && func_start > boundary)
4191 boundary = func_start;
4193 /* Search for a candidate IT instruction. We have to do some fancy
4194 footwork to distinguish a real IT instruction from the second
4195 half of a 32-bit instruction, but there is no need for that if
4196 there's no candidate. */
4197 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4199 /* No room for an IT instruction. */
4202 buf = (gdb_byte *) xmalloc (buf_len);
4203 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4206 for (i = 0; i < buf_len; i += 2)
4208 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4209 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4222 /* OK, the code bytes before this instruction contain at least one
4223 halfword which resembles an IT instruction. We know that it's
4224 Thumb code, but there are still two possibilities. Either the
4225 halfword really is an IT instruction, or it is the second half of
4226 a 32-bit Thumb instruction. The only way we can tell is to
4227 scan forwards from a known instruction boundary. */
4228 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4232 /* There's a lot of code before this instruction. Start with an
4233 optimistic search; it's easy to recognize halfwords that can
4234 not be the start of a 32-bit instruction, and use that to
4235 lock on to the instruction boundaries. */
4236 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4239 buf_len = IT_SCAN_THRESHOLD;
4242 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4244 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4245 if (thumb_insn_size (inst1) == 2)
4252 /* At this point, if DEFINITE, BUF[I] is the first place we
4253 are sure that we know the instruction boundaries, and it is far
4254 enough from BPADDR that we could not miss an IT instruction
4255 affecting BPADDR. If ! DEFINITE, give up - start from a
4259 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4263 buf_len = bpaddr - boundary;
4269 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4272 buf_len = bpaddr - boundary;
4276 /* Scan forwards. Find the last IT instruction before BPADDR. */
4281 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4283 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4288 else if (inst1 & 0x0002)
4290 else if (inst1 & 0x0004)
4295 i += thumb_insn_size (inst1);
4301 /* There wasn't really an IT instruction after all. */
4304 if (last_it_count < 1)
4305 /* It was too far away. */
4308 /* This really is a trouble spot. Move the breakpoint to the IT
4310 return bpaddr - buf_len + last_it;
4313 /* ARM displaced stepping support.
4315 Generally ARM displaced stepping works as follows:
4317 1. When an instruction is to be single-stepped, it is first decoded by
4318 arm_process_displaced_insn. Depending on the type of instruction, it is
4319 then copied to a scratch location, possibly in a modified form. The
4320 copy_* set of functions performs such modification, as necessary. A
4321 breakpoint is placed after the modified instruction in the scratch space
4322 to return control to GDB. Note in particular that instructions which
4323 modify the PC will no longer do so after modification.
4325 2. The instruction is single-stepped, by setting the PC to the scratch
4326 location address, and resuming. Control returns to GDB when the
4329 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4330 function used for the current instruction. This function's job is to
4331 put the CPU/memory state back to what it would have been if the
4332 instruction had been executed unmodified in its original location. */
4334 /* NOP instruction (mov r0, r0). */
4335 #define ARM_NOP 0xe1a00000
4336 #define THUMB_NOP 0x4600
4338 /* Helper for register reads for displaced stepping. In particular, this
4339 returns the PC as it would be seen by the instruction at its original
4343 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4347 CORE_ADDR from = dsc->insn_addr;
4349 if (regno == ARM_PC_REGNUM)
4351 /* Compute pipeline offset:
4352 - When executing an ARM instruction, PC reads as the address of the
4353 current instruction plus 8.
4354 - When executing a Thumb instruction, PC reads as the address of the
4355 current instruction plus 4. */
4362 if (debug_displaced)
4363 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4364 (unsigned long) from);
4365 return (ULONGEST) from;
4369 regcache_cooked_read_unsigned (regs, regno, &ret);
4370 if (debug_displaced)
4371 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4372 regno, (unsigned long) ret);
4378 displaced_in_arm_mode (struct regcache *regs)
4381 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4383 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4385 return (ps & t_bit) == 0;
4388 /* Write to the PC as from a branch instruction. */
4391 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4395 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4396 architecture versions < 6. */
4397 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4398 val & ~(ULONGEST) 0x3);
4400 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4401 val & ~(ULONGEST) 0x1);
4404 /* Write to the PC as from a branch-exchange instruction. */
4407 bx_write_pc (struct regcache *regs, ULONGEST val)
4410 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4412 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4419 else if ((val & 2) == 0)
4421 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4422 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4426 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4427 mode, align dest to 4 bytes). */
4428 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4429 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4430 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4434 /* Write to the PC as if from a load instruction. */
4437 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4441 bx_write_pc (regs, val);
4443 branch_write_pc (regs, dsc, val);
4446 /* Write to the PC as if from an ALU instruction. */
4449 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4452 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4453 bx_write_pc (regs, val);
4455 branch_write_pc (regs, dsc, val);
4458 /* Helper for writing to registers for displaced stepping. Writing to the PC
4459 has a varying effects depending on the instruction which does the write:
4460 this is controlled by the WRITE_PC argument. */
4463 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4464 int regno, ULONGEST val, enum pc_write_style write_pc)
4466 if (regno == ARM_PC_REGNUM)
4468 if (debug_displaced)
4469 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4470 (unsigned long) val);
4473 case BRANCH_WRITE_PC:
4474 branch_write_pc (regs, dsc, val);
4478 bx_write_pc (regs, val);
4482 load_write_pc (regs, dsc, val);
4486 alu_write_pc (regs, dsc, val);
4489 case CANNOT_WRITE_PC:
4490 warning (_("Instruction wrote to PC in an unexpected way when "
4491 "single-stepping"));
4495 internal_error (__FILE__, __LINE__,
4496 _("Invalid argument to displaced_write_reg"));
4499 dsc->wrote_to_pc = 1;
4503 if (debug_displaced)
4504 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4505 regno, (unsigned long) val);
4506 regcache_cooked_write_unsigned (regs, regno, val);
4510 /* This function is used to concisely determine if an instruction INSN
4511 references PC. Register fields of interest in INSN should have the
4512 corresponding fields of BITMASK set to 0b1111. The function
4513 returns return 1 if any of these fields in INSN reference the PC
4514 (also 0b1111, r15), else it returns 0. */
4517 insn_references_pc (uint32_t insn, uint32_t bitmask)
4519 uint32_t lowbit = 1;
4521 while (bitmask != 0)
4525 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4531 mask = lowbit * 0xf;
4533 if ((insn & mask) == mask)
4542 /* The simplest copy function. Many instructions have the same effect no
4543 matter what address they are executed at: in those cases, use this. */
4546 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4547 const char *iname, arm_displaced_step_closure *dsc)
4549 if (debug_displaced)
4550 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4551 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4554 dsc->modinsn[0] = insn;
4560 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4561 uint16_t insn2, const char *iname,
4562 arm_displaced_step_closure *dsc)
4564 if (debug_displaced)
4565 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4566 "opcode/class '%s' unmodified\n", insn1, insn2,
4569 dsc->modinsn[0] = insn1;
4570 dsc->modinsn[1] = insn2;
4576 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4579 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4581 arm_displaced_step_closure *dsc)
4583 if (debug_displaced)
4584 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4585 "opcode/class '%s' unmodified\n", insn,
4588 dsc->modinsn[0] = insn;
4593 /* Preload instructions with immediate offset. */
4596 cleanup_preload (struct gdbarch *gdbarch,
4597 struct regcache *regs, arm_displaced_step_closure *dsc)
4599 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4600 if (!dsc->u.preload.immed)
4601 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4605 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4606 arm_displaced_step_closure *dsc, unsigned int rn)
4609 /* Preload instructions:
4611 {pli/pld} [rn, #+/-imm]
4613 {pli/pld} [r0, #+/-imm]. */
4615 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4616 rn_val = displaced_read_reg (regs, dsc, rn);
4617 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4618 dsc->u.preload.immed = 1;
4620 dsc->cleanup = &cleanup_preload;
4624 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4625 arm_displaced_step_closure *dsc)
4627 unsigned int rn = bits (insn, 16, 19);
4629 if (!insn_references_pc (insn, 0x000f0000ul))
4630 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4632 if (debug_displaced)
4633 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4634 (unsigned long) insn);
4636 dsc->modinsn[0] = insn & 0xfff0ffff;
4638 install_preload (gdbarch, regs, dsc, rn);
4644 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4645 struct regcache *regs, arm_displaced_step_closure *dsc)
4647 unsigned int rn = bits (insn1, 0, 3);
4648 unsigned int u_bit = bit (insn1, 7);
4649 int imm12 = bits (insn2, 0, 11);
4652 if (rn != ARM_PC_REGNUM)
4653 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4655 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4656 PLD (literal) Encoding T1. */
4657 if (debug_displaced)
4658 fprintf_unfiltered (gdb_stdlog,
4659 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4660 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4666 /* Rewrite instruction {pli/pld} PC imm12 into:
4667 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4671 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4673 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4674 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4676 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4678 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4679 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4680 dsc->u.preload.immed = 0;
4682 /* {pli/pld} [r0, r1] */
4683 dsc->modinsn[0] = insn1 & 0xfff0;
4684 dsc->modinsn[1] = 0xf001;
4687 dsc->cleanup = &cleanup_preload;
4691 /* Preload instructions with register offset. */
4694 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4695 arm_displaced_step_closure *dsc, unsigned int rn,
4698 ULONGEST rn_val, rm_val;
4700 /* Preload register-offset instructions:
4702 {pli/pld} [rn, rm {, shift}]
4704 {pli/pld} [r0, r1 {, shift}]. */
4706 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4707 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4708 rn_val = displaced_read_reg (regs, dsc, rn);
4709 rm_val = displaced_read_reg (regs, dsc, rm);
4710 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4711 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4712 dsc->u.preload.immed = 0;
4714 dsc->cleanup = &cleanup_preload;
4718 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4719 struct regcache *regs,
4720 arm_displaced_step_closure *dsc)
4722 unsigned int rn = bits (insn, 16, 19);
4723 unsigned int rm = bits (insn, 0, 3);
4726 if (!insn_references_pc (insn, 0x000f000ful))
4727 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4729 if (debug_displaced)
4730 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4731 (unsigned long) insn);
4733 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4735 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4739 /* Copy/cleanup coprocessor load and store instructions. */
4742 cleanup_copro_load_store (struct gdbarch *gdbarch,
4743 struct regcache *regs,
4744 arm_displaced_step_closure *dsc)
4746 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4748 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4750 if (dsc->u.ldst.writeback)
4751 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4755 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4756 arm_displaced_step_closure *dsc,
4757 int writeback, unsigned int rn)
4761 /* Coprocessor load/store instructions:
4763 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4765 {stc/stc2} [r0, #+/-imm].
4767 ldc/ldc2 are handled identically. */
4769 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4770 rn_val = displaced_read_reg (regs, dsc, rn);
4771 /* PC should be 4-byte aligned. */
4772 rn_val = rn_val & 0xfffffffc;
4773 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4775 dsc->u.ldst.writeback = writeback;
4776 dsc->u.ldst.rn = rn;
4778 dsc->cleanup = &cleanup_copro_load_store;
4782 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4783 struct regcache *regs,
4784 arm_displaced_step_closure *dsc)
4786 unsigned int rn = bits (insn, 16, 19);
4788 if (!insn_references_pc (insn, 0x000f0000ul))
4789 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4791 if (debug_displaced)
4792 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4793 "load/store insn %.8lx\n", (unsigned long) insn);
4795 dsc->modinsn[0] = insn & 0xfff0ffff;
4797 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4803 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4804 uint16_t insn2, struct regcache *regs,
4805 arm_displaced_step_closure *dsc)
4807 unsigned int rn = bits (insn1, 0, 3);
4809 if (rn != ARM_PC_REGNUM)
4810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4811 "copro load/store", dsc);
4813 if (debug_displaced)
4814 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4815 "load/store insn %.4x%.4x\n", insn1, insn2);
4817 dsc->modinsn[0] = insn1 & 0xfff0;
4818 dsc->modinsn[1] = insn2;
4821 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4822 doesn't support writeback, so pass 0. */
4823 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4828 /* Clean up branch instructions (actually perform the branch, by setting
4832 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4833 arm_displaced_step_closure *dsc)
4835 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4836 int branch_taken = condition_true (dsc->u.branch.cond, status);
4837 enum pc_write_style write_pc = dsc->u.branch.exchange
4838 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4843 if (dsc->u.branch.link)
4845 /* The value of LR should be the next insn of current one. In order
4846 not to confuse logic hanlding later insn `bx lr', if current insn mode
4847 is Thumb, the bit 0 of LR value should be set to 1. */
4848 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4851 next_insn_addr |= 0x1;
4853 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4857 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4860 /* Copy B/BL/BLX instructions with immediate destinations. */
4863 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4864 arm_displaced_step_closure *dsc,
4865 unsigned int cond, int exchange, int link, long offset)
4867 /* Implement "BL<cond> <label>" as:
4869 Preparation: cond <- instruction condition
4870 Insn: mov r0, r0 (nop)
4871 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4873 B<cond> similar, but don't set r14 in cleanup. */
4875 dsc->u.branch.cond = cond;
4876 dsc->u.branch.link = link;
4877 dsc->u.branch.exchange = exchange;
4879 dsc->u.branch.dest = dsc->insn_addr;
4880 if (link && exchange)
4881 /* For BLX, offset is computed from the Align (PC, 4). */
4882 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4885 dsc->u.branch.dest += 4 + offset;
4887 dsc->u.branch.dest += 8 + offset;
4889 dsc->cleanup = &cleanup_branch;
4892 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4893 struct regcache *regs, arm_displaced_step_closure *dsc)
4895 unsigned int cond = bits (insn, 28, 31);
4896 int exchange = (cond == 0xf);
4897 int link = exchange || bit (insn, 24);
4900 if (debug_displaced)
4901 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4902 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4903 (unsigned long) insn);
4905 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4906 then arrange the switch into Thumb mode. */
4907 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4909 offset = bits (insn, 0, 23) << 2;
4911 if (bit (offset, 25))
4912 offset = offset | ~0x3ffffff;
4914 dsc->modinsn[0] = ARM_NOP;
4916 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4921 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4922 uint16_t insn2, struct regcache *regs,
4923 arm_displaced_step_closure *dsc)
4925 int link = bit (insn2, 14);
4926 int exchange = link && !bit (insn2, 12);
4929 int j1 = bit (insn2, 13);
4930 int j2 = bit (insn2, 11);
4931 int s = sbits (insn1, 10, 10);
4932 int i1 = !(j1 ^ bit (insn1, 10));
4933 int i2 = !(j2 ^ bit (insn1, 10));
4935 if (!link && !exchange) /* B */
4937 offset = (bits (insn2, 0, 10) << 1);
4938 if (bit (insn2, 12)) /* Encoding T4 */
4940 offset |= (bits (insn1, 0, 9) << 12)
4946 else /* Encoding T3 */
4948 offset |= (bits (insn1, 0, 5) << 12)
4952 cond = bits (insn1, 6, 9);
4957 offset = (bits (insn1, 0, 9) << 12);
4958 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4959 offset |= exchange ?
4960 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4963 if (debug_displaced)
4964 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4965 "%.4x %.4x with offset %.8lx\n",
4966 link ? (exchange) ? "blx" : "bl" : "b",
4967 insn1, insn2, offset);
4969 dsc->modinsn[0] = THUMB_NOP;
4971 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4975 /* Copy B Thumb instructions. */
4977 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4978 arm_displaced_step_closure *dsc)
4980 unsigned int cond = 0;
4982 unsigned short bit_12_15 = bits (insn, 12, 15);
4983 CORE_ADDR from = dsc->insn_addr;
4985 if (bit_12_15 == 0xd)
4987 /* offset = SignExtend (imm8:0, 32) */
4988 offset = sbits ((insn << 1), 0, 8);
4989 cond = bits (insn, 8, 11);
4991 else if (bit_12_15 == 0xe) /* Encoding T2 */
4993 offset = sbits ((insn << 1), 0, 11);
4997 if (debug_displaced)
4998 fprintf_unfiltered (gdb_stdlog,
4999 "displaced: copying b immediate insn %.4x "
5000 "with offset %d\n", insn, offset);
5002 dsc->u.branch.cond = cond;
5003 dsc->u.branch.link = 0;
5004 dsc->u.branch.exchange = 0;
5005 dsc->u.branch.dest = from + 4 + offset;
5007 dsc->modinsn[0] = THUMB_NOP;
5009 dsc->cleanup = &cleanup_branch;
5014 /* Copy BX/BLX with register-specified destinations. */
5017 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5018 arm_displaced_step_closure *dsc, int link,
5019 unsigned int cond, unsigned int rm)
5021 /* Implement {BX,BLX}<cond> <reg>" as:
5023 Preparation: cond <- instruction condition
5024 Insn: mov r0, r0 (nop)
5025 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5027 Don't set r14 in cleanup for BX. */
5029 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5031 dsc->u.branch.cond = cond;
5032 dsc->u.branch.link = link;
5034 dsc->u.branch.exchange = 1;
5036 dsc->cleanup = &cleanup_branch;
5040 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5041 struct regcache *regs, arm_displaced_step_closure *dsc)
5043 unsigned int cond = bits (insn, 28, 31);
5046 int link = bit (insn, 5);
5047 unsigned int rm = bits (insn, 0, 3);
5049 if (debug_displaced)
5050 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5051 (unsigned long) insn);
5053 dsc->modinsn[0] = ARM_NOP;
5055 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5060 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5061 struct regcache *regs,
5062 arm_displaced_step_closure *dsc)
5064 int link = bit (insn, 7);
5065 unsigned int rm = bits (insn, 3, 6);
5067 if (debug_displaced)
5068 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5069 (unsigned short) insn);
5071 dsc->modinsn[0] = THUMB_NOP;
5073 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5079 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5082 cleanup_alu_imm (struct gdbarch *gdbarch,
5083 struct regcache *regs, arm_displaced_step_closure *dsc)
5085 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5086 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5087 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5088 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5092 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5093 arm_displaced_step_closure *dsc)
5095 unsigned int rn = bits (insn, 16, 19);
5096 unsigned int rd = bits (insn, 12, 15);
5097 unsigned int op = bits (insn, 21, 24);
5098 int is_mov = (op == 0xd);
5099 ULONGEST rd_val, rn_val;
5101 if (!insn_references_pc (insn, 0x000ff000ul))
5102 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5104 if (debug_displaced)
5105 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5106 "%.8lx\n", is_mov ? "move" : "ALU",
5107 (unsigned long) insn);
5109 /* Instruction is of form:
5111 <op><cond> rd, [rn,] #imm
5115 Preparation: tmp1, tmp2 <- r0, r1;
5117 Insn: <op><cond> r0, r1, #imm
5118 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5121 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5122 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5123 rn_val = displaced_read_reg (regs, dsc, rn);
5124 rd_val = displaced_read_reg (regs, dsc, rd);
5125 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5126 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5130 dsc->modinsn[0] = insn & 0xfff00fff;
5132 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5134 dsc->cleanup = &cleanup_alu_imm;
5140 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5141 uint16_t insn2, struct regcache *regs,
5142 arm_displaced_step_closure *dsc)
5144 unsigned int op = bits (insn1, 5, 8);
5145 unsigned int rn, rm, rd;
5146 ULONGEST rd_val, rn_val;
5148 rn = bits (insn1, 0, 3); /* Rn */
5149 rm = bits (insn2, 0, 3); /* Rm */
5150 rd = bits (insn2, 8, 11); /* Rd */
5152 /* This routine is only called for instruction MOV. */
5153 gdb_assert (op == 0x2 && rn == 0xf);
5155 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5156 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5158 if (debug_displaced)
5159 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5160 "ALU", insn1, insn2);
5162 /* Instruction is of form:
5164 <op><cond> rd, [rn,] #imm
5168 Preparation: tmp1, tmp2 <- r0, r1;
5170 Insn: <op><cond> r0, r1, #imm
5171 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5174 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5175 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5176 rn_val = displaced_read_reg (regs, dsc, rn);
5177 rd_val = displaced_read_reg (regs, dsc, rd);
5178 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5179 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5182 dsc->modinsn[0] = insn1;
5183 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5186 dsc->cleanup = &cleanup_alu_imm;
5191 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5194 cleanup_alu_reg (struct gdbarch *gdbarch,
5195 struct regcache *regs, arm_displaced_step_closure *dsc)
5200 rd_val = displaced_read_reg (regs, dsc, 0);
5202 for (i = 0; i < 3; i++)
5203 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5205 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5209 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5210 arm_displaced_step_closure *dsc,
5211 unsigned int rd, unsigned int rn, unsigned int rm)
5213 ULONGEST rd_val, rn_val, rm_val;
5215 /* Instruction is of form:
5217 <op><cond> rd, [rn,] rm [, <shift>]
5221 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5222 r0, r1, r2 <- rd, rn, rm
5223 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5224 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5227 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5228 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5229 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5230 rd_val = displaced_read_reg (regs, dsc, rd);
5231 rn_val = displaced_read_reg (regs, dsc, rn);
5232 rm_val = displaced_read_reg (regs, dsc, rm);
5233 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5234 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5235 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5238 dsc->cleanup = &cleanup_alu_reg;
5242 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5243 arm_displaced_step_closure *dsc)
5245 unsigned int op = bits (insn, 21, 24);
5246 int is_mov = (op == 0xd);
5248 if (!insn_references_pc (insn, 0x000ff00ful))
5249 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5251 if (debug_displaced)
5252 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5253 is_mov ? "move" : "ALU", (unsigned long) insn);
5256 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5258 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5260 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5266 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5267 struct regcache *regs,
5268 arm_displaced_step_closure *dsc)
5272 rm = bits (insn, 3, 6);
5273 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5275 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5276 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5278 if (debug_displaced)
5279 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5280 (unsigned short) insn);
5282 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5284 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5289 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5292 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5293 struct regcache *regs,
5294 arm_displaced_step_closure *dsc)
5296 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5299 for (i = 0; i < 4; i++)
5300 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5302 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5306 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5307 arm_displaced_step_closure *dsc,
5308 unsigned int rd, unsigned int rn, unsigned int rm,
5312 ULONGEST rd_val, rn_val, rm_val, rs_val;
5314 /* Instruction is of form:
5316 <op><cond> rd, [rn,] rm, <shift> rs
5320 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5321 r0, r1, r2, r3 <- rd, rn, rm, rs
5322 Insn: <op><cond> r0, r1, r2, <shift> r3
5324 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5328 for (i = 0; i < 4; i++)
5329 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5331 rd_val = displaced_read_reg (regs, dsc, rd);
5332 rn_val = displaced_read_reg (regs, dsc, rn);
5333 rm_val = displaced_read_reg (regs, dsc, rm);
5334 rs_val = displaced_read_reg (regs, dsc, rs);
5335 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5336 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5337 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5338 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5340 dsc->cleanup = &cleanup_alu_shifted_reg;
5344 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5345 struct regcache *regs,
5346 arm_displaced_step_closure *dsc)
5348 unsigned int op = bits (insn, 21, 24);
5349 int is_mov = (op == 0xd);
5350 unsigned int rd, rn, rm, rs;
5352 if (!insn_references_pc (insn, 0x000fff0ful))
5353 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5355 if (debug_displaced)
5356 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5357 "%.8lx\n", is_mov ? "move" : "ALU",
5358 (unsigned long) insn);
5360 rn = bits (insn, 16, 19);
5361 rm = bits (insn, 0, 3);
5362 rs = bits (insn, 8, 11);
5363 rd = bits (insn, 12, 15);
5366 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5368 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5370 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5375 /* Clean up load instructions. */
5378 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5379 arm_displaced_step_closure *dsc)
5381 ULONGEST rt_val, rt_val2 = 0, rn_val;
5383 rt_val = displaced_read_reg (regs, dsc, 0);
5384 if (dsc->u.ldst.xfersize == 8)
5385 rt_val2 = displaced_read_reg (regs, dsc, 1);
5386 rn_val = displaced_read_reg (regs, dsc, 2);
5388 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5389 if (dsc->u.ldst.xfersize > 4)
5390 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5391 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5392 if (!dsc->u.ldst.immed)
5393 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5395 /* Handle register writeback. */
5396 if (dsc->u.ldst.writeback)
5397 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5398 /* Put result in right place. */
5399 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5400 if (dsc->u.ldst.xfersize == 8)
5401 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5404 /* Clean up store instructions. */
5407 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5408 arm_displaced_step_closure *dsc)
5410 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5412 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5413 if (dsc->u.ldst.xfersize > 4)
5414 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5415 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5416 if (!dsc->u.ldst.immed)
5417 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5418 if (!dsc->u.ldst.restore_r4)
5419 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5422 if (dsc->u.ldst.writeback)
5423 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5426 /* Copy "extra" load/store instructions. These are halfword/doubleword
5427 transfers, which have a different encoding to byte/word transfers. */
5430 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5431 struct regcache *regs, arm_displaced_step_closure *dsc)
5433 unsigned int op1 = bits (insn, 20, 24);
5434 unsigned int op2 = bits (insn, 5, 6);
5435 unsigned int rt = bits (insn, 12, 15);
5436 unsigned int rn = bits (insn, 16, 19);
5437 unsigned int rm = bits (insn, 0, 3);
5438 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5439 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5440 int immed = (op1 & 0x4) != 0;
5442 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5444 if (!insn_references_pc (insn, 0x000ff00ful))
5445 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5447 if (debug_displaced)
5448 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5449 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5450 (unsigned long) insn);
5452 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5455 internal_error (__FILE__, __LINE__,
5456 _("copy_extra_ld_st: instruction decode error"));
5458 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5459 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5460 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5462 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5464 rt_val = displaced_read_reg (regs, dsc, rt);
5465 if (bytesize[opcode] == 8)
5466 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5467 rn_val = displaced_read_reg (regs, dsc, rn);
5469 rm_val = displaced_read_reg (regs, dsc, rm);
5471 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5472 if (bytesize[opcode] == 8)
5473 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5474 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5476 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5479 dsc->u.ldst.xfersize = bytesize[opcode];
5480 dsc->u.ldst.rn = rn;
5481 dsc->u.ldst.immed = immed;
5482 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5483 dsc->u.ldst.restore_r4 = 0;
5486 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5488 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5489 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5491 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5493 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5494 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5496 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5501 /* Copy byte/half word/word loads and stores. */
5504 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5505 arm_displaced_step_closure *dsc, int load,
5506 int immed, int writeback, int size, int usermode,
5507 int rt, int rm, int rn)
5509 ULONGEST rt_val, rn_val, rm_val = 0;
5511 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5512 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5514 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5516 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5518 rt_val = displaced_read_reg (regs, dsc, rt);
5519 rn_val = displaced_read_reg (regs, dsc, rn);
5521 rm_val = displaced_read_reg (regs, dsc, rm);
5523 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5524 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5526 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5528 dsc->u.ldst.xfersize = size;
5529 dsc->u.ldst.rn = rn;
5530 dsc->u.ldst.immed = immed;
5531 dsc->u.ldst.writeback = writeback;
5533 /* To write PC we can do:
5535 Before this sequence of instructions:
5536 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5537 r2 is the Rn value got from dispalced_read_reg.
5539 Insn1: push {pc} Write address of STR instruction + offset on stack
5540 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5541 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5542 = addr(Insn1) + offset - addr(Insn3) - 8
5544 Insn4: add r4, r4, #8 r4 = offset - 8
5545 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5547 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5549 Otherwise we don't know what value to write for PC, since the offset is
5550 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5551 of this can be found in Section "Saving from r15" in
5552 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5554 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5559 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5560 uint16_t insn2, struct regcache *regs,
5561 arm_displaced_step_closure *dsc, int size)
5563 unsigned int u_bit = bit (insn1, 7);
5564 unsigned int rt = bits (insn2, 12, 15);
5565 int imm12 = bits (insn2, 0, 11);
5568 if (debug_displaced)
5569 fprintf_unfiltered (gdb_stdlog,
5570 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5571 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5577 /* Rewrite instruction LDR Rt imm12 into:
5579 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5583 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5586 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5587 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5590 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5592 pc_val = pc_val & 0xfffffffc;
5594 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5595 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5599 dsc->u.ldst.xfersize = size;
5600 dsc->u.ldst.immed = 0;
5601 dsc->u.ldst.writeback = 0;
5602 dsc->u.ldst.restore_r4 = 0;
5604 /* LDR R0, R2, R3 */
5605 dsc->modinsn[0] = 0xf852;
5606 dsc->modinsn[1] = 0x3;
5609 dsc->cleanup = &cleanup_load;
5615 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5616 uint16_t insn2, struct regcache *regs,
5617 arm_displaced_step_closure *dsc,
5618 int writeback, int immed)
5620 unsigned int rt = bits (insn2, 12, 15);
5621 unsigned int rn = bits (insn1, 0, 3);
5622 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5623 /* In LDR (register), there is also a register Rm, which is not allowed to
5624 be PC, so we don't have to check it. */
5626 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5627 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5630 if (debug_displaced)
5631 fprintf_unfiltered (gdb_stdlog,
5632 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5633 rt, rn, insn1, insn2);
5635 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5638 dsc->u.ldst.restore_r4 = 0;
5641 /* ldr[b]<cond> rt, [rn, #imm], etc.
5643 ldr[b]<cond> r0, [r2, #imm]. */
5645 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5646 dsc->modinsn[1] = insn2 & 0x0fff;
5649 /* ldr[b]<cond> rt, [rn, rm], etc.
5651 ldr[b]<cond> r0, [r2, r3]. */
5653 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5654 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5664 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5665 struct regcache *regs,
5666 arm_displaced_step_closure *dsc,
5667 int load, int size, int usermode)
5669 int immed = !bit (insn, 25);
5670 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5671 unsigned int rt = bits (insn, 12, 15);
5672 unsigned int rn = bits (insn, 16, 19);
5673 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5675 if (!insn_references_pc (insn, 0x000ff00ful))
5676 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5678 if (debug_displaced)
5679 fprintf_unfiltered (gdb_stdlog,
5680 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5681 load ? (size == 1 ? "ldrb" : "ldr")
5682 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5684 (unsigned long) insn);
5686 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5687 usermode, rt, rm, rn);
5689 if (load || rt != ARM_PC_REGNUM)
5691 dsc->u.ldst.restore_r4 = 0;
5694 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5696 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5697 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5699 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5701 {ldr,str}[b]<cond> r0, [r2, r3]. */
5702 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5706 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5707 dsc->u.ldst.restore_r4 = 1;
5708 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5709 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5710 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5711 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5712 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5716 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5718 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5723 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5728 /* Cleanup LDM instructions with fully-populated register list. This is an
5729 unfortunate corner case: it's impossible to implement correctly by modifying
5730 the instruction. The issue is as follows: we have an instruction,
5734 which we must rewrite to avoid loading PC. A possible solution would be to
5735 do the load in two halves, something like (with suitable cleanup
5739 ldm[id][ab] r8!, {r0-r7}
5741 ldm[id][ab] r8, {r7-r14}
5744 but at present there's no suitable place for <temp>, since the scratch space
5745 is overwritten before the cleanup routine is called. For now, we simply
5746 emulate the instruction. */
5749 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5750 arm_displaced_step_closure *dsc)
5752 int inc = dsc->u.block.increment;
5753 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5754 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5755 uint32_t regmask = dsc->u.block.regmask;
5756 int regno = inc ? 0 : 15;
5757 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5758 int exception_return = dsc->u.block.load && dsc->u.block.user
5759 && (regmask & 0x8000) != 0;
5760 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5761 int do_transfer = condition_true (dsc->u.block.cond, status);
5762 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5767 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5768 sensible we can do here. Complain loudly. */
5769 if (exception_return)
5770 error (_("Cannot single-step exception return"));
5772 /* We don't handle any stores here for now. */
5773 gdb_assert (dsc->u.block.load != 0);
5775 if (debug_displaced)
5776 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5777 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5778 dsc->u.block.increment ? "inc" : "dec",
5779 dsc->u.block.before ? "before" : "after");
5786 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5789 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5792 xfer_addr += bump_before;
5794 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5795 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5797 xfer_addr += bump_after;
5799 regmask &= ~(1 << regno);
5802 if (dsc->u.block.writeback)
5803 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5807 /* Clean up an STM which included the PC in the register list. */
5810 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5811 arm_displaced_step_closure *dsc)
5813 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5814 int store_executed = condition_true (dsc->u.block.cond, status);
5815 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5816 CORE_ADDR stm_insn_addr;
5819 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5821 /* If condition code fails, there's nothing else to do. */
5822 if (!store_executed)
5825 if (dsc->u.block.increment)
5827 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5829 if (dsc->u.block.before)
5834 pc_stored_at = dsc->u.block.xfer_addr;
5836 if (dsc->u.block.before)
5840 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5841 stm_insn_addr = dsc->scratch_base;
5842 offset = pc_val - stm_insn_addr;
5844 if (debug_displaced)
5845 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5846 "STM instruction\n", offset);
5848 /* Rewrite the stored PC to the proper value for the non-displaced original
5850 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5851 dsc->insn_addr + offset);
5854 /* Clean up an LDM which includes the PC in the register list. We clumped all
5855 the registers in the transferred list into a contiguous range r0...rX (to
5856 avoid loading PC directly and losing control of the debugged program), so we
5857 must undo that here. */
5860 cleanup_block_load_pc (struct gdbarch *gdbarch,
5861 struct regcache *regs,
5862 arm_displaced_step_closure *dsc)
5864 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5865 int load_executed = condition_true (dsc->u.block.cond, status);
5866 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5867 unsigned int regs_loaded = bitcount (mask);
5868 unsigned int num_to_shuffle = regs_loaded, clobbered;
5870 /* The method employed here will fail if the register list is fully populated
5871 (we need to avoid loading PC directly). */
5872 gdb_assert (num_to_shuffle < 16);
5877 clobbered = (1 << num_to_shuffle) - 1;
5879 while (num_to_shuffle > 0)
5881 if ((mask & (1 << write_reg)) != 0)
5883 unsigned int read_reg = num_to_shuffle - 1;
5885 if (read_reg != write_reg)
5887 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5888 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5889 if (debug_displaced)
5890 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5891 "loaded register r%d to r%d\n"), read_reg,
5894 else if (debug_displaced)
5895 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5896 "r%d already in the right place\n"),
5899 clobbered &= ~(1 << write_reg);
5907 /* Restore any registers we scribbled over. */
5908 for (write_reg = 0; clobbered != 0; write_reg++)
5910 if ((clobbered & (1 << write_reg)) != 0)
5912 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5914 if (debug_displaced)
5915 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5916 "clobbered register r%d\n"), write_reg);
5917 clobbered &= ~(1 << write_reg);
5921 /* Perform register writeback manually. */
5922 if (dsc->u.block.writeback)
5924 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5926 if (dsc->u.block.increment)
5927 new_rn_val += regs_loaded * 4;
5929 new_rn_val -= regs_loaded * 4;
5931 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5936 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5937 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5940 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5941 struct regcache *regs,
5942 arm_displaced_step_closure *dsc)
5944 int load = bit (insn, 20);
5945 int user = bit (insn, 22);
5946 int increment = bit (insn, 23);
5947 int before = bit (insn, 24);
5948 int writeback = bit (insn, 21);
5949 int rn = bits (insn, 16, 19);
5951 /* Block transfers which don't mention PC can be run directly
5953 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5954 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5956 if (rn == ARM_PC_REGNUM)
5958 warning (_("displaced: Unpredictable LDM or STM with "
5959 "base register r15"));
5960 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5963 if (debug_displaced)
5964 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5965 "%.8lx\n", (unsigned long) insn);
5967 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5968 dsc->u.block.rn = rn;
5970 dsc->u.block.load = load;
5971 dsc->u.block.user = user;
5972 dsc->u.block.increment = increment;
5973 dsc->u.block.before = before;
5974 dsc->u.block.writeback = writeback;
5975 dsc->u.block.cond = bits (insn, 28, 31);
5977 dsc->u.block.regmask = insn & 0xffff;
5981 if ((insn & 0xffff) == 0xffff)
5983 /* LDM with a fully-populated register list. This case is
5984 particularly tricky. Implement for now by fully emulating the
5985 instruction (which might not behave perfectly in all cases, but
5986 these instructions should be rare enough for that not to matter
5988 dsc->modinsn[0] = ARM_NOP;
5990 dsc->cleanup = &cleanup_block_load_all;
5994 /* LDM of a list of registers which includes PC. Implement by
5995 rewriting the list of registers to be transferred into a
5996 contiguous chunk r0...rX before doing the transfer, then shuffling
5997 registers into the correct places in the cleanup routine. */
5998 unsigned int regmask = insn & 0xffff;
5999 unsigned int num_in_list = bitcount (regmask), new_regmask;
6002 for (i = 0; i < num_in_list; i++)
6003 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6005 /* Writeback makes things complicated. We need to avoid clobbering
6006 the base register with one of the registers in our modified
6007 register list, but just using a different register can't work in
6010 ldm r14!, {r0-r13,pc}
6012 which would need to be rewritten as:
6016 but that can't work, because there's no free register for N.
6018 Solve this by turning off the writeback bit, and emulating
6019 writeback manually in the cleanup routine. */
6024 new_regmask = (1 << num_in_list) - 1;
6026 if (debug_displaced)
6027 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6028 "{..., pc}: original reg list %.4x, modified "
6029 "list %.4x\n"), rn, writeback ? "!" : "",
6030 (int) insn & 0xffff, new_regmask);
6032 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6034 dsc->cleanup = &cleanup_block_load_pc;
6039 /* STM of a list of registers which includes PC. Run the instruction
6040 as-is, but out of line: this will store the wrong value for the PC,
6041 so we must manually fix up the memory in the cleanup routine.
6042 Doing things this way has the advantage that we can auto-detect
6043 the offset of the PC write (which is architecture-dependent) in
6044 the cleanup routine. */
6045 dsc->modinsn[0] = insn;
6047 dsc->cleanup = &cleanup_block_store_pc;
6054 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6055 struct regcache *regs,
6056 arm_displaced_step_closure *dsc)
6058 int rn = bits (insn1, 0, 3);
6059 int load = bit (insn1, 4);
6060 int writeback = bit (insn1, 5);
6062 /* Block transfers which don't mention PC can be run directly
6064 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6065 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6067 if (rn == ARM_PC_REGNUM)
6069 warning (_("displaced: Unpredictable LDM or STM with "
6070 "base register r15"));
6071 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6072 "unpredictable ldm/stm", dsc);
6075 if (debug_displaced)
6076 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6077 "%.4x%.4x\n", insn1, insn2);
6079 /* Clear bit 13, since it should be always zero. */
6080 dsc->u.block.regmask = (insn2 & 0xdfff);
6081 dsc->u.block.rn = rn;
6083 dsc->u.block.load = load;
6084 dsc->u.block.user = 0;
6085 dsc->u.block.increment = bit (insn1, 7);
6086 dsc->u.block.before = bit (insn1, 8);
6087 dsc->u.block.writeback = writeback;
6088 dsc->u.block.cond = INST_AL;
6089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6093 if (dsc->u.block.regmask == 0xffff)
6095 /* This branch is impossible to happen. */
6100 unsigned int regmask = dsc->u.block.regmask;
6101 unsigned int num_in_list = bitcount (regmask), new_regmask;
6104 for (i = 0; i < num_in_list; i++)
6105 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6110 new_regmask = (1 << num_in_list) - 1;
6112 if (debug_displaced)
6113 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6114 "{..., pc}: original reg list %.4x, modified "
6115 "list %.4x\n"), rn, writeback ? "!" : "",
6116 (int) dsc->u.block.regmask, new_regmask);
6118 dsc->modinsn[0] = insn1;
6119 dsc->modinsn[1] = (new_regmask & 0xffff);
6122 dsc->cleanup = &cleanup_block_load_pc;
6127 dsc->modinsn[0] = insn1;
6128 dsc->modinsn[1] = insn2;
6130 dsc->cleanup = &cleanup_block_store_pc;
6135 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6136 This is used to avoid a dependency on BFD's bfd_endian enum. */
6139 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6142 return read_memory_unsigned_integer (memaddr, len,
6143 (enum bfd_endian) byte_order);
6146 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6149 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6152 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6155 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6158 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6163 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6166 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6168 return arm_is_thumb (self->regcache);
6171 /* single_step() is called just before we want to resume the inferior,
6172 if we want to single-step it but there is no hardware or kernel
6173 single-step support. We find the target of the coming instructions
6174 and breakpoint them. */
6176 std::vector<CORE_ADDR>
6177 arm_software_single_step (struct regcache *regcache)
6179 struct gdbarch *gdbarch = regcache->arch ();
6180 struct arm_get_next_pcs next_pcs_ctx;
6182 arm_get_next_pcs_ctor (&next_pcs_ctx,
6183 &arm_get_next_pcs_ops,
6184 gdbarch_byte_order (gdbarch),
6185 gdbarch_byte_order_for_code (gdbarch),
6189 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6191 for (CORE_ADDR &pc_ref : next_pcs)
6192 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6197 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6198 for Linux, where some SVC instructions must be treated specially. */
6201 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6202 arm_displaced_step_closure *dsc)
6204 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6208 "%.8lx\n", (unsigned long) resume_addr);
6210 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6214 /* Common copy routine for svc instruciton. */
6217 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6218 arm_displaced_step_closure *dsc)
6220 /* Preparation: none.
6221 Insn: unmodified svc.
6222 Cleanup: pc <- insn_addr + insn_size. */
6224 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6226 dsc->wrote_to_pc = 1;
6228 /* Allow OS-specific code to override SVC handling. */
6229 if (dsc->u.svc.copy_svc_os)
6230 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6233 dsc->cleanup = &cleanup_svc;
6239 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6240 struct regcache *regs, arm_displaced_step_closure *dsc)
6243 if (debug_displaced)
6244 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6245 (unsigned long) insn);
6247 dsc->modinsn[0] = insn;
6249 return install_svc (gdbarch, regs, dsc);
6253 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6254 struct regcache *regs, arm_displaced_step_closure *dsc)
6257 if (debug_displaced)
6258 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6261 dsc->modinsn[0] = insn;
6263 return install_svc (gdbarch, regs, dsc);
6266 /* Copy undefined instructions. */
6269 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6270 arm_displaced_step_closure *dsc)
6272 if (debug_displaced)
6273 fprintf_unfiltered (gdb_stdlog,
6274 "displaced: copying undefined insn %.8lx\n",
6275 (unsigned long) insn);
6277 dsc->modinsn[0] = insn;
6283 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6284 arm_displaced_step_closure *dsc)
6287 if (debug_displaced)
6288 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6289 "%.4x %.4x\n", (unsigned short) insn1,
6290 (unsigned short) insn2);
6292 dsc->modinsn[0] = insn1;
6293 dsc->modinsn[1] = insn2;
6299 /* Copy unpredictable instructions. */
6302 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6303 arm_displaced_step_closure *dsc)
6305 if (debug_displaced)
6306 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6307 "%.8lx\n", (unsigned long) insn);
6309 dsc->modinsn[0] = insn;
6314 /* The decode_* functions are instruction decoding helpers. They mostly follow
6315 the presentation in the ARM ARM. */
6318 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6319 struct regcache *regs,
6320 arm_displaced_step_closure *dsc)
6322 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6323 unsigned int rn = bits (insn, 16, 19);
6325 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6326 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6327 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6328 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6329 else if ((op1 & 0x60) == 0x20)
6330 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6331 else if ((op1 & 0x71) == 0x40)
6332 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6334 else if ((op1 & 0x77) == 0x41)
6335 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6336 else if ((op1 & 0x77) == 0x45)
6337 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6338 else if ((op1 & 0x77) == 0x51)
6341 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6343 return arm_copy_unpred (gdbarch, insn, dsc);
6345 else if ((op1 & 0x77) == 0x55)
6346 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6347 else if (op1 == 0x57)
6350 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6351 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6352 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6353 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6354 default: return arm_copy_unpred (gdbarch, insn, dsc);
6356 else if ((op1 & 0x63) == 0x43)
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 else if ((op2 & 0x1) == 0x0)
6359 switch (op1 & ~0x80)
6362 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6364 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6365 case 0x71: case 0x75:
6367 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6368 case 0x63: case 0x67: case 0x73: case 0x77:
6369 return arm_copy_unpred (gdbarch, insn, dsc);
6371 return arm_copy_undef (gdbarch, insn, dsc);
6374 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6378 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6379 struct regcache *regs,
6380 arm_displaced_step_closure *dsc)
6382 if (bit (insn, 27) == 0)
6383 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6384 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6385 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6388 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6391 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6393 case 0x4: case 0x5: case 0x6: case 0x7:
6394 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6397 switch ((insn & 0xe00000) >> 21)
6399 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6401 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6404 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6407 return arm_copy_undef (gdbarch, insn, dsc);
6412 int rn_f = (bits (insn, 16, 19) == 0xf);
6413 switch ((insn & 0xe00000) >> 21)
6416 /* ldc/ldc2 imm (undefined for rn == pc). */
6417 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6418 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6421 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6423 case 0x4: case 0x5: case 0x6: case 0x7:
6424 /* ldc/ldc2 lit (undefined for rn != pc). */
6425 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6426 : arm_copy_undef (gdbarch, insn, dsc);
6429 return arm_copy_undef (gdbarch, insn, dsc);
6434 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6437 if (bits (insn, 16, 19) == 0xf)
6439 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6441 return arm_copy_undef (gdbarch, insn, dsc);
6445 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6447 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6451 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6453 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6456 return arm_copy_undef (gdbarch, insn, dsc);
6460 /* Decode miscellaneous instructions in dp/misc encoding space. */
6463 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6464 struct regcache *regs,
6465 arm_displaced_step_closure *dsc)
6467 unsigned int op2 = bits (insn, 4, 6);
6468 unsigned int op = bits (insn, 21, 22);
6473 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6476 if (op == 0x1) /* bx. */
6477 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6479 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6481 return arm_copy_undef (gdbarch, insn, dsc);
6485 /* Not really supported. */
6486 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6488 return arm_copy_undef (gdbarch, insn, dsc);
6492 return arm_copy_bx_blx_reg (gdbarch, insn,
6493 regs, dsc); /* blx register. */
6495 return arm_copy_undef (gdbarch, insn, dsc);
6498 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6502 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6504 /* Not really supported. */
6505 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6509 return arm_copy_undef (gdbarch, insn, dsc);
6514 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6515 struct regcache *regs,
6516 arm_displaced_step_closure *dsc)
6519 switch (bits (insn, 20, 24))
6522 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6525 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6527 case 0x12: case 0x16:
6528 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6531 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6535 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6537 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6538 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6539 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6540 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6541 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6542 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6543 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6544 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6545 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6546 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6547 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6548 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6549 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6550 /* 2nd arg means "unprivileged". */
6551 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6555 /* Should be unreachable. */
6560 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6561 struct regcache *regs,
6562 arm_displaced_step_closure *dsc)
6564 int a = bit (insn, 25), b = bit (insn, 4);
6565 uint32_t op1 = bits (insn, 20, 24);
6567 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6568 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6570 else if ((!a && (op1 & 0x17) == 0x02)
6571 || (a && (op1 & 0x17) == 0x02 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6573 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6574 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6576 else if ((!a && (op1 & 0x17) == 0x03)
6577 || (a && (op1 & 0x17) == 0x03 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6579 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6580 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6581 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6582 else if ((!a && (op1 & 0x17) == 0x06)
6583 || (a && (op1 & 0x17) == 0x06 && !b))
6584 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6585 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6586 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6587 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6588 else if ((!a && (op1 & 0x17) == 0x07)
6589 || (a && (op1 & 0x17) == 0x07 && !b))
6590 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6592 /* Should be unreachable. */
6597 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6598 arm_displaced_step_closure *dsc)
6600 switch (bits (insn, 20, 24))
6602 case 0x00: case 0x01: case 0x02: case 0x03:
6603 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6605 case 0x04: case 0x05: case 0x06: case 0x07:
6606 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6608 case 0x08: case 0x09: case 0x0a: case 0x0b:
6609 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6610 return arm_copy_unmodified (gdbarch, insn,
6611 "decode/pack/unpack/saturate/reverse", dsc);
6614 if (bits (insn, 5, 7) == 0) /* op2. */
6616 if (bits (insn, 12, 15) == 0xf)
6617 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6619 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6622 return arm_copy_undef (gdbarch, insn, dsc);
6624 case 0x1a: case 0x1b:
6625 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6626 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6628 return arm_copy_undef (gdbarch, insn, dsc);
6630 case 0x1c: case 0x1d:
6631 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6633 if (bits (insn, 0, 3) == 0xf)
6634 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6636 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6639 return arm_copy_undef (gdbarch, insn, dsc);
6641 case 0x1e: case 0x1f:
6642 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6643 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6645 return arm_copy_undef (gdbarch, insn, dsc);
6648 /* Should be unreachable. */
6653 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6654 struct regcache *regs,
6655 arm_displaced_step_closure *dsc)
6658 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6660 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6664 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6665 struct regcache *regs,
6666 arm_displaced_step_closure *dsc)
6668 unsigned int opcode = bits (insn, 20, 24);
6672 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6673 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6675 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6676 case 0x12: case 0x16:
6677 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6679 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6680 case 0x13: case 0x17:
6681 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6683 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6684 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6685 /* Note: no writeback for these instructions. Bit 25 will always be
6686 zero though (via caller), so the following works OK. */
6687 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6690 /* Should be unreachable. */
6694 /* Decode shifted register instructions. */
6697 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6698 uint16_t insn2, struct regcache *regs,
6699 arm_displaced_step_closure *dsc)
6701 /* PC is only allowed to be used in instruction MOV. */
6703 unsigned int op = bits (insn1, 5, 8);
6704 unsigned int rn = bits (insn1, 0, 3);
6706 if (op == 0x2 && rn == 0xf) /* MOV */
6707 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6709 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6710 "dp (shift reg)", dsc);
6714 /* Decode extension register load/store. Exactly the same as
6715 arm_decode_ext_reg_ld_st. */
6718 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6719 uint16_t insn2, struct regcache *regs,
6720 arm_displaced_step_closure *dsc)
6722 unsigned int opcode = bits (insn1, 4, 8);
6726 case 0x04: case 0x05:
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vmov", dsc);
6730 case 0x08: case 0x0c: /* 01x00 */
6731 case 0x0a: case 0x0e: /* 01x10 */
6732 case 0x12: case 0x16: /* 10x10 */
6733 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6734 "vfp/neon vstm/vpush", dsc);
6736 case 0x09: case 0x0d: /* 01x01 */
6737 case 0x0b: case 0x0f: /* 01x11 */
6738 case 0x13: case 0x17: /* 10x11 */
6739 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6740 "vfp/neon vldm/vpop", dsc);
6742 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6743 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6745 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6746 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6749 /* Should be unreachable. */
6754 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6755 struct regcache *regs, arm_displaced_step_closure *dsc)
6757 unsigned int op1 = bits (insn, 20, 25);
6758 int op = bit (insn, 4);
6759 unsigned int coproc = bits (insn, 8, 11);
6761 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6762 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6763 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6764 && (coproc & 0xe) != 0xa)
6766 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6767 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6768 && (coproc & 0xe) != 0xa)
6769 /* ldc/ldc2 imm/lit. */
6770 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6771 else if ((op1 & 0x3e) == 0x00)
6772 return arm_copy_undef (gdbarch, insn, dsc);
6773 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6774 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6775 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6776 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6777 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6778 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6779 else if ((op1 & 0x30) == 0x20 && !op)
6781 if ((coproc & 0xe) == 0xa)
6782 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6784 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6786 else if ((op1 & 0x30) == 0x20 && op)
6787 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6788 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6789 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6790 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6791 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6792 else if ((op1 & 0x30) == 0x30)
6793 return arm_copy_svc (gdbarch, insn, regs, dsc);
6795 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6799 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6800 uint16_t insn2, struct regcache *regs,
6801 arm_displaced_step_closure *dsc)
6803 unsigned int coproc = bits (insn2, 8, 11);
6804 unsigned int bit_5_8 = bits (insn1, 5, 8);
6805 unsigned int bit_9 = bit (insn1, 9);
6806 unsigned int bit_4 = bit (insn1, 4);
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6814 else if (bit_5_8 == 0) /* UNDEFINED. */
6815 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6818 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6819 if ((coproc & 0xe) == 0xa)
6820 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6822 else /* coproc is not 101x. */
6824 if (bit_4 == 0) /* STC/STC2. */
6825 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6827 else /* LDC/LDC2 {literal, immeidate}. */
6828 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6840 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6841 arm_displaced_step_closure *dsc, int rd)
6847 Preparation: Rd <- PC
6853 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6854 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6858 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6859 arm_displaced_step_closure *dsc,
6860 int rd, unsigned int imm)
6863 /* Encoding T2: ADDS Rd, #imm */
6864 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6866 install_pc_relative (gdbarch, regs, dsc, rd);
6872 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6873 struct regcache *regs,
6874 arm_displaced_step_closure *dsc)
6876 unsigned int rd = bits (insn, 8, 10);
6877 unsigned int imm8 = bits (insn, 0, 7);
6879 if (debug_displaced)
6880 fprintf_unfiltered (gdb_stdlog,
6881 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6884 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6888 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6889 uint16_t insn2, struct regcache *regs,
6890 arm_displaced_step_closure *dsc)
6892 unsigned int rd = bits (insn2, 8, 11);
6893 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6894 extract raw immediate encoding rather than computing immediate. When
6895 generating ADD or SUB instruction, we can simply perform OR operation to
6896 set immediate into ADD. */
6897 unsigned int imm_3_8 = insn2 & 0x70ff;
6898 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6900 if (debug_displaced)
6901 fprintf_unfiltered (gdb_stdlog,
6902 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6903 rd, imm_i, imm_3_8, insn1, insn2);
6905 if (bit (insn1, 7)) /* Encoding T2 */
6907 /* Encoding T3: SUB Rd, Rd, #imm */
6908 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6909 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6911 else /* Encoding T3 */
6913 /* Encoding T3: ADD Rd, Rd, #imm */
6914 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6915 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6919 install_pc_relative (gdbarch, regs, dsc, rd);
6925 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6926 struct regcache *regs,
6927 arm_displaced_step_closure *dsc)
6929 unsigned int rt = bits (insn1, 8, 10);
6931 int imm8 = (bits (insn1, 0, 7) << 2);
6937 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6939 Insn: LDR R0, [R2, R3];
6940 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6942 if (debug_displaced)
6943 fprintf_unfiltered (gdb_stdlog,
6944 "displaced: copying thumb ldr r%d [pc #%d]\n"
6947 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6948 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6949 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6950 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6951 /* The assembler calculates the required value of the offset from the
6952 Align(PC,4) value of this instruction to the label. */
6953 pc = pc & 0xfffffffc;
6955 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6956 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6959 dsc->u.ldst.xfersize = 4;
6961 dsc->u.ldst.immed = 0;
6962 dsc->u.ldst.writeback = 0;
6963 dsc->u.ldst.restore_r4 = 0;
6965 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6967 dsc->cleanup = &cleanup_load;
6972 /* Copy Thumb cbnz/cbz insruction. */
6975 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6976 struct regcache *regs,
6977 arm_displaced_step_closure *dsc)
6979 int non_zero = bit (insn1, 11);
6980 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6981 CORE_ADDR from = dsc->insn_addr;
6982 int rn = bits (insn1, 0, 2);
6983 int rn_val = displaced_read_reg (regs, dsc, rn);
6985 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6986 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6987 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6988 condition is false, let it be, cleanup_branch will do nothing. */
6989 if (dsc->u.branch.cond)
6991 dsc->u.branch.cond = INST_AL;
6992 dsc->u.branch.dest = from + 4 + imm5;
6995 dsc->u.branch.dest = from + 2;
6997 dsc->u.branch.link = 0;
6998 dsc->u.branch.exchange = 0;
7000 if (debug_displaced)
7001 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7002 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7003 rn, rn_val, insn1, dsc->u.branch.dest);
7005 dsc->modinsn[0] = THUMB_NOP;
7007 dsc->cleanup = &cleanup_branch;
7011 /* Copy Table Branch Byte/Halfword */
7013 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7014 uint16_t insn2, struct regcache *regs,
7015 arm_displaced_step_closure *dsc)
7017 ULONGEST rn_val, rm_val;
7018 int is_tbh = bit (insn2, 4);
7019 CORE_ADDR halfwords = 0;
7020 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7022 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7023 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7029 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7030 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7036 target_read_memory (rn_val + rm_val, buf, 1);
7037 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7040 if (debug_displaced)
7041 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7042 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7043 (unsigned int) rn_val, (unsigned int) rm_val,
7044 (unsigned int) halfwords);
7046 dsc->u.branch.cond = INST_AL;
7047 dsc->u.branch.link = 0;
7048 dsc->u.branch.exchange = 0;
7049 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7051 dsc->cleanup = &cleanup_branch;
7057 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7058 arm_displaced_step_closure *dsc)
7061 int val = displaced_read_reg (regs, dsc, 7);
7062 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7065 val = displaced_read_reg (regs, dsc, 8);
7066 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7069 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7074 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7075 struct regcache *regs,
7076 arm_displaced_step_closure *dsc)
7078 dsc->u.block.regmask = insn1 & 0x00ff;
7080 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7083 (1) register list is full, that is, r0-r7 are used.
7084 Prepare: tmp[0] <- r8
7086 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7087 MOV r8, r7; Move value of r7 to r8;
7088 POP {r7}; Store PC value into r7.
7090 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7092 (2) register list is not full, supposing there are N registers in
7093 register list (except PC, 0 <= N <= 7).
7094 Prepare: for each i, 0 - N, tmp[i] <- ri.
7096 POP {r0, r1, ...., rN};
7098 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7099 from tmp[] properly.
7101 if (debug_displaced)
7102 fprintf_unfiltered (gdb_stdlog,
7103 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7104 dsc->u.block.regmask, insn1);
7106 if (dsc->u.block.regmask == 0xff)
7108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7110 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7111 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7112 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7115 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7119 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7121 unsigned int new_regmask;
7123 for (i = 0; i < num_in_list + 1; i++)
7124 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7126 new_regmask = (1 << (num_in_list + 1)) - 1;
7128 if (debug_displaced)
7129 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7130 "{..., pc}: original reg list %.4x,"
7131 " modified list %.4x\n"),
7132 (int) dsc->u.block.regmask, new_regmask);
7134 dsc->u.block.regmask |= 0x8000;
7135 dsc->u.block.writeback = 0;
7136 dsc->u.block.cond = INST_AL;
7138 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7140 dsc->cleanup = &cleanup_block_load_pc;
7147 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7148 struct regcache *regs,
7149 arm_displaced_step_closure *dsc)
7151 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7152 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7155 /* 16-bit thumb instructions. */
7156 switch (op_bit_12_15)
7158 /* Shift (imme), add, subtract, move and compare. */
7159 case 0: case 1: case 2: case 3:
7160 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7161 "shift/add/sub/mov/cmp",
7165 switch (op_bit_10_11)
7167 case 0: /* Data-processing */
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7172 case 1: /* Special data instructions and branch and exchange. */
7174 unsigned short op = bits (insn1, 7, 9);
7175 if (op == 6 || op == 7) /* BX or BLX */
7176 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7177 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7178 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7180 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7184 default: /* LDR (literal) */
7185 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7188 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7189 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7192 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7193 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7194 else /* Generate SP-relative address */
7195 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7197 case 11: /* Misc 16-bit instructions */
7199 switch (bits (insn1, 8, 11))
7201 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7202 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7204 case 12: case 13: /* POP */
7205 if (bit (insn1, 8)) /* PC is in register list. */
7206 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7210 case 15: /* If-Then, and hints */
7211 if (bits (insn1, 0, 3))
7212 /* If-Then makes up to four following instructions conditional.
7213 IT instruction itself is not conditional, so handle it as a
7214 common unmodified instruction. */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7218 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7226 if (op_bit_10_11 < 2) /* Store multiple registers */
7227 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7228 else /* Load multiple registers */
7229 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7231 case 13: /* Conditional branch and supervisor call */
7232 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7233 err = thumb_copy_b (gdbarch, insn1, dsc);
7235 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7237 case 14: /* Unconditional branch */
7238 err = thumb_copy_b (gdbarch, insn1, dsc);
7245 internal_error (__FILE__, __LINE__,
7246 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7250 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7251 uint16_t insn1, uint16_t insn2,
7252 struct regcache *regs,
7253 arm_displaced_step_closure *dsc)
7255 int rt = bits (insn2, 12, 15);
7256 int rn = bits (insn1, 0, 3);
7257 int op1 = bits (insn1, 7, 8);
7259 switch (bits (insn1, 5, 6))
7261 case 0: /* Load byte and memory hints */
7262 if (rt == 0xf) /* PLD/PLI */
7265 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7266 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7268 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7273 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7274 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7277 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7278 "ldrb{reg, immediate}/ldrbt",
7283 case 1: /* Load halfword and memory hints. */
7284 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7286 "pld/unalloc memhint", dsc);
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7293 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7297 case 2: /* Load word */
7299 int insn2_bit_8_11 = bits (insn2, 8, 11);
7302 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7303 else if (op1 == 0x1) /* Encoding T3 */
7304 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7306 else /* op1 == 0x0 */
7308 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7309 /* LDR (immediate) */
7310 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7311 dsc, bit (insn2, 8), 1);
7312 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7313 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7316 /* LDR (register) */
7317 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7323 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7330 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7331 uint16_t insn2, struct regcache *regs,
7332 arm_displaced_step_closure *dsc)
7335 unsigned short op = bit (insn2, 15);
7336 unsigned int op1 = bits (insn1, 11, 12);
7342 switch (bits (insn1, 9, 10))
7347 /* Load/store {dual, execlusive}, table branch. */
7348 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7349 && bits (insn2, 5, 7) == 0)
7350 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7353 /* PC is not allowed to use in load/store {dual, exclusive}
7355 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7356 "load/store dual/ex", dsc);
7358 else /* load/store multiple */
7360 switch (bits (insn1, 7, 8))
7362 case 0: case 3: /* SRS, RFE */
7363 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7366 case 1: case 2: /* LDM/STM/PUSH/POP */
7367 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7374 /* Data-processing (shift register). */
7375 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7378 default: /* Coprocessor instructions. */
7379 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7384 case 2: /* op1 = 2 */
7385 if (op) /* Branch and misc control. */
7387 if (bit (insn2, 14) /* BLX/BL */
7388 || bit (insn2, 12) /* Unconditional branch */
7389 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7390 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7392 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7397 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7399 int dp_op = bits (insn1, 4, 8);
7400 int rn = bits (insn1, 0, 3);
7401 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7402 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7405 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7408 else /* Data processing (modified immeidate) */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 case 3: /* op1 = 3 */
7414 switch (bits (insn1, 9, 10))
7418 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7420 else /* NEON Load/Store and Store single data item */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "neon elt/struct load/store",
7425 case 1: /* op1 = 3, bits (9, 10) == 1 */
7426 switch (bits (insn1, 7, 8))
7428 case 0: case 1: /* Data processing (register) */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7432 case 2: /* Multiply and absolute difference */
7433 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 "mul/mua/diff", dsc);
7436 case 3: /* Long multiply and divide */
7437 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 default: /* Coprocessor instructions */
7443 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7452 internal_error (__FILE__, __LINE__,
7453 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7458 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7459 struct regcache *regs,
7460 arm_displaced_step_closure *dsc)
7462 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7464 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7466 if (debug_displaced)
7467 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7468 "at %.8lx\n", insn1, (unsigned long) from);
7471 dsc->insn_size = thumb_insn_size (insn1);
7472 if (thumb_insn_size (insn1) == 4)
7475 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7476 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7479 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7483 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7484 CORE_ADDR to, struct regcache *regs,
7485 arm_displaced_step_closure *dsc)
7488 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7491 /* Most displaced instructions use a 1-instruction scratch space, so set this
7492 here and override below if/when necessary. */
7494 dsc->insn_addr = from;
7495 dsc->scratch_base = to;
7496 dsc->cleanup = NULL;
7497 dsc->wrote_to_pc = 0;
7499 if (!displaced_in_arm_mode (regs))
7500 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7504 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7505 if (debug_displaced)
7506 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7507 "at %.8lx\n", (unsigned long) insn,
7508 (unsigned long) from);
7510 if ((insn & 0xf0000000) == 0xf0000000)
7511 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7512 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7514 case 0x0: case 0x1: case 0x2: case 0x3:
7515 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7518 case 0x4: case 0x5: case 0x6:
7519 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7523 err = arm_decode_media (gdbarch, insn, dsc);
7526 case 0x8: case 0x9: case 0xa: case 0xb:
7527 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7530 case 0xc: case 0xd: case 0xe: case 0xf:
7531 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7536 internal_error (__FILE__, __LINE__,
7537 _("arm_process_displaced_insn: Instruction decode error"));
7540 /* Actually set up the scratch space for a displaced instruction. */
7543 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7544 CORE_ADDR to, arm_displaced_step_closure *dsc)
7546 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7547 unsigned int i, len, offset;
7548 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7549 int size = dsc->is_thumb? 2 : 4;
7550 const gdb_byte *bkp_insn;
7553 /* Poke modified instruction(s). */
7554 for (i = 0; i < dsc->numinsns; i++)
7556 if (debug_displaced)
7558 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7560 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7563 fprintf_unfiltered (gdb_stdlog, "%.4x",
7564 (unsigned short)dsc->modinsn[i]);
7566 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7567 (unsigned long) to + offset);
7570 write_memory_unsigned_integer (to + offset, size,
7571 byte_order_for_code,
7576 /* Choose the correct breakpoint instruction. */
7579 bkp_insn = tdep->thumb_breakpoint;
7580 len = tdep->thumb_breakpoint_size;
7584 bkp_insn = tdep->arm_breakpoint;
7585 len = tdep->arm_breakpoint_size;
7588 /* Put breakpoint afterwards. */
7589 write_memory (to + offset, bkp_insn, len);
7591 if (debug_displaced)
7592 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7593 paddress (gdbarch, from), paddress (gdbarch, to));
7596 /* Entry point for cleaning things up after a displaced instruction has been
7600 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7601 struct displaced_step_closure *dsc_,
7602 CORE_ADDR from, CORE_ADDR to,
7603 struct regcache *regs)
7605 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7608 dsc->cleanup (gdbarch, regs, dsc);
7610 if (!dsc->wrote_to_pc)
7611 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7612 dsc->insn_addr + dsc->insn_size);
7616 #include "bfd-in2.h"
7617 #include "libcoff.h"
7620 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7622 gdb_disassembler *di
7623 = static_cast<gdb_disassembler *>(info->application_data);
7624 struct gdbarch *gdbarch = di->arch ();
7626 if (arm_pc_is_thumb (gdbarch, memaddr))
7628 static asymbol *asym;
7629 static combined_entry_type ce;
7630 static struct coff_symbol_struct csym;
7631 static struct bfd fake_bfd;
7632 static bfd_target fake_target;
7634 if (csym.native == NULL)
7636 /* Create a fake symbol vector containing a Thumb symbol.
7637 This is solely so that the code in print_insn_little_arm()
7638 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7639 the presence of a Thumb symbol and switch to decoding
7640 Thumb instructions. */
7642 fake_target.flavour = bfd_target_coff_flavour;
7643 fake_bfd.xvec = &fake_target;
7644 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7646 csym.symbol.the_bfd = &fake_bfd;
7647 csym.symbol.name = "fake";
7648 asym = (asymbol *) & csym;
7651 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7652 info->symbols = &asym;
7655 info->symbols = NULL;
7657 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7658 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7659 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7660 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7661 in default_print_insn. */
7662 if (exec_bfd != NULL)
7663 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7665 return default_print_insn (memaddr, info);
7668 /* The following define instruction sequences that will cause ARM
7669 cpu's to take an undefined instruction trap. These are used to
7670 signal a breakpoint to GDB.
7672 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7673 modes. A different instruction is required for each mode. The ARM
7674 cpu's can also be big or little endian. Thus four different
7675 instructions are needed to support all cases.
7677 Note: ARMv4 defines several new instructions that will take the
7678 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7679 not in fact add the new instructions. The new undefined
7680 instructions in ARMv4 are all instructions that had no defined
7681 behaviour in earlier chips. There is no guarantee that they will
7682 raise an exception, but may be treated as NOP's. In practice, it
7683 may only safe to rely on instructions matching:
7685 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7686 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7687 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7689 Even this may only true if the condition predicate is true. The
7690 following use a condition predicate of ALWAYS so it is always TRUE.
7692 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7693 and NetBSD all use a software interrupt rather than an undefined
7694 instruction to force a trap. This can be handled by by the
7695 abi-specific code during establishment of the gdbarch vector. */
7697 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7698 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7699 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7700 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7702 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7703 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7704 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7705 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7707 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7710 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7712 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7713 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7715 if (arm_pc_is_thumb (gdbarch, *pcptr))
7717 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7719 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7720 check whether we are replacing a 32-bit instruction. */
7721 if (tdep->thumb2_breakpoint != NULL)
7725 if (target_read_memory (*pcptr, buf, 2) == 0)
7727 unsigned short inst1;
7729 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7730 if (thumb_insn_size (inst1) == 4)
7731 return ARM_BP_KIND_THUMB2;
7735 return ARM_BP_KIND_THUMB;
7738 return ARM_BP_KIND_ARM;
7742 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7744 static const gdb_byte *
7745 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7747 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7751 case ARM_BP_KIND_ARM:
7752 *size = tdep->arm_breakpoint_size;
7753 return tdep->arm_breakpoint;
7754 case ARM_BP_KIND_THUMB:
7755 *size = tdep->thumb_breakpoint_size;
7756 return tdep->thumb_breakpoint;
7757 case ARM_BP_KIND_THUMB2:
7758 *size = tdep->thumb2_breakpoint_size;
7759 return tdep->thumb2_breakpoint;
7761 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7765 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7768 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7769 struct regcache *regcache,
7774 /* Check the memory pointed by PC is readable. */
7775 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7777 struct arm_get_next_pcs next_pcs_ctx;
7779 arm_get_next_pcs_ctor (&next_pcs_ctx,
7780 &arm_get_next_pcs_ops,
7781 gdbarch_byte_order (gdbarch),
7782 gdbarch_byte_order_for_code (gdbarch),
7786 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7788 /* If MEMADDR is the next instruction of current pc, do the
7789 software single step computation, and get the thumb mode by
7790 the destination address. */
7791 for (CORE_ADDR pc : next_pcs)
7793 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7795 if (IS_THUMB_ADDR (pc))
7797 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7798 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7801 return ARM_BP_KIND_ARM;
7806 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7809 /* Extract from an array REGBUF containing the (raw) register state a
7810 function return value of type TYPE, and copy that, in virtual
7811 format, into VALBUF. */
7814 arm_extract_return_value (struct type *type, struct regcache *regs,
7817 struct gdbarch *gdbarch = regs->arch ();
7818 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7820 if (TYPE_CODE_FLT == TYPE_CODE (type))
7822 switch (gdbarch_tdep (gdbarch)->fp_model)
7826 /* The value is in register F0 in internal format. We need to
7827 extract the raw value and then convert it to the desired
7829 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7831 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7832 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7837 case ARM_FLOAT_SOFT_FPA:
7838 case ARM_FLOAT_SOFT_VFP:
7839 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7840 not using the VFP ABI code. */
7842 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7843 if (TYPE_LENGTH (type) > 4)
7844 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7848 internal_error (__FILE__, __LINE__,
7849 _("arm_extract_return_value: "
7850 "Floating point model not supported"));
7854 else if (TYPE_CODE (type) == TYPE_CODE_INT
7855 || TYPE_CODE (type) == TYPE_CODE_CHAR
7856 || TYPE_CODE (type) == TYPE_CODE_BOOL
7857 || TYPE_CODE (type) == TYPE_CODE_PTR
7858 || TYPE_IS_REFERENCE (type)
7859 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7861 /* If the type is a plain integer, then the access is
7862 straight-forward. Otherwise we have to play around a bit
7864 int len = TYPE_LENGTH (type);
7865 int regno = ARM_A1_REGNUM;
7870 /* By using store_unsigned_integer we avoid having to do
7871 anything special for small big-endian values. */
7872 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7873 store_unsigned_integer (valbuf,
7874 (len > INT_REGISTER_SIZE
7875 ? INT_REGISTER_SIZE : len),
7877 len -= INT_REGISTER_SIZE;
7878 valbuf += INT_REGISTER_SIZE;
7883 /* For a structure or union the behaviour is as if the value had
7884 been stored to word-aligned memory and then loaded into
7885 registers with 32-bit load instruction(s). */
7886 int len = TYPE_LENGTH (type);
7887 int regno = ARM_A1_REGNUM;
7888 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7892 regs->cooked_read (regno++, tmpbuf);
7893 memcpy (valbuf, tmpbuf,
7894 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7895 len -= INT_REGISTER_SIZE;
7896 valbuf += INT_REGISTER_SIZE;
7902 /* Will a function return an aggregate type in memory or in a
7903 register? Return 0 if an aggregate type can be returned in a
7904 register, 1 if it must be returned in memory. */
7907 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7909 enum type_code code;
7911 type = check_typedef (type);
7913 /* Simple, non-aggregate types (ie not including vectors and
7914 complex) are always returned in a register (or registers). */
7915 code = TYPE_CODE (type);
7916 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7917 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7920 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7922 /* Vector values should be returned using ARM registers if they
7923 are not over 16 bytes. */
7924 return (TYPE_LENGTH (type) > 16);
7927 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7929 /* The AAPCS says all aggregates not larger than a word are returned
7931 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7940 /* All aggregate types that won't fit in a register must be returned
7942 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7945 /* In the ARM ABI, "integer" like aggregate types are returned in
7946 registers. For an aggregate type to be integer like, its size
7947 must be less than or equal to INT_REGISTER_SIZE and the
7948 offset of each addressable subfield must be zero. Note that bit
7949 fields are not addressable, and all addressable subfields of
7950 unions always start at offset zero.
7952 This function is based on the behaviour of GCC 2.95.1.
7953 See: gcc/arm.c: arm_return_in_memory() for details.
7955 Note: All versions of GCC before GCC 2.95.2 do not set up the
7956 parameters correctly for a function returning the following
7957 structure: struct { float f;}; This should be returned in memory,
7958 not a register. Richard Earnshaw sent me a patch, but I do not
7959 know of any way to detect if a function like the above has been
7960 compiled with the correct calling convention. */
7962 /* Assume all other aggregate types can be returned in a register.
7963 Run a check for structures, unions and arrays. */
7966 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7969 /* Need to check if this struct/union is "integer" like. For
7970 this to be true, its size must be less than or equal to
7971 INT_REGISTER_SIZE and the offset of each addressable
7972 subfield must be zero. Note that bit fields are not
7973 addressable, and unions always start at offset zero. If any
7974 of the subfields is a floating point type, the struct/union
7975 cannot be an integer type. */
7977 /* For each field in the object, check:
7978 1) Is it FP? --> yes, nRc = 1;
7979 2) Is it addressable (bitpos != 0) and
7980 not packed (bitsize == 0)?
7984 for (i = 0; i < TYPE_NFIELDS (type); i++)
7986 enum type_code field_type_code;
7989 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7992 /* Is it a floating point type field? */
7993 if (field_type_code == TYPE_CODE_FLT)
7999 /* If bitpos != 0, then we have to care about it. */
8000 if (TYPE_FIELD_BITPOS (type, i) != 0)
8002 /* Bitfields are not addressable. If the field bitsize is
8003 zero, then the field is not packed. Hence it cannot be
8004 a bitfield or any other packed type. */
8005 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8018 /* Write into appropriate registers a function return value of type
8019 TYPE, given in virtual format. */
8022 arm_store_return_value (struct type *type, struct regcache *regs,
8023 const gdb_byte *valbuf)
8025 struct gdbarch *gdbarch = regs->arch ();
8026 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8028 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8030 gdb_byte buf[FP_REGISTER_SIZE];
8032 switch (gdbarch_tdep (gdbarch)->fp_model)
8036 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8037 regs->cooked_write (ARM_F0_REGNUM, buf);
8040 case ARM_FLOAT_SOFT_FPA:
8041 case ARM_FLOAT_SOFT_VFP:
8042 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8043 not using the VFP ABI code. */
8045 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8046 if (TYPE_LENGTH (type) > 4)
8047 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8051 internal_error (__FILE__, __LINE__,
8052 _("arm_store_return_value: Floating "
8053 "point model not supported"));
8057 else if (TYPE_CODE (type) == TYPE_CODE_INT
8058 || TYPE_CODE (type) == TYPE_CODE_CHAR
8059 || TYPE_CODE (type) == TYPE_CODE_BOOL
8060 || TYPE_CODE (type) == TYPE_CODE_PTR
8061 || TYPE_IS_REFERENCE (type)
8062 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8064 if (TYPE_LENGTH (type) <= 4)
8066 /* Values of one word or less are zero/sign-extended and
8068 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8069 LONGEST val = unpack_long (type, valbuf);
8071 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8072 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8076 /* Integral values greater than one word are stored in consecutive
8077 registers starting with r0. This will always be a multiple of
8078 the regiser size. */
8079 int len = TYPE_LENGTH (type);
8080 int regno = ARM_A1_REGNUM;
8084 regs->cooked_write (regno++, valbuf);
8085 len -= INT_REGISTER_SIZE;
8086 valbuf += INT_REGISTER_SIZE;
8092 /* For a structure or union the behaviour is as if the value had
8093 been stored to word-aligned memory and then loaded into
8094 registers with 32-bit load instruction(s). */
8095 int len = TYPE_LENGTH (type);
8096 int regno = ARM_A1_REGNUM;
8097 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8101 memcpy (tmpbuf, valbuf,
8102 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8103 regs->cooked_write (regno++, tmpbuf);
8104 len -= INT_REGISTER_SIZE;
8105 valbuf += INT_REGISTER_SIZE;
8111 /* Handle function return values. */
8113 static enum return_value_convention
8114 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8115 struct type *valtype, struct regcache *regcache,
8116 gdb_byte *readbuf, const gdb_byte *writebuf)
8118 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8119 struct type *func_type = function ? value_type (function) : NULL;
8120 enum arm_vfp_cprc_base_type vfp_base_type;
8123 if (arm_vfp_abi_for_function (gdbarch, func_type)
8124 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8126 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8127 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8129 for (i = 0; i < vfp_base_count; i++)
8131 if (reg_char == 'q')
8134 arm_neon_quad_write (gdbarch, regcache, i,
8135 writebuf + i * unit_length);
8138 arm_neon_quad_read (gdbarch, regcache, i,
8139 readbuf + i * unit_length);
8146 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8147 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8150 regcache->cooked_write (regnum, writebuf + i * unit_length);
8152 regcache->cooked_read (regnum, readbuf + i * unit_length);
8155 return RETURN_VALUE_REGISTER_CONVENTION;
8158 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8159 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8160 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8162 if (tdep->struct_return == pcc_struct_return
8163 || arm_return_in_memory (gdbarch, valtype))
8164 return RETURN_VALUE_STRUCT_CONVENTION;
8166 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8168 if (arm_return_in_memory (gdbarch, valtype))
8169 return RETURN_VALUE_STRUCT_CONVENTION;
8173 arm_store_return_value (valtype, regcache, writebuf);
8176 arm_extract_return_value (valtype, regcache, readbuf);
8178 return RETURN_VALUE_REGISTER_CONVENTION;
8183 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8185 struct gdbarch *gdbarch = get_frame_arch (frame);
8186 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8187 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8189 gdb_byte buf[INT_REGISTER_SIZE];
8191 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8193 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8197 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8201 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8202 return the target PC. Otherwise return 0. */
8205 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8209 CORE_ADDR start_addr;
8211 /* Find the starting address and name of the function containing the PC. */
8212 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8214 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8216 start_addr = arm_skip_bx_reg (frame, pc);
8217 if (start_addr != 0)
8223 /* If PC is in a Thumb call or return stub, return the address of the
8224 target PC, which is in a register. The thunk functions are called
8225 _call_via_xx, where x is the register name. The possible names
8226 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8227 functions, named __ARM_call_via_r[0-7]. */
8228 if (startswith (name, "_call_via_")
8229 || startswith (name, "__ARM_call_via_"))
8231 /* Use the name suffix to determine which register contains the
8233 static const char *table[15] =
8234 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8235 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8238 int offset = strlen (name) - 2;
8240 for (regno = 0; regno <= 14; regno++)
8241 if (strcmp (&name[offset], table[regno]) == 0)
8242 return get_frame_register_unsigned (frame, regno);
8245 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8246 non-interworking calls to foo. We could decode the stubs
8247 to find the target but it's easier to use the symbol table. */
8248 namelen = strlen (name);
8249 if (name[0] == '_' && name[1] == '_'
8250 && ((namelen > 2 + strlen ("_from_thumb")
8251 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8252 || (namelen > 2 + strlen ("_from_arm")
8253 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8256 int target_len = namelen - 2;
8257 struct bound_minimal_symbol minsym;
8258 struct objfile *objfile;
8259 struct obj_section *sec;
8261 if (name[namelen - 1] == 'b')
8262 target_len -= strlen ("_from_thumb");
8264 target_len -= strlen ("_from_arm");
8266 target_name = (char *) alloca (target_len + 1);
8267 memcpy (target_name, name + 2, target_len);
8268 target_name[target_len] = '\0';
8270 sec = find_pc_section (pc);
8271 objfile = (sec == NULL) ? NULL : sec->objfile;
8272 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8273 if (minsym.minsym != NULL)
8274 return BMSYMBOL_VALUE_ADDRESS (minsym);
8279 return 0; /* not a stub */
8283 set_arm_command (const char *args, int from_tty)
8285 printf_unfiltered (_("\
8286 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8287 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8291 show_arm_command (const char *args, int from_tty)
8293 cmd_show_list (showarmcmdlist, from_tty, "");
8297 arm_update_current_architecture (void)
8299 struct gdbarch_info info;
8301 /* If the current architecture is not ARM, we have nothing to do. */
8302 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8305 /* Update the architecture. */
8306 gdbarch_info_init (&info);
8308 if (!gdbarch_update_p (info))
8309 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8313 set_fp_model_sfunc (const char *args, int from_tty,
8314 struct cmd_list_element *c)
8318 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8319 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8321 arm_fp_model = (enum arm_float_model) fp_model;
8325 if (fp_model == ARM_FLOAT_LAST)
8326 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8329 arm_update_current_architecture ();
8333 show_fp_model (struct ui_file *file, int from_tty,
8334 struct cmd_list_element *c, const char *value)
8336 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8338 if (arm_fp_model == ARM_FLOAT_AUTO
8339 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8340 fprintf_filtered (file, _("\
8341 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8342 fp_model_strings[tdep->fp_model]);
8344 fprintf_filtered (file, _("\
8345 The current ARM floating point model is \"%s\".\n"),
8346 fp_model_strings[arm_fp_model]);
8350 arm_set_abi (const char *args, int from_tty,
8351 struct cmd_list_element *c)
8355 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8356 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8358 arm_abi_global = (enum arm_abi_kind) arm_abi;
8362 if (arm_abi == ARM_ABI_LAST)
8363 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8366 arm_update_current_architecture ();
8370 arm_show_abi (struct ui_file *file, int from_tty,
8371 struct cmd_list_element *c, const char *value)
8373 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8375 if (arm_abi_global == ARM_ABI_AUTO
8376 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8377 fprintf_filtered (file, _("\
8378 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8379 arm_abi_strings[tdep->arm_abi]);
8381 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8386 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8387 struct cmd_list_element *c, const char *value)
8389 fprintf_filtered (file,
8390 _("The current execution mode assumed "
8391 "(when symbols are unavailable) is \"%s\".\n"),
8392 arm_fallback_mode_string);
8396 arm_show_force_mode (struct ui_file *file, int from_tty,
8397 struct cmd_list_element *c, const char *value)
8399 fprintf_filtered (file,
8400 _("The current execution mode assumed "
8401 "(even when symbols are available) is \"%s\".\n"),
8402 arm_force_mode_string);
8405 /* If the user changes the register disassembly style used for info
8406 register and other commands, we have to also switch the style used
8407 in opcodes for disassembly output. This function is run in the "set
8408 arm disassembly" command, and does that. */
8411 set_disassembly_style_sfunc (const char *args, int from_tty,
8412 struct cmd_list_element *c)
8414 /* Convert the short style name into the long style name (eg, reg-names-*)
8415 before calling the generic set_disassembler_options() function. */
8416 std::string long_name = std::string ("reg-names-") + disassembly_style;
8417 set_disassembler_options (&long_name[0]);
8421 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8422 struct cmd_list_element *c, const char *value)
8424 struct gdbarch *gdbarch = get_current_arch ();
8425 char *options = get_disassembler_options (gdbarch);
8426 const char *style = "";
8430 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8431 if (CONST_STRNEQ (opt, "reg-names-"))
8433 style = &opt[strlen ("reg-names-")];
8434 len = strcspn (style, ",");
8437 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8440 /* Return the ARM register name corresponding to register I. */
8442 arm_register_name (struct gdbarch *gdbarch, int i)
8444 const int num_regs = gdbarch_num_regs (gdbarch);
8446 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8447 && i >= num_regs && i < num_regs + 32)
8449 static const char *const vfp_pseudo_names[] = {
8450 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8451 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8452 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8453 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8456 return vfp_pseudo_names[i - num_regs];
8459 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8460 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8462 static const char *const neon_pseudo_names[] = {
8463 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8464 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8467 return neon_pseudo_names[i - num_regs - 32];
8470 if (i >= ARRAY_SIZE (arm_register_names))
8471 /* These registers are only supported on targets which supply
8472 an XML description. */
8475 return arm_register_names[i];
8478 /* Test whether the coff symbol specific value corresponds to a Thumb
8482 coff_sym_is_thumb (int val)
8484 return (val == C_THUMBEXT
8485 || val == C_THUMBSTAT
8486 || val == C_THUMBEXTFUNC
8487 || val == C_THUMBSTATFUNC
8488 || val == C_THUMBLABEL);
8491 /* arm_coff_make_msymbol_special()
8492 arm_elf_make_msymbol_special()
8494 These functions test whether the COFF or ELF symbol corresponds to
8495 an address in thumb code, and set a "special" bit in a minimal
8496 symbol to indicate that it does. */
8499 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8501 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8503 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8504 == ST_BRANCH_TO_THUMB)
8505 MSYMBOL_SET_SPECIAL (msym);
8509 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8511 if (coff_sym_is_thumb (val))
8512 MSYMBOL_SET_SPECIAL (msym);
8516 arm_objfile_data_free (struct objfile *objfile, void *arg)
8518 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8521 for (i = 0; i < objfile->obfd->section_count; i++)
8522 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8526 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8529 const char *name = bfd_asymbol_name (sym);
8530 struct arm_per_objfile *data;
8531 VEC(arm_mapping_symbol_s) **map_p;
8532 struct arm_mapping_symbol new_map_sym;
8534 gdb_assert (name[0] == '$');
8535 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8538 data = (struct arm_per_objfile *) objfile_data (objfile,
8539 arm_objfile_data_key);
8542 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8543 struct arm_per_objfile);
8544 set_objfile_data (objfile, arm_objfile_data_key, data);
8545 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8546 objfile->obfd->section_count,
8547 VEC(arm_mapping_symbol_s) *);
8549 map_p = &data->section_maps[bfd_get_section (sym)->index];
8551 new_map_sym.value = sym->value;
8552 new_map_sym.type = name[1];
8554 /* Assume that most mapping symbols appear in order of increasing
8555 value. If they were randomly distributed, it would be faster to
8556 always push here and then sort at first use. */
8557 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8559 struct arm_mapping_symbol *prev_map_sym;
8561 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8562 if (prev_map_sym->value >= sym->value)
8565 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8566 arm_compare_mapping_symbols);
8567 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8572 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8576 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8578 struct gdbarch *gdbarch = regcache->arch ();
8579 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8581 /* If necessary, set the T bit. */
8584 ULONGEST val, t_bit;
8585 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8586 t_bit = arm_psr_thumb_bit (gdbarch);
8587 if (arm_pc_is_thumb (gdbarch, pc))
8588 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8591 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8596 /* Read the contents of a NEON quad register, by reading from two
8597 double registers. This is used to implement the quad pseudo
8598 registers, and for argument passing in case the quad registers are
8599 missing; vectors are passed in quad registers when using the VFP
8600 ABI, even if a NEON unit is not present. REGNUM is the index of
8601 the quad register, in [0, 15]. */
8603 static enum register_status
8604 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8605 int regnum, gdb_byte *buf)
8608 gdb_byte reg_buf[8];
8609 int offset, double_regnum;
8610 enum register_status status;
8612 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8613 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8616 /* d0 is always the least significant half of q0. */
8617 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8622 status = regcache->raw_read (double_regnum, reg_buf);
8623 if (status != REG_VALID)
8625 memcpy (buf + offset, reg_buf, 8);
8627 offset = 8 - offset;
8628 status = regcache->raw_read (double_regnum + 1, reg_buf);
8629 if (status != REG_VALID)
8631 memcpy (buf + offset, reg_buf, 8);
8636 static enum register_status
8637 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8638 int regnum, gdb_byte *buf)
8640 const int num_regs = gdbarch_num_regs (gdbarch);
8642 gdb_byte reg_buf[8];
8643 int offset, double_regnum;
8645 gdb_assert (regnum >= num_regs);
8648 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8649 /* Quad-precision register. */
8650 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8653 enum register_status status;
8655 /* Single-precision register. */
8656 gdb_assert (regnum < 32);
8658 /* s0 is always the least significant half of d0. */
8659 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8660 offset = (regnum & 1) ? 0 : 4;
8662 offset = (regnum & 1) ? 4 : 0;
8664 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8665 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8668 status = regcache->raw_read (double_regnum, reg_buf);
8669 if (status == REG_VALID)
8670 memcpy (buf, reg_buf + offset, 4);
8675 /* Store the contents of BUF to a NEON quad register, by writing to
8676 two double registers. This is used to implement the quad pseudo
8677 registers, and for argument passing in case the quad registers are
8678 missing; vectors are passed in quad registers when using the VFP
8679 ABI, even if a NEON unit is not present. REGNUM is the index
8680 of the quad register, in [0, 15]. */
8683 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8684 int regnum, const gdb_byte *buf)
8687 int offset, double_regnum;
8689 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8690 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8693 /* d0 is always the least significant half of q0. */
8694 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8699 regcache->raw_write (double_regnum, buf + offset);
8700 offset = 8 - offset;
8701 regcache->raw_write (double_regnum + 1, buf + offset);
8705 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8706 int regnum, const gdb_byte *buf)
8708 const int num_regs = gdbarch_num_regs (gdbarch);
8710 gdb_byte reg_buf[8];
8711 int offset, double_regnum;
8713 gdb_assert (regnum >= num_regs);
8716 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8717 /* Quad-precision register. */
8718 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8721 /* Single-precision register. */
8722 gdb_assert (regnum < 32);
8724 /* s0 is always the least significant half of d0. */
8725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8726 offset = (regnum & 1) ? 0 : 4;
8728 offset = (regnum & 1) ? 4 : 0;
8730 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8731 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8734 regcache->raw_read (double_regnum, reg_buf);
8735 memcpy (reg_buf + offset, buf, 4);
8736 regcache->raw_write (double_regnum, reg_buf);
8740 static struct value *
8741 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8743 const int *reg_p = (const int *) baton;
8744 return value_of_register (*reg_p, frame);
8747 static enum gdb_osabi
8748 arm_elf_osabi_sniffer (bfd *abfd)
8750 unsigned int elfosabi;
8751 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8753 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8755 if (elfosabi == ELFOSABI_ARM)
8756 /* GNU tools use this value. Check note sections in this case,
8758 bfd_map_over_sections (abfd,
8759 generic_elf_osabi_sniff_abi_tag_sections,
8762 /* Anything else will be handled by the generic ELF sniffer. */
8767 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8768 struct reggroup *group)
8770 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8771 this, FPS register belongs to save_regroup, restore_reggroup, and
8772 all_reggroup, of course. */
8773 if (regnum == ARM_FPS_REGNUM)
8774 return (group == float_reggroup
8775 || group == save_reggroup
8776 || group == restore_reggroup
8777 || group == all_reggroup);
8779 return default_register_reggroup_p (gdbarch, regnum, group);
8783 /* For backward-compatibility we allow two 'g' packet lengths with
8784 the remote protocol depending on whether FPA registers are
8785 supplied. M-profile targets do not have FPA registers, but some
8786 stubs already exist in the wild which use a 'g' packet which
8787 supplies them albeit with dummy values. The packet format which
8788 includes FPA registers should be considered deprecated for
8789 M-profile targets. */
8792 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8794 if (gdbarch_tdep (gdbarch)->is_m)
8796 /* If we know from the executable this is an M-profile target,
8797 cater for remote targets whose register set layout is the
8798 same as the FPA layout. */
8799 register_remote_g_packet_guess (gdbarch,
8800 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8801 (16 * INT_REGISTER_SIZE)
8802 + (8 * FP_REGISTER_SIZE)
8803 + (2 * INT_REGISTER_SIZE),
8804 tdesc_arm_with_m_fpa_layout);
8806 /* The regular M-profile layout. */
8807 register_remote_g_packet_guess (gdbarch,
8808 /* r0-r12,sp,lr,pc; xpsr */
8809 (16 * INT_REGISTER_SIZE)
8810 + INT_REGISTER_SIZE,
8813 /* M-profile plus M4F VFP. */
8814 register_remote_g_packet_guess (gdbarch,
8815 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8816 (16 * INT_REGISTER_SIZE)
8817 + (16 * VFP_REGISTER_SIZE)
8818 + (2 * INT_REGISTER_SIZE),
8819 tdesc_arm_with_m_vfp_d16);
8822 /* Otherwise we don't have a useful guess. */
8825 /* Implement the code_of_frame_writable gdbarch method. */
8828 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8830 if (gdbarch_tdep (gdbarch)->is_m
8831 && get_frame_type (frame) == SIGTRAMP_FRAME)
8833 /* M-profile exception frames return to some magic PCs, where
8834 isn't writable at all. */
8842 /* Initialize the current architecture based on INFO. If possible,
8843 re-use an architecture from ARCHES, which is a list of
8844 architectures already created during this debugging session.
8846 Called e.g. at program startup, when reading a core file, and when
8847 reading a binary file. */
8849 static struct gdbarch *
8850 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8852 struct gdbarch_tdep *tdep;
8853 struct gdbarch *gdbarch;
8854 struct gdbarch_list *best_arch;
8855 enum arm_abi_kind arm_abi = arm_abi_global;
8856 enum arm_float_model fp_model = arm_fp_model;
8857 struct tdesc_arch_data *tdesc_data = NULL;
8859 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8860 int have_wmmx_registers = 0;
8862 int have_fpa_registers = 1;
8863 const struct target_desc *tdesc = info.target_desc;
8865 /* If we have an object to base this architecture on, try to determine
8868 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8870 int ei_osabi, e_flags;
8872 switch (bfd_get_flavour (info.abfd))
8874 case bfd_target_coff_flavour:
8875 /* Assume it's an old APCS-style ABI. */
8877 arm_abi = ARM_ABI_APCS;
8880 case bfd_target_elf_flavour:
8881 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8882 e_flags = elf_elfheader (info.abfd)->e_flags;
8884 if (ei_osabi == ELFOSABI_ARM)
8886 /* GNU tools used to use this value, but do not for EABI
8887 objects. There's nowhere to tag an EABI version
8888 anyway, so assume APCS. */
8889 arm_abi = ARM_ABI_APCS;
8891 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8893 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8897 case EF_ARM_EABI_UNKNOWN:
8898 /* Assume GNU tools. */
8899 arm_abi = ARM_ABI_APCS;
8902 case EF_ARM_EABI_VER4:
8903 case EF_ARM_EABI_VER5:
8904 arm_abi = ARM_ABI_AAPCS;
8905 /* EABI binaries default to VFP float ordering.
8906 They may also contain build attributes that can
8907 be used to identify if the VFP argument-passing
8909 if (fp_model == ARM_FLOAT_AUTO)
8912 switch (bfd_elf_get_obj_attr_int (info.abfd,
8916 case AEABI_VFP_args_base:
8917 /* "The user intended FP parameter/result
8918 passing to conform to AAPCS, base
8920 fp_model = ARM_FLOAT_SOFT_VFP;
8922 case AEABI_VFP_args_vfp:
8923 /* "The user intended FP parameter/result
8924 passing to conform to AAPCS, VFP
8926 fp_model = ARM_FLOAT_VFP;
8928 case AEABI_VFP_args_toolchain:
8929 /* "The user intended FP parameter/result
8930 passing to conform to tool chain-specific
8931 conventions" - we don't know any such
8932 conventions, so leave it as "auto". */
8934 case AEABI_VFP_args_compatible:
8935 /* "Code is compatible with both the base
8936 and VFP variants; the user did not permit
8937 non-variadic functions to pass FP
8938 parameters/results" - leave it as
8942 /* Attribute value not mentioned in the
8943 November 2012 ABI, so leave it as
8948 fp_model = ARM_FLOAT_SOFT_VFP;
8954 /* Leave it as "auto". */
8955 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8960 /* Detect M-profile programs. This only works if the
8961 executable file includes build attributes; GCC does
8962 copy them to the executable, but e.g. RealView does
8965 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8968 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8969 Tag_CPU_arch_profile);
8971 /* GCC specifies the profile for v6-M; RealView only
8972 specifies the profile for architectures starting with
8973 V7 (as opposed to architectures with a tag
8974 numerically greater than TAG_CPU_ARCH_V7). */
8975 if (!tdesc_has_registers (tdesc)
8976 && (attr_arch == TAG_CPU_ARCH_V6_M
8977 || attr_arch == TAG_CPU_ARCH_V6S_M
8978 || attr_profile == 'M'))
8983 if (fp_model == ARM_FLOAT_AUTO)
8985 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8988 /* Leave it as "auto". Strictly speaking this case
8989 means FPA, but almost nobody uses that now, and
8990 many toolchains fail to set the appropriate bits
8991 for the floating-point model they use. */
8993 case EF_ARM_SOFT_FLOAT:
8994 fp_model = ARM_FLOAT_SOFT_FPA;
8996 case EF_ARM_VFP_FLOAT:
8997 fp_model = ARM_FLOAT_VFP;
8999 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9000 fp_model = ARM_FLOAT_SOFT_VFP;
9005 if (e_flags & EF_ARM_BE8)
9006 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9011 /* Leave it as "auto". */
9016 /* Check any target description for validity. */
9017 if (tdesc_has_registers (tdesc))
9019 /* For most registers we require GDB's default names; but also allow
9020 the numeric names for sp / lr / pc, as a convenience. */
9021 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9022 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9023 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9025 const struct tdesc_feature *feature;
9028 feature = tdesc_find_feature (tdesc,
9029 "org.gnu.gdb.arm.core");
9030 if (feature == NULL)
9032 feature = tdesc_find_feature (tdesc,
9033 "org.gnu.gdb.arm.m-profile");
9034 if (feature == NULL)
9040 tdesc_data = tdesc_data_alloc ();
9043 for (i = 0; i < ARM_SP_REGNUM; i++)
9044 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9045 arm_register_names[i]);
9046 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9049 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9052 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9056 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9057 ARM_PS_REGNUM, "xpsr");
9059 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9060 ARM_PS_REGNUM, "cpsr");
9064 tdesc_data_cleanup (tdesc_data);
9068 feature = tdesc_find_feature (tdesc,
9069 "org.gnu.gdb.arm.fpa");
9070 if (feature != NULL)
9073 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9074 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9075 arm_register_names[i]);
9078 tdesc_data_cleanup (tdesc_data);
9083 have_fpa_registers = 0;
9085 feature = tdesc_find_feature (tdesc,
9086 "org.gnu.gdb.xscale.iwmmxt");
9087 if (feature != NULL)
9089 static const char *const iwmmxt_names[] = {
9090 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9091 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9092 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9093 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9097 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9099 &= tdesc_numbered_register (feature, tdesc_data, i,
9100 iwmmxt_names[i - ARM_WR0_REGNUM]);
9102 /* Check for the control registers, but do not fail if they
9104 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9105 tdesc_numbered_register (feature, tdesc_data, i,
9106 iwmmxt_names[i - ARM_WR0_REGNUM]);
9108 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9110 &= tdesc_numbered_register (feature, tdesc_data, i,
9111 iwmmxt_names[i - ARM_WR0_REGNUM]);
9115 tdesc_data_cleanup (tdesc_data);
9119 have_wmmx_registers = 1;
9122 /* If we have a VFP unit, check whether the single precision registers
9123 are present. If not, then we will synthesize them as pseudo
9125 feature = tdesc_find_feature (tdesc,
9126 "org.gnu.gdb.arm.vfp");
9127 if (feature != NULL)
9129 static const char *const vfp_double_names[] = {
9130 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9131 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9132 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9133 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9136 /* Require the double precision registers. There must be either
9139 for (i = 0; i < 32; i++)
9141 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9143 vfp_double_names[i]);
9147 if (!valid_p && i == 16)
9150 /* Also require FPSCR. */
9151 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9152 ARM_FPSCR_REGNUM, "fpscr");
9155 tdesc_data_cleanup (tdesc_data);
9159 if (tdesc_unnumbered_register (feature, "s0") == 0)
9160 have_vfp_pseudos = 1;
9162 vfp_register_count = i;
9164 /* If we have VFP, also check for NEON. The architecture allows
9165 NEON without VFP (integer vector operations only), but GDB
9166 does not support that. */
9167 feature = tdesc_find_feature (tdesc,
9168 "org.gnu.gdb.arm.neon");
9169 if (feature != NULL)
9171 /* NEON requires 32 double-precision registers. */
9174 tdesc_data_cleanup (tdesc_data);
9178 /* If there are quad registers defined by the stub, use
9179 their type; otherwise (normally) provide them with
9180 the default type. */
9181 if (tdesc_unnumbered_register (feature, "q0") == 0)
9182 have_neon_pseudos = 1;
9189 /* If there is already a candidate, use it. */
9190 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9192 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9194 if (arm_abi != ARM_ABI_AUTO
9195 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9198 if (fp_model != ARM_FLOAT_AUTO
9199 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9202 /* There are various other properties in tdep that we do not
9203 need to check here: those derived from a target description,
9204 since gdbarches with a different target description are
9205 automatically disqualified. */
9207 /* Do check is_m, though, since it might come from the binary. */
9208 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9211 /* Found a match. */
9215 if (best_arch != NULL)
9217 if (tdesc_data != NULL)
9218 tdesc_data_cleanup (tdesc_data);
9219 return best_arch->gdbarch;
9222 tdep = XCNEW (struct gdbarch_tdep);
9223 gdbarch = gdbarch_alloc (&info, tdep);
9225 /* Record additional information about the architecture we are defining.
9226 These are gdbarch discriminators, like the OSABI. */
9227 tdep->arm_abi = arm_abi;
9228 tdep->fp_model = fp_model;
9230 tdep->have_fpa_registers = have_fpa_registers;
9231 tdep->have_wmmx_registers = have_wmmx_registers;
9232 gdb_assert (vfp_register_count == 0
9233 || vfp_register_count == 16
9234 || vfp_register_count == 32);
9235 tdep->vfp_register_count = vfp_register_count;
9236 tdep->have_vfp_pseudos = have_vfp_pseudos;
9237 tdep->have_neon_pseudos = have_neon_pseudos;
9238 tdep->have_neon = have_neon;
9240 arm_register_g_packet_guesses (gdbarch);
9243 switch (info.byte_order_for_code)
9245 case BFD_ENDIAN_BIG:
9246 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9247 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9248 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9249 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9253 case BFD_ENDIAN_LITTLE:
9254 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9255 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9256 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9257 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9262 internal_error (__FILE__, __LINE__,
9263 _("arm_gdbarch_init: bad byte order for float format"));
9266 /* On ARM targets char defaults to unsigned. */
9267 set_gdbarch_char_signed (gdbarch, 0);
9269 /* wchar_t is unsigned under the AAPCS. */
9270 if (tdep->arm_abi == ARM_ABI_AAPCS)
9271 set_gdbarch_wchar_signed (gdbarch, 0);
9273 set_gdbarch_wchar_signed (gdbarch, 1);
9275 /* Compute type alignment. */
9276 set_gdbarch_type_align (gdbarch, arm_type_align);
9278 /* Note: for displaced stepping, this includes the breakpoint, and one word
9279 of additional scratch space. This setting isn't used for anything beside
9280 displaced stepping at present. */
9281 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9283 /* This should be low enough for everything. */
9284 tdep->lowest_pc = 0x20;
9285 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9287 /* The default, for both APCS and AAPCS, is to return small
9288 structures in registers. */
9289 tdep->struct_return = reg_struct_return;
9291 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9292 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9295 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9297 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9299 frame_base_set_default (gdbarch, &arm_normal_base);
9301 /* Address manipulation. */
9302 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9304 /* Advance PC across function entry code. */
9305 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9307 /* Detect whether PC is at a point where the stack has been destroyed. */
9308 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9310 /* Skip trampolines. */
9311 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9313 /* The stack grows downward. */
9314 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9316 /* Breakpoint manipulation. */
9317 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9318 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9319 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9320 arm_breakpoint_kind_from_current_state);
9322 /* Information about registers, etc. */
9323 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9324 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9325 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9326 set_gdbarch_register_type (gdbarch, arm_register_type);
9327 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9329 /* This "info float" is FPA-specific. Use the generic version if we
9331 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9332 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9334 /* Internal <-> external register number maps. */
9335 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9336 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9338 set_gdbarch_register_name (gdbarch, arm_register_name);
9340 /* Returning results. */
9341 set_gdbarch_return_value (gdbarch, arm_return_value);
9344 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9346 /* Minsymbol frobbing. */
9347 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9348 set_gdbarch_coff_make_msymbol_special (gdbarch,
9349 arm_coff_make_msymbol_special);
9350 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9352 /* Thumb-2 IT block support. */
9353 set_gdbarch_adjust_breakpoint_address (gdbarch,
9354 arm_adjust_breakpoint_address);
9356 /* Virtual tables. */
9357 set_gdbarch_vbit_in_delta (gdbarch, 1);
9359 /* Hook in the ABI-specific overrides, if they have been registered. */
9360 gdbarch_init_osabi (info, gdbarch);
9362 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9364 /* Add some default predicates. */
9366 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9367 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9368 dwarf2_append_unwinders (gdbarch);
9369 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9370 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9371 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9373 /* Now we have tuned the configuration, set a few final things,
9374 based on what the OS ABI has told us. */
9376 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9377 binaries are always marked. */
9378 if (tdep->arm_abi == ARM_ABI_AUTO)
9379 tdep->arm_abi = ARM_ABI_APCS;
9381 /* Watchpoints are not steppable. */
9382 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9384 /* We used to default to FPA for generic ARM, but almost nobody
9385 uses that now, and we now provide a way for the user to force
9386 the model. So default to the most useful variant. */
9387 if (tdep->fp_model == ARM_FLOAT_AUTO)
9388 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9390 if (tdep->jb_pc >= 0)
9391 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9393 /* Floating point sizes and format. */
9394 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9395 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9397 set_gdbarch_double_format
9398 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9399 set_gdbarch_long_double_format
9400 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9404 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9405 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9408 if (have_vfp_pseudos)
9410 /* NOTE: These are the only pseudo registers used by
9411 the ARM target at the moment. If more are added, a
9412 little more care in numbering will be needed. */
9414 int num_pseudos = 32;
9415 if (have_neon_pseudos)
9417 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9418 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9419 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9424 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9426 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9428 /* Override tdesc_register_type to adjust the types of VFP
9429 registers for NEON. */
9430 set_gdbarch_register_type (gdbarch, arm_register_type);
9433 /* Add standard register aliases. We add aliases even for those
9434 nanes which are used by the current architecture - it's simpler,
9435 and does no harm, since nothing ever lists user registers. */
9436 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9437 user_reg_add (gdbarch, arm_register_aliases[i].name,
9438 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9440 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9441 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9447 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9449 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9454 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9455 (unsigned long) tdep->lowest_pc);
9461 static void arm_record_test (void);
9466 _initialize_arm_tdep (void)
9470 char regdesc[1024], *rdptr = regdesc;
9471 size_t rest = sizeof (regdesc);
9473 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9475 arm_objfile_data_key
9476 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9478 /* Add ourselves to objfile event chain. */
9479 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9481 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9483 /* Register an ELF OS ABI sniffer for ARM binaries. */
9484 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9485 bfd_target_elf_flavour,
9486 arm_elf_osabi_sniffer);
9488 /* Initialize the standard target descriptions. */
9489 initialize_tdesc_arm_with_m ();
9490 initialize_tdesc_arm_with_m_fpa_layout ();
9491 initialize_tdesc_arm_with_m_vfp_d16 ();
9492 initialize_tdesc_arm_with_iwmmxt ();
9493 initialize_tdesc_arm_with_vfpv2 ();
9494 initialize_tdesc_arm_with_vfpv3 ();
9495 initialize_tdesc_arm_with_neon ();
9497 /* Add root prefix command for all "set arm"/"show arm" commands. */
9498 add_prefix_cmd ("arm", no_class, set_arm_command,
9499 _("Various ARM-specific commands."),
9500 &setarmcmdlist, "set arm ", 0, &setlist);
9502 add_prefix_cmd ("arm", no_class, show_arm_command,
9503 _("Various ARM-specific commands."),
9504 &showarmcmdlist, "show arm ", 0, &showlist);
9507 arm_disassembler_options = xstrdup ("reg-names-std");
9508 const disasm_options_t *disasm_options
9509 = &disassembler_options_arm ()->options;
9510 int num_disassembly_styles = 0;
9511 for (i = 0; disasm_options->name[i] != NULL; i++)
9512 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9513 num_disassembly_styles++;
9515 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9516 valid_disassembly_styles = XNEWVEC (const char *,
9517 num_disassembly_styles + 1);
9518 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9519 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9521 size_t offset = strlen ("reg-names-");
9522 const char *style = disasm_options->name[i];
9523 valid_disassembly_styles[j++] = &style[offset];
9524 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9525 disasm_options->description[i]);
9529 /* Mark the end of valid options. */
9530 valid_disassembly_styles[num_disassembly_styles] = NULL;
9532 /* Create the help text. */
9533 std::string helptext = string_printf ("%s%s%s",
9534 _("The valid values are:\n"),
9536 _("The default is \"std\"."));
9538 add_setshow_enum_cmd("disassembler", no_class,
9539 valid_disassembly_styles, &disassembly_style,
9540 _("Set the disassembly style."),
9541 _("Show the disassembly style."),
9543 set_disassembly_style_sfunc,
9544 show_disassembly_style_sfunc,
9545 &setarmcmdlist, &showarmcmdlist);
9547 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9548 _("Set usage of ARM 32-bit mode."),
9549 _("Show usage of ARM 32-bit mode."),
9550 _("When off, a 26-bit PC will be used."),
9552 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9554 &setarmcmdlist, &showarmcmdlist);
9556 /* Add a command to allow the user to force the FPU model. */
9557 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9558 _("Set the floating point type."),
9559 _("Show the floating point type."),
9560 _("auto - Determine the FP typefrom the OS-ABI.\n\
9561 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9562 fpa - FPA co-processor (GCC compiled).\n\
9563 softvfp - Software FP with pure-endian doubles.\n\
9564 vfp - VFP co-processor."),
9565 set_fp_model_sfunc, show_fp_model,
9566 &setarmcmdlist, &showarmcmdlist);
9568 /* Add a command to allow the user to force the ABI. */
9569 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9572 NULL, arm_set_abi, arm_show_abi,
9573 &setarmcmdlist, &showarmcmdlist);
9575 /* Add two commands to allow the user to force the assumed
9577 add_setshow_enum_cmd ("fallback-mode", class_support,
9578 arm_mode_strings, &arm_fallback_mode_string,
9579 _("Set the mode assumed when symbols are unavailable."),
9580 _("Show the mode assumed when symbols are unavailable."),
9581 NULL, NULL, arm_show_fallback_mode,
9582 &setarmcmdlist, &showarmcmdlist);
9583 add_setshow_enum_cmd ("force-mode", class_support,
9584 arm_mode_strings, &arm_force_mode_string,
9585 _("Set the mode assumed even when symbols are available."),
9586 _("Show the mode assumed even when symbols are available."),
9587 NULL, NULL, arm_show_force_mode,
9588 &setarmcmdlist, &showarmcmdlist);
9590 /* Debugging flag. */
9591 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9592 _("Set ARM debugging."),
9593 _("Show ARM debugging."),
9594 _("When on, arm-specific debugging is enabled."),
9596 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9597 &setdebuglist, &showdebuglist);
9600 selftests::register_test ("arm-record", selftests::arm_record_test);
9605 /* ARM-reversible process record data structures. */
9607 #define ARM_INSN_SIZE_BYTES 4
9608 #define THUMB_INSN_SIZE_BYTES 2
9609 #define THUMB2_INSN_SIZE_BYTES 4
9612 /* Position of the bit within a 32-bit ARM instruction
9613 that defines whether the instruction is a load or store. */
9614 #define INSN_S_L_BIT_NUM 20
9616 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9619 unsigned int reg_len = LENGTH; \
9622 REGS = XNEWVEC (uint32_t, reg_len); \
9623 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9628 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9631 unsigned int mem_len = LENGTH; \
9634 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9635 memcpy(&MEMS->len, &RECORD_BUF[0], \
9636 sizeof(struct arm_mem_r) * LENGTH); \
9641 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9642 #define INSN_RECORDED(ARM_RECORD) \
9643 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9645 /* ARM memory record structure. */
9648 uint32_t len; /* Record length. */
9649 uint32_t addr; /* Memory address. */
9652 /* ARM instruction record contains opcode of current insn
9653 and execution state (before entry to decode_insn()),
9654 contains list of to-be-modified registers and
9655 memory blocks (on return from decode_insn()). */
9657 typedef struct insn_decode_record_t
9659 struct gdbarch *gdbarch;
9660 struct regcache *regcache;
9661 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9662 uint32_t arm_insn; /* Should accommodate thumb. */
9663 uint32_t cond; /* Condition code. */
9664 uint32_t opcode; /* Insn opcode. */
9665 uint32_t decode; /* Insn decode bits. */
9666 uint32_t mem_rec_count; /* No of mem records. */
9667 uint32_t reg_rec_count; /* No of reg records. */
9668 uint32_t *arm_regs; /* Registers to be saved for this record. */
9669 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9670 } insn_decode_record;
9673 /* Checks ARM SBZ and SBO mandatory fields. */
9676 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9678 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9697 enum arm_record_result
9699 ARM_RECORD_SUCCESS = 0,
9700 ARM_RECORD_FAILURE = 1
9707 } arm_record_strx_t;
9718 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9719 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9722 struct regcache *reg_cache = arm_insn_r->regcache;
9723 ULONGEST u_regval[2]= {0};
9725 uint32_t reg_src1 = 0, reg_src2 = 0;
9726 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9728 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9729 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9731 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9733 /* 1) Handle misc store, immediate offset. */
9734 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9735 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9736 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9737 regcache_raw_read_unsigned (reg_cache, reg_src1,
9739 if (ARM_PC_REGNUM == reg_src1)
9741 /* If R15 was used as Rn, hence current PC+8. */
9742 u_regval[0] = u_regval[0] + 8;
9744 offset_8 = (immed_high << 4) | immed_low;
9745 /* Calculate target store address. */
9746 if (14 == arm_insn_r->opcode)
9748 tgt_mem_addr = u_regval[0] + offset_8;
9752 tgt_mem_addr = u_regval[0] - offset_8;
9754 if (ARM_RECORD_STRH == str_type)
9756 record_buf_mem[0] = 2;
9757 record_buf_mem[1] = tgt_mem_addr;
9758 arm_insn_r->mem_rec_count = 1;
9760 else if (ARM_RECORD_STRD == str_type)
9762 record_buf_mem[0] = 4;
9763 record_buf_mem[1] = tgt_mem_addr;
9764 record_buf_mem[2] = 4;
9765 record_buf_mem[3] = tgt_mem_addr + 4;
9766 arm_insn_r->mem_rec_count = 2;
9769 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9771 /* 2) Store, register offset. */
9773 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9775 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9777 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9780 /* If R15 was used as Rn, hence current PC+8. */
9781 u_regval[0] = u_regval[0] + 8;
9783 /* Calculate target store address, Rn +/- Rm, register offset. */
9784 if (12 == arm_insn_r->opcode)
9786 tgt_mem_addr = u_regval[0] + u_regval[1];
9790 tgt_mem_addr = u_regval[1] - u_regval[0];
9792 if (ARM_RECORD_STRH == str_type)
9794 record_buf_mem[0] = 2;
9795 record_buf_mem[1] = tgt_mem_addr;
9796 arm_insn_r->mem_rec_count = 1;
9798 else if (ARM_RECORD_STRD == str_type)
9800 record_buf_mem[0] = 4;
9801 record_buf_mem[1] = tgt_mem_addr;
9802 record_buf_mem[2] = 4;
9803 record_buf_mem[3] = tgt_mem_addr + 4;
9804 arm_insn_r->mem_rec_count = 2;
9807 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9808 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9810 /* 3) Store, immediate pre-indexed. */
9811 /* 5) Store, immediate post-indexed. */
9812 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9813 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9814 offset_8 = (immed_high << 4) | immed_low;
9815 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9816 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9817 /* Calculate target store address, Rn +/- Rm, register offset. */
9818 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9820 tgt_mem_addr = u_regval[0] + offset_8;
9824 tgt_mem_addr = u_regval[0] - offset_8;
9826 if (ARM_RECORD_STRH == str_type)
9828 record_buf_mem[0] = 2;
9829 record_buf_mem[1] = tgt_mem_addr;
9830 arm_insn_r->mem_rec_count = 1;
9832 else if (ARM_RECORD_STRD == str_type)
9834 record_buf_mem[0] = 4;
9835 record_buf_mem[1] = tgt_mem_addr;
9836 record_buf_mem[2] = 4;
9837 record_buf_mem[3] = tgt_mem_addr + 4;
9838 arm_insn_r->mem_rec_count = 2;
9840 /* Record Rn also as it changes. */
9841 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9842 arm_insn_r->reg_rec_count = 1;
9844 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9845 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9847 /* 4) Store, register pre-indexed. */
9848 /* 6) Store, register post -indexed. */
9849 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9850 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9851 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9852 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9853 /* Calculate target store address, Rn +/- Rm, register offset. */
9854 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9856 tgt_mem_addr = u_regval[0] + u_regval[1];
9860 tgt_mem_addr = u_regval[1] - u_regval[0];
9862 if (ARM_RECORD_STRH == str_type)
9864 record_buf_mem[0] = 2;
9865 record_buf_mem[1] = tgt_mem_addr;
9866 arm_insn_r->mem_rec_count = 1;
9868 else if (ARM_RECORD_STRD == str_type)
9870 record_buf_mem[0] = 4;
9871 record_buf_mem[1] = tgt_mem_addr;
9872 record_buf_mem[2] = 4;
9873 record_buf_mem[3] = tgt_mem_addr + 4;
9874 arm_insn_r->mem_rec_count = 2;
9876 /* Record Rn also as it changes. */
9877 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9878 arm_insn_r->reg_rec_count = 1;
9883 /* Handling ARM extension space insns. */
9886 arm_record_extension_space (insn_decode_record *arm_insn_r)
9888 int ret = 0; /* Return value: -1:record failure ; 0:success */
9889 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9890 uint32_t record_buf[8], record_buf_mem[8];
9891 uint32_t reg_src1 = 0;
9892 struct regcache *reg_cache = arm_insn_r->regcache;
9893 ULONGEST u_regval = 0;
9895 gdb_assert (!INSN_RECORDED(arm_insn_r));
9896 /* Handle unconditional insn extension space. */
9898 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9899 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9900 if (arm_insn_r->cond)
9902 /* PLD has no affect on architectural state, it just affects
9904 if (5 == ((opcode1 & 0xE0) >> 5))
9907 record_buf[0] = ARM_PS_REGNUM;
9908 record_buf[1] = ARM_LR_REGNUM;
9909 arm_insn_r->reg_rec_count = 2;
9911 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9915 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9916 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9919 /* Undefined instruction on ARM V5; need to handle if later
9920 versions define it. */
9923 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9924 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9925 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9927 /* Handle arithmetic insn extension space. */
9928 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9929 && !INSN_RECORDED(arm_insn_r))
9931 /* Handle MLA(S) and MUL(S). */
9932 if (in_inclusive_range (insn_op1, 0U, 3U))
9934 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9935 record_buf[1] = ARM_PS_REGNUM;
9936 arm_insn_r->reg_rec_count = 2;
9938 else if (in_inclusive_range (insn_op1, 4U, 15U))
9940 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9941 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9942 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9943 record_buf[2] = ARM_PS_REGNUM;
9944 arm_insn_r->reg_rec_count = 3;
9948 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9949 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9950 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9952 /* Handle control insn extension space. */
9954 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9955 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9957 if (!bit (arm_insn_r->arm_insn,25))
9959 if (!bits (arm_insn_r->arm_insn, 4, 7))
9961 if ((0 == insn_op1) || (2 == insn_op1))
9964 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9965 arm_insn_r->reg_rec_count = 1;
9967 else if (1 == insn_op1)
9969 /* CSPR is going to be changed. */
9970 record_buf[0] = ARM_PS_REGNUM;
9971 arm_insn_r->reg_rec_count = 1;
9973 else if (3 == insn_op1)
9975 /* SPSR is going to be changed. */
9976 /* We need to get SPSR value, which is yet to be done. */
9980 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9985 record_buf[0] = ARM_PS_REGNUM;
9986 arm_insn_r->reg_rec_count = 1;
9988 else if (3 == insn_op1)
9991 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9992 arm_insn_r->reg_rec_count = 1;
9995 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9998 record_buf[0] = ARM_PS_REGNUM;
9999 record_buf[1] = ARM_LR_REGNUM;
10000 arm_insn_r->reg_rec_count = 2;
10002 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10004 /* QADD, QSUB, QDADD, QDSUB */
10005 record_buf[0] = ARM_PS_REGNUM;
10006 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10007 arm_insn_r->reg_rec_count = 2;
10009 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10012 record_buf[0] = ARM_PS_REGNUM;
10013 record_buf[1] = ARM_LR_REGNUM;
10014 arm_insn_r->reg_rec_count = 2;
10016 /* Save SPSR also;how? */
10019 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10020 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10021 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10022 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10025 if (0 == insn_op1 || 1 == insn_op1)
10027 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10028 /* We dont do optimization for SMULW<y> where we
10030 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10031 record_buf[1] = ARM_PS_REGNUM;
10032 arm_insn_r->reg_rec_count = 2;
10034 else if (2 == insn_op1)
10037 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10038 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10039 arm_insn_r->reg_rec_count = 2;
10041 else if (3 == insn_op1)
10044 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10045 arm_insn_r->reg_rec_count = 1;
10051 /* MSR : immediate form. */
10054 /* CSPR is going to be changed. */
10055 record_buf[0] = ARM_PS_REGNUM;
10056 arm_insn_r->reg_rec_count = 1;
10058 else if (3 == insn_op1)
10060 /* SPSR is going to be changed. */
10061 /* we need to get SPSR value, which is yet to be done */
10067 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10068 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10069 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10071 /* Handle load/store insn extension space. */
10073 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10074 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10075 && !INSN_RECORDED(arm_insn_r))
10080 /* These insn, changes register and memory as well. */
10081 /* SWP or SWPB insn. */
10082 /* Get memory address given by Rn. */
10083 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10084 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10085 /* SWP insn ?, swaps word. */
10086 if (8 == arm_insn_r->opcode)
10088 record_buf_mem[0] = 4;
10092 /* SWPB insn, swaps only byte. */
10093 record_buf_mem[0] = 1;
10095 record_buf_mem[1] = u_regval;
10096 arm_insn_r->mem_rec_count = 1;
10097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10098 arm_insn_r->reg_rec_count = 1;
10100 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10103 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10106 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10109 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10110 record_buf[1] = record_buf[0] + 1;
10111 arm_insn_r->reg_rec_count = 2;
10113 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10116 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10119 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10121 /* LDRH, LDRSB, LDRSH. */
10122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123 arm_insn_r->reg_rec_count = 1;
10128 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10129 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10130 && !INSN_RECORDED(arm_insn_r))
10133 /* Handle coprocessor insn extension space. */
10136 /* To be done for ARMv5 and later; as of now we return -1. */
10140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10141 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10146 /* Handling opcode 000 insns. */
10149 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10151 struct regcache *reg_cache = arm_insn_r->regcache;
10152 uint32_t record_buf[8], record_buf_mem[8];
10153 ULONGEST u_regval[2] = {0};
10155 uint32_t reg_src1 = 0;
10156 uint32_t opcode1 = 0;
10158 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10159 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10160 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10162 if (!((opcode1 & 0x19) == 0x10))
10164 /* Data-processing (register) and Data-processing (register-shifted
10166 /* Out of 11 shifter operands mode, all the insn modifies destination
10167 register, which is specified by 13-16 decode. */
10168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10169 record_buf[1] = ARM_PS_REGNUM;
10170 arm_insn_r->reg_rec_count = 2;
10172 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10174 /* Miscellaneous instructions */
10176 if (3 == arm_insn_r->decode && 0x12 == opcode1
10177 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10179 /* Handle BLX, branch and link/exchange. */
10180 if (9 == arm_insn_r->opcode)
10182 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10183 and R14 stores the return address. */
10184 record_buf[0] = ARM_PS_REGNUM;
10185 record_buf[1] = ARM_LR_REGNUM;
10186 arm_insn_r->reg_rec_count = 2;
10189 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10191 /* Handle enhanced software breakpoint insn, BKPT. */
10192 /* CPSR is changed to be executed in ARM state, disabling normal
10193 interrupts, entering abort mode. */
10194 /* According to high vector configuration PC is set. */
10195 /* user hit breakpoint and type reverse, in
10196 that case, we need to go back with previous CPSR and
10197 Program Counter. */
10198 record_buf[0] = ARM_PS_REGNUM;
10199 record_buf[1] = ARM_LR_REGNUM;
10200 arm_insn_r->reg_rec_count = 2;
10202 /* Save SPSR also; how? */
10205 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10206 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10208 /* Handle BX, branch and link/exchange. */
10209 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10210 record_buf[0] = ARM_PS_REGNUM;
10211 arm_insn_r->reg_rec_count = 1;
10213 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10214 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10215 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10217 /* Count leading zeros: CLZ. */
10218 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10219 arm_insn_r->reg_rec_count = 1;
10221 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10222 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10223 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10224 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10226 /* Handle MRS insn. */
10227 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10228 arm_insn_r->reg_rec_count = 1;
10231 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10233 /* Multiply and multiply-accumulate */
10235 /* Handle multiply instructions. */
10236 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10237 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10239 /* Handle MLA and MUL. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10241 record_buf[1] = ARM_PS_REGNUM;
10242 arm_insn_r->reg_rec_count = 2;
10244 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10246 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10247 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10248 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10249 record_buf[2] = ARM_PS_REGNUM;
10250 arm_insn_r->reg_rec_count = 3;
10253 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10255 /* Synchronization primitives */
10257 /* Handling SWP, SWPB. */
10258 /* These insn, changes register and memory as well. */
10259 /* SWP or SWPB insn. */
10261 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10262 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10263 /* SWP insn ?, swaps word. */
10264 if (8 == arm_insn_r->opcode)
10266 record_buf_mem[0] = 4;
10270 /* SWPB insn, swaps only byte. */
10271 record_buf_mem[0] = 1;
10273 record_buf_mem[1] = u_regval[0];
10274 arm_insn_r->mem_rec_count = 1;
10275 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10276 arm_insn_r->reg_rec_count = 1;
10278 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10279 || 15 == arm_insn_r->decode)
10281 if ((opcode1 & 0x12) == 2)
10283 /* Extra load/store (unprivileged) */
10288 /* Extra load/store */
10289 switch (bits (arm_insn_r->arm_insn, 5, 6))
10292 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10294 /* STRH (register), STRH (immediate) */
10295 arm_record_strx (arm_insn_r, &record_buf[0],
10296 &record_buf_mem[0], ARM_RECORD_STRH);
10298 else if ((opcode1 & 0x05) == 0x1)
10300 /* LDRH (register) */
10301 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10302 arm_insn_r->reg_rec_count = 1;
10304 if (bit (arm_insn_r->arm_insn, 21))
10306 /* Write back to Rn. */
10307 record_buf[arm_insn_r->reg_rec_count++]
10308 = bits (arm_insn_r->arm_insn, 16, 19);
10311 else if ((opcode1 & 0x05) == 0x5)
10313 /* LDRH (immediate), LDRH (literal) */
10314 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10316 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10317 arm_insn_r->reg_rec_count = 1;
10321 /*LDRH (immediate) */
10322 if (bit (arm_insn_r->arm_insn, 21))
10324 /* Write back to Rn. */
10325 record_buf[arm_insn_r->reg_rec_count++] = rn;
10333 if ((opcode1 & 0x05) == 0x0)
10335 /* LDRD (register) */
10336 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10337 record_buf[1] = record_buf[0] + 1;
10338 arm_insn_r->reg_rec_count = 2;
10340 if (bit (arm_insn_r->arm_insn, 21))
10342 /* Write back to Rn. */
10343 record_buf[arm_insn_r->reg_rec_count++]
10344 = bits (arm_insn_r->arm_insn, 16, 19);
10347 else if ((opcode1 & 0x05) == 0x1)
10349 /* LDRSB (register) */
10350 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10351 arm_insn_r->reg_rec_count = 1;
10353 if (bit (arm_insn_r->arm_insn, 21))
10355 /* Write back to Rn. */
10356 record_buf[arm_insn_r->reg_rec_count++]
10357 = bits (arm_insn_r->arm_insn, 16, 19);
10360 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10362 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10364 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10366 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10367 arm_insn_r->reg_rec_count = 1;
10371 /*LDRD (immediate), LDRSB (immediate) */
10372 if (bit (arm_insn_r->arm_insn, 21))
10374 /* Write back to Rn. */
10375 record_buf[arm_insn_r->reg_rec_count++] = rn;
10383 if ((opcode1 & 0x05) == 0x0)
10385 /* STRD (register) */
10386 arm_record_strx (arm_insn_r, &record_buf[0],
10387 &record_buf_mem[0], ARM_RECORD_STRD);
10389 else if ((opcode1 & 0x05) == 0x1)
10391 /* LDRSH (register) */
10392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10393 arm_insn_r->reg_rec_count = 1;
10395 if (bit (arm_insn_r->arm_insn, 21))
10397 /* Write back to Rn. */
10398 record_buf[arm_insn_r->reg_rec_count++]
10399 = bits (arm_insn_r->arm_insn, 16, 19);
10402 else if ((opcode1 & 0x05) == 0x4)
10404 /* STRD (immediate) */
10405 arm_record_strx (arm_insn_r, &record_buf[0],
10406 &record_buf_mem[0], ARM_RECORD_STRD);
10408 else if ((opcode1 & 0x05) == 0x5)
10410 /* LDRSH (immediate), LDRSH (literal) */
10411 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10412 arm_insn_r->reg_rec_count = 1;
10414 if (bit (arm_insn_r->arm_insn, 21))
10416 /* Write back to Rn. */
10417 record_buf[arm_insn_r->reg_rec_count++]
10418 = bits (arm_insn_r->arm_insn, 16, 19);
10434 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10435 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10439 /* Handling opcode 001 insns. */
10442 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10444 uint32_t record_buf[8], record_buf_mem[8];
10446 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10447 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10449 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10450 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10451 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10454 /* Handle MSR insn. */
10455 if (9 == arm_insn_r->opcode)
10457 /* CSPR is going to be changed. */
10458 record_buf[0] = ARM_PS_REGNUM;
10459 arm_insn_r->reg_rec_count = 1;
10463 /* SPSR is going to be changed. */
10466 else if (arm_insn_r->opcode <= 15)
10468 /* Normal data processing insns. */
10469 /* Out of 11 shifter operands mode, all the insn modifies destination
10470 register, which is specified by 13-16 decode. */
10471 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10472 record_buf[1] = ARM_PS_REGNUM;
10473 arm_insn_r->reg_rec_count = 2;
10480 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10481 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10486 arm_record_media (insn_decode_record *arm_insn_r)
10488 uint32_t record_buf[8];
10490 switch (bits (arm_insn_r->arm_insn, 22, 24))
10493 /* Parallel addition and subtraction, signed */
10495 /* Parallel addition and subtraction, unsigned */
10498 /* Packing, unpacking, saturation and reversal */
10500 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10502 record_buf[arm_insn_r->reg_rec_count++] = rd;
10508 /* Signed multiplies */
10510 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10511 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10513 record_buf[arm_insn_r->reg_rec_count++] = rd;
10515 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10516 else if (op1 == 0x4)
10517 record_buf[arm_insn_r->reg_rec_count++]
10518 = bits (arm_insn_r->arm_insn, 12, 15);
10524 if (bit (arm_insn_r->arm_insn, 21)
10525 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10528 record_buf[arm_insn_r->reg_rec_count++]
10529 = bits (arm_insn_r->arm_insn, 12, 15);
10531 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10532 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10534 /* USAD8 and USADA8 */
10535 record_buf[arm_insn_r->reg_rec_count++]
10536 = bits (arm_insn_r->arm_insn, 16, 19);
10543 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10544 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10546 /* Permanently UNDEFINED */
10551 /* BFC, BFI and UBFX */
10552 record_buf[arm_insn_r->reg_rec_count++]
10553 = bits (arm_insn_r->arm_insn, 12, 15);
10562 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10567 /* Handle ARM mode instructions with opcode 010. */
10570 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10572 struct regcache *reg_cache = arm_insn_r->regcache;
10574 uint32_t reg_base , reg_dest;
10575 uint32_t offset_12, tgt_mem_addr;
10576 uint32_t record_buf[8], record_buf_mem[8];
10577 unsigned char wback;
10580 /* Calculate wback. */
10581 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10582 || (bit (arm_insn_r->arm_insn, 21) == 1);
10584 arm_insn_r->reg_rec_count = 0;
10585 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10587 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10589 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10592 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10593 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10595 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10596 preceeds a LDR instruction having R15 as reg_base, it
10597 emulates a branch and link instruction, and hence we need to save
10598 CPSR and PC as well. */
10599 if (ARM_PC_REGNUM == reg_dest)
10600 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10602 /* If wback is true, also save the base register, which is going to be
10605 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10609 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10611 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10612 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10614 /* Handle bit U. */
10615 if (bit (arm_insn_r->arm_insn, 23))
10617 /* U == 1: Add the offset. */
10618 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10622 /* U == 0: subtract the offset. */
10623 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10626 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10628 if (bit (arm_insn_r->arm_insn, 22))
10630 /* STRB and STRBT: 1 byte. */
10631 record_buf_mem[0] = 1;
10635 /* STR and STRT: 4 bytes. */
10636 record_buf_mem[0] = 4;
10639 /* Handle bit P. */
10640 if (bit (arm_insn_r->arm_insn, 24))
10641 record_buf_mem[1] = tgt_mem_addr;
10643 record_buf_mem[1] = (uint32_t) u_regval;
10645 arm_insn_r->mem_rec_count = 1;
10647 /* If wback is true, also save the base register, which is going to be
10650 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10653 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10654 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10658 /* Handling opcode 011 insns. */
10661 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10663 struct regcache *reg_cache = arm_insn_r->regcache;
10665 uint32_t shift_imm = 0;
10666 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10667 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10668 uint32_t record_buf[8], record_buf_mem[8];
10671 ULONGEST u_regval[2];
10673 if (bit (arm_insn_r->arm_insn, 4))
10674 return arm_record_media (arm_insn_r);
10676 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10677 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10679 /* Handle enhanced store insns and LDRD DSP insn,
10680 order begins according to addressing modes for store insns
10684 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10686 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10687 /* LDR insn has a capability to do branching, if
10688 MOV LR, PC is precedded by LDR insn having Rn as R15
10689 in that case, it emulates branch and link insn, and hence we
10690 need to save CSPR and PC as well. */
10691 if (15 != reg_dest)
10693 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10694 arm_insn_r->reg_rec_count = 1;
10698 record_buf[0] = reg_dest;
10699 record_buf[1] = ARM_PS_REGNUM;
10700 arm_insn_r->reg_rec_count = 2;
10705 if (! bits (arm_insn_r->arm_insn, 4, 11))
10707 /* Store insn, register offset and register pre-indexed,
10708 register post-indexed. */
10710 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10712 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10713 regcache_raw_read_unsigned (reg_cache, reg_src1
10715 regcache_raw_read_unsigned (reg_cache, reg_src2
10717 if (15 == reg_src2)
10719 /* If R15 was used as Rn, hence current PC+8. */
10720 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10721 u_regval[0] = u_regval[0] + 8;
10723 /* Calculate target store address, Rn +/- Rm, register offset. */
10725 if (bit (arm_insn_r->arm_insn, 23))
10727 tgt_mem_addr = u_regval[0] + u_regval[1];
10731 tgt_mem_addr = u_regval[1] - u_regval[0];
10734 switch (arm_insn_r->opcode)
10748 record_buf_mem[0] = 4;
10763 record_buf_mem[0] = 1;
10767 gdb_assert_not_reached ("no decoding pattern found");
10770 record_buf_mem[1] = tgt_mem_addr;
10771 arm_insn_r->mem_rec_count = 1;
10773 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10774 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10775 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10776 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10777 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10778 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10781 /* Rn is going to be changed in pre-indexed mode and
10782 post-indexed mode as well. */
10783 record_buf[0] = reg_src2;
10784 arm_insn_r->reg_rec_count = 1;
10789 /* Store insn, scaled register offset; scaled pre-indexed. */
10790 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10792 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10794 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10795 /* Get shift_imm. */
10796 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10797 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10798 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10799 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10800 /* Offset_12 used as shift. */
10804 /* Offset_12 used as index. */
10805 offset_12 = u_regval[0] << shift_imm;
10809 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10815 if (bit (u_regval[0], 31))
10817 offset_12 = 0xFFFFFFFF;
10826 /* This is arithmetic shift. */
10827 offset_12 = s_word >> shift_imm;
10834 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10836 /* Get C flag value and shift it by 31. */
10837 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10838 | (u_regval[0]) >> 1);
10842 offset_12 = (u_regval[0] >> shift_imm) \
10844 (sizeof(uint32_t) - shift_imm));
10849 gdb_assert_not_reached ("no decoding pattern found");
10853 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10855 if (bit (arm_insn_r->arm_insn, 23))
10857 tgt_mem_addr = u_regval[1] + offset_12;
10861 tgt_mem_addr = u_regval[1] - offset_12;
10864 switch (arm_insn_r->opcode)
10878 record_buf_mem[0] = 4;
10893 record_buf_mem[0] = 1;
10897 gdb_assert_not_reached ("no decoding pattern found");
10900 record_buf_mem[1] = tgt_mem_addr;
10901 arm_insn_r->mem_rec_count = 1;
10903 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10904 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10905 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10906 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10907 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10908 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10911 /* Rn is going to be changed in register scaled pre-indexed
10912 mode,and scaled post indexed mode. */
10913 record_buf[0] = reg_src2;
10914 arm_insn_r->reg_rec_count = 1;
10919 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10920 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10924 /* Handle ARM mode instructions with opcode 100. */
10927 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10929 struct regcache *reg_cache = arm_insn_r->regcache;
10930 uint32_t register_count = 0, register_bits;
10931 uint32_t reg_base, addr_mode;
10932 uint32_t record_buf[24], record_buf_mem[48];
10936 /* Fetch the list of registers. */
10937 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10938 arm_insn_r->reg_rec_count = 0;
10940 /* Fetch the base register that contains the address we are loading data
10942 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10944 /* Calculate wback. */
10945 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10947 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10949 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10951 /* Find out which registers are going to be loaded from memory. */
10952 while (register_bits)
10954 if (register_bits & 0x00000001)
10955 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10956 register_bits = register_bits >> 1;
10961 /* If wback is true, also save the base register, which is going to be
10964 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10966 /* Save the CPSR register. */
10967 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10971 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10973 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10975 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10977 /* Find out how many registers are going to be stored to memory. */
10978 while (register_bits)
10980 if (register_bits & 0x00000001)
10982 register_bits = register_bits >> 1;
10987 /* STMDA (STMED): Decrement after. */
10989 record_buf_mem[1] = (uint32_t) u_regval
10990 - register_count * INT_REGISTER_SIZE + 4;
10992 /* STM (STMIA, STMEA): Increment after. */
10994 record_buf_mem[1] = (uint32_t) u_regval;
10996 /* STMDB (STMFD): Decrement before. */
10998 record_buf_mem[1] = (uint32_t) u_regval
10999 - register_count * INT_REGISTER_SIZE;
11001 /* STMIB (STMFA): Increment before. */
11003 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11006 gdb_assert_not_reached ("no decoding pattern found");
11010 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11011 arm_insn_r->mem_rec_count = 1;
11013 /* If wback is true, also save the base register, which is going to be
11016 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11019 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11020 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11024 /* Handling opcode 101 insns. */
11027 arm_record_b_bl (insn_decode_record *arm_insn_r)
11029 uint32_t record_buf[8];
11031 /* Handle B, BL, BLX(1) insns. */
11032 /* B simply branches so we do nothing here. */
11033 /* Note: BLX(1) doesnt fall here but instead it falls into
11034 extension space. */
11035 if (bit (arm_insn_r->arm_insn, 24))
11037 record_buf[0] = ARM_LR_REGNUM;
11038 arm_insn_r->reg_rec_count = 1;
11041 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11047 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11049 printf_unfiltered (_("Process record does not support instruction "
11050 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11051 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11056 /* Record handler for vector data transfer instructions. */
11059 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11061 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11062 uint32_t record_buf[4];
11064 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11065 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11066 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11067 bit_l = bit (arm_insn_r->arm_insn, 20);
11068 bit_c = bit (arm_insn_r->arm_insn, 8);
11070 /* Handle VMOV instruction. */
11071 if (bit_l && bit_c)
11073 record_buf[0] = reg_t;
11074 arm_insn_r->reg_rec_count = 1;
11076 else if (bit_l && !bit_c)
11078 /* Handle VMOV instruction. */
11079 if (bits_a == 0x00)
11081 record_buf[0] = reg_t;
11082 arm_insn_r->reg_rec_count = 1;
11084 /* Handle VMRS instruction. */
11085 else if (bits_a == 0x07)
11088 reg_t = ARM_PS_REGNUM;
11090 record_buf[0] = reg_t;
11091 arm_insn_r->reg_rec_count = 1;
11094 else if (!bit_l && !bit_c)
11096 /* Handle VMOV instruction. */
11097 if (bits_a == 0x00)
11099 record_buf[0] = ARM_D0_REGNUM + reg_v;
11101 arm_insn_r->reg_rec_count = 1;
11103 /* Handle VMSR instruction. */
11104 else if (bits_a == 0x07)
11106 record_buf[0] = ARM_FPSCR_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11110 else if (!bit_l && bit_c)
11112 /* Handle VMOV instruction. */
11113 if (!(bits_a & 0x04))
11115 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11117 arm_insn_r->reg_rec_count = 1;
11119 /* Handle VDUP instruction. */
11122 if (bit (arm_insn_r->arm_insn, 21))
11124 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11125 record_buf[0] = reg_v + ARM_D0_REGNUM;
11126 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11127 arm_insn_r->reg_rec_count = 2;
11131 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11132 record_buf[0] = reg_v + ARM_D0_REGNUM;
11133 arm_insn_r->reg_rec_count = 1;
11138 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11142 /* Record handler for extension register load/store instructions. */
11145 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11147 uint32_t opcode, single_reg;
11148 uint8_t op_vldm_vstm;
11149 uint32_t record_buf[8], record_buf_mem[128];
11150 ULONGEST u_regval = 0;
11152 struct regcache *reg_cache = arm_insn_r->regcache;
11154 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11155 single_reg = !bit (arm_insn_r->arm_insn, 8);
11156 op_vldm_vstm = opcode & 0x1b;
11158 /* Handle VMOV instructions. */
11159 if ((opcode & 0x1e) == 0x04)
11161 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11163 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11164 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11165 arm_insn_r->reg_rec_count = 2;
11169 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11170 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11174 /* The first S register number m is REG_M:M (M is bit 5),
11175 the corresponding D register number is REG_M:M / 2, which
11177 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11178 /* The second S register number is REG_M:M + 1, the
11179 corresponding D register number is (REG_M:M + 1) / 2.
11180 IOW, if bit M is 1, the first and second S registers
11181 are mapped to different D registers, otherwise, they are
11182 in the same D register. */
11185 record_buf[arm_insn_r->reg_rec_count++]
11186 = ARM_D0_REGNUM + reg_m + 1;
11191 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11192 arm_insn_r->reg_rec_count = 1;
11196 /* Handle VSTM and VPUSH instructions. */
11197 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11198 || op_vldm_vstm == 0x12)
11200 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11201 uint32_t memory_index = 0;
11203 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11204 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11205 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11206 imm_off32 = imm_off8 << 2;
11207 memory_count = imm_off8;
11209 if (bit (arm_insn_r->arm_insn, 23))
11210 start_address = u_regval;
11212 start_address = u_regval - imm_off32;
11214 if (bit (arm_insn_r->arm_insn, 21))
11216 record_buf[0] = reg_rn;
11217 arm_insn_r->reg_rec_count = 1;
11220 while (memory_count > 0)
11224 record_buf_mem[memory_index] = 4;
11225 record_buf_mem[memory_index + 1] = start_address;
11226 start_address = start_address + 4;
11227 memory_index = memory_index + 2;
11231 record_buf_mem[memory_index] = 4;
11232 record_buf_mem[memory_index + 1] = start_address;
11233 record_buf_mem[memory_index + 2] = 4;
11234 record_buf_mem[memory_index + 3] = start_address + 4;
11235 start_address = start_address + 8;
11236 memory_index = memory_index + 4;
11240 arm_insn_r->mem_rec_count = (memory_index >> 1);
11242 /* Handle VLDM instructions. */
11243 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11244 || op_vldm_vstm == 0x13)
11246 uint32_t reg_count, reg_vd;
11247 uint32_t reg_index = 0;
11248 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11250 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11251 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11253 /* REG_VD is the first D register number. If the instruction
11254 loads memory to S registers (SINGLE_REG is TRUE), the register
11255 number is (REG_VD << 1 | bit D), so the corresponding D
11256 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11258 reg_vd = reg_vd | (bit_d << 4);
11260 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11261 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11263 /* If the instruction loads memory to D register, REG_COUNT should
11264 be divided by 2, according to the ARM Architecture Reference
11265 Manual. If the instruction loads memory to S register, divide by
11266 2 as well because two S registers are mapped to D register. */
11267 reg_count = reg_count / 2;
11268 if (single_reg && bit_d)
11270 /* Increase the register count if S register list starts from
11271 an odd number (bit d is one). */
11275 while (reg_count > 0)
11277 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11280 arm_insn_r->reg_rec_count = reg_index;
11282 /* VSTR Vector store register. */
11283 else if ((opcode & 0x13) == 0x10)
11285 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11286 uint32_t memory_index = 0;
11288 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11289 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11290 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11291 imm_off32 = imm_off8 << 2;
11293 if (bit (arm_insn_r->arm_insn, 23))
11294 start_address = u_regval + imm_off32;
11296 start_address = u_regval - imm_off32;
11300 record_buf_mem[memory_index] = 4;
11301 record_buf_mem[memory_index + 1] = start_address;
11302 arm_insn_r->mem_rec_count = 1;
11306 record_buf_mem[memory_index] = 4;
11307 record_buf_mem[memory_index + 1] = start_address;
11308 record_buf_mem[memory_index + 2] = 4;
11309 record_buf_mem[memory_index + 3] = start_address + 4;
11310 arm_insn_r->mem_rec_count = 2;
11313 /* VLDR Vector load register. */
11314 else if ((opcode & 0x13) == 0x11)
11316 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11320 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11321 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11325 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11326 /* Record register D rather than pseudo register S. */
11327 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11329 arm_insn_r->reg_rec_count = 1;
11332 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11333 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11337 /* Record handler for arm/thumb mode VFP data processing instructions. */
11340 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11342 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11343 uint32_t record_buf[4];
11344 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11345 enum insn_types curr_insn_type = INSN_INV;
11347 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11348 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11349 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11350 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11351 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11352 bit_d = bit (arm_insn_r->arm_insn, 22);
11353 /* Mask off the "D" bit. */
11354 opc1 = opc1 & ~0x04;
11356 /* Handle VMLA, VMLS. */
11359 if (bit (arm_insn_r->arm_insn, 10))
11361 if (bit (arm_insn_r->arm_insn, 6))
11362 curr_insn_type = INSN_T0;
11364 curr_insn_type = INSN_T1;
11369 curr_insn_type = INSN_T1;
11371 curr_insn_type = INSN_T2;
11374 /* Handle VNMLA, VNMLS, VNMUL. */
11375 else if (opc1 == 0x01)
11378 curr_insn_type = INSN_T1;
11380 curr_insn_type = INSN_T2;
11383 else if (opc1 == 0x02 && !(opc3 & 0x01))
11385 if (bit (arm_insn_r->arm_insn, 10))
11387 if (bit (arm_insn_r->arm_insn, 6))
11388 curr_insn_type = INSN_T0;
11390 curr_insn_type = INSN_T1;
11395 curr_insn_type = INSN_T1;
11397 curr_insn_type = INSN_T2;
11400 /* Handle VADD, VSUB. */
11401 else if (opc1 == 0x03)
11403 if (!bit (arm_insn_r->arm_insn, 9))
11405 if (bit (arm_insn_r->arm_insn, 6))
11406 curr_insn_type = INSN_T0;
11408 curr_insn_type = INSN_T1;
11413 curr_insn_type = INSN_T1;
11415 curr_insn_type = INSN_T2;
11419 else if (opc1 == 0x08)
11422 curr_insn_type = INSN_T1;
11424 curr_insn_type = INSN_T2;
11426 /* Handle all other vfp data processing instructions. */
11427 else if (opc1 == 0x0b)
11430 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11432 if (bit (arm_insn_r->arm_insn, 4))
11434 if (bit (arm_insn_r->arm_insn, 6))
11435 curr_insn_type = INSN_T0;
11437 curr_insn_type = INSN_T1;
11442 curr_insn_type = INSN_T1;
11444 curr_insn_type = INSN_T2;
11447 /* Handle VNEG and VABS. */
11448 else if ((opc2 == 0x01 && opc3 == 0x01)
11449 || (opc2 == 0x00 && opc3 == 0x03))
11451 if (!bit (arm_insn_r->arm_insn, 11))
11453 if (bit (arm_insn_r->arm_insn, 6))
11454 curr_insn_type = INSN_T0;
11456 curr_insn_type = INSN_T1;
11461 curr_insn_type = INSN_T1;
11463 curr_insn_type = INSN_T2;
11466 /* Handle VSQRT. */
11467 else if (opc2 == 0x01 && opc3 == 0x03)
11470 curr_insn_type = INSN_T1;
11472 curr_insn_type = INSN_T2;
11475 else if (opc2 == 0x07 && opc3 == 0x03)
11478 curr_insn_type = INSN_T1;
11480 curr_insn_type = INSN_T2;
11482 else if (opc3 & 0x01)
11485 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11487 if (!bit (arm_insn_r->arm_insn, 18))
11488 curr_insn_type = INSN_T2;
11492 curr_insn_type = INSN_T1;
11494 curr_insn_type = INSN_T2;
11498 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11501 curr_insn_type = INSN_T1;
11503 curr_insn_type = INSN_T2;
11505 /* Handle VCVTB, VCVTT. */
11506 else if ((opc2 & 0x0e) == 0x02)
11507 curr_insn_type = INSN_T2;
11508 /* Handle VCMP, VCMPE. */
11509 else if ((opc2 & 0x0e) == 0x04)
11510 curr_insn_type = INSN_T3;
11514 switch (curr_insn_type)
11517 reg_vd = reg_vd | (bit_d << 4);
11518 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11519 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11520 arm_insn_r->reg_rec_count = 2;
11524 reg_vd = reg_vd | (bit_d << 4);
11525 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11526 arm_insn_r->reg_rec_count = 1;
11530 reg_vd = (reg_vd << 1) | bit_d;
11531 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11532 arm_insn_r->reg_rec_count = 1;
11536 record_buf[0] = ARM_FPSCR_REGNUM;
11537 arm_insn_r->reg_rec_count = 1;
11541 gdb_assert_not_reached ("no decoding pattern found");
11545 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11549 /* Handling opcode 110 insns. */
11552 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11554 uint32_t op1, op1_ebit, coproc;
11556 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11557 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11558 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11560 if ((coproc & 0x0e) == 0x0a)
11562 /* Handle extension register ld/st instructions. */
11564 return arm_record_exreg_ld_st_insn (arm_insn_r);
11566 /* 64-bit transfers between arm core and extension registers. */
11567 if ((op1 & 0x3e) == 0x04)
11568 return arm_record_exreg_ld_st_insn (arm_insn_r);
11572 /* Handle coprocessor ld/st instructions. */
11577 return arm_record_unsupported_insn (arm_insn_r);
11580 return arm_record_unsupported_insn (arm_insn_r);
11583 /* Move to coprocessor from two arm core registers. */
11585 return arm_record_unsupported_insn (arm_insn_r);
11587 /* Move to two arm core registers from coprocessor. */
11592 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11593 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11594 arm_insn_r->reg_rec_count = 2;
11596 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11600 return arm_record_unsupported_insn (arm_insn_r);
11603 /* Handling opcode 111 insns. */
11606 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11608 uint32_t op, op1_ebit, coproc, bits_24_25;
11609 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11610 struct regcache *reg_cache = arm_insn_r->regcache;
11612 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11613 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11614 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11615 op = bit (arm_insn_r->arm_insn, 4);
11616 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11618 /* Handle arm SWI/SVC system call instructions. */
11619 if (bits_24_25 == 0x3)
11621 if (tdep->arm_syscall_record != NULL)
11623 ULONGEST svc_operand, svc_number;
11625 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11627 if (svc_operand) /* OABI. */
11628 svc_number = svc_operand - 0x900000;
11630 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11632 return tdep->arm_syscall_record (reg_cache, svc_number);
11636 printf_unfiltered (_("no syscall record support\n"));
11640 else if (bits_24_25 == 0x02)
11644 if ((coproc & 0x0e) == 0x0a)
11646 /* 8, 16, and 32-bit transfer */
11647 return arm_record_vdata_transfer_insn (arm_insn_r);
11654 uint32_t record_buf[1];
11656 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11657 if (record_buf[0] == 15)
11658 record_buf[0] = ARM_PS_REGNUM;
11660 arm_insn_r->reg_rec_count = 1;
11661 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11674 if ((coproc & 0x0e) == 0x0a)
11676 /* VFP data-processing instructions. */
11677 return arm_record_vfp_data_proc_insn (arm_insn_r);
11688 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11692 if ((coproc & 0x0e) != 0x0a)
11698 else if (op1 == 4 || op1 == 5)
11700 if ((coproc & 0x0e) == 0x0a)
11702 /* 64-bit transfers between ARM core and extension */
11711 else if (op1 == 0 || op1 == 1)
11718 if ((coproc & 0x0e) == 0x0a)
11720 /* Extension register load/store */
11724 /* STC, STC2, LDC, LDC2 */
11733 /* Handling opcode 000 insns. */
11736 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11738 uint32_t record_buf[8];
11739 uint32_t reg_src1 = 0;
11741 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11743 record_buf[0] = ARM_PS_REGNUM;
11744 record_buf[1] = reg_src1;
11745 thumb_insn_r->reg_rec_count = 2;
11747 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11753 /* Handling opcode 001 insns. */
11756 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11758 uint32_t record_buf[8];
11759 uint32_t reg_src1 = 0;
11761 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11763 record_buf[0] = ARM_PS_REGNUM;
11764 record_buf[1] = reg_src1;
11765 thumb_insn_r->reg_rec_count = 2;
11767 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11772 /* Handling opcode 010 insns. */
11775 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11777 struct regcache *reg_cache = thumb_insn_r->regcache;
11778 uint32_t record_buf[8], record_buf_mem[8];
11780 uint32_t reg_src1 = 0, reg_src2 = 0;
11781 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11783 ULONGEST u_regval[2] = {0};
11785 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11787 if (bit (thumb_insn_r->arm_insn, 12))
11789 /* Handle load/store register offset. */
11790 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11792 if (in_inclusive_range (opB, 4U, 7U))
11794 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11795 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11796 record_buf[0] = reg_src1;
11797 thumb_insn_r->reg_rec_count = 1;
11799 else if (in_inclusive_range (opB, 0U, 2U))
11801 /* STR(2), STRB(2), STRH(2) . */
11802 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11803 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11804 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11805 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11807 record_buf_mem[0] = 4; /* STR (2). */
11809 record_buf_mem[0] = 1; /* STRB (2). */
11811 record_buf_mem[0] = 2; /* STRH (2). */
11812 record_buf_mem[1] = u_regval[0] + u_regval[1];
11813 thumb_insn_r->mem_rec_count = 1;
11816 else if (bit (thumb_insn_r->arm_insn, 11))
11818 /* Handle load from literal pool. */
11820 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11821 record_buf[0] = reg_src1;
11822 thumb_insn_r->reg_rec_count = 1;
11826 /* Special data instructions and branch and exchange */
11827 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11828 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11829 if ((3 == opcode2) && (!opcode3))
11831 /* Branch with exchange. */
11832 record_buf[0] = ARM_PS_REGNUM;
11833 thumb_insn_r->reg_rec_count = 1;
11837 /* Format 8; special data processing insns. */
11838 record_buf[0] = ARM_PS_REGNUM;
11839 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11840 | bits (thumb_insn_r->arm_insn, 0, 2));
11841 thumb_insn_r->reg_rec_count = 2;
11846 /* Format 5; data processing insns. */
11847 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11848 if (bit (thumb_insn_r->arm_insn, 7))
11850 reg_src1 = reg_src1 + 8;
11852 record_buf[0] = ARM_PS_REGNUM;
11853 record_buf[1] = reg_src1;
11854 thumb_insn_r->reg_rec_count = 2;
11857 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11858 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11864 /* Handling opcode 001 insns. */
11867 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11869 struct regcache *reg_cache = thumb_insn_r->regcache;
11870 uint32_t record_buf[8], record_buf_mem[8];
11872 uint32_t reg_src1 = 0;
11873 uint32_t opcode = 0, immed_5 = 0;
11875 ULONGEST u_regval = 0;
11877 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11882 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11883 record_buf[0] = reg_src1;
11884 thumb_insn_r->reg_rec_count = 1;
11889 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11890 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11891 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11892 record_buf_mem[0] = 4;
11893 record_buf_mem[1] = u_regval + (immed_5 * 4);
11894 thumb_insn_r->mem_rec_count = 1;
11897 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11898 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11904 /* Handling opcode 100 insns. */
11907 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11909 struct regcache *reg_cache = thumb_insn_r->regcache;
11910 uint32_t record_buf[8], record_buf_mem[8];
11912 uint32_t reg_src1 = 0;
11913 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11915 ULONGEST u_regval = 0;
11917 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11922 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11923 record_buf[0] = reg_src1;
11924 thumb_insn_r->reg_rec_count = 1;
11926 else if (1 == opcode)
11929 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11930 record_buf[0] = reg_src1;
11931 thumb_insn_r->reg_rec_count = 1;
11933 else if (2 == opcode)
11936 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11937 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11938 record_buf_mem[0] = 4;
11939 record_buf_mem[1] = u_regval + (immed_8 * 4);
11940 thumb_insn_r->mem_rec_count = 1;
11942 else if (0 == opcode)
11945 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11946 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11947 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11948 record_buf_mem[0] = 2;
11949 record_buf_mem[1] = u_regval + (immed_5 * 2);
11950 thumb_insn_r->mem_rec_count = 1;
11953 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11954 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11960 /* Handling opcode 101 insns. */
11963 thumb_record_misc (insn_decode_record *thumb_insn_r)
11965 struct regcache *reg_cache = thumb_insn_r->regcache;
11967 uint32_t opcode = 0;
11968 uint32_t register_bits = 0, register_count = 0;
11969 uint32_t index = 0, start_address = 0;
11970 uint32_t record_buf[24], record_buf_mem[48];
11973 ULONGEST u_regval = 0;
11975 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11977 if (opcode == 0 || opcode == 1)
11979 /* ADR and ADD (SP plus immediate) */
11981 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11982 record_buf[0] = reg_src1;
11983 thumb_insn_r->reg_rec_count = 1;
11987 /* Miscellaneous 16-bit instructions */
11988 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11993 /* SETEND and CPS */
11996 /* ADD/SUB (SP plus immediate) */
11997 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11998 record_buf[0] = ARM_SP_REGNUM;
11999 thumb_insn_r->reg_rec_count = 1;
12001 case 1: /* fall through */
12002 case 3: /* fall through */
12003 case 9: /* fall through */
12008 /* SXTH, SXTB, UXTH, UXTB */
12009 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12010 thumb_insn_r->reg_rec_count = 1;
12012 case 4: /* fall through */
12015 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12016 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12017 while (register_bits)
12019 if (register_bits & 0x00000001)
12021 register_bits = register_bits >> 1;
12023 start_address = u_regval - \
12024 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12025 thumb_insn_r->mem_rec_count = register_count;
12026 while (register_count)
12028 record_buf_mem[(register_count * 2) - 1] = start_address;
12029 record_buf_mem[(register_count * 2) - 2] = 4;
12030 start_address = start_address + 4;
12033 record_buf[0] = ARM_SP_REGNUM;
12034 thumb_insn_r->reg_rec_count = 1;
12037 /* REV, REV16, REVSH */
12038 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12039 thumb_insn_r->reg_rec_count = 1;
12041 case 12: /* fall through */
12044 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12045 while (register_bits)
12047 if (register_bits & 0x00000001)
12048 record_buf[index++] = register_count;
12049 register_bits = register_bits >> 1;
12052 record_buf[index++] = ARM_PS_REGNUM;
12053 record_buf[index++] = ARM_SP_REGNUM;
12054 thumb_insn_r->reg_rec_count = index;
12058 /* Handle enhanced software breakpoint insn, BKPT. */
12059 /* CPSR is changed to be executed in ARM state, disabling normal
12060 interrupts, entering abort mode. */
12061 /* According to high vector configuration PC is set. */
12062 /* User hits breakpoint and type reverse, in that case, we need to go back with
12063 previous CPSR and Program Counter. */
12064 record_buf[0] = ARM_PS_REGNUM;
12065 record_buf[1] = ARM_LR_REGNUM;
12066 thumb_insn_r->reg_rec_count = 2;
12067 /* We need to save SPSR value, which is not yet done. */
12068 printf_unfiltered (_("Process record does not support instruction "
12069 "0x%0x at address %s.\n"),
12070 thumb_insn_r->arm_insn,
12071 paddress (thumb_insn_r->gdbarch,
12072 thumb_insn_r->this_addr));
12076 /* If-Then, and hints */
12083 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12084 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12090 /* Handling opcode 110 insns. */
12093 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12095 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12096 struct regcache *reg_cache = thumb_insn_r->regcache;
12098 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12099 uint32_t reg_src1 = 0;
12100 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12101 uint32_t index = 0, start_address = 0;
12102 uint32_t record_buf[24], record_buf_mem[48];
12104 ULONGEST u_regval = 0;
12106 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12107 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12113 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12115 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12116 while (register_bits)
12118 if (register_bits & 0x00000001)
12119 record_buf[index++] = register_count;
12120 register_bits = register_bits >> 1;
12123 record_buf[index++] = reg_src1;
12124 thumb_insn_r->reg_rec_count = index;
12126 else if (0 == opcode2)
12128 /* It handles both STMIA. */
12129 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12131 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12132 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12133 while (register_bits)
12135 if (register_bits & 0x00000001)
12137 register_bits = register_bits >> 1;
12139 start_address = u_regval;
12140 thumb_insn_r->mem_rec_count = register_count;
12141 while (register_count)
12143 record_buf_mem[(register_count * 2) - 1] = start_address;
12144 record_buf_mem[(register_count * 2) - 2] = 4;
12145 start_address = start_address + 4;
12149 else if (0x1F == opcode1)
12151 /* Handle arm syscall insn. */
12152 if (tdep->arm_syscall_record != NULL)
12154 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12155 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12159 printf_unfiltered (_("no syscall record support\n"));
12164 /* B (1), conditional branch is automatically taken care in process_record,
12165 as PC is saved there. */
12167 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12168 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12174 /* Handling opcode 111 insns. */
12177 thumb_record_branch (insn_decode_record *thumb_insn_r)
12179 uint32_t record_buf[8];
12180 uint32_t bits_h = 0;
12182 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12184 if (2 == bits_h || 3 == bits_h)
12187 record_buf[0] = ARM_LR_REGNUM;
12188 thumb_insn_r->reg_rec_count = 1;
12190 else if (1 == bits_h)
12193 record_buf[0] = ARM_PS_REGNUM;
12194 record_buf[1] = ARM_LR_REGNUM;
12195 thumb_insn_r->reg_rec_count = 2;
12198 /* B(2) is automatically taken care in process_record, as PC is
12201 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12206 /* Handler for thumb2 load/store multiple instructions. */
12209 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12211 struct regcache *reg_cache = thumb2_insn_r->regcache;
12213 uint32_t reg_rn, op;
12214 uint32_t register_bits = 0, register_count = 0;
12215 uint32_t index = 0, start_address = 0;
12216 uint32_t record_buf[24], record_buf_mem[48];
12218 ULONGEST u_regval = 0;
12220 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12221 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12223 if (0 == op || 3 == op)
12225 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12227 /* Handle RFE instruction. */
12228 record_buf[0] = ARM_PS_REGNUM;
12229 thumb2_insn_r->reg_rec_count = 1;
12233 /* Handle SRS instruction after reading banked SP. */
12234 return arm_record_unsupported_insn (thumb2_insn_r);
12237 else if (1 == op || 2 == op)
12239 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12241 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12242 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12243 while (register_bits)
12245 if (register_bits & 0x00000001)
12246 record_buf[index++] = register_count;
12249 register_bits = register_bits >> 1;
12251 record_buf[index++] = reg_rn;
12252 record_buf[index++] = ARM_PS_REGNUM;
12253 thumb2_insn_r->reg_rec_count = index;
12257 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12258 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12259 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12260 while (register_bits)
12262 if (register_bits & 0x00000001)
12265 register_bits = register_bits >> 1;
12270 /* Start address calculation for LDMDB/LDMEA. */
12271 start_address = u_regval;
12275 /* Start address calculation for LDMDB/LDMEA. */
12276 start_address = u_regval - register_count * 4;
12279 thumb2_insn_r->mem_rec_count = register_count;
12280 while (register_count)
12282 record_buf_mem[register_count * 2 - 1] = start_address;
12283 record_buf_mem[register_count * 2 - 2] = 4;
12284 start_address = start_address + 4;
12287 record_buf[0] = reg_rn;
12288 record_buf[1] = ARM_PS_REGNUM;
12289 thumb2_insn_r->reg_rec_count = 2;
12293 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12295 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12297 return ARM_RECORD_SUCCESS;
12300 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12304 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12306 struct regcache *reg_cache = thumb2_insn_r->regcache;
12308 uint32_t reg_rd, reg_rn, offset_imm;
12309 uint32_t reg_dest1, reg_dest2;
12310 uint32_t address, offset_addr;
12311 uint32_t record_buf[8], record_buf_mem[8];
12312 uint32_t op1, op2, op3;
12314 ULONGEST u_regval[2];
12316 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12317 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12318 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12320 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12322 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12324 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12325 record_buf[0] = reg_dest1;
12326 record_buf[1] = ARM_PS_REGNUM;
12327 thumb2_insn_r->reg_rec_count = 2;
12330 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12332 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12333 record_buf[2] = reg_dest2;
12334 thumb2_insn_r->reg_rec_count = 3;
12339 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12340 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12342 if (0 == op1 && 0 == op2)
12344 /* Handle STREX. */
12345 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12346 address = u_regval[0] + (offset_imm * 4);
12347 record_buf_mem[0] = 4;
12348 record_buf_mem[1] = address;
12349 thumb2_insn_r->mem_rec_count = 1;
12350 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12351 record_buf[0] = reg_rd;
12352 thumb2_insn_r->reg_rec_count = 1;
12354 else if (1 == op1 && 0 == op2)
12356 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12357 record_buf[0] = reg_rd;
12358 thumb2_insn_r->reg_rec_count = 1;
12359 address = u_regval[0];
12360 record_buf_mem[1] = address;
12364 /* Handle STREXB. */
12365 record_buf_mem[0] = 1;
12366 thumb2_insn_r->mem_rec_count = 1;
12370 /* Handle STREXH. */
12371 record_buf_mem[0] = 2 ;
12372 thumb2_insn_r->mem_rec_count = 1;
12376 /* Handle STREXD. */
12377 address = u_regval[0];
12378 record_buf_mem[0] = 4;
12379 record_buf_mem[2] = 4;
12380 record_buf_mem[3] = address + 4;
12381 thumb2_insn_r->mem_rec_count = 2;
12386 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12388 if (bit (thumb2_insn_r->arm_insn, 24))
12390 if (bit (thumb2_insn_r->arm_insn, 23))
12391 offset_addr = u_regval[0] + (offset_imm * 4);
12393 offset_addr = u_regval[0] - (offset_imm * 4);
12395 address = offset_addr;
12398 address = u_regval[0];
12400 record_buf_mem[0] = 4;
12401 record_buf_mem[1] = address;
12402 record_buf_mem[2] = 4;
12403 record_buf_mem[3] = address + 4;
12404 thumb2_insn_r->mem_rec_count = 2;
12405 record_buf[0] = reg_rn;
12406 thumb2_insn_r->reg_rec_count = 1;
12410 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12412 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12414 return ARM_RECORD_SUCCESS;
12417 /* Handler for thumb2 data processing (shift register and modified immediate)
12421 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12423 uint32_t reg_rd, op;
12424 uint32_t record_buf[8];
12426 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12427 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12429 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12431 record_buf[0] = ARM_PS_REGNUM;
12432 thumb2_insn_r->reg_rec_count = 1;
12436 record_buf[0] = reg_rd;
12437 record_buf[1] = ARM_PS_REGNUM;
12438 thumb2_insn_r->reg_rec_count = 2;
12441 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12443 return ARM_RECORD_SUCCESS;
12446 /* Generic handler for thumb2 instructions which effect destination and PS
12450 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12453 uint32_t record_buf[8];
12455 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12457 record_buf[0] = reg_rd;
12458 record_buf[1] = ARM_PS_REGNUM;
12459 thumb2_insn_r->reg_rec_count = 2;
12461 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12463 return ARM_RECORD_SUCCESS;
12466 /* Handler for thumb2 branch and miscellaneous control instructions. */
12469 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12471 uint32_t op, op1, op2;
12472 uint32_t record_buf[8];
12474 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12475 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12476 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12478 /* Handle MSR insn. */
12479 if (!(op1 & 0x2) && 0x38 == op)
12483 /* CPSR is going to be changed. */
12484 record_buf[0] = ARM_PS_REGNUM;
12485 thumb2_insn_r->reg_rec_count = 1;
12489 arm_record_unsupported_insn(thumb2_insn_r);
12493 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12496 record_buf[0] = ARM_PS_REGNUM;
12497 record_buf[1] = ARM_LR_REGNUM;
12498 thumb2_insn_r->reg_rec_count = 2;
12501 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12503 return ARM_RECORD_SUCCESS;
12506 /* Handler for thumb2 store single data item instructions. */
12509 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12511 struct regcache *reg_cache = thumb2_insn_r->regcache;
12513 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12514 uint32_t address, offset_addr;
12515 uint32_t record_buf[8], record_buf_mem[8];
12518 ULONGEST u_regval[2];
12520 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12521 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12522 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12523 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12525 if (bit (thumb2_insn_r->arm_insn, 23))
12528 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12529 offset_addr = u_regval[0] + offset_imm;
12530 address = offset_addr;
12535 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12537 /* Handle STRB (register). */
12538 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12539 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12540 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12541 offset_addr = u_regval[1] << shift_imm;
12542 address = u_regval[0] + offset_addr;
12546 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12547 if (bit (thumb2_insn_r->arm_insn, 10))
12549 if (bit (thumb2_insn_r->arm_insn, 9))
12550 offset_addr = u_regval[0] + offset_imm;
12552 offset_addr = u_regval[0] - offset_imm;
12554 address = offset_addr;
12557 address = u_regval[0];
12563 /* Store byte instructions. */
12566 record_buf_mem[0] = 1;
12568 /* Store half word instructions. */
12571 record_buf_mem[0] = 2;
12573 /* Store word instructions. */
12576 record_buf_mem[0] = 4;
12580 gdb_assert_not_reached ("no decoding pattern found");
12584 record_buf_mem[1] = address;
12585 thumb2_insn_r->mem_rec_count = 1;
12586 record_buf[0] = reg_rn;
12587 thumb2_insn_r->reg_rec_count = 1;
12589 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12591 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12593 return ARM_RECORD_SUCCESS;
12596 /* Handler for thumb2 load memory hints instructions. */
12599 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12601 uint32_t record_buf[8];
12602 uint32_t reg_rt, reg_rn;
12604 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12605 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12607 if (ARM_PC_REGNUM != reg_rt)
12609 record_buf[0] = reg_rt;
12610 record_buf[1] = reg_rn;
12611 record_buf[2] = ARM_PS_REGNUM;
12612 thumb2_insn_r->reg_rec_count = 3;
12614 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12616 return ARM_RECORD_SUCCESS;
12619 return ARM_RECORD_FAILURE;
12622 /* Handler for thumb2 load word instructions. */
12625 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12627 uint32_t record_buf[8];
12629 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12630 record_buf[1] = ARM_PS_REGNUM;
12631 thumb2_insn_r->reg_rec_count = 2;
12633 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12635 return ARM_RECORD_SUCCESS;
12638 /* Handler for thumb2 long multiply, long multiply accumulate, and
12639 divide instructions. */
12642 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12644 uint32_t opcode1 = 0, opcode2 = 0;
12645 uint32_t record_buf[8];
12647 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12648 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12650 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12652 /* Handle SMULL, UMULL, SMULAL. */
12653 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12654 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12655 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12656 record_buf[2] = ARM_PS_REGNUM;
12657 thumb2_insn_r->reg_rec_count = 3;
12659 else if (1 == opcode1 || 3 == opcode2)
12661 /* Handle SDIV and UDIV. */
12662 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12663 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12664 record_buf[2] = ARM_PS_REGNUM;
12665 thumb2_insn_r->reg_rec_count = 3;
12668 return ARM_RECORD_FAILURE;
12670 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12672 return ARM_RECORD_SUCCESS;
12675 /* Record handler for thumb32 coprocessor instructions. */
12678 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12680 if (bit (thumb2_insn_r->arm_insn, 25))
12681 return arm_record_coproc_data_proc (thumb2_insn_r);
12683 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12686 /* Record handler for advance SIMD structure load/store instructions. */
12689 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12691 struct regcache *reg_cache = thumb2_insn_r->regcache;
12692 uint32_t l_bit, a_bit, b_bits;
12693 uint32_t record_buf[128], record_buf_mem[128];
12694 uint32_t reg_rn, reg_vd, address, f_elem;
12695 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12698 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12699 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12700 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12701 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12702 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12703 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12704 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12705 f_elem = 8 / f_ebytes;
12709 ULONGEST u_regval = 0;
12710 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12711 address = u_regval;
12716 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12718 if (b_bits == 0x07)
12720 else if (b_bits == 0x0a)
12722 else if (b_bits == 0x06)
12724 else if (b_bits == 0x02)
12729 for (index_r = 0; index_r < bf_regs; index_r++)
12731 for (index_e = 0; index_e < f_elem; index_e++)
12733 record_buf_mem[index_m++] = f_ebytes;
12734 record_buf_mem[index_m++] = address;
12735 address = address + f_ebytes;
12736 thumb2_insn_r->mem_rec_count += 1;
12741 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12743 if (b_bits == 0x09 || b_bits == 0x08)
12745 else if (b_bits == 0x03)
12750 for (index_r = 0; index_r < bf_regs; index_r++)
12751 for (index_e = 0; index_e < f_elem; index_e++)
12753 for (loop_t = 0; loop_t < 2; loop_t++)
12755 record_buf_mem[index_m++] = f_ebytes;
12756 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12757 thumb2_insn_r->mem_rec_count += 1;
12759 address = address + (2 * f_ebytes);
12763 else if ((b_bits & 0x0e) == 0x04)
12765 for (index_e = 0; index_e < f_elem; index_e++)
12767 for (loop_t = 0; loop_t < 3; loop_t++)
12769 record_buf_mem[index_m++] = f_ebytes;
12770 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12771 thumb2_insn_r->mem_rec_count += 1;
12773 address = address + (3 * f_ebytes);
12777 else if (!(b_bits & 0x0e))
12779 for (index_e = 0; index_e < f_elem; index_e++)
12781 for (loop_t = 0; loop_t < 4; loop_t++)
12783 record_buf_mem[index_m++] = f_ebytes;
12784 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12785 thumb2_insn_r->mem_rec_count += 1;
12787 address = address + (4 * f_ebytes);
12793 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12795 if (bft_size == 0x00)
12797 else if (bft_size == 0x01)
12799 else if (bft_size == 0x02)
12805 if (!(b_bits & 0x0b) || b_bits == 0x08)
12806 thumb2_insn_r->mem_rec_count = 1;
12808 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12809 thumb2_insn_r->mem_rec_count = 2;
12811 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12812 thumb2_insn_r->mem_rec_count = 3;
12814 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12815 thumb2_insn_r->mem_rec_count = 4;
12817 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12819 record_buf_mem[index_m] = f_ebytes;
12820 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12829 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12830 thumb2_insn_r->reg_rec_count = 1;
12832 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12833 thumb2_insn_r->reg_rec_count = 2;
12835 else if ((b_bits & 0x0e) == 0x04)
12836 thumb2_insn_r->reg_rec_count = 3;
12838 else if (!(b_bits & 0x0e))
12839 thumb2_insn_r->reg_rec_count = 4;
12844 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12845 thumb2_insn_r->reg_rec_count = 1;
12847 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12848 thumb2_insn_r->reg_rec_count = 2;
12850 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12851 thumb2_insn_r->reg_rec_count = 3;
12853 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12854 thumb2_insn_r->reg_rec_count = 4;
12856 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12857 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12861 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12863 record_buf[index_r] = reg_rn;
12864 thumb2_insn_r->reg_rec_count += 1;
12867 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12869 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12874 /* Decodes thumb2 instruction type and invokes its record handler. */
12876 static unsigned int
12877 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12879 uint32_t op, op1, op2;
12881 op = bit (thumb2_insn_r->arm_insn, 15);
12882 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12883 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12887 if (!(op2 & 0x64 ))
12889 /* Load/store multiple instruction. */
12890 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12892 else if ((op2 & 0x64) == 0x4)
12894 /* Load/store (dual/exclusive) and table branch instruction. */
12895 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12897 else if ((op2 & 0x60) == 0x20)
12899 /* Data-processing (shifted register). */
12900 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12902 else if (op2 & 0x40)
12904 /* Co-processor instructions. */
12905 return thumb2_record_coproc_insn (thumb2_insn_r);
12908 else if (op1 == 0x02)
12912 /* Branches and miscellaneous control instructions. */
12913 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12915 else if (op2 & 0x20)
12917 /* Data-processing (plain binary immediate) instruction. */
12918 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12922 /* Data-processing (modified immediate). */
12923 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12926 else if (op1 == 0x03)
12928 if (!(op2 & 0x71 ))
12930 /* Store single data item. */
12931 return thumb2_record_str_single_data (thumb2_insn_r);
12933 else if (!((op2 & 0x71) ^ 0x10))
12935 /* Advanced SIMD or structure load/store instructions. */
12936 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12938 else if (!((op2 & 0x67) ^ 0x01))
12940 /* Load byte, memory hints instruction. */
12941 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12943 else if (!((op2 & 0x67) ^ 0x03))
12945 /* Load halfword, memory hints instruction. */
12946 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12948 else if (!((op2 & 0x67) ^ 0x05))
12950 /* Load word instruction. */
12951 return thumb2_record_ld_word (thumb2_insn_r);
12953 else if (!((op2 & 0x70) ^ 0x20))
12955 /* Data-processing (register) instruction. */
12956 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12958 else if (!((op2 & 0x78) ^ 0x30))
12960 /* Multiply, multiply accumulate, abs diff instruction. */
12961 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12963 else if (!((op2 & 0x78) ^ 0x38))
12965 /* Long multiply, long multiply accumulate, and divide. */
12966 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12968 else if (op2 & 0x40)
12970 /* Co-processor instructions. */
12971 return thumb2_record_coproc_insn (thumb2_insn_r);
12979 /* Abstract memory reader. */
12981 class abstract_memory_reader
12984 /* Read LEN bytes of target memory at address MEMADDR, placing the
12985 results in GDB's memory at BUF. Return true on success. */
12987 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12990 /* Instruction reader from real target. */
12992 class instruction_reader : public abstract_memory_reader
12995 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12997 if (target_read_memory (memaddr, buf, len))
13006 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13007 and positive val on fauilure. */
13010 extract_arm_insn (abstract_memory_reader& reader,
13011 insn_decode_record *insn_record, uint32_t insn_size)
13013 gdb_byte buf[insn_size];
13015 memset (&buf[0], 0, insn_size);
13017 if (!reader.read (insn_record->this_addr, buf, insn_size))
13019 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13021 gdbarch_byte_order_for_code (insn_record->gdbarch));
13025 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13027 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13031 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13032 record_type_t record_type, uint32_t insn_size)
13035 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13037 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13039 arm_record_data_proc_misc_ld_str, /* 000. */
13040 arm_record_data_proc_imm, /* 001. */
13041 arm_record_ld_st_imm_offset, /* 010. */
13042 arm_record_ld_st_reg_offset, /* 011. */
13043 arm_record_ld_st_multiple, /* 100. */
13044 arm_record_b_bl, /* 101. */
13045 arm_record_asimd_vfp_coproc, /* 110. */
13046 arm_record_coproc_data_proc /* 111. */
13049 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13051 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13053 thumb_record_shift_add_sub, /* 000. */
13054 thumb_record_add_sub_cmp_mov, /* 001. */
13055 thumb_record_ld_st_reg_offset, /* 010. */
13056 thumb_record_ld_st_imm_offset, /* 011. */
13057 thumb_record_ld_st_stack, /* 100. */
13058 thumb_record_misc, /* 101. */
13059 thumb_record_ldm_stm_swi, /* 110. */
13060 thumb_record_branch /* 111. */
13063 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13064 uint32_t insn_id = 0;
13066 if (extract_arm_insn (reader, arm_record, insn_size))
13070 printf_unfiltered (_("Process record: error reading memory at "
13071 "addr %s len = %d.\n"),
13072 paddress (arm_record->gdbarch,
13073 arm_record->this_addr), insn_size);
13077 else if (ARM_RECORD == record_type)
13079 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13080 insn_id = bits (arm_record->arm_insn, 25, 27);
13082 if (arm_record->cond == 0xf)
13083 ret = arm_record_extension_space (arm_record);
13086 /* If this insn has fallen into extension space
13087 then we need not decode it anymore. */
13088 ret = arm_handle_insn[insn_id] (arm_record);
13090 if (ret != ARM_RECORD_SUCCESS)
13092 arm_record_unsupported_insn (arm_record);
13096 else if (THUMB_RECORD == record_type)
13098 /* As thumb does not have condition codes, we set negative. */
13099 arm_record->cond = -1;
13100 insn_id = bits (arm_record->arm_insn, 13, 15);
13101 ret = thumb_handle_insn[insn_id] (arm_record);
13102 if (ret != ARM_RECORD_SUCCESS)
13104 arm_record_unsupported_insn (arm_record);
13108 else if (THUMB2_RECORD == record_type)
13110 /* As thumb does not have condition codes, we set negative. */
13111 arm_record->cond = -1;
13113 /* Swap first half of 32bit thumb instruction with second half. */
13114 arm_record->arm_insn
13115 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13117 ret = thumb2_record_decode_insn_handler (arm_record);
13119 if (ret != ARM_RECORD_SUCCESS)
13121 arm_record_unsupported_insn (arm_record);
13127 /* Throw assertion. */
13128 gdb_assert_not_reached ("not a valid instruction, could not decode");
13135 namespace selftests {
13137 /* Provide both 16-bit and 32-bit thumb instructions. */
13139 class instruction_reader_thumb : public abstract_memory_reader
13142 template<size_t SIZE>
13143 instruction_reader_thumb (enum bfd_endian endian,
13144 const uint16_t (&insns)[SIZE])
13145 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13148 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13150 SELF_CHECK (len == 4 || len == 2);
13151 SELF_CHECK (memaddr % 2 == 0);
13152 SELF_CHECK ((memaddr / 2) < m_insns_size);
13154 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13157 store_unsigned_integer (&buf[2], 2, m_endian,
13158 m_insns[memaddr / 2 + 1]);
13164 enum bfd_endian m_endian;
13165 const uint16_t *m_insns;
13166 size_t m_insns_size;
13170 arm_record_test (void)
13172 struct gdbarch_info info;
13173 gdbarch_info_init (&info);
13174 info.bfd_arch_info = bfd_scan_arch ("arm");
13176 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13178 SELF_CHECK (gdbarch != NULL);
13180 /* 16-bit Thumb instructions. */
13182 insn_decode_record arm_record;
13184 memset (&arm_record, 0, sizeof (insn_decode_record));
13185 arm_record.gdbarch = gdbarch;
13187 static const uint16_t insns[] = {
13188 /* db b2 uxtb r3, r3 */
13190 /* cd 58 ldr r5, [r1, r3] */
13194 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13195 instruction_reader_thumb reader (endian, insns);
13196 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13197 THUMB_INSN_SIZE_BYTES);
13199 SELF_CHECK (ret == 0);
13200 SELF_CHECK (arm_record.mem_rec_count == 0);
13201 SELF_CHECK (arm_record.reg_rec_count == 1);
13202 SELF_CHECK (arm_record.arm_regs[0] == 3);
13204 arm_record.this_addr += 2;
13205 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13206 THUMB_INSN_SIZE_BYTES);
13208 SELF_CHECK (ret == 0);
13209 SELF_CHECK (arm_record.mem_rec_count == 0);
13210 SELF_CHECK (arm_record.reg_rec_count == 1);
13211 SELF_CHECK (arm_record.arm_regs[0] == 5);
13214 /* 32-bit Thumb-2 instructions. */
13216 insn_decode_record arm_record;
13218 memset (&arm_record, 0, sizeof (insn_decode_record));
13219 arm_record.gdbarch = gdbarch;
13221 static const uint16_t insns[] = {
13222 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13226 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13227 instruction_reader_thumb reader (endian, insns);
13228 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13229 THUMB2_INSN_SIZE_BYTES);
13231 SELF_CHECK (ret == 0);
13232 SELF_CHECK (arm_record.mem_rec_count == 0);
13233 SELF_CHECK (arm_record.reg_rec_count == 1);
13234 SELF_CHECK (arm_record.arm_regs[0] == 7);
13237 } // namespace selftests
13238 #endif /* GDB_SELF_TEST */
13240 /* Cleans up local record registers and memory allocations. */
13243 deallocate_reg_mem (insn_decode_record *record)
13245 xfree (record->arm_regs);
13246 xfree (record->arm_mems);
13250 /* Parse the current instruction and record the values of the registers and
13251 memory that will be changed in current instruction to record_arch_list".
13252 Return -1 if something is wrong. */
13255 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13256 CORE_ADDR insn_addr)
13259 uint32_t no_of_rec = 0;
13260 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13261 ULONGEST t_bit = 0, insn_id = 0;
13263 ULONGEST u_regval = 0;
13265 insn_decode_record arm_record;
13267 memset (&arm_record, 0, sizeof (insn_decode_record));
13268 arm_record.regcache = regcache;
13269 arm_record.this_addr = insn_addr;
13270 arm_record.gdbarch = gdbarch;
13273 if (record_debug > 1)
13275 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13277 paddress (gdbarch, arm_record.this_addr));
13280 instruction_reader reader;
13281 if (extract_arm_insn (reader, &arm_record, 2))
13285 printf_unfiltered (_("Process record: error reading memory at "
13286 "addr %s len = %d.\n"),
13287 paddress (arm_record.gdbarch,
13288 arm_record.this_addr), 2);
13293 /* Check the insn, whether it is thumb or arm one. */
13295 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13296 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13299 if (!(u_regval & t_bit))
13301 /* We are decoding arm insn. */
13302 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13306 insn_id = bits (arm_record.arm_insn, 11, 15);
13307 /* is it thumb2 insn? */
13308 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13310 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13311 THUMB2_INSN_SIZE_BYTES);
13315 /* We are decoding thumb insn. */
13316 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13317 THUMB_INSN_SIZE_BYTES);
13323 /* Record registers. */
13324 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13325 if (arm_record.arm_regs)
13327 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13329 if (record_full_arch_list_add_reg
13330 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13334 /* Record memories. */
13335 if (arm_record.arm_mems)
13337 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13339 if (record_full_arch_list_add_mem
13340 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13341 arm_record.arm_mems[no_of_rec].len))
13346 if (record_full_arch_list_add_end ())
13351 deallocate_reg_mem (&arm_record);