1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
58 #include "common/vec.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
73 #include "common/selftest.h"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 readable_regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
495 arm_m_addr_is_magic (CORE_ADDR addr)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
507 /* Address is magic. */
511 /* Address is not magic. */
516 /* Remove useless bits from addresses in a running program. */
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
527 return UNMAKE_THUMB_ADDR (val);
529 return (val & 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
549 /* The GNU linker's Thumb call stub to foo is named
551 if (strstr (name, "_from_thumb") != NULL)
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
557 if (startswith (name, "__truncdfsf2"))
559 if (startswith (name, "__aeabi_d2f"))
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
565 if (startswith (name, "__aeabi_read_tp"))
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
604 thumb_expand_immediate (unsigned int imm)
606 unsigned int count = imm >> 7;
614 return (imm & 0xff) | ((imm & 0xff) << 16);
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
622 return (0x80 | (imm & 0x7f)) << (32 - count);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
629 thumb_instruction_restores_sp (unsigned short insn)
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
652 CORE_ADDR unrecognized_pc = 0;
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 while (start < limit)
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 else if (thumb_instruction_restores_sp (insn))
693 /* Don't scan past the epilogue. */
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736 if (stack.store_would_trash (addr))
739 stack.store (addr, 4, regs[regno]);
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
750 if (stack.store_would_trash (addr))
753 stack.store (addr, 4, regs[rd]);
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2;
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
802 int j1, j2, imm1, imm2;
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 pv_t addr = regs[bits (insn, 0, 3)];
829 if (stack.store_would_trash (addr))
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
841 regs[bits (insn, 0, 3)] = addr;
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
852 offset = inst2 & 0xff;
854 addr = pv_add_constant (addr, offset);
856 addr = pv_add_constant (addr, -offset);
858 if (stack.store_would_trash (addr))
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
866 regs[bits (insn, 0, 3)] = addr;
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
876 offset = inst2 & 0xff;
878 addr = pv_add_constant (addr, offset);
880 addr = pv_add_constant (addr, -offset);
882 if (stack.store_would_trash (addr))
885 stack.store (addr, 4, regs[regno]);
888 regs[bits (insn, 0, 3)] = addr;
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 int regno = bits (inst2, 12, 15);
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900 if (stack.store_would_trash (addr))
903 stack.store (addr, 4, regs[regno]);
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant;
1020 offset = bits (inst2, 0, 11);
1022 loc = start + 4 + offset;
1024 loc = start + 4 - offset;
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant;
1036 offset = bits (inst2, 0, 7) << 2;
1038 loc = start + 4 + offset;
1040 loc = start + 4 - offset;
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 /* Don't scan past anything that might change control flow. */
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1063 else if (thumb_instruction_changes_pc (insn))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1086 return unrecognized_pc;
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1111 return unrecognized_pc;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg = bits (insn1, 8, 10);
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1161 address = (high << 16 | low);
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1176 *destreg = bits (insn, 12, 15);
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1191 address = (high << 16 | low);
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1254 unsigned int destreg;
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1261 if (bits (insn, 3, 5) != basereg)
1263 destreg = bits (insn, 0, 2);
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1270 if (destreg != bits (insn, 0, 2))
1275 unsigned int destreg;
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1282 if (bits (insn, 16, 19) != basereg)
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1290 if (bits (insn, 12, 15) != destreg)
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1296 return pc + offset + 4;
1298 return pc + offset + 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1319 CORE_ADDR func_addr, limit_pc;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1330 if (post_prologue_pc)
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1348 if (post_prologue_pc != 0)
1350 CORE_ADDR analyzed_limit;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1368 if (analyzed_limit != post_prologue_pc)
1371 return post_prologue_pc;
1375 /* Can't determine prologue from the symbol table, need to examine
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1384 limit_pc = pc + 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1426 /* See comment in arm_scan_prologue for an explanation of
1428 if (prologue_end > prologue_start + 64)
1430 prologue_end = prologue_start + 64;
1434 /* We're in the boondocks: we have no idea where the start of the
1438 prologue_end = std::min (prologue_end, prev_pc);
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1447 arm_instruction_restores_sp (unsigned int insn)
1449 if (bits (insn, 28, 31) != INST_NV)
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1457 || (insn & 0x0fff0000) == 0x08bd0000
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask = insn & 0xffff;
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1624 n_saved_fp_regs = 1;
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1631 n_saved_fp_regs = 4;
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1650 if (skip_prologue_function (gdbarch, dest, 0))
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1660 else if (arm_instruction_restores_sp (insn))
1662 /* Don't scan past the epilogue. */
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1686 unrecognized_pc = current_pc;
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1699 int framereg, framesize;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1728 return unrecognized_pc;
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1789 if (prologue_end > prologue_start + 64)
1791 prologue_end = prologue_start + 64; /* See above. */
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1804 /* AAPCS does not use a frame register, so we can abort here. */
1805 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1808 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1809 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1814 prologue_start = gdbarch_addr_bits_remove
1815 (gdbarch, return_value) - 8;
1816 prologue_end = prologue_start + 64; /* See above. */
1820 if (prev_pc < prologue_end)
1821 prologue_end = prev_pc;
1823 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1826 static struct arm_prologue_cache *
1827 arm_make_prologue_cache (struct frame_info *this_frame)
1830 struct arm_prologue_cache *cache;
1831 CORE_ADDR unwound_fp;
1833 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1834 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1836 arm_scan_prologue (this_frame, cache);
1838 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1839 if (unwound_fp == 0)
1842 cache->prev_sp = unwound_fp + cache->framesize;
1844 /* Calculate actual addresses of saved registers using offsets
1845 determined by arm_scan_prologue. */
1846 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1847 if (trad_frame_addr_p (cache->saved_regs, reg))
1848 cache->saved_regs[reg].addr += cache->prev_sp;
1853 /* Implementation of the stop_reason hook for arm_prologue frames. */
1855 static enum unwind_stop_reason
1856 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1859 struct arm_prologue_cache *cache;
1862 if (*this_cache == NULL)
1863 *this_cache = arm_make_prologue_cache (this_frame);
1864 cache = (struct arm_prologue_cache *) *this_cache;
1866 /* This is meant to halt the backtrace at "_start". */
1867 pc = get_frame_pc (this_frame);
1868 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1869 return UNWIND_OUTERMOST;
1871 /* If we've hit a wall, stop. */
1872 if (cache->prev_sp == 0)
1873 return UNWIND_OUTERMOST;
1875 return UNWIND_NO_REASON;
1878 /* Our frame ID for a normal frame is the current function's starting PC
1879 and the caller's SP when we were called. */
1882 arm_prologue_this_id (struct frame_info *this_frame,
1884 struct frame_id *this_id)
1886 struct arm_prologue_cache *cache;
1890 if (*this_cache == NULL)
1891 *this_cache = arm_make_prologue_cache (this_frame);
1892 cache = (struct arm_prologue_cache *) *this_cache;
1894 /* Use function start address as part of the frame ID. If we cannot
1895 identify the start address (due to missing symbol information),
1896 fall back to just using the current PC. */
1897 pc = get_frame_pc (this_frame);
1898 func = get_frame_func (this_frame);
1902 id = frame_id_build (cache->prev_sp, func);
1906 static struct value *
1907 arm_prologue_prev_register (struct frame_info *this_frame,
1911 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1912 struct arm_prologue_cache *cache;
1914 if (*this_cache == NULL)
1915 *this_cache = arm_make_prologue_cache (this_frame);
1916 cache = (struct arm_prologue_cache *) *this_cache;
1918 /* If we are asked to unwind the PC, then we need to return the LR
1919 instead. The prologue may save PC, but it will point into this
1920 frame's prologue, not the next frame's resume location. Also
1921 strip the saved T bit. A valid LR may have the low bit set, but
1922 a valid PC never does. */
1923 if (prev_regnum == ARM_PC_REGNUM)
1927 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1928 return frame_unwind_got_constant (this_frame, prev_regnum,
1929 arm_addr_bits_remove (gdbarch, lr));
1932 /* SP is generally not saved to the stack, but this frame is
1933 identified by the next frame's stack pointer at the time of the call.
1934 The value was already reconstructed into PREV_SP. */
1935 if (prev_regnum == ARM_SP_REGNUM)
1936 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1938 /* The CPSR may have been changed by the call instruction and by the
1939 called function. The only bit we can reconstruct is the T bit,
1940 by checking the low bit of LR as of the call. This is a reliable
1941 indicator of Thumb-ness except for some ARM v4T pre-interworking
1942 Thumb code, which could get away with a clear low bit as long as
1943 the called function did not use bx. Guess that all other
1944 bits are unchanged; the condition flags are presumably lost,
1945 but the processor status is likely valid. */
1946 if (prev_regnum == ARM_PS_REGNUM)
1949 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1951 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1952 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1953 if (IS_THUMB_ADDR (lr))
1957 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1960 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1964 struct frame_unwind arm_prologue_unwind = {
1966 arm_prologue_unwind_stop_reason,
1967 arm_prologue_this_id,
1968 arm_prologue_prev_register,
1970 default_frame_sniffer
1973 /* Maintain a list of ARM exception table entries per objfile, similar to the
1974 list of mapping symbols. We only cache entries for standard ARM-defined
1975 personality routines; the cache will contain only the frame unwinding
1976 instructions associated with the entry (not the descriptors). */
1978 static const struct objfile_data *arm_exidx_data_key;
1980 struct arm_exidx_entry
1985 typedef struct arm_exidx_entry arm_exidx_entry_s;
1986 DEF_VEC_O(arm_exidx_entry_s);
1988 struct arm_exidx_data
1990 VEC(arm_exidx_entry_s) **section_maps;
1994 arm_exidx_data_free (struct objfile *objfile, void *arg)
1996 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1999 for (i = 0; i < objfile->obfd->section_count; i++)
2000 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2004 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2005 const struct arm_exidx_entry *rhs)
2007 return lhs->addr < rhs->addr;
2010 static struct obj_section *
2011 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2013 struct obj_section *osect;
2015 ALL_OBJFILE_OSECTIONS (objfile, osect)
2016 if (bfd_get_section_flags (objfile->obfd,
2017 osect->the_bfd_section) & SEC_ALLOC)
2019 bfd_vma start, size;
2020 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2021 size = bfd_get_section_size (osect->the_bfd_section);
2023 if (start <= vma && vma < start + size)
2030 /* Parse contents of exception table and exception index sections
2031 of OBJFILE, and fill in the exception table entry cache.
2033 For each entry that refers to a standard ARM-defined personality
2034 routine, extract the frame unwinding instructions (from either
2035 the index or the table section). The unwinding instructions
2037 - extracting them from the rest of the table data
2038 - converting to host endianness
2039 - appending the implicit 0xb0 ("Finish") code
2041 The extracted and normalized instructions are stored for later
2042 retrieval by the arm_find_exidx_entry routine. */
2045 arm_exidx_new_objfile (struct objfile *objfile)
2047 struct arm_exidx_data *data;
2048 asection *exidx, *extab;
2049 bfd_vma exidx_vma = 0, extab_vma = 0;
2052 /* If we've already touched this file, do nothing. */
2053 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2058 gdb::byte_vector exidx_data;
2061 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2062 exidx_data.resize (bfd_get_section_size (exidx));
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data.data (), 0,
2066 exidx_data.size ()))
2070 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2071 gdb::byte_vector extab_data;
2074 extab_vma = bfd_section_vma (objfile->obfd, extab);
2075 extab_data.resize (bfd_get_section_size (extab));
2077 if (!bfd_get_section_contents (objfile->obfd, extab,
2078 extab_data.data (), 0,
2079 extab_data.size ()))
2083 /* Allocate exception table data structure. */
2084 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2085 set_objfile_data (objfile, arm_exidx_data_key, data);
2086 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2087 objfile->obfd->section_count,
2088 VEC(arm_exidx_entry_s) *);
2090 /* Fill in exception table. */
2091 for (i = 0; i < exidx_data.size () / 8; i++)
2093 struct arm_exidx_entry new_exidx_entry;
2094 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2095 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2096 exidx_data.data () + i * 8 + 4);
2097 bfd_vma addr = 0, word = 0;
2098 int n_bytes = 0, n_words = 0;
2099 struct obj_section *sec;
2100 gdb_byte *entry = NULL;
2102 /* Extract address of start of function. */
2103 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2104 idx += exidx_vma + i * 8;
2106 /* Find section containing function and compute section offset. */
2107 sec = arm_obj_section_from_vma (objfile, idx);
2110 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2112 /* Determine address of exception table entry. */
2115 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2117 else if ((val & 0xff000000) == 0x80000000)
2119 /* Exception table entry embedded in .ARM.exidx
2120 -- must be short form. */
2124 else if (!(val & 0x80000000))
2126 /* Exception table entry in .ARM.extab. */
2127 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2128 addr += exidx_vma + i * 8 + 4;
2130 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2132 word = bfd_h_get_32 (objfile->obfd,
2133 extab_data.data () + addr - extab_vma);
2136 if ((word & 0xff000000) == 0x80000000)
2141 else if ((word & 0xff000000) == 0x81000000
2142 || (word & 0xff000000) == 0x82000000)
2146 n_words = ((word >> 16) & 0xff);
2148 else if (!(word & 0x80000000))
2151 struct obj_section *pers_sec;
2152 int gnu_personality = 0;
2154 /* Custom personality routine. */
2155 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2156 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2158 /* Check whether we've got one of the variants of the
2159 GNU personality routines. */
2160 pers_sec = arm_obj_section_from_vma (objfile, pers);
2163 static const char *personality[] =
2165 "__gcc_personality_v0",
2166 "__gxx_personality_v0",
2167 "__gcj_personality_v0",
2168 "__gnu_objc_personality_v0",
2172 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2175 for (k = 0; personality[k]; k++)
2176 if (lookup_minimal_symbol_by_pc_name
2177 (pc, personality[k], objfile))
2179 gnu_personality = 1;
2184 /* If so, the next word contains a word count in the high
2185 byte, followed by the same unwind instructions as the
2186 pre-defined forms. */
2188 && addr + 4 <= extab_vma + extab_data.size ())
2190 word = bfd_h_get_32 (objfile->obfd,
2192 + addr - extab_vma));
2195 n_words = ((word >> 24) & 0xff);
2201 /* Sanity check address. */
2203 if (addr < extab_vma
2204 || addr + 4 * n_words > extab_vma + extab_data.size ())
2205 n_words = n_bytes = 0;
2207 /* The unwind instructions reside in WORD (only the N_BYTES least
2208 significant bytes are valid), followed by N_WORDS words in the
2209 extab section starting at ADDR. */
2210 if (n_bytes || n_words)
2213 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2214 n_bytes + n_words * 4 + 1);
2217 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2221 word = bfd_h_get_32 (objfile->obfd,
2222 extab_data.data () + addr - extab_vma);
2225 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2226 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2227 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2228 *p++ = (gdb_byte) (word & 0xff);
2231 /* Implied "Finish" to terminate the list. */
2235 /* Push entry onto vector. They are guaranteed to always
2236 appear in order of increasing addresses. */
2237 new_exidx_entry.addr = idx;
2238 new_exidx_entry.entry = entry;
2239 VEC_safe_push (arm_exidx_entry_s,
2240 data->section_maps[sec->the_bfd_section->index],
2245 /* Search for the exception table entry covering MEMADDR. If one is found,
2246 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2247 set *START to the start of the region covered by this entry. */
2250 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2252 struct obj_section *sec;
2254 sec = find_pc_section (memaddr);
2257 struct arm_exidx_data *data;
2258 VEC(arm_exidx_entry_s) *map;
2259 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2262 data = ((struct arm_exidx_data *)
2263 objfile_data (sec->objfile, arm_exidx_data_key));
2266 map = data->section_maps[sec->the_bfd_section->index];
2267 if (!VEC_empty (arm_exidx_entry_s, map))
2269 struct arm_exidx_entry *map_sym;
2271 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2272 arm_compare_exidx_entries);
2274 /* VEC_lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < VEC_length (arm_exidx_entry_s, map))
2280 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2281 if (map_sym->addr == map_key.addr)
2284 *start = map_sym->addr + obj_section_addr (sec);
2285 return map_sym->entry;
2291 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2293 *start = map_sym->addr + obj_section_addr (sec);
2294 return map_sym->entry;
2303 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2304 instruction list from the ARM exception table entry ENTRY, allocate and
2305 return a prologue cache structure describing how to unwind this frame.
2307 Return NULL if the unwinding instruction list contains a "spare",
2308 "reserved" or "refuse to unwind" instruction as defined in section
2309 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2310 for the ARM Architecture" document. */
2312 static struct arm_prologue_cache *
2313 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2318 struct arm_prologue_cache *cache;
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2326 /* Whenever we reload SP, we actually have to retrieve its
2327 actual value in the current frame. */
2330 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2332 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2333 vsp = get_frame_register_unsigned (this_frame, reg);
2337 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2338 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2344 /* Decode next unwind instruction. */
2347 if ((insn & 0xc0) == 0)
2349 int offset = insn & 0x3f;
2350 vsp += (offset << 2) + 4;
2352 else if ((insn & 0xc0) == 0x40)
2354 int offset = insn & 0x3f;
2355 vsp -= (offset << 2) + 4;
2357 else if ((insn & 0xf0) == 0x80)
2359 int mask = ((insn & 0xf) << 8) | *entry++;
2362 /* The special case of an all-zero mask identifies
2363 "Refuse to unwind". We return NULL to fall back
2364 to the prologue analyzer. */
2368 /* Pop registers r4..r15 under mask. */
2369 for (i = 0; i < 12; i++)
2370 if (mask & (1 << i))
2372 cache->saved_regs[4 + i].addr = vsp;
2376 /* Special-case popping SP -- we need to reload vsp. */
2377 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2380 else if ((insn & 0xf0) == 0x90)
2382 int reg = insn & 0xf;
2384 /* Reserved cases. */
2385 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2388 /* Set SP from another register and mark VSP for reload. */
2389 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2392 else if ((insn & 0xf0) == 0xa0)
2394 int count = insn & 0x7;
2395 int pop_lr = (insn & 0x8) != 0;
2398 /* Pop r4..r[4+count]. */
2399 for (i = 0; i <= count; i++)
2401 cache->saved_regs[4 + i].addr = vsp;
2405 /* If indicated by flag, pop LR as well. */
2408 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2412 else if (insn == 0xb0)
2414 /* We could only have updated PC by popping into it; if so, it
2415 will show up as address. Otherwise, copy LR into PC. */
2416 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2417 cache->saved_regs[ARM_PC_REGNUM]
2418 = cache->saved_regs[ARM_LR_REGNUM];
2423 else if (insn == 0xb1)
2425 int mask = *entry++;
2428 /* All-zero mask and mask >= 16 is "spare". */
2429 if (mask == 0 || mask >= 16)
2432 /* Pop r0..r3 under mask. */
2433 for (i = 0; i < 4; i++)
2434 if (mask & (1 << i))
2436 cache->saved_regs[i].addr = vsp;
2440 else if (insn == 0xb2)
2442 ULONGEST offset = 0;
2447 offset |= (*entry & 0x7f) << shift;
2450 while (*entry++ & 0x80);
2452 vsp += 0x204 + (offset << 2);
2454 else if (insn == 0xb3)
2456 int start = *entry >> 4;
2457 int count = (*entry++) & 0xf;
2460 /* Only registers D0..D15 are valid here. */
2461 if (start + count >= 16)
2464 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2465 for (i = 0; i <= count; i++)
2467 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2474 else if ((insn & 0xf8) == 0xb8)
2476 int count = insn & 0x7;
2479 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2480 for (i = 0; i <= count; i++)
2482 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2489 else if (insn == 0xc6)
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2495 /* Only registers WR0..WR15 are valid. */
2496 if (start + count >= 16)
2499 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2506 else if (insn == 0xc7)
2508 int mask = *entry++;
2511 /* All-zero mask and mask >= 16 is "spare". */
2512 if (mask == 0 || mask >= 16)
2515 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2516 for (i = 0; i < 4; i++)
2517 if (mask & (1 << i))
2519 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2523 else if ((insn & 0xf8) == 0xc0)
2525 int count = insn & 0x7;
2528 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2529 for (i = 0; i <= count; i++)
2531 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2535 else if (insn == 0xc8)
2537 int start = *entry >> 4;
2538 int count = (*entry++) & 0xf;
2541 /* Only registers D0..D31 are valid. */
2542 if (start + count >= 16)
2545 /* Pop VFP double-precision registers
2546 D[16+start]..D[16+start+count]. */
2547 for (i = 0; i <= count; i++)
2549 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2553 else if (insn == 0xc9)
2555 int start = *entry >> 4;
2556 int count = (*entry++) & 0xf;
2559 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2560 for (i = 0; i <= count; i++)
2562 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2566 else if ((insn & 0xf8) == 0xd0)
2568 int count = insn & 0x7;
2571 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2572 for (i = 0; i <= count; i++)
2574 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2580 /* Everything else is "spare". */
2585 /* If we restore SP from a register, assume this was the frame register.
2586 Otherwise just fall back to SP as frame register. */
2587 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2588 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2590 cache->framereg = ARM_SP_REGNUM;
2592 /* Determine offset to previous frame. */
2594 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2596 /* We already got the previous SP. */
2597 cache->prev_sp = vsp;
2602 /* Unwinding via ARM exception table entries. Note that the sniffer
2603 already computes a filled-in prologue cache, which is then used
2604 with the same arm_prologue_this_id and arm_prologue_prev_register
2605 routines also used for prologue-parsing based unwinding. */
2608 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2609 struct frame_info *this_frame,
2610 void **this_prologue_cache)
2612 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2614 CORE_ADDR addr_in_block, exidx_region, func_start;
2615 struct arm_prologue_cache *cache;
2618 /* See if we have an ARM exception table entry covering this address. */
2619 addr_in_block = get_frame_address_in_block (this_frame);
2620 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2624 /* The ARM exception table does not describe unwind information
2625 for arbitrary PC values, but is guaranteed to be correct only
2626 at call sites. We have to decide here whether we want to use
2627 ARM exception table information for this frame, or fall back
2628 to using prologue parsing. (Note that if we have DWARF CFI,
2629 this sniffer isn't even called -- CFI is always preferred.)
2631 Before we make this decision, however, we check whether we
2632 actually have *symbol* information for the current frame.
2633 If not, prologue parsing would not work anyway, so we might
2634 as well use the exception table and hope for the best. */
2635 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2639 /* If the next frame is "normal", we are at a call site in this
2640 frame, so exception information is guaranteed to be valid. */
2641 if (get_next_frame (this_frame)
2642 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2645 /* We also assume exception information is valid if we're currently
2646 blocked in a system call. The system library is supposed to
2647 ensure this, so that e.g. pthread cancellation works. */
2648 if (arm_frame_is_thumb (this_frame))
2652 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2653 2, byte_order_for_code, &insn)
2654 && (insn & 0xff00) == 0xdf00 /* svc */)
2661 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2662 4, byte_order_for_code, &insn)
2663 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2667 /* Bail out if we don't know that exception information is valid. */
2671 /* The ARM exception index does not mark the *end* of the region
2672 covered by the entry, and some functions will not have any entry.
2673 To correctly recognize the end of the covered region, the linker
2674 should have inserted dummy records with a CANTUNWIND marker.
2676 Unfortunately, current versions of GNU ld do not reliably do
2677 this, and thus we may have found an incorrect entry above.
2678 As a (temporary) sanity check, we only use the entry if it
2679 lies *within* the bounds of the function. Note that this check
2680 might reject perfectly valid entries that just happen to cover
2681 multiple functions; therefore this check ought to be removed
2682 once the linker is fixed. */
2683 if (func_start > exidx_region)
2687 /* Decode the list of unwinding instructions into a prologue cache.
2688 Note that this may fail due to e.g. a "refuse to unwind" code. */
2689 cache = arm_exidx_fill_cache (this_frame, entry);
2693 *this_prologue_cache = cache;
2697 struct frame_unwind arm_exidx_unwind = {
2699 default_frame_unwind_stop_reason,
2700 arm_prologue_this_id,
2701 arm_prologue_prev_register,
2703 arm_exidx_unwind_sniffer
2706 static struct arm_prologue_cache *
2707 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2709 struct arm_prologue_cache *cache;
2712 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2713 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2715 /* Still rely on the offset calculated from prologue. */
2716 arm_scan_prologue (this_frame, cache);
2718 /* Since we are in epilogue, the SP has been restored. */
2719 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2721 /* Calculate actual addresses of saved registers using offsets
2722 determined by arm_scan_prologue. */
2723 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2724 if (trad_frame_addr_p (cache->saved_regs, reg))
2725 cache->saved_regs[reg].addr += cache->prev_sp;
2730 /* Implementation of function hook 'this_id' in
2731 'struct frame_uwnind' for epilogue unwinder. */
2734 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2736 struct frame_id *this_id)
2738 struct arm_prologue_cache *cache;
2741 if (*this_cache == NULL)
2742 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2743 cache = (struct arm_prologue_cache *) *this_cache;
2745 /* Use function start address as part of the frame ID. If we cannot
2746 identify the start address (due to missing symbol information),
2747 fall back to just using the current PC. */
2748 pc = get_frame_pc (this_frame);
2749 func = get_frame_func (this_frame);
2753 (*this_id) = frame_id_build (cache->prev_sp, pc);
2756 /* Implementation of function hook 'prev_register' in
2757 'struct frame_uwnind' for epilogue unwinder. */
2759 static struct value *
2760 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2761 void **this_cache, int regnum)
2763 if (*this_cache == NULL)
2764 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2769 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2771 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2774 /* Implementation of function hook 'sniffer' in
2775 'struct frame_uwnind' for epilogue unwinder. */
2778 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 if (frame_relative_level (this_frame) == 0)
2784 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2785 CORE_ADDR pc = get_frame_pc (this_frame);
2787 if (arm_frame_is_thumb (this_frame))
2788 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2790 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2796 /* Frame unwinder from epilogue. */
2798 static const struct frame_unwind arm_epilogue_frame_unwind =
2801 default_frame_unwind_stop_reason,
2802 arm_epilogue_frame_this_id,
2803 arm_epilogue_frame_prev_register,
2805 arm_epilogue_frame_sniffer,
2808 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2809 trampoline, return the target PC. Otherwise return 0.
2811 void call0a (char c, short s, int i, long l) {}
2815 (*pointer_to_call0a) (c, s, i, l);
2818 Instead of calling a stub library function _call_via_xx (xx is
2819 the register name), GCC may inline the trampoline in the object
2820 file as below (register r2 has the address of call0a).
2823 .type main, %function
2832 The trampoline 'bx r2' doesn't belong to main. */
2835 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2837 /* The heuristics of recognizing such trampoline is that FRAME is
2838 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2839 if (arm_frame_is_thumb (frame))
2843 if (target_read_memory (pc, buf, 2) == 0)
2845 struct gdbarch *gdbarch = get_frame_arch (frame);
2846 enum bfd_endian byte_order_for_code
2847 = gdbarch_byte_order_for_code (gdbarch);
2849 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2851 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2854 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2856 /* Clear the LSB so that gdb core sets step-resume
2857 breakpoint at the right address. */
2858 return UNMAKE_THUMB_ADDR (dest);
2866 static struct arm_prologue_cache *
2867 arm_make_stub_cache (struct frame_info *this_frame)
2869 struct arm_prologue_cache *cache;
2871 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2872 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2874 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2879 /* Our frame ID for a stub frame is the current SP and LR. */
2882 arm_stub_this_id (struct frame_info *this_frame,
2884 struct frame_id *this_id)
2886 struct arm_prologue_cache *cache;
2888 if (*this_cache == NULL)
2889 *this_cache = arm_make_stub_cache (this_frame);
2890 cache = (struct arm_prologue_cache *) *this_cache;
2892 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2896 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2897 struct frame_info *this_frame,
2898 void **this_prologue_cache)
2900 CORE_ADDR addr_in_block;
2902 CORE_ADDR pc, start_addr;
2905 addr_in_block = get_frame_address_in_block (this_frame);
2906 pc = get_frame_pc (this_frame);
2907 if (in_plt_section (addr_in_block)
2908 /* We also use the stub winder if the target memory is unreadable
2909 to avoid having the prologue unwinder trying to read it. */
2910 || target_read_memory (pc, dummy, 4) != 0)
2913 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2914 && arm_skip_bx_reg (this_frame, pc) != 0)
2920 struct frame_unwind arm_stub_unwind = {
2922 default_frame_unwind_stop_reason,
2924 arm_prologue_prev_register,
2926 arm_stub_unwind_sniffer
2929 /* Put here the code to store, into CACHE->saved_regs, the addresses
2930 of the saved registers of frame described by THIS_FRAME. CACHE is
2933 static struct arm_prologue_cache *
2934 arm_m_exception_cache (struct frame_info *this_frame)
2936 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2937 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2938 struct arm_prologue_cache *cache;
2939 CORE_ADDR unwound_sp;
2942 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2943 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2945 unwound_sp = get_frame_register_unsigned (this_frame,
2948 /* The hardware saves eight 32-bit words, comprising xPSR,
2949 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2950 "B1.5.6 Exception entry behavior" in
2951 "ARMv7-M Architecture Reference Manual". */
2952 cache->saved_regs[0].addr = unwound_sp;
2953 cache->saved_regs[1].addr = unwound_sp + 4;
2954 cache->saved_regs[2].addr = unwound_sp + 8;
2955 cache->saved_regs[3].addr = unwound_sp + 12;
2956 cache->saved_regs[12].addr = unwound_sp + 16;
2957 cache->saved_regs[14].addr = unwound_sp + 20;
2958 cache->saved_regs[15].addr = unwound_sp + 24;
2959 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2961 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2962 aligner between the top of the 32-byte stack frame and the
2963 previous context's stack pointer. */
2964 cache->prev_sp = unwound_sp + 32;
2965 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2966 && (xpsr & (1 << 9)) != 0)
2967 cache->prev_sp += 4;
2972 /* Implementation of function hook 'this_id' in
2973 'struct frame_uwnind'. */
2976 arm_m_exception_this_id (struct frame_info *this_frame,
2978 struct frame_id *this_id)
2980 struct arm_prologue_cache *cache;
2982 if (*this_cache == NULL)
2983 *this_cache = arm_m_exception_cache (this_frame);
2984 cache = (struct arm_prologue_cache *) *this_cache;
2986 /* Our frame ID for a stub frame is the current SP and LR. */
2987 *this_id = frame_id_build (cache->prev_sp,
2988 get_frame_pc (this_frame));
2991 /* Implementation of function hook 'prev_register' in
2992 'struct frame_uwnind'. */
2994 static struct value *
2995 arm_m_exception_prev_register (struct frame_info *this_frame,
2999 struct arm_prologue_cache *cache;
3001 if (*this_cache == NULL)
3002 *this_cache = arm_m_exception_cache (this_frame);
3003 cache = (struct arm_prologue_cache *) *this_cache;
3005 /* The value was already reconstructed into PREV_SP. */
3006 if (prev_regnum == ARM_SP_REGNUM)
3007 return frame_unwind_got_constant (this_frame, prev_regnum,
3010 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3014 /* Implementation of function hook 'sniffer' in
3015 'struct frame_uwnind'. */
3018 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3019 struct frame_info *this_frame,
3020 void **this_prologue_cache)
3022 CORE_ADDR this_pc = get_frame_pc (this_frame);
3024 /* No need to check is_m; this sniffer is only registered for
3025 M-profile architectures. */
3027 /* Check if exception frame returns to a magic PC value. */
3028 return arm_m_addr_is_magic (this_pc);
3031 /* Frame unwinder for M-profile exceptions. */
3033 struct frame_unwind arm_m_exception_unwind =
3036 default_frame_unwind_stop_reason,
3037 arm_m_exception_this_id,
3038 arm_m_exception_prev_register,
3040 arm_m_exception_unwind_sniffer
3044 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3046 struct arm_prologue_cache *cache;
3048 if (*this_cache == NULL)
3049 *this_cache = arm_make_prologue_cache (this_frame);
3050 cache = (struct arm_prologue_cache *) *this_cache;
3052 return cache->prev_sp - cache->framesize;
3055 struct frame_base arm_normal_base = {
3056 &arm_prologue_unwind,
3057 arm_normal_frame_base,
3058 arm_normal_frame_base,
3059 arm_normal_frame_base
3062 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3063 dummy frame. The frame ID's base needs to match the TOS value
3064 saved by save_dummy_frame_tos() and returned from
3065 arm_push_dummy_call, and the PC needs to match the dummy frame's
3068 static struct frame_id
3069 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3071 return frame_id_build (get_frame_register_unsigned (this_frame,
3073 get_frame_pc (this_frame));
3076 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3077 be used to construct the previous frame's ID, after looking up the
3078 containing function). */
3081 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3084 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3085 return arm_addr_bits_remove (gdbarch, pc);
3089 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3091 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3094 static struct value *
3095 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3098 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3100 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3105 /* The PC is normally copied from the return column, which
3106 describes saves of LR. However, that version may have an
3107 extra bit set to indicate Thumb state. The bit is not
3109 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3110 return frame_unwind_got_constant (this_frame, regnum,
3111 arm_addr_bits_remove (gdbarch, lr));
3114 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3115 cpsr = get_frame_register_unsigned (this_frame, regnum);
3116 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3117 if (IS_THUMB_ADDR (lr))
3121 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3124 internal_error (__FILE__, __LINE__,
3125 _("Unexpected register %d"), regnum);
3130 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3131 struct dwarf2_frame_state_reg *reg,
3132 struct frame_info *this_frame)
3138 reg->how = DWARF2_FRAME_REG_FN;
3139 reg->loc.fn = arm_dwarf2_prev_register;
3142 reg->how = DWARF2_FRAME_REG_CFA;
3147 /* Implement the stack_frame_destroyed_p gdbarch method. */
3150 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3152 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3153 unsigned int insn, insn2;
3154 int found_return = 0, found_stack_adjust = 0;
3155 CORE_ADDR func_start, func_end;
3159 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3162 /* The epilogue is a sequence of instructions along the following lines:
3164 - add stack frame size to SP or FP
3165 - [if frame pointer used] restore SP from FP
3166 - restore registers from SP [may include PC]
3167 - a return-type instruction [if PC wasn't already restored]
3169 In a first pass, we scan forward from the current PC and verify the
3170 instructions we find as compatible with this sequence, ending in a
3173 However, this is not sufficient to distinguish indirect function calls
3174 within a function from indirect tail calls in the epilogue in some cases.
3175 Therefore, if we didn't already find any SP-changing instruction during
3176 forward scan, we add a backward scanning heuristic to ensure we actually
3177 are in the epilogue. */
3180 while (scan_pc < func_end && !found_return)
3182 if (target_read_memory (scan_pc, buf, 2))
3186 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3188 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3190 else if (insn == 0x46f7) /* mov pc, lr */
3192 else if (thumb_instruction_restores_sp (insn))
3194 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3197 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3199 if (target_read_memory (scan_pc, buf, 2))
3203 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3205 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3207 if (insn2 & 0x8000) /* <registers> include PC. */
3210 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3211 && (insn2 & 0x0fff) == 0x0b04)
3213 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3216 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3217 && (insn2 & 0x0e00) == 0x0a00)
3229 /* Since any instruction in the epilogue sequence, with the possible
3230 exception of return itself, updates the stack pointer, we need to
3231 scan backwards for at most one instruction. Try either a 16-bit or
3232 a 32-bit instruction. This is just a heuristic, so we do not worry
3233 too much about false positives. */
3235 if (pc - 4 < func_start)
3237 if (target_read_memory (pc - 4, buf, 4))
3240 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3241 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3243 if (thumb_instruction_restores_sp (insn2))
3244 found_stack_adjust = 1;
3245 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3246 found_stack_adjust = 1;
3247 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3248 && (insn2 & 0x0fff) == 0x0b04)
3249 found_stack_adjust = 1;
3250 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3251 && (insn2 & 0x0e00) == 0x0a00)
3252 found_stack_adjust = 1;
3254 return found_stack_adjust;
3258 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3260 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3263 CORE_ADDR func_start, func_end;
3265 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3268 /* We are in the epilogue if the previous instruction was a stack
3269 adjustment and the next instruction is a possible return (bx, mov
3270 pc, or pop). We could have to scan backwards to find the stack
3271 adjustment, or forwards to find the return, but this is a decent
3272 approximation. First scan forwards. */
3275 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3276 if (bits (insn, 28, 31) != INST_NV)
3278 if ((insn & 0x0ffffff0) == 0x012fff10)
3281 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3284 else if ((insn & 0x0fff0000) == 0x08bd0000
3285 && (insn & 0x0000c000) != 0)
3286 /* POP (LDMIA), including PC or LR. */
3293 /* Scan backwards. This is just a heuristic, so do not worry about
3294 false positives from mode changes. */
3296 if (pc < func_start + 4)
3299 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3300 if (arm_instruction_restores_sp (insn))
3306 /* Implement the stack_frame_destroyed_p gdbarch method. */
3309 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3311 if (arm_pc_is_thumb (gdbarch, pc))
3312 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3314 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3317 /* When arguments must be pushed onto the stack, they go on in reverse
3318 order. The code below implements a FILO (stack) to do this. */
3323 struct stack_item *prev;
3327 static struct stack_item *
3328 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3330 struct stack_item *si;
3331 si = XNEW (struct stack_item);
3332 si->data = (gdb_byte *) xmalloc (len);
3335 memcpy (si->data, contents, len);
3339 static struct stack_item *
3340 pop_stack_item (struct stack_item *si)
3342 struct stack_item *dead = si;
3350 /* Return the alignment (in bytes) of the given type. */
3353 arm_type_align (struct type *t)
3359 t = check_typedef (t);
3360 switch (TYPE_CODE (t))
3363 /* Should never happen. */
3364 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3368 case TYPE_CODE_ENUM:
3372 case TYPE_CODE_RANGE:
3374 case TYPE_CODE_RVALUE_REF:
3375 case TYPE_CODE_CHAR:
3376 case TYPE_CODE_BOOL:
3377 return TYPE_LENGTH (t);
3379 case TYPE_CODE_ARRAY:
3380 if (TYPE_VECTOR (t))
3382 /* Use the natural alignment for vector types (the same for
3383 scalar type), but the maximum alignment is 64-bit. */
3384 if (TYPE_LENGTH (t) > 8)
3387 return TYPE_LENGTH (t);
3390 return arm_type_align (TYPE_TARGET_TYPE (t));
3391 case TYPE_CODE_COMPLEX:
3392 return arm_type_align (TYPE_TARGET_TYPE (t));
3394 case TYPE_CODE_STRUCT:
3395 case TYPE_CODE_UNION:
3397 for (n = 0; n < TYPE_NFIELDS (t); n++)
3399 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3407 /* Possible base types for a candidate for passing and returning in
3410 enum arm_vfp_cprc_base_type
3419 /* The length of one element of base type B. */
3422 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3426 case VFP_CPRC_SINGLE:
3428 case VFP_CPRC_DOUBLE:
3430 case VFP_CPRC_VEC64:
3432 case VFP_CPRC_VEC128:
3435 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3440 /* The character ('s', 'd' or 'q') for the type of VFP register used
3441 for passing base type B. */
3444 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3448 case VFP_CPRC_SINGLE:
3450 case VFP_CPRC_DOUBLE:
3452 case VFP_CPRC_VEC64:
3454 case VFP_CPRC_VEC128:
3457 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3462 /* Determine whether T may be part of a candidate for passing and
3463 returning in VFP registers, ignoring the limit on the total number
3464 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3465 classification of the first valid component found; if it is not
3466 VFP_CPRC_UNKNOWN, all components must have the same classification
3467 as *BASE_TYPE. If it is found that T contains a type not permitted
3468 for passing and returning in VFP registers, a type differently
3469 classified from *BASE_TYPE, or two types differently classified
3470 from each other, return -1, otherwise return the total number of
3471 base-type elements found (possibly 0 in an empty structure or
3472 array). Vector types are not currently supported, matching the
3473 generic AAPCS support. */
3476 arm_vfp_cprc_sub_candidate (struct type *t,
3477 enum arm_vfp_cprc_base_type *base_type)
3479 t = check_typedef (t);
3480 switch (TYPE_CODE (t))
3483 switch (TYPE_LENGTH (t))
3486 if (*base_type == VFP_CPRC_UNKNOWN)
3487 *base_type = VFP_CPRC_SINGLE;
3488 else if (*base_type != VFP_CPRC_SINGLE)
3493 if (*base_type == VFP_CPRC_UNKNOWN)
3494 *base_type = VFP_CPRC_DOUBLE;
3495 else if (*base_type != VFP_CPRC_DOUBLE)
3504 case TYPE_CODE_COMPLEX:
3505 /* Arguments of complex T where T is one of the types float or
3506 double get treated as if they are implemented as:
3515 switch (TYPE_LENGTH (t))
3518 if (*base_type == VFP_CPRC_UNKNOWN)
3519 *base_type = VFP_CPRC_SINGLE;
3520 else if (*base_type != VFP_CPRC_SINGLE)
3525 if (*base_type == VFP_CPRC_UNKNOWN)
3526 *base_type = VFP_CPRC_DOUBLE;
3527 else if (*base_type != VFP_CPRC_DOUBLE)
3536 case TYPE_CODE_ARRAY:
3538 if (TYPE_VECTOR (t))
3540 /* A 64-bit or 128-bit containerized vector type are VFP
3542 switch (TYPE_LENGTH (t))
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_VEC64;
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_VEC128;
3561 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3565 if (TYPE_LENGTH (t) == 0)
3567 gdb_assert (count == 0);
3570 else if (count == 0)
3572 unitlen = arm_vfp_cprc_unit_length (*base_type);
3573 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3574 return TYPE_LENGTH (t) / unitlen;
3579 case TYPE_CODE_STRUCT:
3584 for (i = 0; i < TYPE_NFIELDS (t); i++)
3588 if (!field_is_static (&TYPE_FIELD (t, i)))
3589 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3591 if (sub_count == -1)
3595 if (TYPE_LENGTH (t) == 0)
3597 gdb_assert (count == 0);
3600 else if (count == 0)
3602 unitlen = arm_vfp_cprc_unit_length (*base_type);
3603 if (TYPE_LENGTH (t) != unitlen * count)
3608 case TYPE_CODE_UNION:
3613 for (i = 0; i < TYPE_NFIELDS (t); i++)
3615 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3617 if (sub_count == -1)
3619 count = (count > sub_count ? count : sub_count);
3621 if (TYPE_LENGTH (t) == 0)
3623 gdb_assert (count == 0);
3626 else if (count == 0)
3628 unitlen = arm_vfp_cprc_unit_length (*base_type);
3629 if (TYPE_LENGTH (t) != unitlen * count)
3641 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3642 if passed to or returned from a non-variadic function with the VFP
3643 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3644 *BASE_TYPE to the base type for T and *COUNT to the number of
3645 elements of that base type before returning. */
3648 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3651 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3652 int c = arm_vfp_cprc_sub_candidate (t, &b);
3653 if (c <= 0 || c > 4)
3660 /* Return 1 if the VFP ABI should be used for passing arguments to and
3661 returning values from a function of type FUNC_TYPE, 0
3665 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3667 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3668 /* Variadic functions always use the base ABI. Assume that functions
3669 without debug info are not variadic. */
3670 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3672 /* The VFP ABI is only supported as a variant of AAPCS. */
3673 if (tdep->arm_abi != ARM_ABI_AAPCS)
3675 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3678 /* We currently only support passing parameters in integer registers, which
3679 conforms with GCC's default model, and VFP argument passing following
3680 the VFP variant of AAPCS. Several other variants exist and
3681 we should probably support some of them based on the selected ABI. */
3684 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3685 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3686 struct value **args, CORE_ADDR sp,
3687 function_call_return_method return_method,
3688 CORE_ADDR struct_addr)
3690 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3694 struct stack_item *si = NULL;
3697 unsigned vfp_regs_free = (1 << 16) - 1;
3699 /* Determine the type of this function and whether the VFP ABI
3701 ftype = check_typedef (value_type (function));
3702 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3703 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3704 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3706 /* Set the return address. For the ARM, the return breakpoint is
3707 always at BP_ADDR. */
3708 if (arm_pc_is_thumb (gdbarch, bp_addr))
3710 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3712 /* Walk through the list of args and determine how large a temporary
3713 stack is required. Need to take care here as structs may be
3714 passed on the stack, and we have to push them. */
3717 argreg = ARM_A1_REGNUM;
3720 /* The struct_return pointer occupies the first parameter
3721 passing register. */
3722 if (return_method == return_method_struct)
3725 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3726 gdbarch_register_name (gdbarch, argreg),
3727 paddress (gdbarch, struct_addr));
3728 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3732 for (argnum = 0; argnum < nargs; argnum++)
3735 struct type *arg_type;
3736 struct type *target_type;
3737 enum type_code typecode;
3738 const bfd_byte *val;
3740 enum arm_vfp_cprc_base_type vfp_base_type;
3742 int may_use_core_reg = 1;
3744 arg_type = check_typedef (value_type (args[argnum]));
3745 len = TYPE_LENGTH (arg_type);
3746 target_type = TYPE_TARGET_TYPE (arg_type);
3747 typecode = TYPE_CODE (arg_type);
3748 val = value_contents (args[argnum]);
3750 align = arm_type_align (arg_type);
3751 /* Round alignment up to a whole number of words. */
3752 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3753 /* Different ABIs have different maximum alignments. */
3754 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3756 /* The APCS ABI only requires word alignment. */
3757 align = INT_REGISTER_SIZE;
3761 /* The AAPCS requires at most doubleword alignment. */
3762 if (align > INT_REGISTER_SIZE * 2)
3763 align = INT_REGISTER_SIZE * 2;
3767 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3775 /* Because this is a CPRC it cannot go in a core register or
3776 cause a core register to be skipped for alignment.
3777 Either it goes in VFP registers and the rest of this loop
3778 iteration is skipped for this argument, or it goes on the
3779 stack (and the stack alignment code is correct for this
3781 may_use_core_reg = 0;
3783 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3784 shift = unit_length / 4;
3785 mask = (1 << (shift * vfp_base_count)) - 1;
3786 for (regno = 0; regno < 16; regno += shift)
3787 if (((vfp_regs_free >> regno) & mask) == mask)
3796 vfp_regs_free &= ~(mask << regno);
3797 reg_scaled = regno / shift;
3798 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3799 for (i = 0; i < vfp_base_count; i++)
3803 if (reg_char == 'q')
3804 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3805 val + i * unit_length);
3808 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3809 reg_char, reg_scaled + i);
3810 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3812 regcache->cooked_write (regnum, val + i * unit_length);
3819 /* This CPRC could not go in VFP registers, so all VFP
3820 registers are now marked as used. */
3825 /* Push stack padding for dowubleword alignment. */
3826 if (nstack & (align - 1))
3828 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3829 nstack += INT_REGISTER_SIZE;
3832 /* Doubleword aligned quantities must go in even register pairs. */
3833 if (may_use_core_reg
3834 && argreg <= ARM_LAST_ARG_REGNUM
3835 && align > INT_REGISTER_SIZE
3839 /* If the argument is a pointer to a function, and it is a
3840 Thumb function, create a LOCAL copy of the value and set
3841 the THUMB bit in it. */
3842 if (TYPE_CODE_PTR == typecode
3843 && target_type != NULL
3844 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3846 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3847 if (arm_pc_is_thumb (gdbarch, regval))
3849 bfd_byte *copy = (bfd_byte *) alloca (len);
3850 store_unsigned_integer (copy, len, byte_order,
3851 MAKE_THUMB_ADDR (regval));
3856 /* Copy the argument to general registers or the stack in
3857 register-sized pieces. Large arguments are split between
3858 registers and stack. */
3861 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3863 = extract_unsigned_integer (val, partial_len, byte_order);
3865 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3867 /* The argument is being passed in a general purpose
3869 if (byte_order == BFD_ENDIAN_BIG)
3870 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3872 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3874 gdbarch_register_name
3876 phex (regval, INT_REGISTER_SIZE));
3877 regcache_cooked_write_unsigned (regcache, argreg, regval);
3882 gdb_byte buf[INT_REGISTER_SIZE];
3884 memset (buf, 0, sizeof (buf));
3885 store_unsigned_integer (buf, partial_len, byte_order, regval);
3887 /* Push the arguments onto the stack. */
3889 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3891 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3892 nstack += INT_REGISTER_SIZE;
3899 /* If we have an odd number of words to push, then decrement the stack
3900 by one word now, so first stack argument will be dword aligned. */
3907 write_memory (sp, si->data, si->len);
3908 si = pop_stack_item (si);
3911 /* Finally, update teh SP register. */
3912 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3918 /* Always align the frame to an 8-byte boundary. This is required on
3919 some platforms and harmless on the rest. */
3922 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3924 /* Align the stack to eight bytes. */
3925 return sp & ~ (CORE_ADDR) 7;
3929 print_fpu_flags (struct ui_file *file, int flags)
3931 if (flags & (1 << 0))
3932 fputs_filtered ("IVO ", file);
3933 if (flags & (1 << 1))
3934 fputs_filtered ("DVZ ", file);
3935 if (flags & (1 << 2))
3936 fputs_filtered ("OFL ", file);
3937 if (flags & (1 << 3))
3938 fputs_filtered ("UFL ", file);
3939 if (flags & (1 << 4))
3940 fputs_filtered ("INX ", file);
3941 fputc_filtered ('\n', file);
3944 /* Print interesting information about the floating point processor
3945 (if present) or emulator. */
3947 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3948 struct frame_info *frame, const char *args)
3950 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3953 type = (status >> 24) & 127;
3954 if (status & (1 << 31))
3955 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3957 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3958 /* i18n: [floating point unit] mask */
3959 fputs_filtered (_("mask: "), file);
3960 print_fpu_flags (file, status >> 16);
3961 /* i18n: [floating point unit] flags */
3962 fputs_filtered (_("flags: "), file);
3963 print_fpu_flags (file, status);
3966 /* Construct the ARM extended floating point type. */
3967 static struct type *
3968 arm_ext_type (struct gdbarch *gdbarch)
3970 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3972 if (!tdep->arm_ext_type)
3974 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3975 floatformats_arm_ext);
3977 return tdep->arm_ext_type;
3980 static struct type *
3981 arm_neon_double_type (struct gdbarch *gdbarch)
3983 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3985 if (tdep->neon_double_type == NULL)
3987 struct type *t, *elem;
3989 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3991 elem = builtin_type (gdbarch)->builtin_uint8;
3992 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3993 elem = builtin_type (gdbarch)->builtin_uint16;
3994 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3995 elem = builtin_type (gdbarch)->builtin_uint32;
3996 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3997 elem = builtin_type (gdbarch)->builtin_uint64;
3998 append_composite_type_field (t, "u64", elem);
3999 elem = builtin_type (gdbarch)->builtin_float;
4000 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4001 elem = builtin_type (gdbarch)->builtin_double;
4002 append_composite_type_field (t, "f64", elem);
4004 TYPE_VECTOR (t) = 1;
4005 TYPE_NAME (t) = "neon_d";
4006 tdep->neon_double_type = t;
4009 return tdep->neon_double_type;
4012 /* FIXME: The vector types are not correctly ordered on big-endian
4013 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4014 bits of d0 - regardless of what unit size is being held in d0. So
4015 the offset of the first uint8 in d0 is 7, but the offset of the
4016 first float is 4. This code works as-is for little-endian
4019 static struct type *
4020 arm_neon_quad_type (struct gdbarch *gdbarch)
4022 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4024 if (tdep->neon_quad_type == NULL)
4026 struct type *t, *elem;
4028 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4030 elem = builtin_type (gdbarch)->builtin_uint8;
4031 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4032 elem = builtin_type (gdbarch)->builtin_uint16;
4033 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4034 elem = builtin_type (gdbarch)->builtin_uint32;
4035 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4036 elem = builtin_type (gdbarch)->builtin_uint64;
4037 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4038 elem = builtin_type (gdbarch)->builtin_float;
4039 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4040 elem = builtin_type (gdbarch)->builtin_double;
4041 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4043 TYPE_VECTOR (t) = 1;
4044 TYPE_NAME (t) = "neon_q";
4045 tdep->neon_quad_type = t;
4048 return tdep->neon_quad_type;
4051 /* Return the GDB type object for the "standard" data type of data in
4054 static struct type *
4055 arm_register_type (struct gdbarch *gdbarch, int regnum)
4057 int num_regs = gdbarch_num_regs (gdbarch);
4059 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4060 && regnum >= num_regs && regnum < num_regs + 32)
4061 return builtin_type (gdbarch)->builtin_float;
4063 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4064 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4065 return arm_neon_quad_type (gdbarch);
4067 /* If the target description has register information, we are only
4068 in this function so that we can override the types of
4069 double-precision registers for NEON. */
4070 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4072 struct type *t = tdesc_register_type (gdbarch, regnum);
4074 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4075 && TYPE_CODE (t) == TYPE_CODE_FLT
4076 && gdbarch_tdep (gdbarch)->have_neon)
4077 return arm_neon_double_type (gdbarch);
4082 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4084 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4085 return builtin_type (gdbarch)->builtin_void;
4087 return arm_ext_type (gdbarch);
4089 else if (regnum == ARM_SP_REGNUM)
4090 return builtin_type (gdbarch)->builtin_data_ptr;
4091 else if (regnum == ARM_PC_REGNUM)
4092 return builtin_type (gdbarch)->builtin_func_ptr;
4093 else if (regnum >= ARRAY_SIZE (arm_register_names))
4094 /* These registers are only supported on targets which supply
4095 an XML description. */
4096 return builtin_type (gdbarch)->builtin_int0;
4098 return builtin_type (gdbarch)->builtin_uint32;
4101 /* Map a DWARF register REGNUM onto the appropriate GDB register
4105 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4107 /* Core integer regs. */
4108 if (reg >= 0 && reg <= 15)
4111 /* Legacy FPA encoding. These were once used in a way which
4112 overlapped with VFP register numbering, so their use is
4113 discouraged, but GDB doesn't support the ARM toolchain
4114 which used them for VFP. */
4115 if (reg >= 16 && reg <= 23)
4116 return ARM_F0_REGNUM + reg - 16;
4118 /* New assignments for the FPA registers. */
4119 if (reg >= 96 && reg <= 103)
4120 return ARM_F0_REGNUM + reg - 96;
4122 /* WMMX register assignments. */
4123 if (reg >= 104 && reg <= 111)
4124 return ARM_WCGR0_REGNUM + reg - 104;
4126 if (reg >= 112 && reg <= 127)
4127 return ARM_WR0_REGNUM + reg - 112;
4129 if (reg >= 192 && reg <= 199)
4130 return ARM_WC0_REGNUM + reg - 192;
4132 /* VFP v2 registers. A double precision value is actually
4133 in d1 rather than s2, but the ABI only defines numbering
4134 for the single precision registers. This will "just work"
4135 in GDB for little endian targets (we'll read eight bytes,
4136 starting in s0 and then progressing to s1), but will be
4137 reversed on big endian targets with VFP. This won't
4138 be a problem for the new Neon quad registers; you're supposed
4139 to use DW_OP_piece for those. */
4140 if (reg >= 64 && reg <= 95)
4144 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4145 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4149 /* VFP v3 / Neon registers. This range is also used for VFP v2
4150 registers, except that it now describes d0 instead of s0. */
4151 if (reg >= 256 && reg <= 287)
4155 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4156 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4163 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4165 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4168 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4170 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4171 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4173 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4174 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4176 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4177 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4179 if (reg < NUM_GREGS)
4180 return SIM_ARM_R0_REGNUM + reg;
4183 if (reg < NUM_FREGS)
4184 return SIM_ARM_FP0_REGNUM + reg;
4187 if (reg < NUM_SREGS)
4188 return SIM_ARM_FPS_REGNUM + reg;
4191 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4194 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4195 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4196 NULL if an error occurs. BUF is freed. */
4199 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4200 int old_len, int new_len)
4203 int bytes_to_read = new_len - old_len;
4205 new_buf = (gdb_byte *) xmalloc (new_len);
4206 memcpy (new_buf + bytes_to_read, buf, old_len);
4208 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4216 /* An IT block is at most the 2-byte IT instruction followed by
4217 four 4-byte instructions. The furthest back we must search to
4218 find an IT block that affects the current instruction is thus
4219 2 + 3 * 4 == 14 bytes. */
4220 #define MAX_IT_BLOCK_PREFIX 14
4222 /* Use a quick scan if there are more than this many bytes of
4224 #define IT_SCAN_THRESHOLD 32
4226 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4227 A breakpoint in an IT block may not be hit, depending on the
4230 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4234 CORE_ADDR boundary, func_start;
4236 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4237 int i, any, last_it, last_it_count;
4239 /* If we are using BKPT breakpoints, none of this is necessary. */
4240 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4243 /* ARM mode does not have this problem. */
4244 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4247 /* We are setting a breakpoint in Thumb code that could potentially
4248 contain an IT block. The first step is to find how much Thumb
4249 code there is; we do not need to read outside of known Thumb
4251 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4253 /* Thumb-2 code must have mapping symbols to have a chance. */
4256 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4258 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4259 && func_start > boundary)
4260 boundary = func_start;
4262 /* Search for a candidate IT instruction. We have to do some fancy
4263 footwork to distinguish a real IT instruction from the second
4264 half of a 32-bit instruction, but there is no need for that if
4265 there's no candidate. */
4266 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4268 /* No room for an IT instruction. */
4271 buf = (gdb_byte *) xmalloc (buf_len);
4272 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4275 for (i = 0; i < buf_len; i += 2)
4277 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4278 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4291 /* OK, the code bytes before this instruction contain at least one
4292 halfword which resembles an IT instruction. We know that it's
4293 Thumb code, but there are still two possibilities. Either the
4294 halfword really is an IT instruction, or it is the second half of
4295 a 32-bit Thumb instruction. The only way we can tell is to
4296 scan forwards from a known instruction boundary. */
4297 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4301 /* There's a lot of code before this instruction. Start with an
4302 optimistic search; it's easy to recognize halfwords that can
4303 not be the start of a 32-bit instruction, and use that to
4304 lock on to the instruction boundaries. */
4305 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4308 buf_len = IT_SCAN_THRESHOLD;
4311 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4313 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4314 if (thumb_insn_size (inst1) == 2)
4321 /* At this point, if DEFINITE, BUF[I] is the first place we
4322 are sure that we know the instruction boundaries, and it is far
4323 enough from BPADDR that we could not miss an IT instruction
4324 affecting BPADDR. If ! DEFINITE, give up - start from a
4328 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4332 buf_len = bpaddr - boundary;
4338 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4341 buf_len = bpaddr - boundary;
4345 /* Scan forwards. Find the last IT instruction before BPADDR. */
4350 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4352 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4357 else if (inst1 & 0x0002)
4359 else if (inst1 & 0x0004)
4364 i += thumb_insn_size (inst1);
4370 /* There wasn't really an IT instruction after all. */
4373 if (last_it_count < 1)
4374 /* It was too far away. */
4377 /* This really is a trouble spot. Move the breakpoint to the IT
4379 return bpaddr - buf_len + last_it;
4382 /* ARM displaced stepping support.
4384 Generally ARM displaced stepping works as follows:
4386 1. When an instruction is to be single-stepped, it is first decoded by
4387 arm_process_displaced_insn. Depending on the type of instruction, it is
4388 then copied to a scratch location, possibly in a modified form. The
4389 copy_* set of functions performs such modification, as necessary. A
4390 breakpoint is placed after the modified instruction in the scratch space
4391 to return control to GDB. Note in particular that instructions which
4392 modify the PC will no longer do so after modification.
4394 2. The instruction is single-stepped, by setting the PC to the scratch
4395 location address, and resuming. Control returns to GDB when the
4398 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4399 function used for the current instruction. This function's job is to
4400 put the CPU/memory state back to what it would have been if the
4401 instruction had been executed unmodified in its original location. */
4403 /* NOP instruction (mov r0, r0). */
4404 #define ARM_NOP 0xe1a00000
4405 #define THUMB_NOP 0x4600
4407 /* Helper for register reads for displaced stepping. In particular, this
4408 returns the PC as it would be seen by the instruction at its original
4412 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4416 CORE_ADDR from = dsc->insn_addr;
4418 if (regno == ARM_PC_REGNUM)
4420 /* Compute pipeline offset:
4421 - When executing an ARM instruction, PC reads as the address of the
4422 current instruction plus 8.
4423 - When executing a Thumb instruction, PC reads as the address of the
4424 current instruction plus 4. */
4431 if (debug_displaced)
4432 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4433 (unsigned long) from);
4434 return (ULONGEST) from;
4438 regcache_cooked_read_unsigned (regs, regno, &ret);
4439 if (debug_displaced)
4440 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4441 regno, (unsigned long) ret);
4447 displaced_in_arm_mode (struct regcache *regs)
4450 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4452 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4454 return (ps & t_bit) == 0;
4457 /* Write to the PC as from a branch instruction. */
4460 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4464 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4465 architecture versions < 6. */
4466 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4467 val & ~(ULONGEST) 0x3);
4469 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4470 val & ~(ULONGEST) 0x1);
4473 /* Write to the PC as from a branch-exchange instruction. */
4476 bx_write_pc (struct regcache *regs, ULONGEST val)
4479 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4481 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4485 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4486 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4488 else if ((val & 2) == 0)
4490 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4495 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4496 mode, align dest to 4 bytes). */
4497 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4498 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4499 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4503 /* Write to the PC as if from a load instruction. */
4506 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4509 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4510 bx_write_pc (regs, val);
4512 branch_write_pc (regs, dsc, val);
4515 /* Write to the PC as if from an ALU instruction. */
4518 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4521 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4522 bx_write_pc (regs, val);
4524 branch_write_pc (regs, dsc, val);
4527 /* Helper for writing to registers for displaced stepping. Writing to the PC
4528 has a varying effects depending on the instruction which does the write:
4529 this is controlled by the WRITE_PC argument. */
4532 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4533 int regno, ULONGEST val, enum pc_write_style write_pc)
4535 if (regno == ARM_PC_REGNUM)
4537 if (debug_displaced)
4538 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4539 (unsigned long) val);
4542 case BRANCH_WRITE_PC:
4543 branch_write_pc (regs, dsc, val);
4547 bx_write_pc (regs, val);
4551 load_write_pc (regs, dsc, val);
4555 alu_write_pc (regs, dsc, val);
4558 case CANNOT_WRITE_PC:
4559 warning (_("Instruction wrote to PC in an unexpected way when "
4560 "single-stepping"));
4564 internal_error (__FILE__, __LINE__,
4565 _("Invalid argument to displaced_write_reg"));
4568 dsc->wrote_to_pc = 1;
4572 if (debug_displaced)
4573 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4574 regno, (unsigned long) val);
4575 regcache_cooked_write_unsigned (regs, regno, val);
4579 /* This function is used to concisely determine if an instruction INSN
4580 references PC. Register fields of interest in INSN should have the
4581 corresponding fields of BITMASK set to 0b1111. The function
4582 returns return 1 if any of these fields in INSN reference the PC
4583 (also 0b1111, r15), else it returns 0. */
4586 insn_references_pc (uint32_t insn, uint32_t bitmask)
4588 uint32_t lowbit = 1;
4590 while (bitmask != 0)
4594 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4600 mask = lowbit * 0xf;
4602 if ((insn & mask) == mask)
4611 /* The simplest copy function. Many instructions have the same effect no
4612 matter what address they are executed at: in those cases, use this. */
4615 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4616 const char *iname, arm_displaced_step_closure *dsc)
4618 if (debug_displaced)
4619 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4620 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4623 dsc->modinsn[0] = insn;
4629 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4630 uint16_t insn2, const char *iname,
4631 arm_displaced_step_closure *dsc)
4633 if (debug_displaced)
4634 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4635 "opcode/class '%s' unmodified\n", insn1, insn2,
4638 dsc->modinsn[0] = insn1;
4639 dsc->modinsn[1] = insn2;
4645 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4648 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4650 arm_displaced_step_closure *dsc)
4652 if (debug_displaced)
4653 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4654 "opcode/class '%s' unmodified\n", insn,
4657 dsc->modinsn[0] = insn;
4662 /* Preload instructions with immediate offset. */
4665 cleanup_preload (struct gdbarch *gdbarch,
4666 struct regcache *regs, arm_displaced_step_closure *dsc)
4668 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4669 if (!dsc->u.preload.immed)
4670 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4674 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4675 arm_displaced_step_closure *dsc, unsigned int rn)
4678 /* Preload instructions:
4680 {pli/pld} [rn, #+/-imm]
4682 {pli/pld} [r0, #+/-imm]. */
4684 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4685 rn_val = displaced_read_reg (regs, dsc, rn);
4686 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4687 dsc->u.preload.immed = 1;
4689 dsc->cleanup = &cleanup_preload;
4693 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4694 arm_displaced_step_closure *dsc)
4696 unsigned int rn = bits (insn, 16, 19);
4698 if (!insn_references_pc (insn, 0x000f0000ul))
4699 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4701 if (debug_displaced)
4702 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4703 (unsigned long) insn);
4705 dsc->modinsn[0] = insn & 0xfff0ffff;
4707 install_preload (gdbarch, regs, dsc, rn);
4713 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4714 struct regcache *regs, arm_displaced_step_closure *dsc)
4716 unsigned int rn = bits (insn1, 0, 3);
4717 unsigned int u_bit = bit (insn1, 7);
4718 int imm12 = bits (insn2, 0, 11);
4721 if (rn != ARM_PC_REGNUM)
4722 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4724 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4725 PLD (literal) Encoding T1. */
4726 if (debug_displaced)
4727 fprintf_unfiltered (gdb_stdlog,
4728 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4729 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4735 /* Rewrite instruction {pli/pld} PC imm12 into:
4736 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4740 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4742 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4743 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4745 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4747 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4748 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4749 dsc->u.preload.immed = 0;
4751 /* {pli/pld} [r0, r1] */
4752 dsc->modinsn[0] = insn1 & 0xfff0;
4753 dsc->modinsn[1] = 0xf001;
4756 dsc->cleanup = &cleanup_preload;
4760 /* Preload instructions with register offset. */
4763 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4764 arm_displaced_step_closure *dsc, unsigned int rn,
4767 ULONGEST rn_val, rm_val;
4769 /* Preload register-offset instructions:
4771 {pli/pld} [rn, rm {, shift}]
4773 {pli/pld} [r0, r1 {, shift}]. */
4775 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4776 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4777 rn_val = displaced_read_reg (regs, dsc, rn);
4778 rm_val = displaced_read_reg (regs, dsc, rm);
4779 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4780 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4781 dsc->u.preload.immed = 0;
4783 dsc->cleanup = &cleanup_preload;
4787 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4788 struct regcache *regs,
4789 arm_displaced_step_closure *dsc)
4791 unsigned int rn = bits (insn, 16, 19);
4792 unsigned int rm = bits (insn, 0, 3);
4795 if (!insn_references_pc (insn, 0x000f000ful))
4796 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4798 if (debug_displaced)
4799 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4800 (unsigned long) insn);
4802 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4804 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4808 /* Copy/cleanup coprocessor load and store instructions. */
4811 cleanup_copro_load_store (struct gdbarch *gdbarch,
4812 struct regcache *regs,
4813 arm_displaced_step_closure *dsc)
4815 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4817 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4819 if (dsc->u.ldst.writeback)
4820 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4824 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4825 arm_displaced_step_closure *dsc,
4826 int writeback, unsigned int rn)
4830 /* Coprocessor load/store instructions:
4832 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4834 {stc/stc2} [r0, #+/-imm].
4836 ldc/ldc2 are handled identically. */
4838 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4839 rn_val = displaced_read_reg (regs, dsc, rn);
4840 /* PC should be 4-byte aligned. */
4841 rn_val = rn_val & 0xfffffffc;
4842 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4844 dsc->u.ldst.writeback = writeback;
4845 dsc->u.ldst.rn = rn;
4847 dsc->cleanup = &cleanup_copro_load_store;
4851 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4852 struct regcache *regs,
4853 arm_displaced_step_closure *dsc)
4855 unsigned int rn = bits (insn, 16, 19);
4857 if (!insn_references_pc (insn, 0x000f0000ul))
4858 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4860 if (debug_displaced)
4861 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4862 "load/store insn %.8lx\n", (unsigned long) insn);
4864 dsc->modinsn[0] = insn & 0xfff0ffff;
4866 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4872 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4873 uint16_t insn2, struct regcache *regs,
4874 arm_displaced_step_closure *dsc)
4876 unsigned int rn = bits (insn1, 0, 3);
4878 if (rn != ARM_PC_REGNUM)
4879 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4880 "copro load/store", dsc);
4882 if (debug_displaced)
4883 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4884 "load/store insn %.4x%.4x\n", insn1, insn2);
4886 dsc->modinsn[0] = insn1 & 0xfff0;
4887 dsc->modinsn[1] = insn2;
4890 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4891 doesn't support writeback, so pass 0. */
4892 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4897 /* Clean up branch instructions (actually perform the branch, by setting
4901 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4902 arm_displaced_step_closure *dsc)
4904 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4905 int branch_taken = condition_true (dsc->u.branch.cond, status);
4906 enum pc_write_style write_pc = dsc->u.branch.exchange
4907 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4912 if (dsc->u.branch.link)
4914 /* The value of LR should be the next insn of current one. In order
4915 not to confuse logic hanlding later insn `bx lr', if current insn mode
4916 is Thumb, the bit 0 of LR value should be set to 1. */
4917 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4920 next_insn_addr |= 0x1;
4922 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4926 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4929 /* Copy B/BL/BLX instructions with immediate destinations. */
4932 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4933 arm_displaced_step_closure *dsc,
4934 unsigned int cond, int exchange, int link, long offset)
4936 /* Implement "BL<cond> <label>" as:
4938 Preparation: cond <- instruction condition
4939 Insn: mov r0, r0 (nop)
4940 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4942 B<cond> similar, but don't set r14 in cleanup. */
4944 dsc->u.branch.cond = cond;
4945 dsc->u.branch.link = link;
4946 dsc->u.branch.exchange = exchange;
4948 dsc->u.branch.dest = dsc->insn_addr;
4949 if (link && exchange)
4950 /* For BLX, offset is computed from the Align (PC, 4). */
4951 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4954 dsc->u.branch.dest += 4 + offset;
4956 dsc->u.branch.dest += 8 + offset;
4958 dsc->cleanup = &cleanup_branch;
4961 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4962 struct regcache *regs, arm_displaced_step_closure *dsc)
4964 unsigned int cond = bits (insn, 28, 31);
4965 int exchange = (cond == 0xf);
4966 int link = exchange || bit (insn, 24);
4969 if (debug_displaced)
4970 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4971 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4972 (unsigned long) insn);
4974 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4975 then arrange the switch into Thumb mode. */
4976 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4978 offset = bits (insn, 0, 23) << 2;
4980 if (bit (offset, 25))
4981 offset = offset | ~0x3ffffff;
4983 dsc->modinsn[0] = ARM_NOP;
4985 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4990 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4991 uint16_t insn2, struct regcache *regs,
4992 arm_displaced_step_closure *dsc)
4994 int link = bit (insn2, 14);
4995 int exchange = link && !bit (insn2, 12);
4998 int j1 = bit (insn2, 13);
4999 int j2 = bit (insn2, 11);
5000 int s = sbits (insn1, 10, 10);
5001 int i1 = !(j1 ^ bit (insn1, 10));
5002 int i2 = !(j2 ^ bit (insn1, 10));
5004 if (!link && !exchange) /* B */
5006 offset = (bits (insn2, 0, 10) << 1);
5007 if (bit (insn2, 12)) /* Encoding T4 */
5009 offset |= (bits (insn1, 0, 9) << 12)
5015 else /* Encoding T3 */
5017 offset |= (bits (insn1, 0, 5) << 12)
5021 cond = bits (insn1, 6, 9);
5026 offset = (bits (insn1, 0, 9) << 12);
5027 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5028 offset |= exchange ?
5029 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5032 if (debug_displaced)
5033 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5034 "%.4x %.4x with offset %.8lx\n",
5035 link ? (exchange) ? "blx" : "bl" : "b",
5036 insn1, insn2, offset);
5038 dsc->modinsn[0] = THUMB_NOP;
5040 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5044 /* Copy B Thumb instructions. */
5046 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5047 arm_displaced_step_closure *dsc)
5049 unsigned int cond = 0;
5051 unsigned short bit_12_15 = bits (insn, 12, 15);
5052 CORE_ADDR from = dsc->insn_addr;
5054 if (bit_12_15 == 0xd)
5056 /* offset = SignExtend (imm8:0, 32) */
5057 offset = sbits ((insn << 1), 0, 8);
5058 cond = bits (insn, 8, 11);
5060 else if (bit_12_15 == 0xe) /* Encoding T2 */
5062 offset = sbits ((insn << 1), 0, 11);
5066 if (debug_displaced)
5067 fprintf_unfiltered (gdb_stdlog,
5068 "displaced: copying b immediate insn %.4x "
5069 "with offset %d\n", insn, offset);
5071 dsc->u.branch.cond = cond;
5072 dsc->u.branch.link = 0;
5073 dsc->u.branch.exchange = 0;
5074 dsc->u.branch.dest = from + 4 + offset;
5076 dsc->modinsn[0] = THUMB_NOP;
5078 dsc->cleanup = &cleanup_branch;
5083 /* Copy BX/BLX with register-specified destinations. */
5086 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5087 arm_displaced_step_closure *dsc, int link,
5088 unsigned int cond, unsigned int rm)
5090 /* Implement {BX,BLX}<cond> <reg>" as:
5092 Preparation: cond <- instruction condition
5093 Insn: mov r0, r0 (nop)
5094 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5096 Don't set r14 in cleanup for BX. */
5098 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5100 dsc->u.branch.cond = cond;
5101 dsc->u.branch.link = link;
5103 dsc->u.branch.exchange = 1;
5105 dsc->cleanup = &cleanup_branch;
5109 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5110 struct regcache *regs, arm_displaced_step_closure *dsc)
5112 unsigned int cond = bits (insn, 28, 31);
5115 int link = bit (insn, 5);
5116 unsigned int rm = bits (insn, 0, 3);
5118 if (debug_displaced)
5119 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5120 (unsigned long) insn);
5122 dsc->modinsn[0] = ARM_NOP;
5124 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5129 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5130 struct regcache *regs,
5131 arm_displaced_step_closure *dsc)
5133 int link = bit (insn, 7);
5134 unsigned int rm = bits (insn, 3, 6);
5136 if (debug_displaced)
5137 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5138 (unsigned short) insn);
5140 dsc->modinsn[0] = THUMB_NOP;
5142 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5148 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5151 cleanup_alu_imm (struct gdbarch *gdbarch,
5152 struct regcache *regs, arm_displaced_step_closure *dsc)
5154 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5155 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5156 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5157 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5161 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5162 arm_displaced_step_closure *dsc)
5164 unsigned int rn = bits (insn, 16, 19);
5165 unsigned int rd = bits (insn, 12, 15);
5166 unsigned int op = bits (insn, 21, 24);
5167 int is_mov = (op == 0xd);
5168 ULONGEST rd_val, rn_val;
5170 if (!insn_references_pc (insn, 0x000ff000ul))
5171 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5173 if (debug_displaced)
5174 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5175 "%.8lx\n", is_mov ? "move" : "ALU",
5176 (unsigned long) insn);
5178 /* Instruction is of form:
5180 <op><cond> rd, [rn,] #imm
5184 Preparation: tmp1, tmp2 <- r0, r1;
5186 Insn: <op><cond> r0, r1, #imm
5187 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5190 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5191 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5192 rn_val = displaced_read_reg (regs, dsc, rn);
5193 rd_val = displaced_read_reg (regs, dsc, rd);
5194 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5195 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5199 dsc->modinsn[0] = insn & 0xfff00fff;
5201 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5203 dsc->cleanup = &cleanup_alu_imm;
5209 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5210 uint16_t insn2, struct regcache *regs,
5211 arm_displaced_step_closure *dsc)
5213 unsigned int op = bits (insn1, 5, 8);
5214 unsigned int rn, rm, rd;
5215 ULONGEST rd_val, rn_val;
5217 rn = bits (insn1, 0, 3); /* Rn */
5218 rm = bits (insn2, 0, 3); /* Rm */
5219 rd = bits (insn2, 8, 11); /* Rd */
5221 /* This routine is only called for instruction MOV. */
5222 gdb_assert (op == 0x2 && rn == 0xf);
5224 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5225 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5227 if (debug_displaced)
5228 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5229 "ALU", insn1, insn2);
5231 /* Instruction is of form:
5233 <op><cond> rd, [rn,] #imm
5237 Preparation: tmp1, tmp2 <- r0, r1;
5239 Insn: <op><cond> r0, r1, #imm
5240 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5244 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5245 rn_val = displaced_read_reg (regs, dsc, rn);
5246 rd_val = displaced_read_reg (regs, dsc, rd);
5247 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5248 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5251 dsc->modinsn[0] = insn1;
5252 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5255 dsc->cleanup = &cleanup_alu_imm;
5260 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5263 cleanup_alu_reg (struct gdbarch *gdbarch,
5264 struct regcache *regs, arm_displaced_step_closure *dsc)
5269 rd_val = displaced_read_reg (regs, dsc, 0);
5271 for (i = 0; i < 3; i++)
5272 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5274 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5278 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5279 arm_displaced_step_closure *dsc,
5280 unsigned int rd, unsigned int rn, unsigned int rm)
5282 ULONGEST rd_val, rn_val, rm_val;
5284 /* Instruction is of form:
5286 <op><cond> rd, [rn,] rm [, <shift>]
5290 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5291 r0, r1, r2 <- rd, rn, rm
5292 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5293 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5296 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5297 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5298 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5299 rd_val = displaced_read_reg (regs, dsc, rd);
5300 rn_val = displaced_read_reg (regs, dsc, rn);
5301 rm_val = displaced_read_reg (regs, dsc, rm);
5302 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5303 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5304 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5307 dsc->cleanup = &cleanup_alu_reg;
5311 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5312 arm_displaced_step_closure *dsc)
5314 unsigned int op = bits (insn, 21, 24);
5315 int is_mov = (op == 0xd);
5317 if (!insn_references_pc (insn, 0x000ff00ful))
5318 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5320 if (debug_displaced)
5321 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5322 is_mov ? "move" : "ALU", (unsigned long) insn);
5325 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5327 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5329 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5335 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5336 struct regcache *regs,
5337 arm_displaced_step_closure *dsc)
5341 rm = bits (insn, 3, 6);
5342 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5344 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5345 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5347 if (debug_displaced)
5348 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5349 (unsigned short) insn);
5351 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5353 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5358 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5361 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5362 struct regcache *regs,
5363 arm_displaced_step_closure *dsc)
5365 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5368 for (i = 0; i < 4; i++)
5369 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5371 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5375 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5376 arm_displaced_step_closure *dsc,
5377 unsigned int rd, unsigned int rn, unsigned int rm,
5381 ULONGEST rd_val, rn_val, rm_val, rs_val;
5383 /* Instruction is of form:
5385 <op><cond> rd, [rn,] rm, <shift> rs
5389 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5390 r0, r1, r2, r3 <- rd, rn, rm, rs
5391 Insn: <op><cond> r0, r1, r2, <shift> r3
5393 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5397 for (i = 0; i < 4; i++)
5398 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5400 rd_val = displaced_read_reg (regs, dsc, rd);
5401 rn_val = displaced_read_reg (regs, dsc, rn);
5402 rm_val = displaced_read_reg (regs, dsc, rm);
5403 rs_val = displaced_read_reg (regs, dsc, rs);
5404 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5405 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5406 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5407 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5409 dsc->cleanup = &cleanup_alu_shifted_reg;
5413 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5414 struct regcache *regs,
5415 arm_displaced_step_closure *dsc)
5417 unsigned int op = bits (insn, 21, 24);
5418 int is_mov = (op == 0xd);
5419 unsigned int rd, rn, rm, rs;
5421 if (!insn_references_pc (insn, 0x000fff0ful))
5422 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5424 if (debug_displaced)
5425 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5426 "%.8lx\n", is_mov ? "move" : "ALU",
5427 (unsigned long) insn);
5429 rn = bits (insn, 16, 19);
5430 rm = bits (insn, 0, 3);
5431 rs = bits (insn, 8, 11);
5432 rd = bits (insn, 12, 15);
5435 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5437 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5439 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5444 /* Clean up load instructions. */
5447 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5448 arm_displaced_step_closure *dsc)
5450 ULONGEST rt_val, rt_val2 = 0, rn_val;
5452 rt_val = displaced_read_reg (regs, dsc, 0);
5453 if (dsc->u.ldst.xfersize == 8)
5454 rt_val2 = displaced_read_reg (regs, dsc, 1);
5455 rn_val = displaced_read_reg (regs, dsc, 2);
5457 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5458 if (dsc->u.ldst.xfersize > 4)
5459 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5461 if (!dsc->u.ldst.immed)
5462 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5464 /* Handle register writeback. */
5465 if (dsc->u.ldst.writeback)
5466 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5467 /* Put result in right place. */
5468 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5469 if (dsc->u.ldst.xfersize == 8)
5470 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5473 /* Clean up store instructions. */
5476 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5477 arm_displaced_step_closure *dsc)
5479 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5481 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5482 if (dsc->u.ldst.xfersize > 4)
5483 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5484 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5485 if (!dsc->u.ldst.immed)
5486 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5487 if (!dsc->u.ldst.restore_r4)
5488 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5491 if (dsc->u.ldst.writeback)
5492 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5495 /* Copy "extra" load/store instructions. These are halfword/doubleword
5496 transfers, which have a different encoding to byte/word transfers. */
5499 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5500 struct regcache *regs, arm_displaced_step_closure *dsc)
5502 unsigned int op1 = bits (insn, 20, 24);
5503 unsigned int op2 = bits (insn, 5, 6);
5504 unsigned int rt = bits (insn, 12, 15);
5505 unsigned int rn = bits (insn, 16, 19);
5506 unsigned int rm = bits (insn, 0, 3);
5507 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5508 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5509 int immed = (op1 & 0x4) != 0;
5511 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5513 if (!insn_references_pc (insn, 0x000ff00ful))
5514 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5516 if (debug_displaced)
5517 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5518 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5519 (unsigned long) insn);
5521 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5524 internal_error (__FILE__, __LINE__,
5525 _("copy_extra_ld_st: instruction decode error"));
5527 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5528 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5529 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5531 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5533 rt_val = displaced_read_reg (regs, dsc, rt);
5534 if (bytesize[opcode] == 8)
5535 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5536 rn_val = displaced_read_reg (regs, dsc, rn);
5538 rm_val = displaced_read_reg (regs, dsc, rm);
5540 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5541 if (bytesize[opcode] == 8)
5542 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5543 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5545 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5548 dsc->u.ldst.xfersize = bytesize[opcode];
5549 dsc->u.ldst.rn = rn;
5550 dsc->u.ldst.immed = immed;
5551 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5552 dsc->u.ldst.restore_r4 = 0;
5555 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5557 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5558 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5560 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5562 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5563 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5565 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5570 /* Copy byte/half word/word loads and stores. */
5573 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5574 arm_displaced_step_closure *dsc, int load,
5575 int immed, int writeback, int size, int usermode,
5576 int rt, int rm, int rn)
5578 ULONGEST rt_val, rn_val, rm_val = 0;
5580 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5581 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5583 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5585 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5587 rt_val = displaced_read_reg (regs, dsc, rt);
5588 rn_val = displaced_read_reg (regs, dsc, rn);
5590 rm_val = displaced_read_reg (regs, dsc, rm);
5592 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5593 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5595 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5597 dsc->u.ldst.xfersize = size;
5598 dsc->u.ldst.rn = rn;
5599 dsc->u.ldst.immed = immed;
5600 dsc->u.ldst.writeback = writeback;
5602 /* To write PC we can do:
5604 Before this sequence of instructions:
5605 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5606 r2 is the Rn value got from dispalced_read_reg.
5608 Insn1: push {pc} Write address of STR instruction + offset on stack
5609 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5610 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5611 = addr(Insn1) + offset - addr(Insn3) - 8
5613 Insn4: add r4, r4, #8 r4 = offset - 8
5614 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5616 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5618 Otherwise we don't know what value to write for PC, since the offset is
5619 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5620 of this can be found in Section "Saving from r15" in
5621 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5623 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5628 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5629 uint16_t insn2, struct regcache *regs,
5630 arm_displaced_step_closure *dsc, int size)
5632 unsigned int u_bit = bit (insn1, 7);
5633 unsigned int rt = bits (insn2, 12, 15);
5634 int imm12 = bits (insn2, 0, 11);
5637 if (debug_displaced)
5638 fprintf_unfiltered (gdb_stdlog,
5639 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5640 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5646 /* Rewrite instruction LDR Rt imm12 into:
5648 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5652 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5655 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5656 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5657 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5659 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5661 pc_val = pc_val & 0xfffffffc;
5663 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5664 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5668 dsc->u.ldst.xfersize = size;
5669 dsc->u.ldst.immed = 0;
5670 dsc->u.ldst.writeback = 0;
5671 dsc->u.ldst.restore_r4 = 0;
5673 /* LDR R0, R2, R3 */
5674 dsc->modinsn[0] = 0xf852;
5675 dsc->modinsn[1] = 0x3;
5678 dsc->cleanup = &cleanup_load;
5684 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5685 uint16_t insn2, struct regcache *regs,
5686 arm_displaced_step_closure *dsc,
5687 int writeback, int immed)
5689 unsigned int rt = bits (insn2, 12, 15);
5690 unsigned int rn = bits (insn1, 0, 3);
5691 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5692 /* In LDR (register), there is also a register Rm, which is not allowed to
5693 be PC, so we don't have to check it. */
5695 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5696 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5699 if (debug_displaced)
5700 fprintf_unfiltered (gdb_stdlog,
5701 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5702 rt, rn, insn1, insn2);
5704 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5707 dsc->u.ldst.restore_r4 = 0;
5710 /* ldr[b]<cond> rt, [rn, #imm], etc.
5712 ldr[b]<cond> r0, [r2, #imm]. */
5714 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5715 dsc->modinsn[1] = insn2 & 0x0fff;
5718 /* ldr[b]<cond> rt, [rn, rm], etc.
5720 ldr[b]<cond> r0, [r2, r3]. */
5722 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5723 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5733 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5734 struct regcache *regs,
5735 arm_displaced_step_closure *dsc,
5736 int load, int size, int usermode)
5738 int immed = !bit (insn, 25);
5739 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5740 unsigned int rt = bits (insn, 12, 15);
5741 unsigned int rn = bits (insn, 16, 19);
5742 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5744 if (!insn_references_pc (insn, 0x000ff00ful))
5745 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5747 if (debug_displaced)
5748 fprintf_unfiltered (gdb_stdlog,
5749 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5750 load ? (size == 1 ? "ldrb" : "ldr")
5751 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5753 (unsigned long) insn);
5755 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5756 usermode, rt, rm, rn);
5758 if (load || rt != ARM_PC_REGNUM)
5760 dsc->u.ldst.restore_r4 = 0;
5763 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5765 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5766 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5768 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5770 {ldr,str}[b]<cond> r0, [r2, r3]. */
5771 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5775 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5776 dsc->u.ldst.restore_r4 = 1;
5777 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5778 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5779 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5780 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5781 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5785 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5787 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5792 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5797 /* Cleanup LDM instructions with fully-populated register list. This is an
5798 unfortunate corner case: it's impossible to implement correctly by modifying
5799 the instruction. The issue is as follows: we have an instruction,
5803 which we must rewrite to avoid loading PC. A possible solution would be to
5804 do the load in two halves, something like (with suitable cleanup
5808 ldm[id][ab] r8!, {r0-r7}
5810 ldm[id][ab] r8, {r7-r14}
5813 but at present there's no suitable place for <temp>, since the scratch space
5814 is overwritten before the cleanup routine is called. For now, we simply
5815 emulate the instruction. */
5818 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5819 arm_displaced_step_closure *dsc)
5821 int inc = dsc->u.block.increment;
5822 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5823 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5824 uint32_t regmask = dsc->u.block.regmask;
5825 int regno = inc ? 0 : 15;
5826 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5827 int exception_return = dsc->u.block.load && dsc->u.block.user
5828 && (regmask & 0x8000) != 0;
5829 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5830 int do_transfer = condition_true (dsc->u.block.cond, status);
5831 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5836 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5837 sensible we can do here. Complain loudly. */
5838 if (exception_return)
5839 error (_("Cannot single-step exception return"));
5841 /* We don't handle any stores here for now. */
5842 gdb_assert (dsc->u.block.load != 0);
5844 if (debug_displaced)
5845 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5846 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5847 dsc->u.block.increment ? "inc" : "dec",
5848 dsc->u.block.before ? "before" : "after");
5855 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5858 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5861 xfer_addr += bump_before;
5863 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5864 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5866 xfer_addr += bump_after;
5868 regmask &= ~(1 << regno);
5871 if (dsc->u.block.writeback)
5872 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5876 /* Clean up an STM which included the PC in the register list. */
5879 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5880 arm_displaced_step_closure *dsc)
5882 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5883 int store_executed = condition_true (dsc->u.block.cond, status);
5884 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5885 CORE_ADDR stm_insn_addr;
5888 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5890 /* If condition code fails, there's nothing else to do. */
5891 if (!store_executed)
5894 if (dsc->u.block.increment)
5896 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5898 if (dsc->u.block.before)
5903 pc_stored_at = dsc->u.block.xfer_addr;
5905 if (dsc->u.block.before)
5909 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5910 stm_insn_addr = dsc->scratch_base;
5911 offset = pc_val - stm_insn_addr;
5913 if (debug_displaced)
5914 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5915 "STM instruction\n", offset);
5917 /* Rewrite the stored PC to the proper value for the non-displaced original
5919 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5920 dsc->insn_addr + offset);
5923 /* Clean up an LDM which includes the PC in the register list. We clumped all
5924 the registers in the transferred list into a contiguous range r0...rX (to
5925 avoid loading PC directly and losing control of the debugged program), so we
5926 must undo that here. */
5929 cleanup_block_load_pc (struct gdbarch *gdbarch,
5930 struct regcache *regs,
5931 arm_displaced_step_closure *dsc)
5933 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5934 int load_executed = condition_true (dsc->u.block.cond, status);
5935 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5936 unsigned int regs_loaded = bitcount (mask);
5937 unsigned int num_to_shuffle = regs_loaded, clobbered;
5939 /* The method employed here will fail if the register list is fully populated
5940 (we need to avoid loading PC directly). */
5941 gdb_assert (num_to_shuffle < 16);
5946 clobbered = (1 << num_to_shuffle) - 1;
5948 while (num_to_shuffle > 0)
5950 if ((mask & (1 << write_reg)) != 0)
5952 unsigned int read_reg = num_to_shuffle - 1;
5954 if (read_reg != write_reg)
5956 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5957 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5958 if (debug_displaced)
5959 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5960 "loaded register r%d to r%d\n"), read_reg,
5963 else if (debug_displaced)
5964 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5965 "r%d already in the right place\n"),
5968 clobbered &= ~(1 << write_reg);
5976 /* Restore any registers we scribbled over. */
5977 for (write_reg = 0; clobbered != 0; write_reg++)
5979 if ((clobbered & (1 << write_reg)) != 0)
5981 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5983 if (debug_displaced)
5984 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5985 "clobbered register r%d\n"), write_reg);
5986 clobbered &= ~(1 << write_reg);
5990 /* Perform register writeback manually. */
5991 if (dsc->u.block.writeback)
5993 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5995 if (dsc->u.block.increment)
5996 new_rn_val += regs_loaded * 4;
5998 new_rn_val -= regs_loaded * 4;
6000 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6005 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6006 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6009 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6010 struct regcache *regs,
6011 arm_displaced_step_closure *dsc)
6013 int load = bit (insn, 20);
6014 int user = bit (insn, 22);
6015 int increment = bit (insn, 23);
6016 int before = bit (insn, 24);
6017 int writeback = bit (insn, 21);
6018 int rn = bits (insn, 16, 19);
6020 /* Block transfers which don't mention PC can be run directly
6022 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6023 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6025 if (rn == ARM_PC_REGNUM)
6027 warning (_("displaced: Unpredictable LDM or STM with "
6028 "base register r15"));
6029 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6032 if (debug_displaced)
6033 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6034 "%.8lx\n", (unsigned long) insn);
6036 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6037 dsc->u.block.rn = rn;
6039 dsc->u.block.load = load;
6040 dsc->u.block.user = user;
6041 dsc->u.block.increment = increment;
6042 dsc->u.block.before = before;
6043 dsc->u.block.writeback = writeback;
6044 dsc->u.block.cond = bits (insn, 28, 31);
6046 dsc->u.block.regmask = insn & 0xffff;
6050 if ((insn & 0xffff) == 0xffff)
6052 /* LDM with a fully-populated register list. This case is
6053 particularly tricky. Implement for now by fully emulating the
6054 instruction (which might not behave perfectly in all cases, but
6055 these instructions should be rare enough for that not to matter
6057 dsc->modinsn[0] = ARM_NOP;
6059 dsc->cleanup = &cleanup_block_load_all;
6063 /* LDM of a list of registers which includes PC. Implement by
6064 rewriting the list of registers to be transferred into a
6065 contiguous chunk r0...rX before doing the transfer, then shuffling
6066 registers into the correct places in the cleanup routine. */
6067 unsigned int regmask = insn & 0xffff;
6068 unsigned int num_in_list = bitcount (regmask), new_regmask;
6071 for (i = 0; i < num_in_list; i++)
6072 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6074 /* Writeback makes things complicated. We need to avoid clobbering
6075 the base register with one of the registers in our modified
6076 register list, but just using a different register can't work in
6079 ldm r14!, {r0-r13,pc}
6081 which would need to be rewritten as:
6085 but that can't work, because there's no free register for N.
6087 Solve this by turning off the writeback bit, and emulating
6088 writeback manually in the cleanup routine. */
6093 new_regmask = (1 << num_in_list) - 1;
6095 if (debug_displaced)
6096 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6097 "{..., pc}: original reg list %.4x, modified "
6098 "list %.4x\n"), rn, writeback ? "!" : "",
6099 (int) insn & 0xffff, new_regmask);
6101 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6103 dsc->cleanup = &cleanup_block_load_pc;
6108 /* STM of a list of registers which includes PC. Run the instruction
6109 as-is, but out of line: this will store the wrong value for the PC,
6110 so we must manually fix up the memory in the cleanup routine.
6111 Doing things this way has the advantage that we can auto-detect
6112 the offset of the PC write (which is architecture-dependent) in
6113 the cleanup routine. */
6114 dsc->modinsn[0] = insn;
6116 dsc->cleanup = &cleanup_block_store_pc;
6123 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6124 struct regcache *regs,
6125 arm_displaced_step_closure *dsc)
6127 int rn = bits (insn1, 0, 3);
6128 int load = bit (insn1, 4);
6129 int writeback = bit (insn1, 5);
6131 /* Block transfers which don't mention PC can be run directly
6133 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6134 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6136 if (rn == ARM_PC_REGNUM)
6138 warning (_("displaced: Unpredictable LDM or STM with "
6139 "base register r15"));
6140 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6141 "unpredictable ldm/stm", dsc);
6144 if (debug_displaced)
6145 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6146 "%.4x%.4x\n", insn1, insn2);
6148 /* Clear bit 13, since it should be always zero. */
6149 dsc->u.block.regmask = (insn2 & 0xdfff);
6150 dsc->u.block.rn = rn;
6152 dsc->u.block.load = load;
6153 dsc->u.block.user = 0;
6154 dsc->u.block.increment = bit (insn1, 7);
6155 dsc->u.block.before = bit (insn1, 8);
6156 dsc->u.block.writeback = writeback;
6157 dsc->u.block.cond = INST_AL;
6158 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6162 if (dsc->u.block.regmask == 0xffff)
6164 /* This branch is impossible to happen. */
6169 unsigned int regmask = dsc->u.block.regmask;
6170 unsigned int num_in_list = bitcount (regmask), new_regmask;
6173 for (i = 0; i < num_in_list; i++)
6174 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6179 new_regmask = (1 << num_in_list) - 1;
6181 if (debug_displaced)
6182 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6183 "{..., pc}: original reg list %.4x, modified "
6184 "list %.4x\n"), rn, writeback ? "!" : "",
6185 (int) dsc->u.block.regmask, new_regmask);
6187 dsc->modinsn[0] = insn1;
6188 dsc->modinsn[1] = (new_regmask & 0xffff);
6191 dsc->cleanup = &cleanup_block_load_pc;
6196 dsc->modinsn[0] = insn1;
6197 dsc->modinsn[1] = insn2;
6199 dsc->cleanup = &cleanup_block_store_pc;
6204 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6205 This is used to avoid a dependency on BFD's bfd_endian enum. */
6208 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6211 return read_memory_unsigned_integer (memaddr, len,
6212 (enum bfd_endian) byte_order);
6215 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6218 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6221 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6224 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6227 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6232 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6235 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6237 return arm_is_thumb (self->regcache);
6240 /* single_step() is called just before we want to resume the inferior,
6241 if we want to single-step it but there is no hardware or kernel
6242 single-step support. We find the target of the coming instructions
6243 and breakpoint them. */
6245 std::vector<CORE_ADDR>
6246 arm_software_single_step (struct regcache *regcache)
6248 struct gdbarch *gdbarch = regcache->arch ();
6249 struct arm_get_next_pcs next_pcs_ctx;
6251 arm_get_next_pcs_ctor (&next_pcs_ctx,
6252 &arm_get_next_pcs_ops,
6253 gdbarch_byte_order (gdbarch),
6254 gdbarch_byte_order_for_code (gdbarch),
6258 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6260 for (CORE_ADDR &pc_ref : next_pcs)
6261 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6266 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6267 for Linux, where some SVC instructions must be treated specially. */
6270 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6271 arm_displaced_step_closure *dsc)
6273 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6277 "%.8lx\n", (unsigned long) resume_addr);
6279 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6283 /* Common copy routine for svc instruciton. */
6286 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6287 arm_displaced_step_closure *dsc)
6289 /* Preparation: none.
6290 Insn: unmodified svc.
6291 Cleanup: pc <- insn_addr + insn_size. */
6293 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6295 dsc->wrote_to_pc = 1;
6297 /* Allow OS-specific code to override SVC handling. */
6298 if (dsc->u.svc.copy_svc_os)
6299 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6302 dsc->cleanup = &cleanup_svc;
6308 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6309 struct regcache *regs, arm_displaced_step_closure *dsc)
6312 if (debug_displaced)
6313 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6314 (unsigned long) insn);
6316 dsc->modinsn[0] = insn;
6318 return install_svc (gdbarch, regs, dsc);
6322 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6323 struct regcache *regs, arm_displaced_step_closure *dsc)
6326 if (debug_displaced)
6327 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6330 dsc->modinsn[0] = insn;
6332 return install_svc (gdbarch, regs, dsc);
6335 /* Copy undefined instructions. */
6338 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6339 arm_displaced_step_closure *dsc)
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog,
6343 "displaced: copying undefined insn %.8lx\n",
6344 (unsigned long) insn);
6346 dsc->modinsn[0] = insn;
6352 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6353 arm_displaced_step_closure *dsc)
6356 if (debug_displaced)
6357 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6358 "%.4x %.4x\n", (unsigned short) insn1,
6359 (unsigned short) insn2);
6361 dsc->modinsn[0] = insn1;
6362 dsc->modinsn[1] = insn2;
6368 /* Copy unpredictable instructions. */
6371 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6372 arm_displaced_step_closure *dsc)
6374 if (debug_displaced)
6375 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6376 "%.8lx\n", (unsigned long) insn);
6378 dsc->modinsn[0] = insn;
6383 /* The decode_* functions are instruction decoding helpers. They mostly follow
6384 the presentation in the ARM ARM. */
6387 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6388 struct regcache *regs,
6389 arm_displaced_step_closure *dsc)
6391 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6392 unsigned int rn = bits (insn, 16, 19);
6394 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6395 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6396 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6397 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6398 else if ((op1 & 0x60) == 0x20)
6399 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6400 else if ((op1 & 0x71) == 0x40)
6401 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6403 else if ((op1 & 0x77) == 0x41)
6404 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6405 else if ((op1 & 0x77) == 0x45)
6406 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6407 else if ((op1 & 0x77) == 0x51)
6410 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6412 return arm_copy_unpred (gdbarch, insn, dsc);
6414 else if ((op1 & 0x77) == 0x55)
6415 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6416 else if (op1 == 0x57)
6419 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6420 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6421 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6422 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6423 default: return arm_copy_unpred (gdbarch, insn, dsc);
6425 else if ((op1 & 0x63) == 0x43)
6426 return arm_copy_unpred (gdbarch, insn, dsc);
6427 else if ((op2 & 0x1) == 0x0)
6428 switch (op1 & ~0x80)
6431 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6433 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6434 case 0x71: case 0x75:
6436 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6437 case 0x63: case 0x67: case 0x73: case 0x77:
6438 return arm_copy_unpred (gdbarch, insn, dsc);
6440 return arm_copy_undef (gdbarch, insn, dsc);
6443 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6447 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6448 struct regcache *regs,
6449 arm_displaced_step_closure *dsc)
6451 if (bit (insn, 27) == 0)
6452 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6453 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6454 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6457 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6460 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6462 case 0x4: case 0x5: case 0x6: case 0x7:
6463 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6466 switch ((insn & 0xe00000) >> 21)
6468 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6470 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6473 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6476 return arm_copy_undef (gdbarch, insn, dsc);
6481 int rn_f = (bits (insn, 16, 19) == 0xf);
6482 switch ((insn & 0xe00000) >> 21)
6485 /* ldc/ldc2 imm (undefined for rn == pc). */
6486 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6487 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6490 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6492 case 0x4: case 0x5: case 0x6: case 0x7:
6493 /* ldc/ldc2 lit (undefined for rn != pc). */
6494 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6495 : arm_copy_undef (gdbarch, insn, dsc);
6498 return arm_copy_undef (gdbarch, insn, dsc);
6503 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6506 if (bits (insn, 16, 19) == 0xf)
6508 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6510 return arm_copy_undef (gdbarch, insn, dsc);
6514 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6516 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6520 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6522 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6525 return arm_copy_undef (gdbarch, insn, dsc);
6529 /* Decode miscellaneous instructions in dp/misc encoding space. */
6532 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6533 struct regcache *regs,
6534 arm_displaced_step_closure *dsc)
6536 unsigned int op2 = bits (insn, 4, 6);
6537 unsigned int op = bits (insn, 21, 22);
6542 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6545 if (op == 0x1) /* bx. */
6546 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6548 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6550 return arm_copy_undef (gdbarch, insn, dsc);
6554 /* Not really supported. */
6555 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6557 return arm_copy_undef (gdbarch, insn, dsc);
6561 return arm_copy_bx_blx_reg (gdbarch, insn,
6562 regs, dsc); /* blx register. */
6564 return arm_copy_undef (gdbarch, insn, dsc);
6567 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6571 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6573 /* Not really supported. */
6574 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6578 return arm_copy_undef (gdbarch, insn, dsc);
6583 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6584 struct regcache *regs,
6585 arm_displaced_step_closure *dsc)
6588 switch (bits (insn, 20, 24))
6591 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6594 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6596 case 0x12: case 0x16:
6597 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6600 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6604 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6606 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6607 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6608 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6609 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6610 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6611 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6612 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6613 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6614 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6615 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6616 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6617 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6618 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6619 /* 2nd arg means "unprivileged". */
6620 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6624 /* Should be unreachable. */
6629 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6630 struct regcache *regs,
6631 arm_displaced_step_closure *dsc)
6633 int a = bit (insn, 25), b = bit (insn, 4);
6634 uint32_t op1 = bits (insn, 20, 24);
6636 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6637 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6638 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6639 else if ((!a && (op1 & 0x17) == 0x02)
6640 || (a && (op1 & 0x17) == 0x02 && !b))
6641 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6642 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6643 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6644 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6645 else if ((!a && (op1 & 0x17) == 0x03)
6646 || (a && (op1 & 0x17) == 0x03 && !b))
6647 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6648 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6649 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6650 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6651 else if ((!a && (op1 & 0x17) == 0x06)
6652 || (a && (op1 & 0x17) == 0x06 && !b))
6653 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6654 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6655 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6656 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6657 else if ((!a && (op1 & 0x17) == 0x07)
6658 || (a && (op1 & 0x17) == 0x07 && !b))
6659 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6661 /* Should be unreachable. */
6666 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6667 arm_displaced_step_closure *dsc)
6669 switch (bits (insn, 20, 24))
6671 case 0x00: case 0x01: case 0x02: case 0x03:
6672 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6674 case 0x04: case 0x05: case 0x06: case 0x07:
6675 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6677 case 0x08: case 0x09: case 0x0a: case 0x0b:
6678 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6679 return arm_copy_unmodified (gdbarch, insn,
6680 "decode/pack/unpack/saturate/reverse", dsc);
6683 if (bits (insn, 5, 7) == 0) /* op2. */
6685 if (bits (insn, 12, 15) == 0xf)
6686 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6688 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6691 return arm_copy_undef (gdbarch, insn, dsc);
6693 case 0x1a: case 0x1b:
6694 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6695 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6697 return arm_copy_undef (gdbarch, insn, dsc);
6699 case 0x1c: case 0x1d:
6700 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6702 if (bits (insn, 0, 3) == 0xf)
6703 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6705 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6708 return arm_copy_undef (gdbarch, insn, dsc);
6710 case 0x1e: case 0x1f:
6711 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6712 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6714 return arm_copy_undef (gdbarch, insn, dsc);
6717 /* Should be unreachable. */
6722 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6723 struct regcache *regs,
6724 arm_displaced_step_closure *dsc)
6727 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6729 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6733 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6734 struct regcache *regs,
6735 arm_displaced_step_closure *dsc)
6737 unsigned int opcode = bits (insn, 20, 24);
6741 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6742 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6744 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6745 case 0x12: case 0x16:
6746 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6748 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6749 case 0x13: case 0x17:
6750 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6752 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6753 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6754 /* Note: no writeback for these instructions. Bit 25 will always be
6755 zero though (via caller), so the following works OK. */
6756 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 /* Should be unreachable. */
6763 /* Decode shifted register instructions. */
6766 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6767 uint16_t insn2, struct regcache *regs,
6768 arm_displaced_step_closure *dsc)
6770 /* PC is only allowed to be used in instruction MOV. */
6772 unsigned int op = bits (insn1, 5, 8);
6773 unsigned int rn = bits (insn1, 0, 3);
6775 if (op == 0x2 && rn == 0xf) /* MOV */
6776 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6778 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6779 "dp (shift reg)", dsc);
6783 /* Decode extension register load/store. Exactly the same as
6784 arm_decode_ext_reg_ld_st. */
6787 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_closure *dsc)
6791 unsigned int opcode = bits (insn1, 4, 8);
6795 case 0x04: case 0x05:
6796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6797 "vfp/neon vmov", dsc);
6799 case 0x08: case 0x0c: /* 01x00 */
6800 case 0x0a: case 0x0e: /* 01x10 */
6801 case 0x12: case 0x16: /* 10x10 */
6802 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6803 "vfp/neon vstm/vpush", dsc);
6805 case 0x09: case 0x0d: /* 01x01 */
6806 case 0x0b: case 0x0f: /* 01x11 */
6807 case 0x13: case 0x17: /* 10x11 */
6808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6809 "vfp/neon vldm/vpop", dsc);
6811 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6814 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6815 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6818 /* Should be unreachable. */
6823 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6824 struct regcache *regs, arm_displaced_step_closure *dsc)
6826 unsigned int op1 = bits (insn, 20, 25);
6827 int op = bit (insn, 4);
6828 unsigned int coproc = bits (insn, 8, 11);
6830 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6831 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6832 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6833 && (coproc & 0xe) != 0xa)
6835 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6836 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6837 && (coproc & 0xe) != 0xa)
6838 /* ldc/ldc2 imm/lit. */
6839 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6840 else if ((op1 & 0x3e) == 0x00)
6841 return arm_copy_undef (gdbarch, insn, dsc);
6842 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6843 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6844 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6845 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6846 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6847 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6848 else if ((op1 & 0x30) == 0x20 && !op)
6850 if ((coproc & 0xe) == 0xa)
6851 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6853 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6855 else if ((op1 & 0x30) == 0x20 && op)
6856 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6857 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6858 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6859 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6860 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6861 else if ((op1 & 0x30) == 0x30)
6862 return arm_copy_svc (gdbarch, insn, regs, dsc);
6864 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6868 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6869 uint16_t insn2, struct regcache *regs,
6870 arm_displaced_step_closure *dsc)
6872 unsigned int coproc = bits (insn2, 8, 11);
6873 unsigned int bit_5_8 = bits (insn1, 5, 8);
6874 unsigned int bit_9 = bit (insn1, 9);
6875 unsigned int bit_4 = bit (insn1, 4);
6880 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6881 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6883 else if (bit_5_8 == 0) /* UNDEFINED. */
6884 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6887 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6888 if ((coproc & 0xe) == 0xa)
6889 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6891 else /* coproc is not 101x. */
6893 if (bit_4 == 0) /* STC/STC2. */
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6896 else /* LDC/LDC2 {literal, immeidate}. */
6897 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6903 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6909 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6910 arm_displaced_step_closure *dsc, int rd)
6916 Preparation: Rd <- PC
6922 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6923 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6927 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6928 arm_displaced_step_closure *dsc,
6929 int rd, unsigned int imm)
6932 /* Encoding T2: ADDS Rd, #imm */
6933 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6935 install_pc_relative (gdbarch, regs, dsc, rd);
6941 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6942 struct regcache *regs,
6943 arm_displaced_step_closure *dsc)
6945 unsigned int rd = bits (insn, 8, 10);
6946 unsigned int imm8 = bits (insn, 0, 7);
6948 if (debug_displaced)
6949 fprintf_unfiltered (gdb_stdlog,
6950 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6953 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6957 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6958 uint16_t insn2, struct regcache *regs,
6959 arm_displaced_step_closure *dsc)
6961 unsigned int rd = bits (insn2, 8, 11);
6962 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6963 extract raw immediate encoding rather than computing immediate. When
6964 generating ADD or SUB instruction, we can simply perform OR operation to
6965 set immediate into ADD. */
6966 unsigned int imm_3_8 = insn2 & 0x70ff;
6967 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6969 if (debug_displaced)
6970 fprintf_unfiltered (gdb_stdlog,
6971 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6972 rd, imm_i, imm_3_8, insn1, insn2);
6974 if (bit (insn1, 7)) /* Encoding T2 */
6976 /* Encoding T3: SUB Rd, Rd, #imm */
6977 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6978 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6980 else /* Encoding T3 */
6982 /* Encoding T3: ADD Rd, Rd, #imm */
6983 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6984 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6988 install_pc_relative (gdbarch, regs, dsc, rd);
6994 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6995 struct regcache *regs,
6996 arm_displaced_step_closure *dsc)
6998 unsigned int rt = bits (insn1, 8, 10);
7000 int imm8 = (bits (insn1, 0, 7) << 2);
7006 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7008 Insn: LDR R0, [R2, R3];
7009 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7011 if (debug_displaced)
7012 fprintf_unfiltered (gdb_stdlog,
7013 "displaced: copying thumb ldr r%d [pc #%d]\n"
7016 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7017 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7018 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7019 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7020 /* The assembler calculates the required value of the offset from the
7021 Align(PC,4) value of this instruction to the label. */
7022 pc = pc & 0xfffffffc;
7024 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7025 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7028 dsc->u.ldst.xfersize = 4;
7030 dsc->u.ldst.immed = 0;
7031 dsc->u.ldst.writeback = 0;
7032 dsc->u.ldst.restore_r4 = 0;
7034 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7036 dsc->cleanup = &cleanup_load;
7041 /* Copy Thumb cbnz/cbz insruction. */
7044 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7045 struct regcache *regs,
7046 arm_displaced_step_closure *dsc)
7048 int non_zero = bit (insn1, 11);
7049 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7050 CORE_ADDR from = dsc->insn_addr;
7051 int rn = bits (insn1, 0, 2);
7052 int rn_val = displaced_read_reg (regs, dsc, rn);
7054 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7055 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7056 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7057 condition is false, let it be, cleanup_branch will do nothing. */
7058 if (dsc->u.branch.cond)
7060 dsc->u.branch.cond = INST_AL;
7061 dsc->u.branch.dest = from + 4 + imm5;
7064 dsc->u.branch.dest = from + 2;
7066 dsc->u.branch.link = 0;
7067 dsc->u.branch.exchange = 0;
7069 if (debug_displaced)
7070 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7071 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7072 rn, rn_val, insn1, dsc->u.branch.dest);
7074 dsc->modinsn[0] = THUMB_NOP;
7076 dsc->cleanup = &cleanup_branch;
7080 /* Copy Table Branch Byte/Halfword */
7082 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7083 uint16_t insn2, struct regcache *regs,
7084 arm_displaced_step_closure *dsc)
7086 ULONGEST rn_val, rm_val;
7087 int is_tbh = bit (insn2, 4);
7088 CORE_ADDR halfwords = 0;
7089 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7091 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7092 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7098 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7099 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7105 target_read_memory (rn_val + rm_val, buf, 1);
7106 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7109 if (debug_displaced)
7110 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7111 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7112 (unsigned int) rn_val, (unsigned int) rm_val,
7113 (unsigned int) halfwords);
7115 dsc->u.branch.cond = INST_AL;
7116 dsc->u.branch.link = 0;
7117 dsc->u.branch.exchange = 0;
7118 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7120 dsc->cleanup = &cleanup_branch;
7126 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7127 arm_displaced_step_closure *dsc)
7130 int val = displaced_read_reg (regs, dsc, 7);
7131 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7134 val = displaced_read_reg (regs, dsc, 8);
7135 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7138 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7143 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7144 struct regcache *regs,
7145 arm_displaced_step_closure *dsc)
7147 dsc->u.block.regmask = insn1 & 0x00ff;
7149 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7152 (1) register list is full, that is, r0-r7 are used.
7153 Prepare: tmp[0] <- r8
7155 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7156 MOV r8, r7; Move value of r7 to r8;
7157 POP {r7}; Store PC value into r7.
7159 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7161 (2) register list is not full, supposing there are N registers in
7162 register list (except PC, 0 <= N <= 7).
7163 Prepare: for each i, 0 - N, tmp[i] <- ri.
7165 POP {r0, r1, ...., rN};
7167 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7168 from tmp[] properly.
7170 if (debug_displaced)
7171 fprintf_unfiltered (gdb_stdlog,
7172 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7173 dsc->u.block.regmask, insn1);
7175 if (dsc->u.block.regmask == 0xff)
7177 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7179 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7180 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7181 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7184 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7188 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7190 unsigned int new_regmask;
7192 for (i = 0; i < num_in_list + 1; i++)
7193 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7195 new_regmask = (1 << (num_in_list + 1)) - 1;
7197 if (debug_displaced)
7198 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7199 "{..., pc}: original reg list %.4x,"
7200 " modified list %.4x\n"),
7201 (int) dsc->u.block.regmask, new_regmask);
7203 dsc->u.block.regmask |= 0x8000;
7204 dsc->u.block.writeback = 0;
7205 dsc->u.block.cond = INST_AL;
7207 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7209 dsc->cleanup = &cleanup_block_load_pc;
7216 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7217 struct regcache *regs,
7218 arm_displaced_step_closure *dsc)
7220 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7221 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7224 /* 16-bit thumb instructions. */
7225 switch (op_bit_12_15)
7227 /* Shift (imme), add, subtract, move and compare. */
7228 case 0: case 1: case 2: case 3:
7229 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7230 "shift/add/sub/mov/cmp",
7234 switch (op_bit_10_11)
7236 case 0: /* Data-processing */
7237 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7241 case 1: /* Special data instructions and branch and exchange. */
7243 unsigned short op = bits (insn1, 7, 9);
7244 if (op == 6 || op == 7) /* BX or BLX */
7245 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7246 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7247 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7249 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7253 default: /* LDR (literal) */
7254 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7257 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7258 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7261 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7262 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7263 else /* Generate SP-relative address */
7264 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7266 case 11: /* Misc 16-bit instructions */
7268 switch (bits (insn1, 8, 11))
7270 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7271 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7273 case 12: case 13: /* POP */
7274 if (bit (insn1, 8)) /* PC is in register list. */
7275 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7277 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7279 case 15: /* If-Then, and hints */
7280 if (bits (insn1, 0, 3))
7281 /* If-Then makes up to four following instructions conditional.
7282 IT instruction itself is not conditional, so handle it as a
7283 common unmodified instruction. */
7284 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7295 if (op_bit_10_11 < 2) /* Store multiple registers */
7296 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7297 else /* Load multiple registers */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7300 case 13: /* Conditional branch and supervisor call */
7301 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7302 err = thumb_copy_b (gdbarch, insn1, dsc);
7304 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7306 case 14: /* Unconditional branch */
7307 err = thumb_copy_b (gdbarch, insn1, dsc);
7314 internal_error (__FILE__, __LINE__,
7315 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7319 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7320 uint16_t insn1, uint16_t insn2,
7321 struct regcache *regs,
7322 arm_displaced_step_closure *dsc)
7324 int rt = bits (insn2, 12, 15);
7325 int rn = bits (insn1, 0, 3);
7326 int op1 = bits (insn1, 7, 8);
7328 switch (bits (insn1, 5, 6))
7330 case 0: /* Load byte and memory hints */
7331 if (rt == 0xf) /* PLD/PLI */
7334 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7335 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7337 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7342 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7343 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7346 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7347 "ldrb{reg, immediate}/ldrbt",
7352 case 1: /* Load halfword and memory hints. */
7353 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7354 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7355 "pld/unalloc memhint", dsc);
7359 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7362 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7366 case 2: /* Load word */
7368 int insn2_bit_8_11 = bits (insn2, 8, 11);
7371 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7372 else if (op1 == 0x1) /* Encoding T3 */
7373 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7375 else /* op1 == 0x0 */
7377 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7378 /* LDR (immediate) */
7379 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7380 dsc, bit (insn2, 8), 1);
7381 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7382 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7385 /* LDR (register) */
7386 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7392 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7399 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7400 uint16_t insn2, struct regcache *regs,
7401 arm_displaced_step_closure *dsc)
7404 unsigned short op = bit (insn2, 15);
7405 unsigned int op1 = bits (insn1, 11, 12);
7411 switch (bits (insn1, 9, 10))
7416 /* Load/store {dual, execlusive}, table branch. */
7417 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7418 && bits (insn2, 5, 7) == 0)
7419 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7422 /* PC is not allowed to use in load/store {dual, exclusive}
7424 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7425 "load/store dual/ex", dsc);
7427 else /* load/store multiple */
7429 switch (bits (insn1, 7, 8))
7431 case 0: case 3: /* SRS, RFE */
7432 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7435 case 1: case 2: /* LDM/STM/PUSH/POP */
7436 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7443 /* Data-processing (shift register). */
7444 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7447 default: /* Coprocessor instructions. */
7448 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7453 case 2: /* op1 = 2 */
7454 if (op) /* Branch and misc control. */
7456 if (bit (insn2, 14) /* BLX/BL */
7457 || bit (insn2, 12) /* Unconditional branch */
7458 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7459 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7461 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7466 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7468 int dp_op = bits (insn1, 4, 8);
7469 int rn = bits (insn1, 0, 3);
7470 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7471 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7474 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7477 else /* Data processing (modified immeidate) */
7478 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7482 case 3: /* op1 = 3 */
7483 switch (bits (insn1, 9, 10))
7487 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7489 else /* NEON Load/Store and Store single data item */
7490 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7491 "neon elt/struct load/store",
7494 case 1: /* op1 = 3, bits (9, 10) == 1 */
7495 switch (bits (insn1, 7, 8))
7497 case 0: case 1: /* Data processing (register) */
7498 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7501 case 2: /* Multiply and absolute difference */
7502 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7503 "mul/mua/diff", dsc);
7505 case 3: /* Long multiply and divide */
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7511 default: /* Coprocessor instructions */
7512 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7521 internal_error (__FILE__, __LINE__,
7522 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7527 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7528 struct regcache *regs,
7529 arm_displaced_step_closure *dsc)
7531 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7533 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7535 if (debug_displaced)
7536 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7537 "at %.8lx\n", insn1, (unsigned long) from);
7540 dsc->insn_size = thumb_insn_size (insn1);
7541 if (thumb_insn_size (insn1) == 4)
7544 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7545 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7548 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7552 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7553 CORE_ADDR to, struct regcache *regs,
7554 arm_displaced_step_closure *dsc)
7557 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7560 /* Most displaced instructions use a 1-instruction scratch space, so set this
7561 here and override below if/when necessary. */
7563 dsc->insn_addr = from;
7564 dsc->scratch_base = to;
7565 dsc->cleanup = NULL;
7566 dsc->wrote_to_pc = 0;
7568 if (!displaced_in_arm_mode (regs))
7569 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7573 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7574 if (debug_displaced)
7575 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7576 "at %.8lx\n", (unsigned long) insn,
7577 (unsigned long) from);
7579 if ((insn & 0xf0000000) == 0xf0000000)
7580 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7581 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7583 case 0x0: case 0x1: case 0x2: case 0x3:
7584 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7587 case 0x4: case 0x5: case 0x6:
7588 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7592 err = arm_decode_media (gdbarch, insn, dsc);
7595 case 0x8: case 0x9: case 0xa: case 0xb:
7596 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7599 case 0xc: case 0xd: case 0xe: case 0xf:
7600 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7605 internal_error (__FILE__, __LINE__,
7606 _("arm_process_displaced_insn: Instruction decode error"));
7609 /* Actually set up the scratch space for a displaced instruction. */
7612 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7613 CORE_ADDR to, arm_displaced_step_closure *dsc)
7615 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7616 unsigned int i, len, offset;
7617 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7618 int size = dsc->is_thumb? 2 : 4;
7619 const gdb_byte *bkp_insn;
7622 /* Poke modified instruction(s). */
7623 for (i = 0; i < dsc->numinsns; i++)
7625 if (debug_displaced)
7627 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7629 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7632 fprintf_unfiltered (gdb_stdlog, "%.4x",
7633 (unsigned short)dsc->modinsn[i]);
7635 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7636 (unsigned long) to + offset);
7639 write_memory_unsigned_integer (to + offset, size,
7640 byte_order_for_code,
7645 /* Choose the correct breakpoint instruction. */
7648 bkp_insn = tdep->thumb_breakpoint;
7649 len = tdep->thumb_breakpoint_size;
7653 bkp_insn = tdep->arm_breakpoint;
7654 len = tdep->arm_breakpoint_size;
7657 /* Put breakpoint afterwards. */
7658 write_memory (to + offset, bkp_insn, len);
7660 if (debug_displaced)
7661 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7662 paddress (gdbarch, from), paddress (gdbarch, to));
7665 /* Entry point for cleaning things up after a displaced instruction has been
7669 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7670 struct displaced_step_closure *dsc_,
7671 CORE_ADDR from, CORE_ADDR to,
7672 struct regcache *regs)
7674 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7677 dsc->cleanup (gdbarch, regs, dsc);
7679 if (!dsc->wrote_to_pc)
7680 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7681 dsc->insn_addr + dsc->insn_size);
7685 #include "bfd-in2.h"
7686 #include "libcoff.h"
7689 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7691 gdb_disassembler *di
7692 = static_cast<gdb_disassembler *>(info->application_data);
7693 struct gdbarch *gdbarch = di->arch ();
7695 if (arm_pc_is_thumb (gdbarch, memaddr))
7697 static asymbol *asym;
7698 static combined_entry_type ce;
7699 static struct coff_symbol_struct csym;
7700 static struct bfd fake_bfd;
7701 static bfd_target fake_target;
7703 if (csym.native == NULL)
7705 /* Create a fake symbol vector containing a Thumb symbol.
7706 This is solely so that the code in print_insn_little_arm()
7707 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7708 the presence of a Thumb symbol and switch to decoding
7709 Thumb instructions. */
7711 fake_target.flavour = bfd_target_coff_flavour;
7712 fake_bfd.xvec = &fake_target;
7713 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7715 csym.symbol.the_bfd = &fake_bfd;
7716 csym.symbol.name = "fake";
7717 asym = (asymbol *) & csym;
7720 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7721 info->symbols = &asym;
7724 info->symbols = NULL;
7726 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7727 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7728 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7729 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7730 in default_print_insn. */
7731 if (exec_bfd != NULL)
7732 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7734 return default_print_insn (memaddr, info);
7737 /* The following define instruction sequences that will cause ARM
7738 cpu's to take an undefined instruction trap. These are used to
7739 signal a breakpoint to GDB.
7741 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7742 modes. A different instruction is required for each mode. The ARM
7743 cpu's can also be big or little endian. Thus four different
7744 instructions are needed to support all cases.
7746 Note: ARMv4 defines several new instructions that will take the
7747 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7748 not in fact add the new instructions. The new undefined
7749 instructions in ARMv4 are all instructions that had no defined
7750 behaviour in earlier chips. There is no guarantee that they will
7751 raise an exception, but may be treated as NOP's. In practice, it
7752 may only safe to rely on instructions matching:
7754 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7755 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7756 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7758 Even this may only true if the condition predicate is true. The
7759 following use a condition predicate of ALWAYS so it is always TRUE.
7761 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7762 and NetBSD all use a software interrupt rather than an undefined
7763 instruction to force a trap. This can be handled by by the
7764 abi-specific code during establishment of the gdbarch vector. */
7766 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7767 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7768 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7769 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7771 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7772 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7773 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7774 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7776 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7779 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7781 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7782 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7784 if (arm_pc_is_thumb (gdbarch, *pcptr))
7786 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7788 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7789 check whether we are replacing a 32-bit instruction. */
7790 if (tdep->thumb2_breakpoint != NULL)
7794 if (target_read_memory (*pcptr, buf, 2) == 0)
7796 unsigned short inst1;
7798 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7799 if (thumb_insn_size (inst1) == 4)
7800 return ARM_BP_KIND_THUMB2;
7804 return ARM_BP_KIND_THUMB;
7807 return ARM_BP_KIND_ARM;
7811 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7813 static const gdb_byte *
7814 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7816 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7820 case ARM_BP_KIND_ARM:
7821 *size = tdep->arm_breakpoint_size;
7822 return tdep->arm_breakpoint;
7823 case ARM_BP_KIND_THUMB:
7824 *size = tdep->thumb_breakpoint_size;
7825 return tdep->thumb_breakpoint;
7826 case ARM_BP_KIND_THUMB2:
7827 *size = tdep->thumb2_breakpoint_size;
7828 return tdep->thumb2_breakpoint;
7830 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7834 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7837 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7838 struct regcache *regcache,
7843 /* Check the memory pointed by PC is readable. */
7844 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7846 struct arm_get_next_pcs next_pcs_ctx;
7848 arm_get_next_pcs_ctor (&next_pcs_ctx,
7849 &arm_get_next_pcs_ops,
7850 gdbarch_byte_order (gdbarch),
7851 gdbarch_byte_order_for_code (gdbarch),
7855 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7857 /* If MEMADDR is the next instruction of current pc, do the
7858 software single step computation, and get the thumb mode by
7859 the destination address. */
7860 for (CORE_ADDR pc : next_pcs)
7862 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7864 if (IS_THUMB_ADDR (pc))
7866 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7867 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7870 return ARM_BP_KIND_ARM;
7875 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7878 /* Extract from an array REGBUF containing the (raw) register state a
7879 function return value of type TYPE, and copy that, in virtual
7880 format, into VALBUF. */
7883 arm_extract_return_value (struct type *type, struct regcache *regs,
7886 struct gdbarch *gdbarch = regs->arch ();
7887 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7889 if (TYPE_CODE_FLT == TYPE_CODE (type))
7891 switch (gdbarch_tdep (gdbarch)->fp_model)
7895 /* The value is in register F0 in internal format. We need to
7896 extract the raw value and then convert it to the desired
7898 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7900 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7901 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7906 case ARM_FLOAT_SOFT_FPA:
7907 case ARM_FLOAT_SOFT_VFP:
7908 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7909 not using the VFP ABI code. */
7911 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7912 if (TYPE_LENGTH (type) > 4)
7913 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7917 internal_error (__FILE__, __LINE__,
7918 _("arm_extract_return_value: "
7919 "Floating point model not supported"));
7923 else if (TYPE_CODE (type) == TYPE_CODE_INT
7924 || TYPE_CODE (type) == TYPE_CODE_CHAR
7925 || TYPE_CODE (type) == TYPE_CODE_BOOL
7926 || TYPE_CODE (type) == TYPE_CODE_PTR
7927 || TYPE_IS_REFERENCE (type)
7928 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7930 /* If the type is a plain integer, then the access is
7931 straight-forward. Otherwise we have to play around a bit
7933 int len = TYPE_LENGTH (type);
7934 int regno = ARM_A1_REGNUM;
7939 /* By using store_unsigned_integer we avoid having to do
7940 anything special for small big-endian values. */
7941 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7942 store_unsigned_integer (valbuf,
7943 (len > INT_REGISTER_SIZE
7944 ? INT_REGISTER_SIZE : len),
7946 len -= INT_REGISTER_SIZE;
7947 valbuf += INT_REGISTER_SIZE;
7952 /* For a structure or union the behaviour is as if the value had
7953 been stored to word-aligned memory and then loaded into
7954 registers with 32-bit load instruction(s). */
7955 int len = TYPE_LENGTH (type);
7956 int regno = ARM_A1_REGNUM;
7957 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7961 regs->cooked_read (regno++, tmpbuf);
7962 memcpy (valbuf, tmpbuf,
7963 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7964 len -= INT_REGISTER_SIZE;
7965 valbuf += INT_REGISTER_SIZE;
7971 /* Will a function return an aggregate type in memory or in a
7972 register? Return 0 if an aggregate type can be returned in a
7973 register, 1 if it must be returned in memory. */
7976 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7978 enum type_code code;
7980 type = check_typedef (type);
7982 /* Simple, non-aggregate types (ie not including vectors and
7983 complex) are always returned in a register (or registers). */
7984 code = TYPE_CODE (type);
7985 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7986 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7989 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7991 /* Vector values should be returned using ARM registers if they
7992 are not over 16 bytes. */
7993 return (TYPE_LENGTH (type) > 16);
7996 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7998 /* The AAPCS says all aggregates not larger than a word are returned
8000 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8009 /* All aggregate types that won't fit in a register must be returned
8011 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8014 /* In the ARM ABI, "integer" like aggregate types are returned in
8015 registers. For an aggregate type to be integer like, its size
8016 must be less than or equal to INT_REGISTER_SIZE and the
8017 offset of each addressable subfield must be zero. Note that bit
8018 fields are not addressable, and all addressable subfields of
8019 unions always start at offset zero.
8021 This function is based on the behaviour of GCC 2.95.1.
8022 See: gcc/arm.c: arm_return_in_memory() for details.
8024 Note: All versions of GCC before GCC 2.95.2 do not set up the
8025 parameters correctly for a function returning the following
8026 structure: struct { float f;}; This should be returned in memory,
8027 not a register. Richard Earnshaw sent me a patch, but I do not
8028 know of any way to detect if a function like the above has been
8029 compiled with the correct calling convention. */
8031 /* Assume all other aggregate types can be returned in a register.
8032 Run a check for structures, unions and arrays. */
8035 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8038 /* Need to check if this struct/union is "integer" like. For
8039 this to be true, its size must be less than or equal to
8040 INT_REGISTER_SIZE and the offset of each addressable
8041 subfield must be zero. Note that bit fields are not
8042 addressable, and unions always start at offset zero. If any
8043 of the subfields is a floating point type, the struct/union
8044 cannot be an integer type. */
8046 /* For each field in the object, check:
8047 1) Is it FP? --> yes, nRc = 1;
8048 2) Is it addressable (bitpos != 0) and
8049 not packed (bitsize == 0)?
8053 for (i = 0; i < TYPE_NFIELDS (type); i++)
8055 enum type_code field_type_code;
8058 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8061 /* Is it a floating point type field? */
8062 if (field_type_code == TYPE_CODE_FLT)
8068 /* If bitpos != 0, then we have to care about it. */
8069 if (TYPE_FIELD_BITPOS (type, i) != 0)
8071 /* Bitfields are not addressable. If the field bitsize is
8072 zero, then the field is not packed. Hence it cannot be
8073 a bitfield or any other packed type. */
8074 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8087 /* Write into appropriate registers a function return value of type
8088 TYPE, given in virtual format. */
8091 arm_store_return_value (struct type *type, struct regcache *regs,
8092 const gdb_byte *valbuf)
8094 struct gdbarch *gdbarch = regs->arch ();
8095 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8097 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8099 gdb_byte buf[FP_REGISTER_SIZE];
8101 switch (gdbarch_tdep (gdbarch)->fp_model)
8105 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8106 regs->cooked_write (ARM_F0_REGNUM, buf);
8109 case ARM_FLOAT_SOFT_FPA:
8110 case ARM_FLOAT_SOFT_VFP:
8111 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8112 not using the VFP ABI code. */
8114 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8115 if (TYPE_LENGTH (type) > 4)
8116 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8120 internal_error (__FILE__, __LINE__,
8121 _("arm_store_return_value: Floating "
8122 "point model not supported"));
8126 else if (TYPE_CODE (type) == TYPE_CODE_INT
8127 || TYPE_CODE (type) == TYPE_CODE_CHAR
8128 || TYPE_CODE (type) == TYPE_CODE_BOOL
8129 || TYPE_CODE (type) == TYPE_CODE_PTR
8130 || TYPE_IS_REFERENCE (type)
8131 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8133 if (TYPE_LENGTH (type) <= 4)
8135 /* Values of one word or less are zero/sign-extended and
8137 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8138 LONGEST val = unpack_long (type, valbuf);
8140 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8141 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8145 /* Integral values greater than one word are stored in consecutive
8146 registers starting with r0. This will always be a multiple of
8147 the regiser size. */
8148 int len = TYPE_LENGTH (type);
8149 int regno = ARM_A1_REGNUM;
8153 regs->cooked_write (regno++, valbuf);
8154 len -= INT_REGISTER_SIZE;
8155 valbuf += INT_REGISTER_SIZE;
8161 /* For a structure or union the behaviour is as if the value had
8162 been stored to word-aligned memory and then loaded into
8163 registers with 32-bit load instruction(s). */
8164 int len = TYPE_LENGTH (type);
8165 int regno = ARM_A1_REGNUM;
8166 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8170 memcpy (tmpbuf, valbuf,
8171 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8172 regs->cooked_write (regno++, tmpbuf);
8173 len -= INT_REGISTER_SIZE;
8174 valbuf += INT_REGISTER_SIZE;
8180 /* Handle function return values. */
8182 static enum return_value_convention
8183 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8184 struct type *valtype, struct regcache *regcache,
8185 gdb_byte *readbuf, const gdb_byte *writebuf)
8187 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8188 struct type *func_type = function ? value_type (function) : NULL;
8189 enum arm_vfp_cprc_base_type vfp_base_type;
8192 if (arm_vfp_abi_for_function (gdbarch, func_type)
8193 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8195 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8196 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8198 for (i = 0; i < vfp_base_count; i++)
8200 if (reg_char == 'q')
8203 arm_neon_quad_write (gdbarch, regcache, i,
8204 writebuf + i * unit_length);
8207 arm_neon_quad_read (gdbarch, regcache, i,
8208 readbuf + i * unit_length);
8215 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8216 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8219 regcache->cooked_write (regnum, writebuf + i * unit_length);
8221 regcache->cooked_read (regnum, readbuf + i * unit_length);
8224 return RETURN_VALUE_REGISTER_CONVENTION;
8227 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8228 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8229 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8231 if (tdep->struct_return == pcc_struct_return
8232 || arm_return_in_memory (gdbarch, valtype))
8233 return RETURN_VALUE_STRUCT_CONVENTION;
8235 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8237 if (arm_return_in_memory (gdbarch, valtype))
8238 return RETURN_VALUE_STRUCT_CONVENTION;
8242 arm_store_return_value (valtype, regcache, writebuf);
8245 arm_extract_return_value (valtype, regcache, readbuf);
8247 return RETURN_VALUE_REGISTER_CONVENTION;
8252 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8254 struct gdbarch *gdbarch = get_frame_arch (frame);
8255 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8256 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8258 gdb_byte buf[INT_REGISTER_SIZE];
8260 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8262 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8266 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8270 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8271 return the target PC. Otherwise return 0. */
8274 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8278 CORE_ADDR start_addr;
8280 /* Find the starting address and name of the function containing the PC. */
8281 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8283 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8285 start_addr = arm_skip_bx_reg (frame, pc);
8286 if (start_addr != 0)
8292 /* If PC is in a Thumb call or return stub, return the address of the
8293 target PC, which is in a register. The thunk functions are called
8294 _call_via_xx, where x is the register name. The possible names
8295 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8296 functions, named __ARM_call_via_r[0-7]. */
8297 if (startswith (name, "_call_via_")
8298 || startswith (name, "__ARM_call_via_"))
8300 /* Use the name suffix to determine which register contains the
8302 static const char *table[15] =
8303 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8304 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8307 int offset = strlen (name) - 2;
8309 for (regno = 0; regno <= 14; regno++)
8310 if (strcmp (&name[offset], table[regno]) == 0)
8311 return get_frame_register_unsigned (frame, regno);
8314 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8315 non-interworking calls to foo. We could decode the stubs
8316 to find the target but it's easier to use the symbol table. */
8317 namelen = strlen (name);
8318 if (name[0] == '_' && name[1] == '_'
8319 && ((namelen > 2 + strlen ("_from_thumb")
8320 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8321 || (namelen > 2 + strlen ("_from_arm")
8322 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8325 int target_len = namelen - 2;
8326 struct bound_minimal_symbol minsym;
8327 struct objfile *objfile;
8328 struct obj_section *sec;
8330 if (name[namelen - 1] == 'b')
8331 target_len -= strlen ("_from_thumb");
8333 target_len -= strlen ("_from_arm");
8335 target_name = (char *) alloca (target_len + 1);
8336 memcpy (target_name, name + 2, target_len);
8337 target_name[target_len] = '\0';
8339 sec = find_pc_section (pc);
8340 objfile = (sec == NULL) ? NULL : sec->objfile;
8341 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8342 if (minsym.minsym != NULL)
8343 return BMSYMBOL_VALUE_ADDRESS (minsym);
8348 return 0; /* not a stub */
8352 set_arm_command (const char *args, int from_tty)
8354 printf_unfiltered (_("\
8355 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8356 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8360 show_arm_command (const char *args, int from_tty)
8362 cmd_show_list (showarmcmdlist, from_tty, "");
8366 arm_update_current_architecture (void)
8368 struct gdbarch_info info;
8370 /* If the current architecture is not ARM, we have nothing to do. */
8371 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8374 /* Update the architecture. */
8375 gdbarch_info_init (&info);
8377 if (!gdbarch_update_p (info))
8378 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8382 set_fp_model_sfunc (const char *args, int from_tty,
8383 struct cmd_list_element *c)
8387 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8388 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8390 arm_fp_model = (enum arm_float_model) fp_model;
8394 if (fp_model == ARM_FLOAT_LAST)
8395 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8398 arm_update_current_architecture ();
8402 show_fp_model (struct ui_file *file, int from_tty,
8403 struct cmd_list_element *c, const char *value)
8405 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8407 if (arm_fp_model == ARM_FLOAT_AUTO
8408 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8409 fprintf_filtered (file, _("\
8410 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8411 fp_model_strings[tdep->fp_model]);
8413 fprintf_filtered (file, _("\
8414 The current ARM floating point model is \"%s\".\n"),
8415 fp_model_strings[arm_fp_model]);
8419 arm_set_abi (const char *args, int from_tty,
8420 struct cmd_list_element *c)
8424 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8425 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8427 arm_abi_global = (enum arm_abi_kind) arm_abi;
8431 if (arm_abi == ARM_ABI_LAST)
8432 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8435 arm_update_current_architecture ();
8439 arm_show_abi (struct ui_file *file, int from_tty,
8440 struct cmd_list_element *c, const char *value)
8442 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8444 if (arm_abi_global == ARM_ABI_AUTO
8445 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8446 fprintf_filtered (file, _("\
8447 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8448 arm_abi_strings[tdep->arm_abi]);
8450 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8455 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8456 struct cmd_list_element *c, const char *value)
8458 fprintf_filtered (file,
8459 _("The current execution mode assumed "
8460 "(when symbols are unavailable) is \"%s\".\n"),
8461 arm_fallback_mode_string);
8465 arm_show_force_mode (struct ui_file *file, int from_tty,
8466 struct cmd_list_element *c, const char *value)
8468 fprintf_filtered (file,
8469 _("The current execution mode assumed "
8470 "(even when symbols are available) is \"%s\".\n"),
8471 arm_force_mode_string);
8474 /* If the user changes the register disassembly style used for info
8475 register and other commands, we have to also switch the style used
8476 in opcodes for disassembly output. This function is run in the "set
8477 arm disassembly" command, and does that. */
8480 set_disassembly_style_sfunc (const char *args, int from_tty,
8481 struct cmd_list_element *c)
8483 /* Convert the short style name into the long style name (eg, reg-names-*)
8484 before calling the generic set_disassembler_options() function. */
8485 std::string long_name = std::string ("reg-names-") + disassembly_style;
8486 set_disassembler_options (&long_name[0]);
8490 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8491 struct cmd_list_element *c, const char *value)
8493 struct gdbarch *gdbarch = get_current_arch ();
8494 char *options = get_disassembler_options (gdbarch);
8495 const char *style = "";
8499 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8500 if (CONST_STRNEQ (opt, "reg-names-"))
8502 style = &opt[strlen ("reg-names-")];
8503 len = strcspn (style, ",");
8506 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8509 /* Return the ARM register name corresponding to register I. */
8511 arm_register_name (struct gdbarch *gdbarch, int i)
8513 const int num_regs = gdbarch_num_regs (gdbarch);
8515 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8516 && i >= num_regs && i < num_regs + 32)
8518 static const char *const vfp_pseudo_names[] = {
8519 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8520 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8521 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8522 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8525 return vfp_pseudo_names[i - num_regs];
8528 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8529 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8531 static const char *const neon_pseudo_names[] = {
8532 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8533 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8536 return neon_pseudo_names[i - num_regs - 32];
8539 if (i >= ARRAY_SIZE (arm_register_names))
8540 /* These registers are only supported on targets which supply
8541 an XML description. */
8544 return arm_register_names[i];
8547 /* Test whether the coff symbol specific value corresponds to a Thumb
8551 coff_sym_is_thumb (int val)
8553 return (val == C_THUMBEXT
8554 || val == C_THUMBSTAT
8555 || val == C_THUMBEXTFUNC
8556 || val == C_THUMBSTATFUNC
8557 || val == C_THUMBLABEL);
8560 /* arm_coff_make_msymbol_special()
8561 arm_elf_make_msymbol_special()
8563 These functions test whether the COFF or ELF symbol corresponds to
8564 an address in thumb code, and set a "special" bit in a minimal
8565 symbol to indicate that it does. */
8568 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8570 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8572 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8573 == ST_BRANCH_TO_THUMB)
8574 MSYMBOL_SET_SPECIAL (msym);
8578 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8580 if (coff_sym_is_thumb (val))
8581 MSYMBOL_SET_SPECIAL (msym);
8585 arm_objfile_data_free (struct objfile *objfile, void *arg)
8587 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8590 for (i = 0; i < objfile->obfd->section_count; i++)
8591 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8595 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8598 const char *name = bfd_asymbol_name (sym);
8599 struct arm_per_objfile *data;
8600 VEC(arm_mapping_symbol_s) **map_p;
8601 struct arm_mapping_symbol new_map_sym;
8603 gdb_assert (name[0] == '$');
8604 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8607 data = (struct arm_per_objfile *) objfile_data (objfile,
8608 arm_objfile_data_key);
8611 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8612 struct arm_per_objfile);
8613 set_objfile_data (objfile, arm_objfile_data_key, data);
8614 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8615 objfile->obfd->section_count,
8616 VEC(arm_mapping_symbol_s) *);
8618 map_p = &data->section_maps[bfd_get_section (sym)->index];
8620 new_map_sym.value = sym->value;
8621 new_map_sym.type = name[1];
8623 /* Assume that most mapping symbols appear in order of increasing
8624 value. If they were randomly distributed, it would be faster to
8625 always push here and then sort at first use. */
8626 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8628 struct arm_mapping_symbol *prev_map_sym;
8630 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8631 if (prev_map_sym->value >= sym->value)
8634 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8635 arm_compare_mapping_symbols);
8636 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8641 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8645 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8647 struct gdbarch *gdbarch = regcache->arch ();
8648 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8650 /* If necessary, set the T bit. */
8653 ULONGEST val, t_bit;
8654 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8655 t_bit = arm_psr_thumb_bit (gdbarch);
8656 if (arm_pc_is_thumb (gdbarch, pc))
8657 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8660 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8665 /* Read the contents of a NEON quad register, by reading from two
8666 double registers. This is used to implement the quad pseudo
8667 registers, and for argument passing in case the quad registers are
8668 missing; vectors are passed in quad registers when using the VFP
8669 ABI, even if a NEON unit is not present. REGNUM is the index of
8670 the quad register, in [0, 15]. */
8672 static enum register_status
8673 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8674 int regnum, gdb_byte *buf)
8677 gdb_byte reg_buf[8];
8678 int offset, double_regnum;
8679 enum register_status status;
8681 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8682 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8685 /* d0 is always the least significant half of q0. */
8686 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8691 status = regcache->raw_read (double_regnum, reg_buf);
8692 if (status != REG_VALID)
8694 memcpy (buf + offset, reg_buf, 8);
8696 offset = 8 - offset;
8697 status = regcache->raw_read (double_regnum + 1, reg_buf);
8698 if (status != REG_VALID)
8700 memcpy (buf + offset, reg_buf, 8);
8705 static enum register_status
8706 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8707 int regnum, gdb_byte *buf)
8709 const int num_regs = gdbarch_num_regs (gdbarch);
8711 gdb_byte reg_buf[8];
8712 int offset, double_regnum;
8714 gdb_assert (regnum >= num_regs);
8717 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8718 /* Quad-precision register. */
8719 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8722 enum register_status status;
8724 /* Single-precision register. */
8725 gdb_assert (regnum < 32);
8727 /* s0 is always the least significant half of d0. */
8728 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8729 offset = (regnum & 1) ? 0 : 4;
8731 offset = (regnum & 1) ? 4 : 0;
8733 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8734 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8737 status = regcache->raw_read (double_regnum, reg_buf);
8738 if (status == REG_VALID)
8739 memcpy (buf, reg_buf + offset, 4);
8744 /* Store the contents of BUF to a NEON quad register, by writing to
8745 two double registers. This is used to implement the quad pseudo
8746 registers, and for argument passing in case the quad registers are
8747 missing; vectors are passed in quad registers when using the VFP
8748 ABI, even if a NEON unit is not present. REGNUM is the index
8749 of the quad register, in [0, 15]. */
8752 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8753 int regnum, const gdb_byte *buf)
8756 int offset, double_regnum;
8758 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8759 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8762 /* d0 is always the least significant half of q0. */
8763 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8768 regcache->raw_write (double_regnum, buf + offset);
8769 offset = 8 - offset;
8770 regcache->raw_write (double_regnum + 1, buf + offset);
8774 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8775 int regnum, const gdb_byte *buf)
8777 const int num_regs = gdbarch_num_regs (gdbarch);
8779 gdb_byte reg_buf[8];
8780 int offset, double_regnum;
8782 gdb_assert (regnum >= num_regs);
8785 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8786 /* Quad-precision register. */
8787 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8790 /* Single-precision register. */
8791 gdb_assert (regnum < 32);
8793 /* s0 is always the least significant half of d0. */
8794 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8795 offset = (regnum & 1) ? 0 : 4;
8797 offset = (regnum & 1) ? 4 : 0;
8799 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8800 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8803 regcache->raw_read (double_regnum, reg_buf);
8804 memcpy (reg_buf + offset, buf, 4);
8805 regcache->raw_write (double_regnum, reg_buf);
8809 static struct value *
8810 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8812 const int *reg_p = (const int *) baton;
8813 return value_of_register (*reg_p, frame);
8816 static enum gdb_osabi
8817 arm_elf_osabi_sniffer (bfd *abfd)
8819 unsigned int elfosabi;
8820 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8822 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8824 if (elfosabi == ELFOSABI_ARM)
8825 /* GNU tools use this value. Check note sections in this case,
8827 bfd_map_over_sections (abfd,
8828 generic_elf_osabi_sniff_abi_tag_sections,
8831 /* Anything else will be handled by the generic ELF sniffer. */
8836 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8837 struct reggroup *group)
8839 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8840 this, FPS register belongs to save_regroup, restore_reggroup, and
8841 all_reggroup, of course. */
8842 if (regnum == ARM_FPS_REGNUM)
8843 return (group == float_reggroup
8844 || group == save_reggroup
8845 || group == restore_reggroup
8846 || group == all_reggroup);
8848 return default_register_reggroup_p (gdbarch, regnum, group);
8852 /* For backward-compatibility we allow two 'g' packet lengths with
8853 the remote protocol depending on whether FPA registers are
8854 supplied. M-profile targets do not have FPA registers, but some
8855 stubs already exist in the wild which use a 'g' packet which
8856 supplies them albeit with dummy values. The packet format which
8857 includes FPA registers should be considered deprecated for
8858 M-profile targets. */
8861 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8863 if (gdbarch_tdep (gdbarch)->is_m)
8865 /* If we know from the executable this is an M-profile target,
8866 cater for remote targets whose register set layout is the
8867 same as the FPA layout. */
8868 register_remote_g_packet_guess (gdbarch,
8869 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8870 (16 * INT_REGISTER_SIZE)
8871 + (8 * FP_REGISTER_SIZE)
8872 + (2 * INT_REGISTER_SIZE),
8873 tdesc_arm_with_m_fpa_layout);
8875 /* The regular M-profile layout. */
8876 register_remote_g_packet_guess (gdbarch,
8877 /* r0-r12,sp,lr,pc; xpsr */
8878 (16 * INT_REGISTER_SIZE)
8879 + INT_REGISTER_SIZE,
8882 /* M-profile plus M4F VFP. */
8883 register_remote_g_packet_guess (gdbarch,
8884 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8885 (16 * INT_REGISTER_SIZE)
8886 + (16 * VFP_REGISTER_SIZE)
8887 + (2 * INT_REGISTER_SIZE),
8888 tdesc_arm_with_m_vfp_d16);
8891 /* Otherwise we don't have a useful guess. */
8894 /* Implement the code_of_frame_writable gdbarch method. */
8897 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8899 if (gdbarch_tdep (gdbarch)->is_m
8900 && get_frame_type (frame) == SIGTRAMP_FRAME)
8902 /* M-profile exception frames return to some magic PCs, where
8903 isn't writable at all. */
8911 /* Initialize the current architecture based on INFO. If possible,
8912 re-use an architecture from ARCHES, which is a list of
8913 architectures already created during this debugging session.
8915 Called e.g. at program startup, when reading a core file, and when
8916 reading a binary file. */
8918 static struct gdbarch *
8919 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8921 struct gdbarch_tdep *tdep;
8922 struct gdbarch *gdbarch;
8923 struct gdbarch_list *best_arch;
8924 enum arm_abi_kind arm_abi = arm_abi_global;
8925 enum arm_float_model fp_model = arm_fp_model;
8926 struct tdesc_arch_data *tdesc_data = NULL;
8928 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8929 int have_wmmx_registers = 0;
8931 int have_fpa_registers = 1;
8932 const struct target_desc *tdesc = info.target_desc;
8934 /* If we have an object to base this architecture on, try to determine
8937 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8939 int ei_osabi, e_flags;
8941 switch (bfd_get_flavour (info.abfd))
8943 case bfd_target_coff_flavour:
8944 /* Assume it's an old APCS-style ABI. */
8946 arm_abi = ARM_ABI_APCS;
8949 case bfd_target_elf_flavour:
8950 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8951 e_flags = elf_elfheader (info.abfd)->e_flags;
8953 if (ei_osabi == ELFOSABI_ARM)
8955 /* GNU tools used to use this value, but do not for EABI
8956 objects. There's nowhere to tag an EABI version
8957 anyway, so assume APCS. */
8958 arm_abi = ARM_ABI_APCS;
8960 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8962 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8966 case EF_ARM_EABI_UNKNOWN:
8967 /* Assume GNU tools. */
8968 arm_abi = ARM_ABI_APCS;
8971 case EF_ARM_EABI_VER4:
8972 case EF_ARM_EABI_VER5:
8973 arm_abi = ARM_ABI_AAPCS;
8974 /* EABI binaries default to VFP float ordering.
8975 They may also contain build attributes that can
8976 be used to identify if the VFP argument-passing
8978 if (fp_model == ARM_FLOAT_AUTO)
8981 switch (bfd_elf_get_obj_attr_int (info.abfd,
8985 case AEABI_VFP_args_base:
8986 /* "The user intended FP parameter/result
8987 passing to conform to AAPCS, base
8989 fp_model = ARM_FLOAT_SOFT_VFP;
8991 case AEABI_VFP_args_vfp:
8992 /* "The user intended FP parameter/result
8993 passing to conform to AAPCS, VFP
8995 fp_model = ARM_FLOAT_VFP;
8997 case AEABI_VFP_args_toolchain:
8998 /* "The user intended FP parameter/result
8999 passing to conform to tool chain-specific
9000 conventions" - we don't know any such
9001 conventions, so leave it as "auto". */
9003 case AEABI_VFP_args_compatible:
9004 /* "Code is compatible with both the base
9005 and VFP variants; the user did not permit
9006 non-variadic functions to pass FP
9007 parameters/results" - leave it as
9011 /* Attribute value not mentioned in the
9012 November 2012 ABI, so leave it as
9017 fp_model = ARM_FLOAT_SOFT_VFP;
9023 /* Leave it as "auto". */
9024 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9029 /* Detect M-profile programs. This only works if the
9030 executable file includes build attributes; GCC does
9031 copy them to the executable, but e.g. RealView does
9034 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9037 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9038 Tag_CPU_arch_profile);
9040 /* GCC specifies the profile for v6-M; RealView only
9041 specifies the profile for architectures starting with
9042 V7 (as opposed to architectures with a tag
9043 numerically greater than TAG_CPU_ARCH_V7). */
9044 if (!tdesc_has_registers (tdesc)
9045 && (attr_arch == TAG_CPU_ARCH_V6_M
9046 || attr_arch == TAG_CPU_ARCH_V6S_M
9047 || attr_profile == 'M'))
9052 if (fp_model == ARM_FLOAT_AUTO)
9054 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9057 /* Leave it as "auto". Strictly speaking this case
9058 means FPA, but almost nobody uses that now, and
9059 many toolchains fail to set the appropriate bits
9060 for the floating-point model they use. */
9062 case EF_ARM_SOFT_FLOAT:
9063 fp_model = ARM_FLOAT_SOFT_FPA;
9065 case EF_ARM_VFP_FLOAT:
9066 fp_model = ARM_FLOAT_VFP;
9068 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9069 fp_model = ARM_FLOAT_SOFT_VFP;
9074 if (e_flags & EF_ARM_BE8)
9075 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9080 /* Leave it as "auto". */
9085 /* Check any target description for validity. */
9086 if (tdesc_has_registers (tdesc))
9088 /* For most registers we require GDB's default names; but also allow
9089 the numeric names for sp / lr / pc, as a convenience. */
9090 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9091 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9092 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9094 const struct tdesc_feature *feature;
9097 feature = tdesc_find_feature (tdesc,
9098 "org.gnu.gdb.arm.core");
9099 if (feature == NULL)
9101 feature = tdesc_find_feature (tdesc,
9102 "org.gnu.gdb.arm.m-profile");
9103 if (feature == NULL)
9109 tdesc_data = tdesc_data_alloc ();
9112 for (i = 0; i < ARM_SP_REGNUM; i++)
9113 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9114 arm_register_names[i]);
9115 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9118 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9121 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9125 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9126 ARM_PS_REGNUM, "xpsr");
9128 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9129 ARM_PS_REGNUM, "cpsr");
9133 tdesc_data_cleanup (tdesc_data);
9137 feature = tdesc_find_feature (tdesc,
9138 "org.gnu.gdb.arm.fpa");
9139 if (feature != NULL)
9142 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9143 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9144 arm_register_names[i]);
9147 tdesc_data_cleanup (tdesc_data);
9152 have_fpa_registers = 0;
9154 feature = tdesc_find_feature (tdesc,
9155 "org.gnu.gdb.xscale.iwmmxt");
9156 if (feature != NULL)
9158 static const char *const iwmmxt_names[] = {
9159 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9160 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9161 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9162 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9166 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9168 &= tdesc_numbered_register (feature, tdesc_data, i,
9169 iwmmxt_names[i - ARM_WR0_REGNUM]);
9171 /* Check for the control registers, but do not fail if they
9173 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9174 tdesc_numbered_register (feature, tdesc_data, i,
9175 iwmmxt_names[i - ARM_WR0_REGNUM]);
9177 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9179 &= tdesc_numbered_register (feature, tdesc_data, i,
9180 iwmmxt_names[i - ARM_WR0_REGNUM]);
9184 tdesc_data_cleanup (tdesc_data);
9188 have_wmmx_registers = 1;
9191 /* If we have a VFP unit, check whether the single precision registers
9192 are present. If not, then we will synthesize them as pseudo
9194 feature = tdesc_find_feature (tdesc,
9195 "org.gnu.gdb.arm.vfp");
9196 if (feature != NULL)
9198 static const char *const vfp_double_names[] = {
9199 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9200 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9201 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9202 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9205 /* Require the double precision registers. There must be either
9208 for (i = 0; i < 32; i++)
9210 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9212 vfp_double_names[i]);
9216 if (!valid_p && i == 16)
9219 /* Also require FPSCR. */
9220 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9221 ARM_FPSCR_REGNUM, "fpscr");
9224 tdesc_data_cleanup (tdesc_data);
9228 if (tdesc_unnumbered_register (feature, "s0") == 0)
9229 have_vfp_pseudos = 1;
9231 vfp_register_count = i;
9233 /* If we have VFP, also check for NEON. The architecture allows
9234 NEON without VFP (integer vector operations only), but GDB
9235 does not support that. */
9236 feature = tdesc_find_feature (tdesc,
9237 "org.gnu.gdb.arm.neon");
9238 if (feature != NULL)
9240 /* NEON requires 32 double-precision registers. */
9243 tdesc_data_cleanup (tdesc_data);
9247 /* If there are quad registers defined by the stub, use
9248 their type; otherwise (normally) provide them with
9249 the default type. */
9250 if (tdesc_unnumbered_register (feature, "q0") == 0)
9251 have_neon_pseudos = 1;
9258 /* If there is already a candidate, use it. */
9259 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9261 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9263 if (arm_abi != ARM_ABI_AUTO
9264 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9267 if (fp_model != ARM_FLOAT_AUTO
9268 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9271 /* There are various other properties in tdep that we do not
9272 need to check here: those derived from a target description,
9273 since gdbarches with a different target description are
9274 automatically disqualified. */
9276 /* Do check is_m, though, since it might come from the binary. */
9277 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9280 /* Found a match. */
9284 if (best_arch != NULL)
9286 if (tdesc_data != NULL)
9287 tdesc_data_cleanup (tdesc_data);
9288 return best_arch->gdbarch;
9291 tdep = XCNEW (struct gdbarch_tdep);
9292 gdbarch = gdbarch_alloc (&info, tdep);
9294 /* Record additional information about the architecture we are defining.
9295 These are gdbarch discriminators, like the OSABI. */
9296 tdep->arm_abi = arm_abi;
9297 tdep->fp_model = fp_model;
9299 tdep->have_fpa_registers = have_fpa_registers;
9300 tdep->have_wmmx_registers = have_wmmx_registers;
9301 gdb_assert (vfp_register_count == 0
9302 || vfp_register_count == 16
9303 || vfp_register_count == 32);
9304 tdep->vfp_register_count = vfp_register_count;
9305 tdep->have_vfp_pseudos = have_vfp_pseudos;
9306 tdep->have_neon_pseudos = have_neon_pseudos;
9307 tdep->have_neon = have_neon;
9309 arm_register_g_packet_guesses (gdbarch);
9312 switch (info.byte_order_for_code)
9314 case BFD_ENDIAN_BIG:
9315 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9316 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9317 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9318 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9322 case BFD_ENDIAN_LITTLE:
9323 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9324 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9325 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9326 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9331 internal_error (__FILE__, __LINE__,
9332 _("arm_gdbarch_init: bad byte order for float format"));
9335 /* On ARM targets char defaults to unsigned. */
9336 set_gdbarch_char_signed (gdbarch, 0);
9338 /* wchar_t is unsigned under the AAPCS. */
9339 if (tdep->arm_abi == ARM_ABI_AAPCS)
9340 set_gdbarch_wchar_signed (gdbarch, 0);
9342 set_gdbarch_wchar_signed (gdbarch, 1);
9344 /* Note: for displaced stepping, this includes the breakpoint, and one word
9345 of additional scratch space. This setting isn't used for anything beside
9346 displaced stepping at present. */
9347 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9349 /* This should be low enough for everything. */
9350 tdep->lowest_pc = 0x20;
9351 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9353 /* The default, for both APCS and AAPCS, is to return small
9354 structures in registers. */
9355 tdep->struct_return = reg_struct_return;
9357 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9358 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9361 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9363 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9365 /* Frame handling. */
9366 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9367 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9368 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9370 frame_base_set_default (gdbarch, &arm_normal_base);
9372 /* Address manipulation. */
9373 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9375 /* Advance PC across function entry code. */
9376 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9378 /* Detect whether PC is at a point where the stack has been destroyed. */
9379 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9381 /* Skip trampolines. */
9382 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9384 /* The stack grows downward. */
9385 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9387 /* Breakpoint manipulation. */
9388 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9389 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9390 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9391 arm_breakpoint_kind_from_current_state);
9393 /* Information about registers, etc. */
9394 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9395 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9396 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9397 set_gdbarch_register_type (gdbarch, arm_register_type);
9398 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9400 /* This "info float" is FPA-specific. Use the generic version if we
9402 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9403 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9405 /* Internal <-> external register number maps. */
9406 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9407 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9409 set_gdbarch_register_name (gdbarch, arm_register_name);
9411 /* Returning results. */
9412 set_gdbarch_return_value (gdbarch, arm_return_value);
9415 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9417 /* Minsymbol frobbing. */
9418 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9419 set_gdbarch_coff_make_msymbol_special (gdbarch,
9420 arm_coff_make_msymbol_special);
9421 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9423 /* Thumb-2 IT block support. */
9424 set_gdbarch_adjust_breakpoint_address (gdbarch,
9425 arm_adjust_breakpoint_address);
9427 /* Virtual tables. */
9428 set_gdbarch_vbit_in_delta (gdbarch, 1);
9430 /* Hook in the ABI-specific overrides, if they have been registered. */
9431 gdbarch_init_osabi (info, gdbarch);
9433 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9435 /* Add some default predicates. */
9437 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9438 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9439 dwarf2_append_unwinders (gdbarch);
9440 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9441 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9442 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9444 /* Now we have tuned the configuration, set a few final things,
9445 based on what the OS ABI has told us. */
9447 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9448 binaries are always marked. */
9449 if (tdep->arm_abi == ARM_ABI_AUTO)
9450 tdep->arm_abi = ARM_ABI_APCS;
9452 /* Watchpoints are not steppable. */
9453 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9455 /* We used to default to FPA for generic ARM, but almost nobody
9456 uses that now, and we now provide a way for the user to force
9457 the model. So default to the most useful variant. */
9458 if (tdep->fp_model == ARM_FLOAT_AUTO)
9459 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9461 if (tdep->jb_pc >= 0)
9462 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9464 /* Floating point sizes and format. */
9465 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9466 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9468 set_gdbarch_double_format
9469 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9470 set_gdbarch_long_double_format
9471 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9475 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9476 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9479 if (have_vfp_pseudos)
9481 /* NOTE: These are the only pseudo registers used by
9482 the ARM target at the moment. If more are added, a
9483 little more care in numbering will be needed. */
9485 int num_pseudos = 32;
9486 if (have_neon_pseudos)
9488 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9489 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9490 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9495 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9497 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9499 /* Override tdesc_register_type to adjust the types of VFP
9500 registers for NEON. */
9501 set_gdbarch_register_type (gdbarch, arm_register_type);
9504 /* Add standard register aliases. We add aliases even for those
9505 nanes which are used by the current architecture - it's simpler,
9506 and does no harm, since nothing ever lists user registers. */
9507 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9508 user_reg_add (gdbarch, arm_register_aliases[i].name,
9509 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9511 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9512 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9518 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9520 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9525 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9526 (unsigned long) tdep->lowest_pc);
9532 static void arm_record_test (void);
9537 _initialize_arm_tdep (void)
9541 char regdesc[1024], *rdptr = regdesc;
9542 size_t rest = sizeof (regdesc);
9544 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9546 arm_objfile_data_key
9547 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9549 /* Add ourselves to objfile event chain. */
9550 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9552 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9554 /* Register an ELF OS ABI sniffer for ARM binaries. */
9555 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9556 bfd_target_elf_flavour,
9557 arm_elf_osabi_sniffer);
9559 /* Initialize the standard target descriptions. */
9560 initialize_tdesc_arm_with_m ();
9561 initialize_tdesc_arm_with_m_fpa_layout ();
9562 initialize_tdesc_arm_with_m_vfp_d16 ();
9563 initialize_tdesc_arm_with_iwmmxt ();
9564 initialize_tdesc_arm_with_vfpv2 ();
9565 initialize_tdesc_arm_with_vfpv3 ();
9566 initialize_tdesc_arm_with_neon ();
9568 /* Add root prefix command for all "set arm"/"show arm" commands. */
9569 add_prefix_cmd ("arm", no_class, set_arm_command,
9570 _("Various ARM-specific commands."),
9571 &setarmcmdlist, "set arm ", 0, &setlist);
9573 add_prefix_cmd ("arm", no_class, show_arm_command,
9574 _("Various ARM-specific commands."),
9575 &showarmcmdlist, "show arm ", 0, &showlist);
9578 arm_disassembler_options = xstrdup ("reg-names-std");
9579 const disasm_options_t *disasm_options
9580 = &disassembler_options_arm ()->options;
9581 int num_disassembly_styles = 0;
9582 for (i = 0; disasm_options->name[i] != NULL; i++)
9583 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9584 num_disassembly_styles++;
9586 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9587 valid_disassembly_styles = XNEWVEC (const char *,
9588 num_disassembly_styles + 1);
9589 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9590 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9592 size_t offset = strlen ("reg-names-");
9593 const char *style = disasm_options->name[i];
9594 valid_disassembly_styles[j++] = &style[offset];
9595 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9596 disasm_options->description[i]);
9600 /* Mark the end of valid options. */
9601 valid_disassembly_styles[num_disassembly_styles] = NULL;
9603 /* Create the help text. */
9604 std::string helptext = string_printf ("%s%s%s",
9605 _("The valid values are:\n"),
9607 _("The default is \"std\"."));
9609 add_setshow_enum_cmd("disassembler", no_class,
9610 valid_disassembly_styles, &disassembly_style,
9611 _("Set the disassembly style."),
9612 _("Show the disassembly style."),
9614 set_disassembly_style_sfunc,
9615 show_disassembly_style_sfunc,
9616 &setarmcmdlist, &showarmcmdlist);
9618 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9619 _("Set usage of ARM 32-bit mode."),
9620 _("Show usage of ARM 32-bit mode."),
9621 _("When off, a 26-bit PC will be used."),
9623 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9625 &setarmcmdlist, &showarmcmdlist);
9627 /* Add a command to allow the user to force the FPU model. */
9628 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9629 _("Set the floating point type."),
9630 _("Show the floating point type."),
9631 _("auto - Determine the FP typefrom the OS-ABI.\n\
9632 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9633 fpa - FPA co-processor (GCC compiled).\n\
9634 softvfp - Software FP with pure-endian doubles.\n\
9635 vfp - VFP co-processor."),
9636 set_fp_model_sfunc, show_fp_model,
9637 &setarmcmdlist, &showarmcmdlist);
9639 /* Add a command to allow the user to force the ABI. */
9640 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9643 NULL, arm_set_abi, arm_show_abi,
9644 &setarmcmdlist, &showarmcmdlist);
9646 /* Add two commands to allow the user to force the assumed
9648 add_setshow_enum_cmd ("fallback-mode", class_support,
9649 arm_mode_strings, &arm_fallback_mode_string,
9650 _("Set the mode assumed when symbols are unavailable."),
9651 _("Show the mode assumed when symbols are unavailable."),
9652 NULL, NULL, arm_show_fallback_mode,
9653 &setarmcmdlist, &showarmcmdlist);
9654 add_setshow_enum_cmd ("force-mode", class_support,
9655 arm_mode_strings, &arm_force_mode_string,
9656 _("Set the mode assumed even when symbols are available."),
9657 _("Show the mode assumed even when symbols are available."),
9658 NULL, NULL, arm_show_force_mode,
9659 &setarmcmdlist, &showarmcmdlist);
9661 /* Debugging flag. */
9662 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9663 _("Set ARM debugging."),
9664 _("Show ARM debugging."),
9665 _("When on, arm-specific debugging is enabled."),
9667 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9668 &setdebuglist, &showdebuglist);
9671 selftests::register_test ("arm-record", selftests::arm_record_test);
9676 /* ARM-reversible process record data structures. */
9678 #define ARM_INSN_SIZE_BYTES 4
9679 #define THUMB_INSN_SIZE_BYTES 2
9680 #define THUMB2_INSN_SIZE_BYTES 4
9683 /* Position of the bit within a 32-bit ARM instruction
9684 that defines whether the instruction is a load or store. */
9685 #define INSN_S_L_BIT_NUM 20
9687 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9690 unsigned int reg_len = LENGTH; \
9693 REGS = XNEWVEC (uint32_t, reg_len); \
9694 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9699 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9702 unsigned int mem_len = LENGTH; \
9705 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9706 memcpy(&MEMS->len, &RECORD_BUF[0], \
9707 sizeof(struct arm_mem_r) * LENGTH); \
9712 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9713 #define INSN_RECORDED(ARM_RECORD) \
9714 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9716 /* ARM memory record structure. */
9719 uint32_t len; /* Record length. */
9720 uint32_t addr; /* Memory address. */
9723 /* ARM instruction record contains opcode of current insn
9724 and execution state (before entry to decode_insn()),
9725 contains list of to-be-modified registers and
9726 memory blocks (on return from decode_insn()). */
9728 typedef struct insn_decode_record_t
9730 struct gdbarch *gdbarch;
9731 struct regcache *regcache;
9732 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9733 uint32_t arm_insn; /* Should accommodate thumb. */
9734 uint32_t cond; /* Condition code. */
9735 uint32_t opcode; /* Insn opcode. */
9736 uint32_t decode; /* Insn decode bits. */
9737 uint32_t mem_rec_count; /* No of mem records. */
9738 uint32_t reg_rec_count; /* No of reg records. */
9739 uint32_t *arm_regs; /* Registers to be saved for this record. */
9740 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9741 } insn_decode_record;
9744 /* Checks ARM SBZ and SBO mandatory fields. */
9747 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9749 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9768 enum arm_record_result
9770 ARM_RECORD_SUCCESS = 0,
9771 ARM_RECORD_FAILURE = 1
9778 } arm_record_strx_t;
9789 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9790 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9793 struct regcache *reg_cache = arm_insn_r->regcache;
9794 ULONGEST u_regval[2]= {0};
9796 uint32_t reg_src1 = 0, reg_src2 = 0;
9797 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9799 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9800 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9802 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9804 /* 1) Handle misc store, immediate offset. */
9805 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9806 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9807 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9808 regcache_raw_read_unsigned (reg_cache, reg_src1,
9810 if (ARM_PC_REGNUM == reg_src1)
9812 /* If R15 was used as Rn, hence current PC+8. */
9813 u_regval[0] = u_regval[0] + 8;
9815 offset_8 = (immed_high << 4) | immed_low;
9816 /* Calculate target store address. */
9817 if (14 == arm_insn_r->opcode)
9819 tgt_mem_addr = u_regval[0] + offset_8;
9823 tgt_mem_addr = u_regval[0] - offset_8;
9825 if (ARM_RECORD_STRH == str_type)
9827 record_buf_mem[0] = 2;
9828 record_buf_mem[1] = tgt_mem_addr;
9829 arm_insn_r->mem_rec_count = 1;
9831 else if (ARM_RECORD_STRD == str_type)
9833 record_buf_mem[0] = 4;
9834 record_buf_mem[1] = tgt_mem_addr;
9835 record_buf_mem[2] = 4;
9836 record_buf_mem[3] = tgt_mem_addr + 4;
9837 arm_insn_r->mem_rec_count = 2;
9840 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9842 /* 2) Store, register offset. */
9844 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9846 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9847 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9848 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9851 /* If R15 was used as Rn, hence current PC+8. */
9852 u_regval[0] = u_regval[0] + 8;
9854 /* Calculate target store address, Rn +/- Rm, register offset. */
9855 if (12 == arm_insn_r->opcode)
9857 tgt_mem_addr = u_regval[0] + u_regval[1];
9861 tgt_mem_addr = u_regval[1] - u_regval[0];
9863 if (ARM_RECORD_STRH == str_type)
9865 record_buf_mem[0] = 2;
9866 record_buf_mem[1] = tgt_mem_addr;
9867 arm_insn_r->mem_rec_count = 1;
9869 else if (ARM_RECORD_STRD == str_type)
9871 record_buf_mem[0] = 4;
9872 record_buf_mem[1] = tgt_mem_addr;
9873 record_buf_mem[2] = 4;
9874 record_buf_mem[3] = tgt_mem_addr + 4;
9875 arm_insn_r->mem_rec_count = 2;
9878 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9879 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9881 /* 3) Store, immediate pre-indexed. */
9882 /* 5) Store, immediate post-indexed. */
9883 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9884 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9885 offset_8 = (immed_high << 4) | immed_low;
9886 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9887 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9888 /* Calculate target store address, Rn +/- Rm, register offset. */
9889 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9891 tgt_mem_addr = u_regval[0] + offset_8;
9895 tgt_mem_addr = u_regval[0] - offset_8;
9897 if (ARM_RECORD_STRH == str_type)
9899 record_buf_mem[0] = 2;
9900 record_buf_mem[1] = tgt_mem_addr;
9901 arm_insn_r->mem_rec_count = 1;
9903 else if (ARM_RECORD_STRD == str_type)
9905 record_buf_mem[0] = 4;
9906 record_buf_mem[1] = tgt_mem_addr;
9907 record_buf_mem[2] = 4;
9908 record_buf_mem[3] = tgt_mem_addr + 4;
9909 arm_insn_r->mem_rec_count = 2;
9911 /* Record Rn also as it changes. */
9912 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9913 arm_insn_r->reg_rec_count = 1;
9915 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9916 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9918 /* 4) Store, register pre-indexed. */
9919 /* 6) Store, register post -indexed. */
9920 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9921 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9922 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9923 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9924 /* Calculate target store address, Rn +/- Rm, register offset. */
9925 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9927 tgt_mem_addr = u_regval[0] + u_regval[1];
9931 tgt_mem_addr = u_regval[1] - u_regval[0];
9933 if (ARM_RECORD_STRH == str_type)
9935 record_buf_mem[0] = 2;
9936 record_buf_mem[1] = tgt_mem_addr;
9937 arm_insn_r->mem_rec_count = 1;
9939 else if (ARM_RECORD_STRD == str_type)
9941 record_buf_mem[0] = 4;
9942 record_buf_mem[1] = tgt_mem_addr;
9943 record_buf_mem[2] = 4;
9944 record_buf_mem[3] = tgt_mem_addr + 4;
9945 arm_insn_r->mem_rec_count = 2;
9947 /* Record Rn also as it changes. */
9948 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9949 arm_insn_r->reg_rec_count = 1;
9954 /* Handling ARM extension space insns. */
9957 arm_record_extension_space (insn_decode_record *arm_insn_r)
9959 int ret = 0; /* Return value: -1:record failure ; 0:success */
9960 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9961 uint32_t record_buf[8], record_buf_mem[8];
9962 uint32_t reg_src1 = 0;
9963 struct regcache *reg_cache = arm_insn_r->regcache;
9964 ULONGEST u_regval = 0;
9966 gdb_assert (!INSN_RECORDED(arm_insn_r));
9967 /* Handle unconditional insn extension space. */
9969 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9970 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9971 if (arm_insn_r->cond)
9973 /* PLD has no affect on architectural state, it just affects
9975 if (5 == ((opcode1 & 0xE0) >> 5))
9978 record_buf[0] = ARM_PS_REGNUM;
9979 record_buf[1] = ARM_LR_REGNUM;
9980 arm_insn_r->reg_rec_count = 2;
9982 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9986 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9987 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9990 /* Undefined instruction on ARM V5; need to handle if later
9991 versions define it. */
9994 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9995 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9996 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9998 /* Handle arithmetic insn extension space. */
9999 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10000 && !INSN_RECORDED(arm_insn_r))
10002 /* Handle MLA(S) and MUL(S). */
10003 if (in_inclusive_range (insn_op1, 0U, 3U))
10005 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10006 record_buf[1] = ARM_PS_REGNUM;
10007 arm_insn_r->reg_rec_count = 2;
10009 else if (in_inclusive_range (insn_op1, 4U, 15U))
10011 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10012 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10013 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10014 record_buf[2] = ARM_PS_REGNUM;
10015 arm_insn_r->reg_rec_count = 3;
10019 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10020 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10021 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10023 /* Handle control insn extension space. */
10025 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10026 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10028 if (!bit (arm_insn_r->arm_insn,25))
10030 if (!bits (arm_insn_r->arm_insn, 4, 7))
10032 if ((0 == insn_op1) || (2 == insn_op1))
10035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10036 arm_insn_r->reg_rec_count = 1;
10038 else if (1 == insn_op1)
10040 /* CSPR is going to be changed. */
10041 record_buf[0] = ARM_PS_REGNUM;
10042 arm_insn_r->reg_rec_count = 1;
10044 else if (3 == insn_op1)
10046 /* SPSR is going to be changed. */
10047 /* We need to get SPSR value, which is yet to be done. */
10051 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10056 record_buf[0] = ARM_PS_REGNUM;
10057 arm_insn_r->reg_rec_count = 1;
10059 else if (3 == insn_op1)
10062 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10063 arm_insn_r->reg_rec_count = 1;
10066 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10069 record_buf[0] = ARM_PS_REGNUM;
10070 record_buf[1] = ARM_LR_REGNUM;
10071 arm_insn_r->reg_rec_count = 2;
10073 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10075 /* QADD, QSUB, QDADD, QDSUB */
10076 record_buf[0] = ARM_PS_REGNUM;
10077 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10078 arm_insn_r->reg_rec_count = 2;
10080 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10083 record_buf[0] = ARM_PS_REGNUM;
10084 record_buf[1] = ARM_LR_REGNUM;
10085 arm_insn_r->reg_rec_count = 2;
10087 /* Save SPSR also;how? */
10090 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10091 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10092 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10093 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10096 if (0 == insn_op1 || 1 == insn_op1)
10098 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10099 /* We dont do optimization for SMULW<y> where we
10101 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10102 record_buf[1] = ARM_PS_REGNUM;
10103 arm_insn_r->reg_rec_count = 2;
10105 else if (2 == insn_op1)
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10109 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10110 arm_insn_r->reg_rec_count = 2;
10112 else if (3 == insn_op1)
10115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10116 arm_insn_r->reg_rec_count = 1;
10122 /* MSR : immediate form. */
10125 /* CSPR is going to be changed. */
10126 record_buf[0] = ARM_PS_REGNUM;
10127 arm_insn_r->reg_rec_count = 1;
10129 else if (3 == insn_op1)
10131 /* SPSR is going to be changed. */
10132 /* we need to get SPSR value, which is yet to be done */
10138 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10139 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10140 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10142 /* Handle load/store insn extension space. */
10144 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10145 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10146 && !INSN_RECORDED(arm_insn_r))
10151 /* These insn, changes register and memory as well. */
10152 /* SWP or SWPB insn. */
10153 /* Get memory address given by Rn. */
10154 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10155 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10156 /* SWP insn ?, swaps word. */
10157 if (8 == arm_insn_r->opcode)
10159 record_buf_mem[0] = 4;
10163 /* SWPB insn, swaps only byte. */
10164 record_buf_mem[0] = 1;
10166 record_buf_mem[1] = u_regval;
10167 arm_insn_r->mem_rec_count = 1;
10168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10169 arm_insn_r->reg_rec_count = 1;
10171 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10174 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10177 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10180 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10181 record_buf[1] = record_buf[0] + 1;
10182 arm_insn_r->reg_rec_count = 2;
10184 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10187 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10190 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10192 /* LDRH, LDRSB, LDRSH. */
10193 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10194 arm_insn_r->reg_rec_count = 1;
10199 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10200 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10201 && !INSN_RECORDED(arm_insn_r))
10204 /* Handle coprocessor insn extension space. */
10207 /* To be done for ARMv5 and later; as of now we return -1. */
10211 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10212 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10217 /* Handling opcode 000 insns. */
10220 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10222 struct regcache *reg_cache = arm_insn_r->regcache;
10223 uint32_t record_buf[8], record_buf_mem[8];
10224 ULONGEST u_regval[2] = {0};
10226 uint32_t reg_src1 = 0;
10227 uint32_t opcode1 = 0;
10229 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10230 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10231 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10233 if (!((opcode1 & 0x19) == 0x10))
10235 /* Data-processing (register) and Data-processing (register-shifted
10237 /* Out of 11 shifter operands mode, all the insn modifies destination
10238 register, which is specified by 13-16 decode. */
10239 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10240 record_buf[1] = ARM_PS_REGNUM;
10241 arm_insn_r->reg_rec_count = 2;
10243 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10245 /* Miscellaneous instructions */
10247 if (3 == arm_insn_r->decode && 0x12 == opcode1
10248 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10250 /* Handle BLX, branch and link/exchange. */
10251 if (9 == arm_insn_r->opcode)
10253 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10254 and R14 stores the return address. */
10255 record_buf[0] = ARM_PS_REGNUM;
10256 record_buf[1] = ARM_LR_REGNUM;
10257 arm_insn_r->reg_rec_count = 2;
10260 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10262 /* Handle enhanced software breakpoint insn, BKPT. */
10263 /* CPSR is changed to be executed in ARM state, disabling normal
10264 interrupts, entering abort mode. */
10265 /* According to high vector configuration PC is set. */
10266 /* user hit breakpoint and type reverse, in
10267 that case, we need to go back with previous CPSR and
10268 Program Counter. */
10269 record_buf[0] = ARM_PS_REGNUM;
10270 record_buf[1] = ARM_LR_REGNUM;
10271 arm_insn_r->reg_rec_count = 2;
10273 /* Save SPSR also; how? */
10276 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10277 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10279 /* Handle BX, branch and link/exchange. */
10280 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10281 record_buf[0] = ARM_PS_REGNUM;
10282 arm_insn_r->reg_rec_count = 1;
10284 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10285 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10286 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10288 /* Count leading zeros: CLZ. */
10289 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10290 arm_insn_r->reg_rec_count = 1;
10292 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10293 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10294 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10295 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10297 /* Handle MRS insn. */
10298 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10299 arm_insn_r->reg_rec_count = 1;
10302 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10304 /* Multiply and multiply-accumulate */
10306 /* Handle multiply instructions. */
10307 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10308 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10310 /* Handle MLA and MUL. */
10311 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10312 record_buf[1] = ARM_PS_REGNUM;
10313 arm_insn_r->reg_rec_count = 2;
10315 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10317 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10319 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10320 record_buf[2] = ARM_PS_REGNUM;
10321 arm_insn_r->reg_rec_count = 3;
10324 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10326 /* Synchronization primitives */
10328 /* Handling SWP, SWPB. */
10329 /* These insn, changes register and memory as well. */
10330 /* SWP or SWPB insn. */
10332 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10333 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10334 /* SWP insn ?, swaps word. */
10335 if (8 == arm_insn_r->opcode)
10337 record_buf_mem[0] = 4;
10341 /* SWPB insn, swaps only byte. */
10342 record_buf_mem[0] = 1;
10344 record_buf_mem[1] = u_regval[0];
10345 arm_insn_r->mem_rec_count = 1;
10346 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10347 arm_insn_r->reg_rec_count = 1;
10349 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10350 || 15 == arm_insn_r->decode)
10352 if ((opcode1 & 0x12) == 2)
10354 /* Extra load/store (unprivileged) */
10359 /* Extra load/store */
10360 switch (bits (arm_insn_r->arm_insn, 5, 6))
10363 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10365 /* STRH (register), STRH (immediate) */
10366 arm_record_strx (arm_insn_r, &record_buf[0],
10367 &record_buf_mem[0], ARM_RECORD_STRH);
10369 else if ((opcode1 & 0x05) == 0x1)
10371 /* LDRH (register) */
10372 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10373 arm_insn_r->reg_rec_count = 1;
10375 if (bit (arm_insn_r->arm_insn, 21))
10377 /* Write back to Rn. */
10378 record_buf[arm_insn_r->reg_rec_count++]
10379 = bits (arm_insn_r->arm_insn, 16, 19);
10382 else if ((opcode1 & 0x05) == 0x5)
10384 /* LDRH (immediate), LDRH (literal) */
10385 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10387 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10388 arm_insn_r->reg_rec_count = 1;
10392 /*LDRH (immediate) */
10393 if (bit (arm_insn_r->arm_insn, 21))
10395 /* Write back to Rn. */
10396 record_buf[arm_insn_r->reg_rec_count++] = rn;
10404 if ((opcode1 & 0x05) == 0x0)
10406 /* LDRD (register) */
10407 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10408 record_buf[1] = record_buf[0] + 1;
10409 arm_insn_r->reg_rec_count = 2;
10411 if (bit (arm_insn_r->arm_insn, 21))
10413 /* Write back to Rn. */
10414 record_buf[arm_insn_r->reg_rec_count++]
10415 = bits (arm_insn_r->arm_insn, 16, 19);
10418 else if ((opcode1 & 0x05) == 0x1)
10420 /* LDRSB (register) */
10421 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10422 arm_insn_r->reg_rec_count = 1;
10424 if (bit (arm_insn_r->arm_insn, 21))
10426 /* Write back to Rn. */
10427 record_buf[arm_insn_r->reg_rec_count++]
10428 = bits (arm_insn_r->arm_insn, 16, 19);
10431 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10433 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10435 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10437 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10438 arm_insn_r->reg_rec_count = 1;
10442 /*LDRD (immediate), LDRSB (immediate) */
10443 if (bit (arm_insn_r->arm_insn, 21))
10445 /* Write back to Rn. */
10446 record_buf[arm_insn_r->reg_rec_count++] = rn;
10454 if ((opcode1 & 0x05) == 0x0)
10456 /* STRD (register) */
10457 arm_record_strx (arm_insn_r, &record_buf[0],
10458 &record_buf_mem[0], ARM_RECORD_STRD);
10460 else if ((opcode1 & 0x05) == 0x1)
10462 /* LDRSH (register) */
10463 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10464 arm_insn_r->reg_rec_count = 1;
10466 if (bit (arm_insn_r->arm_insn, 21))
10468 /* Write back to Rn. */
10469 record_buf[arm_insn_r->reg_rec_count++]
10470 = bits (arm_insn_r->arm_insn, 16, 19);
10473 else if ((opcode1 & 0x05) == 0x4)
10475 /* STRD (immediate) */
10476 arm_record_strx (arm_insn_r, &record_buf[0],
10477 &record_buf_mem[0], ARM_RECORD_STRD);
10479 else if ((opcode1 & 0x05) == 0x5)
10481 /* LDRSH (immediate), LDRSH (literal) */
10482 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10483 arm_insn_r->reg_rec_count = 1;
10485 if (bit (arm_insn_r->arm_insn, 21))
10487 /* Write back to Rn. */
10488 record_buf[arm_insn_r->reg_rec_count++]
10489 = bits (arm_insn_r->arm_insn, 16, 19);
10505 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10506 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10510 /* Handling opcode 001 insns. */
10513 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10515 uint32_t record_buf[8], record_buf_mem[8];
10517 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10518 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10520 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10521 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10522 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10525 /* Handle MSR insn. */
10526 if (9 == arm_insn_r->opcode)
10528 /* CSPR is going to be changed. */
10529 record_buf[0] = ARM_PS_REGNUM;
10530 arm_insn_r->reg_rec_count = 1;
10534 /* SPSR is going to be changed. */
10537 else if (arm_insn_r->opcode <= 15)
10539 /* Normal data processing insns. */
10540 /* Out of 11 shifter operands mode, all the insn modifies destination
10541 register, which is specified by 13-16 decode. */
10542 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10543 record_buf[1] = ARM_PS_REGNUM;
10544 arm_insn_r->reg_rec_count = 2;
10551 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10552 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10557 arm_record_media (insn_decode_record *arm_insn_r)
10559 uint32_t record_buf[8];
10561 switch (bits (arm_insn_r->arm_insn, 22, 24))
10564 /* Parallel addition and subtraction, signed */
10566 /* Parallel addition and subtraction, unsigned */
10569 /* Packing, unpacking, saturation and reversal */
10571 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10573 record_buf[arm_insn_r->reg_rec_count++] = rd;
10579 /* Signed multiplies */
10581 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10582 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10584 record_buf[arm_insn_r->reg_rec_count++] = rd;
10586 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10587 else if (op1 == 0x4)
10588 record_buf[arm_insn_r->reg_rec_count++]
10589 = bits (arm_insn_r->arm_insn, 12, 15);
10595 if (bit (arm_insn_r->arm_insn, 21)
10596 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10599 record_buf[arm_insn_r->reg_rec_count++]
10600 = bits (arm_insn_r->arm_insn, 12, 15);
10602 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10603 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10605 /* USAD8 and USADA8 */
10606 record_buf[arm_insn_r->reg_rec_count++]
10607 = bits (arm_insn_r->arm_insn, 16, 19);
10614 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10615 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10617 /* Permanently UNDEFINED */
10622 /* BFC, BFI and UBFX */
10623 record_buf[arm_insn_r->reg_rec_count++]
10624 = bits (arm_insn_r->arm_insn, 12, 15);
10633 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10638 /* Handle ARM mode instructions with opcode 010. */
10641 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10643 struct regcache *reg_cache = arm_insn_r->regcache;
10645 uint32_t reg_base , reg_dest;
10646 uint32_t offset_12, tgt_mem_addr;
10647 uint32_t record_buf[8], record_buf_mem[8];
10648 unsigned char wback;
10651 /* Calculate wback. */
10652 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10653 || (bit (arm_insn_r->arm_insn, 21) == 1);
10655 arm_insn_r->reg_rec_count = 0;
10656 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10658 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10660 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10663 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10664 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10666 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10667 preceeds a LDR instruction having R15 as reg_base, it
10668 emulates a branch and link instruction, and hence we need to save
10669 CPSR and PC as well. */
10670 if (ARM_PC_REGNUM == reg_dest)
10671 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10673 /* If wback is true, also save the base register, which is going to be
10676 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10680 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10682 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10683 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10685 /* Handle bit U. */
10686 if (bit (arm_insn_r->arm_insn, 23))
10688 /* U == 1: Add the offset. */
10689 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10693 /* U == 0: subtract the offset. */
10694 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10697 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10699 if (bit (arm_insn_r->arm_insn, 22))
10701 /* STRB and STRBT: 1 byte. */
10702 record_buf_mem[0] = 1;
10706 /* STR and STRT: 4 bytes. */
10707 record_buf_mem[0] = 4;
10710 /* Handle bit P. */
10711 if (bit (arm_insn_r->arm_insn, 24))
10712 record_buf_mem[1] = tgt_mem_addr;
10714 record_buf_mem[1] = (uint32_t) u_regval;
10716 arm_insn_r->mem_rec_count = 1;
10718 /* If wback is true, also save the base register, which is going to be
10721 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10724 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10725 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10729 /* Handling opcode 011 insns. */
10732 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10734 struct regcache *reg_cache = arm_insn_r->regcache;
10736 uint32_t shift_imm = 0;
10737 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10738 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10739 uint32_t record_buf[8], record_buf_mem[8];
10742 ULONGEST u_regval[2];
10744 if (bit (arm_insn_r->arm_insn, 4))
10745 return arm_record_media (arm_insn_r);
10747 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10748 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10750 /* Handle enhanced store insns and LDRD DSP insn,
10751 order begins according to addressing modes for store insns
10755 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10757 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10758 /* LDR insn has a capability to do branching, if
10759 MOV LR, PC is precedded by LDR insn having Rn as R15
10760 in that case, it emulates branch and link insn, and hence we
10761 need to save CSPR and PC as well. */
10762 if (15 != reg_dest)
10764 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10765 arm_insn_r->reg_rec_count = 1;
10769 record_buf[0] = reg_dest;
10770 record_buf[1] = ARM_PS_REGNUM;
10771 arm_insn_r->reg_rec_count = 2;
10776 if (! bits (arm_insn_r->arm_insn, 4, 11))
10778 /* Store insn, register offset and register pre-indexed,
10779 register post-indexed. */
10781 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10783 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10784 regcache_raw_read_unsigned (reg_cache, reg_src1
10786 regcache_raw_read_unsigned (reg_cache, reg_src2
10788 if (15 == reg_src2)
10790 /* If R15 was used as Rn, hence current PC+8. */
10791 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10792 u_regval[0] = u_regval[0] + 8;
10794 /* Calculate target store address, Rn +/- Rm, register offset. */
10796 if (bit (arm_insn_r->arm_insn, 23))
10798 tgt_mem_addr = u_regval[0] + u_regval[1];
10802 tgt_mem_addr = u_regval[1] - u_regval[0];
10805 switch (arm_insn_r->opcode)
10819 record_buf_mem[0] = 4;
10834 record_buf_mem[0] = 1;
10838 gdb_assert_not_reached ("no decoding pattern found");
10841 record_buf_mem[1] = tgt_mem_addr;
10842 arm_insn_r->mem_rec_count = 1;
10844 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10845 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10846 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10847 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10848 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10849 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10852 /* Rn is going to be changed in pre-indexed mode and
10853 post-indexed mode as well. */
10854 record_buf[0] = reg_src2;
10855 arm_insn_r->reg_rec_count = 1;
10860 /* Store insn, scaled register offset; scaled pre-indexed. */
10861 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10863 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10865 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10866 /* Get shift_imm. */
10867 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10868 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10869 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10870 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10871 /* Offset_12 used as shift. */
10875 /* Offset_12 used as index. */
10876 offset_12 = u_regval[0] << shift_imm;
10880 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10886 if (bit (u_regval[0], 31))
10888 offset_12 = 0xFFFFFFFF;
10897 /* This is arithmetic shift. */
10898 offset_12 = s_word >> shift_imm;
10905 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10907 /* Get C flag value and shift it by 31. */
10908 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10909 | (u_regval[0]) >> 1);
10913 offset_12 = (u_regval[0] >> shift_imm) \
10915 (sizeof(uint32_t) - shift_imm));
10920 gdb_assert_not_reached ("no decoding pattern found");
10924 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10926 if (bit (arm_insn_r->arm_insn, 23))
10928 tgt_mem_addr = u_regval[1] + offset_12;
10932 tgt_mem_addr = u_regval[1] - offset_12;
10935 switch (arm_insn_r->opcode)
10949 record_buf_mem[0] = 4;
10964 record_buf_mem[0] = 1;
10968 gdb_assert_not_reached ("no decoding pattern found");
10971 record_buf_mem[1] = tgt_mem_addr;
10972 arm_insn_r->mem_rec_count = 1;
10974 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10975 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10976 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10977 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10978 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10979 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10982 /* Rn is going to be changed in register scaled pre-indexed
10983 mode,and scaled post indexed mode. */
10984 record_buf[0] = reg_src2;
10985 arm_insn_r->reg_rec_count = 1;
10990 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10991 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10995 /* Handle ARM mode instructions with opcode 100. */
10998 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11000 struct regcache *reg_cache = arm_insn_r->regcache;
11001 uint32_t register_count = 0, register_bits;
11002 uint32_t reg_base, addr_mode;
11003 uint32_t record_buf[24], record_buf_mem[48];
11007 /* Fetch the list of registers. */
11008 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11009 arm_insn_r->reg_rec_count = 0;
11011 /* Fetch the base register that contains the address we are loading data
11013 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11015 /* Calculate wback. */
11016 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11018 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11020 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11022 /* Find out which registers are going to be loaded from memory. */
11023 while (register_bits)
11025 if (register_bits & 0x00000001)
11026 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11027 register_bits = register_bits >> 1;
11032 /* If wback is true, also save the base register, which is going to be
11035 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11037 /* Save the CPSR register. */
11038 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11042 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11044 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11046 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11048 /* Find out how many registers are going to be stored to memory. */
11049 while (register_bits)
11051 if (register_bits & 0x00000001)
11053 register_bits = register_bits >> 1;
11058 /* STMDA (STMED): Decrement after. */
11060 record_buf_mem[1] = (uint32_t) u_regval
11061 - register_count * INT_REGISTER_SIZE + 4;
11063 /* STM (STMIA, STMEA): Increment after. */
11065 record_buf_mem[1] = (uint32_t) u_regval;
11067 /* STMDB (STMFD): Decrement before. */
11069 record_buf_mem[1] = (uint32_t) u_regval
11070 - register_count * INT_REGISTER_SIZE;
11072 /* STMIB (STMFA): Increment before. */
11074 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11077 gdb_assert_not_reached ("no decoding pattern found");
11081 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11082 arm_insn_r->mem_rec_count = 1;
11084 /* If wback is true, also save the base register, which is going to be
11087 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11090 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11091 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11095 /* Handling opcode 101 insns. */
11098 arm_record_b_bl (insn_decode_record *arm_insn_r)
11100 uint32_t record_buf[8];
11102 /* Handle B, BL, BLX(1) insns. */
11103 /* B simply branches so we do nothing here. */
11104 /* Note: BLX(1) doesnt fall here but instead it falls into
11105 extension space. */
11106 if (bit (arm_insn_r->arm_insn, 24))
11108 record_buf[0] = ARM_LR_REGNUM;
11109 arm_insn_r->reg_rec_count = 1;
11112 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11118 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11120 printf_unfiltered (_("Process record does not support instruction "
11121 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11122 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11127 /* Record handler for vector data transfer instructions. */
11130 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11132 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11133 uint32_t record_buf[4];
11135 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11136 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11137 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11138 bit_l = bit (arm_insn_r->arm_insn, 20);
11139 bit_c = bit (arm_insn_r->arm_insn, 8);
11141 /* Handle VMOV instruction. */
11142 if (bit_l && bit_c)
11144 record_buf[0] = reg_t;
11145 arm_insn_r->reg_rec_count = 1;
11147 else if (bit_l && !bit_c)
11149 /* Handle VMOV instruction. */
11150 if (bits_a == 0x00)
11152 record_buf[0] = reg_t;
11153 arm_insn_r->reg_rec_count = 1;
11155 /* Handle VMRS instruction. */
11156 else if (bits_a == 0x07)
11159 reg_t = ARM_PS_REGNUM;
11161 record_buf[0] = reg_t;
11162 arm_insn_r->reg_rec_count = 1;
11165 else if (!bit_l && !bit_c)
11167 /* Handle VMOV instruction. */
11168 if (bits_a == 0x00)
11170 record_buf[0] = ARM_D0_REGNUM + reg_v;
11172 arm_insn_r->reg_rec_count = 1;
11174 /* Handle VMSR instruction. */
11175 else if (bits_a == 0x07)
11177 record_buf[0] = ARM_FPSCR_REGNUM;
11178 arm_insn_r->reg_rec_count = 1;
11181 else if (!bit_l && bit_c)
11183 /* Handle VMOV instruction. */
11184 if (!(bits_a & 0x04))
11186 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11188 arm_insn_r->reg_rec_count = 1;
11190 /* Handle VDUP instruction. */
11193 if (bit (arm_insn_r->arm_insn, 21))
11195 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11196 record_buf[0] = reg_v + ARM_D0_REGNUM;
11197 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11198 arm_insn_r->reg_rec_count = 2;
11202 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11203 record_buf[0] = reg_v + ARM_D0_REGNUM;
11204 arm_insn_r->reg_rec_count = 1;
11209 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11213 /* Record handler for extension register load/store instructions. */
11216 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11218 uint32_t opcode, single_reg;
11219 uint8_t op_vldm_vstm;
11220 uint32_t record_buf[8], record_buf_mem[128];
11221 ULONGEST u_regval = 0;
11223 struct regcache *reg_cache = arm_insn_r->regcache;
11225 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11226 single_reg = !bit (arm_insn_r->arm_insn, 8);
11227 op_vldm_vstm = opcode & 0x1b;
11229 /* Handle VMOV instructions. */
11230 if ((opcode & 0x1e) == 0x04)
11232 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11234 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11235 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11236 arm_insn_r->reg_rec_count = 2;
11240 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11241 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11245 /* The first S register number m is REG_M:M (M is bit 5),
11246 the corresponding D register number is REG_M:M / 2, which
11248 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11249 /* The second S register number is REG_M:M + 1, the
11250 corresponding D register number is (REG_M:M + 1) / 2.
11251 IOW, if bit M is 1, the first and second S registers
11252 are mapped to different D registers, otherwise, they are
11253 in the same D register. */
11256 record_buf[arm_insn_r->reg_rec_count++]
11257 = ARM_D0_REGNUM + reg_m + 1;
11262 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11263 arm_insn_r->reg_rec_count = 1;
11267 /* Handle VSTM and VPUSH instructions. */
11268 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11269 || op_vldm_vstm == 0x12)
11271 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11272 uint32_t memory_index = 0;
11274 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11275 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11276 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11277 imm_off32 = imm_off8 << 2;
11278 memory_count = imm_off8;
11280 if (bit (arm_insn_r->arm_insn, 23))
11281 start_address = u_regval;
11283 start_address = u_regval - imm_off32;
11285 if (bit (arm_insn_r->arm_insn, 21))
11287 record_buf[0] = reg_rn;
11288 arm_insn_r->reg_rec_count = 1;
11291 while (memory_count > 0)
11295 record_buf_mem[memory_index] = 4;
11296 record_buf_mem[memory_index + 1] = start_address;
11297 start_address = start_address + 4;
11298 memory_index = memory_index + 2;
11302 record_buf_mem[memory_index] = 4;
11303 record_buf_mem[memory_index + 1] = start_address;
11304 record_buf_mem[memory_index + 2] = 4;
11305 record_buf_mem[memory_index + 3] = start_address + 4;
11306 start_address = start_address + 8;
11307 memory_index = memory_index + 4;
11311 arm_insn_r->mem_rec_count = (memory_index >> 1);
11313 /* Handle VLDM instructions. */
11314 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11315 || op_vldm_vstm == 0x13)
11317 uint32_t reg_count, reg_vd;
11318 uint32_t reg_index = 0;
11319 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11321 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11324 /* REG_VD is the first D register number. If the instruction
11325 loads memory to S registers (SINGLE_REG is TRUE), the register
11326 number is (REG_VD << 1 | bit D), so the corresponding D
11327 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11329 reg_vd = reg_vd | (bit_d << 4);
11331 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11332 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11334 /* If the instruction loads memory to D register, REG_COUNT should
11335 be divided by 2, according to the ARM Architecture Reference
11336 Manual. If the instruction loads memory to S register, divide by
11337 2 as well because two S registers are mapped to D register. */
11338 reg_count = reg_count / 2;
11339 if (single_reg && bit_d)
11341 /* Increase the register count if S register list starts from
11342 an odd number (bit d is one). */
11346 while (reg_count > 0)
11348 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11351 arm_insn_r->reg_rec_count = reg_index;
11353 /* VSTR Vector store register. */
11354 else if ((opcode & 0x13) == 0x10)
11356 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11357 uint32_t memory_index = 0;
11359 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11360 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11361 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11362 imm_off32 = imm_off8 << 2;
11364 if (bit (arm_insn_r->arm_insn, 23))
11365 start_address = u_regval + imm_off32;
11367 start_address = u_regval - imm_off32;
11371 record_buf_mem[memory_index] = 4;
11372 record_buf_mem[memory_index + 1] = start_address;
11373 arm_insn_r->mem_rec_count = 1;
11377 record_buf_mem[memory_index] = 4;
11378 record_buf_mem[memory_index + 1] = start_address;
11379 record_buf_mem[memory_index + 2] = 4;
11380 record_buf_mem[memory_index + 3] = start_address + 4;
11381 arm_insn_r->mem_rec_count = 2;
11384 /* VLDR Vector load register. */
11385 else if ((opcode & 0x13) == 0x11)
11387 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11391 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11392 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11396 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11397 /* Record register D rather than pseudo register S. */
11398 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11400 arm_insn_r->reg_rec_count = 1;
11403 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11404 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11408 /* Record handler for arm/thumb mode VFP data processing instructions. */
11411 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11413 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11414 uint32_t record_buf[4];
11415 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11416 enum insn_types curr_insn_type = INSN_INV;
11418 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11419 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11420 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11421 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11422 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11423 bit_d = bit (arm_insn_r->arm_insn, 22);
11424 /* Mask off the "D" bit. */
11425 opc1 = opc1 & ~0x04;
11427 /* Handle VMLA, VMLS. */
11430 if (bit (arm_insn_r->arm_insn, 10))
11432 if (bit (arm_insn_r->arm_insn, 6))
11433 curr_insn_type = INSN_T0;
11435 curr_insn_type = INSN_T1;
11440 curr_insn_type = INSN_T1;
11442 curr_insn_type = INSN_T2;
11445 /* Handle VNMLA, VNMLS, VNMUL. */
11446 else if (opc1 == 0x01)
11449 curr_insn_type = INSN_T1;
11451 curr_insn_type = INSN_T2;
11454 else if (opc1 == 0x02 && !(opc3 & 0x01))
11456 if (bit (arm_insn_r->arm_insn, 10))
11458 if (bit (arm_insn_r->arm_insn, 6))
11459 curr_insn_type = INSN_T0;
11461 curr_insn_type = INSN_T1;
11466 curr_insn_type = INSN_T1;
11468 curr_insn_type = INSN_T2;
11471 /* Handle VADD, VSUB. */
11472 else if (opc1 == 0x03)
11474 if (!bit (arm_insn_r->arm_insn, 9))
11476 if (bit (arm_insn_r->arm_insn, 6))
11477 curr_insn_type = INSN_T0;
11479 curr_insn_type = INSN_T1;
11484 curr_insn_type = INSN_T1;
11486 curr_insn_type = INSN_T2;
11490 else if (opc1 == 0x08)
11493 curr_insn_type = INSN_T1;
11495 curr_insn_type = INSN_T2;
11497 /* Handle all other vfp data processing instructions. */
11498 else if (opc1 == 0x0b)
11501 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11503 if (bit (arm_insn_r->arm_insn, 4))
11505 if (bit (arm_insn_r->arm_insn, 6))
11506 curr_insn_type = INSN_T0;
11508 curr_insn_type = INSN_T1;
11513 curr_insn_type = INSN_T1;
11515 curr_insn_type = INSN_T2;
11518 /* Handle VNEG and VABS. */
11519 else if ((opc2 == 0x01 && opc3 == 0x01)
11520 || (opc2 == 0x00 && opc3 == 0x03))
11522 if (!bit (arm_insn_r->arm_insn, 11))
11524 if (bit (arm_insn_r->arm_insn, 6))
11525 curr_insn_type = INSN_T0;
11527 curr_insn_type = INSN_T1;
11532 curr_insn_type = INSN_T1;
11534 curr_insn_type = INSN_T2;
11537 /* Handle VSQRT. */
11538 else if (opc2 == 0x01 && opc3 == 0x03)
11541 curr_insn_type = INSN_T1;
11543 curr_insn_type = INSN_T2;
11546 else if (opc2 == 0x07 && opc3 == 0x03)
11549 curr_insn_type = INSN_T1;
11551 curr_insn_type = INSN_T2;
11553 else if (opc3 & 0x01)
11556 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11558 if (!bit (arm_insn_r->arm_insn, 18))
11559 curr_insn_type = INSN_T2;
11563 curr_insn_type = INSN_T1;
11565 curr_insn_type = INSN_T2;
11569 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11572 curr_insn_type = INSN_T1;
11574 curr_insn_type = INSN_T2;
11576 /* Handle VCVTB, VCVTT. */
11577 else if ((opc2 & 0x0e) == 0x02)
11578 curr_insn_type = INSN_T2;
11579 /* Handle VCMP, VCMPE. */
11580 else if ((opc2 & 0x0e) == 0x04)
11581 curr_insn_type = INSN_T3;
11585 switch (curr_insn_type)
11588 reg_vd = reg_vd | (bit_d << 4);
11589 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11590 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11591 arm_insn_r->reg_rec_count = 2;
11595 reg_vd = reg_vd | (bit_d << 4);
11596 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11597 arm_insn_r->reg_rec_count = 1;
11601 reg_vd = (reg_vd << 1) | bit_d;
11602 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11603 arm_insn_r->reg_rec_count = 1;
11607 record_buf[0] = ARM_FPSCR_REGNUM;
11608 arm_insn_r->reg_rec_count = 1;
11612 gdb_assert_not_reached ("no decoding pattern found");
11616 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11620 /* Handling opcode 110 insns. */
11623 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11625 uint32_t op1, op1_ebit, coproc;
11627 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11628 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11629 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11631 if ((coproc & 0x0e) == 0x0a)
11633 /* Handle extension register ld/st instructions. */
11635 return arm_record_exreg_ld_st_insn (arm_insn_r);
11637 /* 64-bit transfers between arm core and extension registers. */
11638 if ((op1 & 0x3e) == 0x04)
11639 return arm_record_exreg_ld_st_insn (arm_insn_r);
11643 /* Handle coprocessor ld/st instructions. */
11648 return arm_record_unsupported_insn (arm_insn_r);
11651 return arm_record_unsupported_insn (arm_insn_r);
11654 /* Move to coprocessor from two arm core registers. */
11656 return arm_record_unsupported_insn (arm_insn_r);
11658 /* Move to two arm core registers from coprocessor. */
11663 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11664 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11665 arm_insn_r->reg_rec_count = 2;
11667 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11671 return arm_record_unsupported_insn (arm_insn_r);
11674 /* Handling opcode 111 insns. */
11677 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11679 uint32_t op, op1_ebit, coproc, bits_24_25;
11680 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11681 struct regcache *reg_cache = arm_insn_r->regcache;
11683 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11684 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11685 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11686 op = bit (arm_insn_r->arm_insn, 4);
11687 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11689 /* Handle arm SWI/SVC system call instructions. */
11690 if (bits_24_25 == 0x3)
11692 if (tdep->arm_syscall_record != NULL)
11694 ULONGEST svc_operand, svc_number;
11696 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11698 if (svc_operand) /* OABI. */
11699 svc_number = svc_operand - 0x900000;
11701 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11703 return tdep->arm_syscall_record (reg_cache, svc_number);
11707 printf_unfiltered (_("no syscall record support\n"));
11711 else if (bits_24_25 == 0x02)
11715 if ((coproc & 0x0e) == 0x0a)
11717 /* 8, 16, and 32-bit transfer */
11718 return arm_record_vdata_transfer_insn (arm_insn_r);
11725 uint32_t record_buf[1];
11727 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11728 if (record_buf[0] == 15)
11729 record_buf[0] = ARM_PS_REGNUM;
11731 arm_insn_r->reg_rec_count = 1;
11732 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11745 if ((coproc & 0x0e) == 0x0a)
11747 /* VFP data-processing instructions. */
11748 return arm_record_vfp_data_proc_insn (arm_insn_r);
11759 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11763 if ((coproc & 0x0e) != 0x0a)
11769 else if (op1 == 4 || op1 == 5)
11771 if ((coproc & 0x0e) == 0x0a)
11773 /* 64-bit transfers between ARM core and extension */
11782 else if (op1 == 0 || op1 == 1)
11789 if ((coproc & 0x0e) == 0x0a)
11791 /* Extension register load/store */
11795 /* STC, STC2, LDC, LDC2 */
11804 /* Handling opcode 000 insns. */
11807 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11809 uint32_t record_buf[8];
11810 uint32_t reg_src1 = 0;
11812 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11814 record_buf[0] = ARM_PS_REGNUM;
11815 record_buf[1] = reg_src1;
11816 thumb_insn_r->reg_rec_count = 2;
11818 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11824 /* Handling opcode 001 insns. */
11827 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11829 uint32_t record_buf[8];
11830 uint32_t reg_src1 = 0;
11832 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11834 record_buf[0] = ARM_PS_REGNUM;
11835 record_buf[1] = reg_src1;
11836 thumb_insn_r->reg_rec_count = 2;
11838 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11843 /* Handling opcode 010 insns. */
11846 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11848 struct regcache *reg_cache = thumb_insn_r->regcache;
11849 uint32_t record_buf[8], record_buf_mem[8];
11851 uint32_t reg_src1 = 0, reg_src2 = 0;
11852 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11854 ULONGEST u_regval[2] = {0};
11856 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11858 if (bit (thumb_insn_r->arm_insn, 12))
11860 /* Handle load/store register offset. */
11861 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11863 if (in_inclusive_range (opB, 4U, 7U))
11865 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11866 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11867 record_buf[0] = reg_src1;
11868 thumb_insn_r->reg_rec_count = 1;
11870 else if (in_inclusive_range (opB, 0U, 2U))
11872 /* STR(2), STRB(2), STRH(2) . */
11873 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11874 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11875 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11876 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11878 record_buf_mem[0] = 4; /* STR (2). */
11880 record_buf_mem[0] = 1; /* STRB (2). */
11882 record_buf_mem[0] = 2; /* STRH (2). */
11883 record_buf_mem[1] = u_regval[0] + u_regval[1];
11884 thumb_insn_r->mem_rec_count = 1;
11887 else if (bit (thumb_insn_r->arm_insn, 11))
11889 /* Handle load from literal pool. */
11891 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11892 record_buf[0] = reg_src1;
11893 thumb_insn_r->reg_rec_count = 1;
11897 /* Special data instructions and branch and exchange */
11898 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11899 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11900 if ((3 == opcode2) && (!opcode3))
11902 /* Branch with exchange. */
11903 record_buf[0] = ARM_PS_REGNUM;
11904 thumb_insn_r->reg_rec_count = 1;
11908 /* Format 8; special data processing insns. */
11909 record_buf[0] = ARM_PS_REGNUM;
11910 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11911 | bits (thumb_insn_r->arm_insn, 0, 2));
11912 thumb_insn_r->reg_rec_count = 2;
11917 /* Format 5; data processing insns. */
11918 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11919 if (bit (thumb_insn_r->arm_insn, 7))
11921 reg_src1 = reg_src1 + 8;
11923 record_buf[0] = ARM_PS_REGNUM;
11924 record_buf[1] = reg_src1;
11925 thumb_insn_r->reg_rec_count = 2;
11928 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11929 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11935 /* Handling opcode 001 insns. */
11938 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11940 struct regcache *reg_cache = thumb_insn_r->regcache;
11941 uint32_t record_buf[8], record_buf_mem[8];
11943 uint32_t reg_src1 = 0;
11944 uint32_t opcode = 0, immed_5 = 0;
11946 ULONGEST u_regval = 0;
11948 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11953 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11954 record_buf[0] = reg_src1;
11955 thumb_insn_r->reg_rec_count = 1;
11960 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11961 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11962 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11963 record_buf_mem[0] = 4;
11964 record_buf_mem[1] = u_regval + (immed_5 * 4);
11965 thumb_insn_r->mem_rec_count = 1;
11968 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11969 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11975 /* Handling opcode 100 insns. */
11978 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11980 struct regcache *reg_cache = thumb_insn_r->regcache;
11981 uint32_t record_buf[8], record_buf_mem[8];
11983 uint32_t reg_src1 = 0;
11984 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11986 ULONGEST u_regval = 0;
11988 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11993 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11994 record_buf[0] = reg_src1;
11995 thumb_insn_r->reg_rec_count = 1;
11997 else if (1 == opcode)
12000 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12001 record_buf[0] = reg_src1;
12002 thumb_insn_r->reg_rec_count = 1;
12004 else if (2 == opcode)
12007 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12008 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12009 record_buf_mem[0] = 4;
12010 record_buf_mem[1] = u_regval + (immed_8 * 4);
12011 thumb_insn_r->mem_rec_count = 1;
12013 else if (0 == opcode)
12016 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12017 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12018 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12019 record_buf_mem[0] = 2;
12020 record_buf_mem[1] = u_regval + (immed_5 * 2);
12021 thumb_insn_r->mem_rec_count = 1;
12024 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12025 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12031 /* Handling opcode 101 insns. */
12034 thumb_record_misc (insn_decode_record *thumb_insn_r)
12036 struct regcache *reg_cache = thumb_insn_r->regcache;
12038 uint32_t opcode = 0;
12039 uint32_t register_bits = 0, register_count = 0;
12040 uint32_t index = 0, start_address = 0;
12041 uint32_t record_buf[24], record_buf_mem[48];
12044 ULONGEST u_regval = 0;
12046 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12048 if (opcode == 0 || opcode == 1)
12050 /* ADR and ADD (SP plus immediate) */
12052 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12053 record_buf[0] = reg_src1;
12054 thumb_insn_r->reg_rec_count = 1;
12058 /* Miscellaneous 16-bit instructions */
12059 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12064 /* SETEND and CPS */
12067 /* ADD/SUB (SP plus immediate) */
12068 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12069 record_buf[0] = ARM_SP_REGNUM;
12070 thumb_insn_r->reg_rec_count = 1;
12072 case 1: /* fall through */
12073 case 3: /* fall through */
12074 case 9: /* fall through */
12079 /* SXTH, SXTB, UXTH, UXTB */
12080 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12081 thumb_insn_r->reg_rec_count = 1;
12083 case 4: /* fall through */
12086 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12087 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12088 while (register_bits)
12090 if (register_bits & 0x00000001)
12092 register_bits = register_bits >> 1;
12094 start_address = u_regval - \
12095 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12096 thumb_insn_r->mem_rec_count = register_count;
12097 while (register_count)
12099 record_buf_mem[(register_count * 2) - 1] = start_address;
12100 record_buf_mem[(register_count * 2) - 2] = 4;
12101 start_address = start_address + 4;
12104 record_buf[0] = ARM_SP_REGNUM;
12105 thumb_insn_r->reg_rec_count = 1;
12108 /* REV, REV16, REVSH */
12109 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12110 thumb_insn_r->reg_rec_count = 1;
12112 case 12: /* fall through */
12115 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12116 while (register_bits)
12118 if (register_bits & 0x00000001)
12119 record_buf[index++] = register_count;
12120 register_bits = register_bits >> 1;
12123 record_buf[index++] = ARM_PS_REGNUM;
12124 record_buf[index++] = ARM_SP_REGNUM;
12125 thumb_insn_r->reg_rec_count = index;
12129 /* Handle enhanced software breakpoint insn, BKPT. */
12130 /* CPSR is changed to be executed in ARM state, disabling normal
12131 interrupts, entering abort mode. */
12132 /* According to high vector configuration PC is set. */
12133 /* User hits breakpoint and type reverse, in that case, we need to go back with
12134 previous CPSR and Program Counter. */
12135 record_buf[0] = ARM_PS_REGNUM;
12136 record_buf[1] = ARM_LR_REGNUM;
12137 thumb_insn_r->reg_rec_count = 2;
12138 /* We need to save SPSR value, which is not yet done. */
12139 printf_unfiltered (_("Process record does not support instruction "
12140 "0x%0x at address %s.\n"),
12141 thumb_insn_r->arm_insn,
12142 paddress (thumb_insn_r->gdbarch,
12143 thumb_insn_r->this_addr));
12147 /* If-Then, and hints */
12154 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12155 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12161 /* Handling opcode 110 insns. */
12164 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12166 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12167 struct regcache *reg_cache = thumb_insn_r->regcache;
12169 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12170 uint32_t reg_src1 = 0;
12171 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12172 uint32_t index = 0, start_address = 0;
12173 uint32_t record_buf[24], record_buf_mem[48];
12175 ULONGEST u_regval = 0;
12177 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12178 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12184 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12186 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12187 while (register_bits)
12189 if (register_bits & 0x00000001)
12190 record_buf[index++] = register_count;
12191 register_bits = register_bits >> 1;
12194 record_buf[index++] = reg_src1;
12195 thumb_insn_r->reg_rec_count = index;
12197 else if (0 == opcode2)
12199 /* It handles both STMIA. */
12200 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12202 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12203 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12204 while (register_bits)
12206 if (register_bits & 0x00000001)
12208 register_bits = register_bits >> 1;
12210 start_address = u_regval;
12211 thumb_insn_r->mem_rec_count = register_count;
12212 while (register_count)
12214 record_buf_mem[(register_count * 2) - 1] = start_address;
12215 record_buf_mem[(register_count * 2) - 2] = 4;
12216 start_address = start_address + 4;
12220 else if (0x1F == opcode1)
12222 /* Handle arm syscall insn. */
12223 if (tdep->arm_syscall_record != NULL)
12225 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12226 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12230 printf_unfiltered (_("no syscall record support\n"));
12235 /* B (1), conditional branch is automatically taken care in process_record,
12236 as PC is saved there. */
12238 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12239 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12245 /* Handling opcode 111 insns. */
12248 thumb_record_branch (insn_decode_record *thumb_insn_r)
12250 uint32_t record_buf[8];
12251 uint32_t bits_h = 0;
12253 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12255 if (2 == bits_h || 3 == bits_h)
12258 record_buf[0] = ARM_LR_REGNUM;
12259 thumb_insn_r->reg_rec_count = 1;
12261 else if (1 == bits_h)
12264 record_buf[0] = ARM_PS_REGNUM;
12265 record_buf[1] = ARM_LR_REGNUM;
12266 thumb_insn_r->reg_rec_count = 2;
12269 /* B(2) is automatically taken care in process_record, as PC is
12272 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12277 /* Handler for thumb2 load/store multiple instructions. */
12280 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12282 struct regcache *reg_cache = thumb2_insn_r->regcache;
12284 uint32_t reg_rn, op;
12285 uint32_t register_bits = 0, register_count = 0;
12286 uint32_t index = 0, start_address = 0;
12287 uint32_t record_buf[24], record_buf_mem[48];
12289 ULONGEST u_regval = 0;
12291 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12292 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12294 if (0 == op || 3 == op)
12296 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12298 /* Handle RFE instruction. */
12299 record_buf[0] = ARM_PS_REGNUM;
12300 thumb2_insn_r->reg_rec_count = 1;
12304 /* Handle SRS instruction after reading banked SP. */
12305 return arm_record_unsupported_insn (thumb2_insn_r);
12308 else if (1 == op || 2 == op)
12310 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12312 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12313 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12314 while (register_bits)
12316 if (register_bits & 0x00000001)
12317 record_buf[index++] = register_count;
12320 register_bits = register_bits >> 1;
12322 record_buf[index++] = reg_rn;
12323 record_buf[index++] = ARM_PS_REGNUM;
12324 thumb2_insn_r->reg_rec_count = index;
12328 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12329 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12330 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12331 while (register_bits)
12333 if (register_bits & 0x00000001)
12336 register_bits = register_bits >> 1;
12341 /* Start address calculation for LDMDB/LDMEA. */
12342 start_address = u_regval;
12346 /* Start address calculation for LDMDB/LDMEA. */
12347 start_address = u_regval - register_count * 4;
12350 thumb2_insn_r->mem_rec_count = register_count;
12351 while (register_count)
12353 record_buf_mem[register_count * 2 - 1] = start_address;
12354 record_buf_mem[register_count * 2 - 2] = 4;
12355 start_address = start_address + 4;
12358 record_buf[0] = reg_rn;
12359 record_buf[1] = ARM_PS_REGNUM;
12360 thumb2_insn_r->reg_rec_count = 2;
12364 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12366 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12368 return ARM_RECORD_SUCCESS;
12371 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12375 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12377 struct regcache *reg_cache = thumb2_insn_r->regcache;
12379 uint32_t reg_rd, reg_rn, offset_imm;
12380 uint32_t reg_dest1, reg_dest2;
12381 uint32_t address, offset_addr;
12382 uint32_t record_buf[8], record_buf_mem[8];
12383 uint32_t op1, op2, op3;
12385 ULONGEST u_regval[2];
12387 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12388 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12389 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12391 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12393 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12395 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12396 record_buf[0] = reg_dest1;
12397 record_buf[1] = ARM_PS_REGNUM;
12398 thumb2_insn_r->reg_rec_count = 2;
12401 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12403 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12404 record_buf[2] = reg_dest2;
12405 thumb2_insn_r->reg_rec_count = 3;
12410 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12411 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12413 if (0 == op1 && 0 == op2)
12415 /* Handle STREX. */
12416 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12417 address = u_regval[0] + (offset_imm * 4);
12418 record_buf_mem[0] = 4;
12419 record_buf_mem[1] = address;
12420 thumb2_insn_r->mem_rec_count = 1;
12421 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12422 record_buf[0] = reg_rd;
12423 thumb2_insn_r->reg_rec_count = 1;
12425 else if (1 == op1 && 0 == op2)
12427 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12428 record_buf[0] = reg_rd;
12429 thumb2_insn_r->reg_rec_count = 1;
12430 address = u_regval[0];
12431 record_buf_mem[1] = address;
12435 /* Handle STREXB. */
12436 record_buf_mem[0] = 1;
12437 thumb2_insn_r->mem_rec_count = 1;
12441 /* Handle STREXH. */
12442 record_buf_mem[0] = 2 ;
12443 thumb2_insn_r->mem_rec_count = 1;
12447 /* Handle STREXD. */
12448 address = u_regval[0];
12449 record_buf_mem[0] = 4;
12450 record_buf_mem[2] = 4;
12451 record_buf_mem[3] = address + 4;
12452 thumb2_insn_r->mem_rec_count = 2;
12457 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12459 if (bit (thumb2_insn_r->arm_insn, 24))
12461 if (bit (thumb2_insn_r->arm_insn, 23))
12462 offset_addr = u_regval[0] + (offset_imm * 4);
12464 offset_addr = u_regval[0] - (offset_imm * 4);
12466 address = offset_addr;
12469 address = u_regval[0];
12471 record_buf_mem[0] = 4;
12472 record_buf_mem[1] = address;
12473 record_buf_mem[2] = 4;
12474 record_buf_mem[3] = address + 4;
12475 thumb2_insn_r->mem_rec_count = 2;
12476 record_buf[0] = reg_rn;
12477 thumb2_insn_r->reg_rec_count = 1;
12481 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12483 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12485 return ARM_RECORD_SUCCESS;
12488 /* Handler for thumb2 data processing (shift register and modified immediate)
12492 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12494 uint32_t reg_rd, op;
12495 uint32_t record_buf[8];
12497 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12498 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12500 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12502 record_buf[0] = ARM_PS_REGNUM;
12503 thumb2_insn_r->reg_rec_count = 1;
12507 record_buf[0] = reg_rd;
12508 record_buf[1] = ARM_PS_REGNUM;
12509 thumb2_insn_r->reg_rec_count = 2;
12512 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12514 return ARM_RECORD_SUCCESS;
12517 /* Generic handler for thumb2 instructions which effect destination and PS
12521 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12524 uint32_t record_buf[8];
12526 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12528 record_buf[0] = reg_rd;
12529 record_buf[1] = ARM_PS_REGNUM;
12530 thumb2_insn_r->reg_rec_count = 2;
12532 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12534 return ARM_RECORD_SUCCESS;
12537 /* Handler for thumb2 branch and miscellaneous control instructions. */
12540 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12542 uint32_t op, op1, op2;
12543 uint32_t record_buf[8];
12545 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12546 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12547 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12549 /* Handle MSR insn. */
12550 if (!(op1 & 0x2) && 0x38 == op)
12554 /* CPSR is going to be changed. */
12555 record_buf[0] = ARM_PS_REGNUM;
12556 thumb2_insn_r->reg_rec_count = 1;
12560 arm_record_unsupported_insn(thumb2_insn_r);
12564 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12567 record_buf[0] = ARM_PS_REGNUM;
12568 record_buf[1] = ARM_LR_REGNUM;
12569 thumb2_insn_r->reg_rec_count = 2;
12572 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12574 return ARM_RECORD_SUCCESS;
12577 /* Handler for thumb2 store single data item instructions. */
12580 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12582 struct regcache *reg_cache = thumb2_insn_r->regcache;
12584 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12585 uint32_t address, offset_addr;
12586 uint32_t record_buf[8], record_buf_mem[8];
12589 ULONGEST u_regval[2];
12591 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12592 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12593 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12594 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12596 if (bit (thumb2_insn_r->arm_insn, 23))
12599 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12600 offset_addr = u_regval[0] + offset_imm;
12601 address = offset_addr;
12606 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12608 /* Handle STRB (register). */
12609 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12610 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12611 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12612 offset_addr = u_regval[1] << shift_imm;
12613 address = u_regval[0] + offset_addr;
12617 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12618 if (bit (thumb2_insn_r->arm_insn, 10))
12620 if (bit (thumb2_insn_r->arm_insn, 9))
12621 offset_addr = u_regval[0] + offset_imm;
12623 offset_addr = u_regval[0] - offset_imm;
12625 address = offset_addr;
12628 address = u_regval[0];
12634 /* Store byte instructions. */
12637 record_buf_mem[0] = 1;
12639 /* Store half word instructions. */
12642 record_buf_mem[0] = 2;
12644 /* Store word instructions. */
12647 record_buf_mem[0] = 4;
12651 gdb_assert_not_reached ("no decoding pattern found");
12655 record_buf_mem[1] = address;
12656 thumb2_insn_r->mem_rec_count = 1;
12657 record_buf[0] = reg_rn;
12658 thumb2_insn_r->reg_rec_count = 1;
12660 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12662 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12664 return ARM_RECORD_SUCCESS;
12667 /* Handler for thumb2 load memory hints instructions. */
12670 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12672 uint32_t record_buf[8];
12673 uint32_t reg_rt, reg_rn;
12675 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12676 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12678 if (ARM_PC_REGNUM != reg_rt)
12680 record_buf[0] = reg_rt;
12681 record_buf[1] = reg_rn;
12682 record_buf[2] = ARM_PS_REGNUM;
12683 thumb2_insn_r->reg_rec_count = 3;
12685 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12687 return ARM_RECORD_SUCCESS;
12690 return ARM_RECORD_FAILURE;
12693 /* Handler for thumb2 load word instructions. */
12696 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12698 uint32_t record_buf[8];
12700 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12701 record_buf[1] = ARM_PS_REGNUM;
12702 thumb2_insn_r->reg_rec_count = 2;
12704 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12706 return ARM_RECORD_SUCCESS;
12709 /* Handler for thumb2 long multiply, long multiply accumulate, and
12710 divide instructions. */
12713 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12715 uint32_t opcode1 = 0, opcode2 = 0;
12716 uint32_t record_buf[8];
12718 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12719 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12721 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12723 /* Handle SMULL, UMULL, SMULAL. */
12724 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12725 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12726 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12727 record_buf[2] = ARM_PS_REGNUM;
12728 thumb2_insn_r->reg_rec_count = 3;
12730 else if (1 == opcode1 || 3 == opcode2)
12732 /* Handle SDIV and UDIV. */
12733 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12734 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12735 record_buf[2] = ARM_PS_REGNUM;
12736 thumb2_insn_r->reg_rec_count = 3;
12739 return ARM_RECORD_FAILURE;
12741 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12743 return ARM_RECORD_SUCCESS;
12746 /* Record handler for thumb32 coprocessor instructions. */
12749 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12751 if (bit (thumb2_insn_r->arm_insn, 25))
12752 return arm_record_coproc_data_proc (thumb2_insn_r);
12754 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12757 /* Record handler for advance SIMD structure load/store instructions. */
12760 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12762 struct regcache *reg_cache = thumb2_insn_r->regcache;
12763 uint32_t l_bit, a_bit, b_bits;
12764 uint32_t record_buf[128], record_buf_mem[128];
12765 uint32_t reg_rn, reg_vd, address, f_elem;
12766 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12769 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12770 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12771 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12772 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12773 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12774 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12775 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12776 f_elem = 8 / f_ebytes;
12780 ULONGEST u_regval = 0;
12781 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12782 address = u_regval;
12787 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12789 if (b_bits == 0x07)
12791 else if (b_bits == 0x0a)
12793 else if (b_bits == 0x06)
12795 else if (b_bits == 0x02)
12800 for (index_r = 0; index_r < bf_regs; index_r++)
12802 for (index_e = 0; index_e < f_elem; index_e++)
12804 record_buf_mem[index_m++] = f_ebytes;
12805 record_buf_mem[index_m++] = address;
12806 address = address + f_ebytes;
12807 thumb2_insn_r->mem_rec_count += 1;
12812 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12814 if (b_bits == 0x09 || b_bits == 0x08)
12816 else if (b_bits == 0x03)
12821 for (index_r = 0; index_r < bf_regs; index_r++)
12822 for (index_e = 0; index_e < f_elem; index_e++)
12824 for (loop_t = 0; loop_t < 2; loop_t++)
12826 record_buf_mem[index_m++] = f_ebytes;
12827 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12828 thumb2_insn_r->mem_rec_count += 1;
12830 address = address + (2 * f_ebytes);
12834 else if ((b_bits & 0x0e) == 0x04)
12836 for (index_e = 0; index_e < f_elem; index_e++)
12838 for (loop_t = 0; loop_t < 3; loop_t++)
12840 record_buf_mem[index_m++] = f_ebytes;
12841 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12842 thumb2_insn_r->mem_rec_count += 1;
12844 address = address + (3 * f_ebytes);
12848 else if (!(b_bits & 0x0e))
12850 for (index_e = 0; index_e < f_elem; index_e++)
12852 for (loop_t = 0; loop_t < 4; loop_t++)
12854 record_buf_mem[index_m++] = f_ebytes;
12855 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12856 thumb2_insn_r->mem_rec_count += 1;
12858 address = address + (4 * f_ebytes);
12864 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12866 if (bft_size == 0x00)
12868 else if (bft_size == 0x01)
12870 else if (bft_size == 0x02)
12876 if (!(b_bits & 0x0b) || b_bits == 0x08)
12877 thumb2_insn_r->mem_rec_count = 1;
12879 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12880 thumb2_insn_r->mem_rec_count = 2;
12882 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12883 thumb2_insn_r->mem_rec_count = 3;
12885 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12886 thumb2_insn_r->mem_rec_count = 4;
12888 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12890 record_buf_mem[index_m] = f_ebytes;
12891 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12900 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12901 thumb2_insn_r->reg_rec_count = 1;
12903 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12904 thumb2_insn_r->reg_rec_count = 2;
12906 else if ((b_bits & 0x0e) == 0x04)
12907 thumb2_insn_r->reg_rec_count = 3;
12909 else if (!(b_bits & 0x0e))
12910 thumb2_insn_r->reg_rec_count = 4;
12915 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12916 thumb2_insn_r->reg_rec_count = 1;
12918 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12919 thumb2_insn_r->reg_rec_count = 2;
12921 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12922 thumb2_insn_r->reg_rec_count = 3;
12924 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12925 thumb2_insn_r->reg_rec_count = 4;
12927 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12928 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12932 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12934 record_buf[index_r] = reg_rn;
12935 thumb2_insn_r->reg_rec_count += 1;
12938 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12940 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12945 /* Decodes thumb2 instruction type and invokes its record handler. */
12947 static unsigned int
12948 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12950 uint32_t op, op1, op2;
12952 op = bit (thumb2_insn_r->arm_insn, 15);
12953 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12954 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12958 if (!(op2 & 0x64 ))
12960 /* Load/store multiple instruction. */
12961 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12963 else if ((op2 & 0x64) == 0x4)
12965 /* Load/store (dual/exclusive) and table branch instruction. */
12966 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12968 else if ((op2 & 0x60) == 0x20)
12970 /* Data-processing (shifted register). */
12971 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12973 else if (op2 & 0x40)
12975 /* Co-processor instructions. */
12976 return thumb2_record_coproc_insn (thumb2_insn_r);
12979 else if (op1 == 0x02)
12983 /* Branches and miscellaneous control instructions. */
12984 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12986 else if (op2 & 0x20)
12988 /* Data-processing (plain binary immediate) instruction. */
12989 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12993 /* Data-processing (modified immediate). */
12994 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12997 else if (op1 == 0x03)
12999 if (!(op2 & 0x71 ))
13001 /* Store single data item. */
13002 return thumb2_record_str_single_data (thumb2_insn_r);
13004 else if (!((op2 & 0x71) ^ 0x10))
13006 /* Advanced SIMD or structure load/store instructions. */
13007 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13009 else if (!((op2 & 0x67) ^ 0x01))
13011 /* Load byte, memory hints instruction. */
13012 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13014 else if (!((op2 & 0x67) ^ 0x03))
13016 /* Load halfword, memory hints instruction. */
13017 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13019 else if (!((op2 & 0x67) ^ 0x05))
13021 /* Load word instruction. */
13022 return thumb2_record_ld_word (thumb2_insn_r);
13024 else if (!((op2 & 0x70) ^ 0x20))
13026 /* Data-processing (register) instruction. */
13027 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13029 else if (!((op2 & 0x78) ^ 0x30))
13031 /* Multiply, multiply accumulate, abs diff instruction. */
13032 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13034 else if (!((op2 & 0x78) ^ 0x38))
13036 /* Long multiply, long multiply accumulate, and divide. */
13037 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13039 else if (op2 & 0x40)
13041 /* Co-processor instructions. */
13042 return thumb2_record_coproc_insn (thumb2_insn_r);
13050 /* Abstract memory reader. */
13052 class abstract_memory_reader
13055 /* Read LEN bytes of target memory at address MEMADDR, placing the
13056 results in GDB's memory at BUF. Return true on success. */
13058 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13061 /* Instruction reader from real target. */
13063 class instruction_reader : public abstract_memory_reader
13066 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13068 if (target_read_memory (memaddr, buf, len))
13077 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13078 and positive val on fauilure. */
13081 extract_arm_insn (abstract_memory_reader& reader,
13082 insn_decode_record *insn_record, uint32_t insn_size)
13084 gdb_byte buf[insn_size];
13086 memset (&buf[0], 0, insn_size);
13088 if (!reader.read (insn_record->this_addr, buf, insn_size))
13090 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13092 gdbarch_byte_order_for_code (insn_record->gdbarch));
13096 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13098 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13102 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13103 record_type_t record_type, uint32_t insn_size)
13106 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13108 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13110 arm_record_data_proc_misc_ld_str, /* 000. */
13111 arm_record_data_proc_imm, /* 001. */
13112 arm_record_ld_st_imm_offset, /* 010. */
13113 arm_record_ld_st_reg_offset, /* 011. */
13114 arm_record_ld_st_multiple, /* 100. */
13115 arm_record_b_bl, /* 101. */
13116 arm_record_asimd_vfp_coproc, /* 110. */
13117 arm_record_coproc_data_proc /* 111. */
13120 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13122 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13124 thumb_record_shift_add_sub, /* 000. */
13125 thumb_record_add_sub_cmp_mov, /* 001. */
13126 thumb_record_ld_st_reg_offset, /* 010. */
13127 thumb_record_ld_st_imm_offset, /* 011. */
13128 thumb_record_ld_st_stack, /* 100. */
13129 thumb_record_misc, /* 101. */
13130 thumb_record_ldm_stm_swi, /* 110. */
13131 thumb_record_branch /* 111. */
13134 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13135 uint32_t insn_id = 0;
13137 if (extract_arm_insn (reader, arm_record, insn_size))
13141 printf_unfiltered (_("Process record: error reading memory at "
13142 "addr %s len = %d.\n"),
13143 paddress (arm_record->gdbarch,
13144 arm_record->this_addr), insn_size);
13148 else if (ARM_RECORD == record_type)
13150 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13151 insn_id = bits (arm_record->arm_insn, 25, 27);
13153 if (arm_record->cond == 0xf)
13154 ret = arm_record_extension_space (arm_record);
13157 /* If this insn has fallen into extension space
13158 then we need not decode it anymore. */
13159 ret = arm_handle_insn[insn_id] (arm_record);
13161 if (ret != ARM_RECORD_SUCCESS)
13163 arm_record_unsupported_insn (arm_record);
13167 else if (THUMB_RECORD == record_type)
13169 /* As thumb does not have condition codes, we set negative. */
13170 arm_record->cond = -1;
13171 insn_id = bits (arm_record->arm_insn, 13, 15);
13172 ret = thumb_handle_insn[insn_id] (arm_record);
13173 if (ret != ARM_RECORD_SUCCESS)
13175 arm_record_unsupported_insn (arm_record);
13179 else if (THUMB2_RECORD == record_type)
13181 /* As thumb does not have condition codes, we set negative. */
13182 arm_record->cond = -1;
13184 /* Swap first half of 32bit thumb instruction with second half. */
13185 arm_record->arm_insn
13186 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13188 ret = thumb2_record_decode_insn_handler (arm_record);
13190 if (ret != ARM_RECORD_SUCCESS)
13192 arm_record_unsupported_insn (arm_record);
13198 /* Throw assertion. */
13199 gdb_assert_not_reached ("not a valid instruction, could not decode");
13206 namespace selftests {
13208 /* Provide both 16-bit and 32-bit thumb instructions. */
13210 class instruction_reader_thumb : public abstract_memory_reader
13213 template<size_t SIZE>
13214 instruction_reader_thumb (enum bfd_endian endian,
13215 const uint16_t (&insns)[SIZE])
13216 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13219 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13221 SELF_CHECK (len == 4 || len == 2);
13222 SELF_CHECK (memaddr % 2 == 0);
13223 SELF_CHECK ((memaddr / 2) < m_insns_size);
13225 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13228 store_unsigned_integer (&buf[2], 2, m_endian,
13229 m_insns[memaddr / 2 + 1]);
13235 enum bfd_endian m_endian;
13236 const uint16_t *m_insns;
13237 size_t m_insns_size;
13241 arm_record_test (void)
13243 struct gdbarch_info info;
13244 gdbarch_info_init (&info);
13245 info.bfd_arch_info = bfd_scan_arch ("arm");
13247 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13249 SELF_CHECK (gdbarch != NULL);
13251 /* 16-bit Thumb instructions. */
13253 insn_decode_record arm_record;
13255 memset (&arm_record, 0, sizeof (insn_decode_record));
13256 arm_record.gdbarch = gdbarch;
13258 static const uint16_t insns[] = {
13259 /* db b2 uxtb r3, r3 */
13261 /* cd 58 ldr r5, [r1, r3] */
13265 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13266 instruction_reader_thumb reader (endian, insns);
13267 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13268 THUMB_INSN_SIZE_BYTES);
13270 SELF_CHECK (ret == 0);
13271 SELF_CHECK (arm_record.mem_rec_count == 0);
13272 SELF_CHECK (arm_record.reg_rec_count == 1);
13273 SELF_CHECK (arm_record.arm_regs[0] == 3);
13275 arm_record.this_addr += 2;
13276 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13277 THUMB_INSN_SIZE_BYTES);
13279 SELF_CHECK (ret == 0);
13280 SELF_CHECK (arm_record.mem_rec_count == 0);
13281 SELF_CHECK (arm_record.reg_rec_count == 1);
13282 SELF_CHECK (arm_record.arm_regs[0] == 5);
13285 /* 32-bit Thumb-2 instructions. */
13287 insn_decode_record arm_record;
13289 memset (&arm_record, 0, sizeof (insn_decode_record));
13290 arm_record.gdbarch = gdbarch;
13292 static const uint16_t insns[] = {
13293 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13297 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13298 instruction_reader_thumb reader (endian, insns);
13299 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13300 THUMB2_INSN_SIZE_BYTES);
13302 SELF_CHECK (ret == 0);
13303 SELF_CHECK (arm_record.mem_rec_count == 0);
13304 SELF_CHECK (arm_record.reg_rec_count == 1);
13305 SELF_CHECK (arm_record.arm_regs[0] == 7);
13308 } // namespace selftests
13309 #endif /* GDB_SELF_TEST */
13311 /* Cleans up local record registers and memory allocations. */
13314 deallocate_reg_mem (insn_decode_record *record)
13316 xfree (record->arm_regs);
13317 xfree (record->arm_mems);
13321 /* Parse the current instruction and record the values of the registers and
13322 memory that will be changed in current instruction to record_arch_list".
13323 Return -1 if something is wrong. */
13326 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13327 CORE_ADDR insn_addr)
13330 uint32_t no_of_rec = 0;
13331 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13332 ULONGEST t_bit = 0, insn_id = 0;
13334 ULONGEST u_regval = 0;
13336 insn_decode_record arm_record;
13338 memset (&arm_record, 0, sizeof (insn_decode_record));
13339 arm_record.regcache = regcache;
13340 arm_record.this_addr = insn_addr;
13341 arm_record.gdbarch = gdbarch;
13344 if (record_debug > 1)
13346 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13348 paddress (gdbarch, arm_record.this_addr));
13351 instruction_reader reader;
13352 if (extract_arm_insn (reader, &arm_record, 2))
13356 printf_unfiltered (_("Process record: error reading memory at "
13357 "addr %s len = %d.\n"),
13358 paddress (arm_record.gdbarch,
13359 arm_record.this_addr), 2);
13364 /* Check the insn, whether it is thumb or arm one. */
13366 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13367 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13370 if (!(u_regval & t_bit))
13372 /* We are decoding arm insn. */
13373 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13377 insn_id = bits (arm_record.arm_insn, 11, 15);
13378 /* is it thumb2 insn? */
13379 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13381 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13382 THUMB2_INSN_SIZE_BYTES);
13386 /* We are decoding thumb insn. */
13387 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13388 THUMB_INSN_SIZE_BYTES);
13394 /* Record registers. */
13395 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13396 if (arm_record.arm_regs)
13398 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13400 if (record_full_arch_list_add_reg
13401 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13405 /* Record memories. */
13406 if (arm_record.arm_mems)
13408 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13410 if (record_full_arch_list_add_mem
13411 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13412 arm_record.arm_mems[no_of_rec].len))
13417 if (record_full_arch_list_add_end ())
13422 deallocate_reg_mem (&arm_record);