1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2018 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 readable_regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
495 arm_m_addr_is_magic (CORE_ADDR addr)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
507 /* Address is magic. */
511 /* Address is not magic. */
516 /* Remove useless bits from addresses in a running program. */
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
527 return UNMAKE_THUMB_ADDR (val);
529 return (val & 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
549 /* The GNU linker's Thumb call stub to foo is named
551 if (strstr (name, "_from_thumb") != NULL)
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
557 if (startswith (name, "__truncdfsf2"))
559 if (startswith (name, "__aeabi_d2f"))
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
565 if (startswith (name, "__aeabi_read_tp"))
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
604 thumb_expand_immediate (unsigned int imm)
606 unsigned int count = imm >> 7;
614 return (imm & 0xff) | ((imm & 0xff) << 16);
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
622 return (0x80 | (imm & 0x7f)) << (32 - count);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
629 thumb_instruction_restores_sp (unsigned short insn)
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
652 CORE_ADDR unrecognized_pc = 0;
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 while (start < limit)
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 else if (thumb_instruction_restores_sp (insn))
693 /* Don't scan past the epilogue. */
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736 if (stack.store_would_trash (addr))
739 stack.store (addr, 4, regs[regno]);
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
750 if (stack.store_would_trash (addr))
753 stack.store (addr, 4, regs[rd]);
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2;
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
802 int j1, j2, imm1, imm2;
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 pv_t addr = regs[bits (insn, 0, 3)];
829 if (stack.store_would_trash (addr))
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
841 regs[bits (insn, 0, 3)] = addr;
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
852 offset = inst2 & 0xff;
854 addr = pv_add_constant (addr, offset);
856 addr = pv_add_constant (addr, -offset);
858 if (stack.store_would_trash (addr))
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
866 regs[bits (insn, 0, 3)] = addr;
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
876 offset = inst2 & 0xff;
878 addr = pv_add_constant (addr, offset);
880 addr = pv_add_constant (addr, -offset);
882 if (stack.store_would_trash (addr))
885 stack.store (addr, 4, regs[regno]);
888 regs[bits (insn, 0, 3)] = addr;
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 int regno = bits (inst2, 12, 15);
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900 if (stack.store_would_trash (addr))
903 stack.store (addr, 4, regs[regno]);
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant;
1020 offset = bits (inst2, 0, 11);
1022 loc = start + 4 + offset;
1024 loc = start + 4 - offset;
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant;
1036 offset = bits (inst2, 0, 7) << 2;
1038 loc = start + 4 + offset;
1040 loc = start + 4 - offset;
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 /* Don't scan past anything that might change control flow. */
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1063 else if (thumb_instruction_changes_pc (insn))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1086 return unrecognized_pc;
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1111 return unrecognized_pc;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg = bits (insn1, 8, 10);
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1161 address = (high << 16 | low);
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1176 *destreg = bits (insn, 12, 15);
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1191 address = (high << 16 | low);
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1254 unsigned int destreg;
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1261 if (bits (insn, 3, 5) != basereg)
1263 destreg = bits (insn, 0, 2);
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1270 if (destreg != bits (insn, 0, 2))
1275 unsigned int destreg;
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1282 if (bits (insn, 16, 19) != basereg)
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1290 if (bits (insn, 12, 15) != destreg)
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1296 return pc + offset + 4;
1298 return pc + offset + 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1319 CORE_ADDR func_addr, limit_pc;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1330 if (post_prologue_pc)
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1348 if (post_prologue_pc != 0)
1350 CORE_ADDR analyzed_limit;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1368 if (analyzed_limit != post_prologue_pc)
1371 return post_prologue_pc;
1375 /* Can't determine prologue from the symbol table, need to examine
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1384 limit_pc = pc + 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1426 /* See comment in arm_scan_prologue for an explanation of
1428 if (prologue_end > prologue_start + 64)
1430 prologue_end = prologue_start + 64;
1434 /* We're in the boondocks: we have no idea where the start of the
1438 prologue_end = std::min (prologue_end, prev_pc);
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1447 arm_instruction_restores_sp (unsigned int insn)
1449 if (bits (insn, 28, 31) != INST_NV)
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1457 || (insn & 0x0fff0000) == 0x08bd0000
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask = insn & 0xffff;
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1624 n_saved_fp_regs = 1;
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1631 n_saved_fp_regs = 4;
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1650 if (skip_prologue_function (gdbarch, dest, 0))
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1660 else if (arm_instruction_restores_sp (insn))
1662 /* Don't scan past the epilogue. */
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1686 unrecognized_pc = current_pc;
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1699 int framereg, framesize;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1728 return unrecognized_pc;
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1789 if (prologue_end > prologue_start + 64)
1791 prologue_end = prologue_start + 64; /* See above. */
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1804 /* AAPCS does not use a frame register, so we can abort here. */
1805 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1808 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1809 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1814 prologue_start = gdbarch_addr_bits_remove
1815 (gdbarch, return_value) - 8;
1816 prologue_end = prologue_start + 64; /* See above. */
1820 if (prev_pc < prologue_end)
1821 prologue_end = prev_pc;
1823 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1826 static struct arm_prologue_cache *
1827 arm_make_prologue_cache (struct frame_info *this_frame)
1830 struct arm_prologue_cache *cache;
1831 CORE_ADDR unwound_fp;
1833 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1834 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1836 arm_scan_prologue (this_frame, cache);
1838 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1839 if (unwound_fp == 0)
1842 cache->prev_sp = unwound_fp + cache->framesize;
1844 /* Calculate actual addresses of saved registers using offsets
1845 determined by arm_scan_prologue. */
1846 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1847 if (trad_frame_addr_p (cache->saved_regs, reg))
1848 cache->saved_regs[reg].addr += cache->prev_sp;
1853 /* Implementation of the stop_reason hook for arm_prologue frames. */
1855 static enum unwind_stop_reason
1856 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1859 struct arm_prologue_cache *cache;
1862 if (*this_cache == NULL)
1863 *this_cache = arm_make_prologue_cache (this_frame);
1864 cache = (struct arm_prologue_cache *) *this_cache;
1866 /* This is meant to halt the backtrace at "_start". */
1867 pc = get_frame_pc (this_frame);
1868 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1869 return UNWIND_OUTERMOST;
1871 /* If we've hit a wall, stop. */
1872 if (cache->prev_sp == 0)
1873 return UNWIND_OUTERMOST;
1875 return UNWIND_NO_REASON;
1878 /* Our frame ID for a normal frame is the current function's starting PC
1879 and the caller's SP when we were called. */
1882 arm_prologue_this_id (struct frame_info *this_frame,
1884 struct frame_id *this_id)
1886 struct arm_prologue_cache *cache;
1890 if (*this_cache == NULL)
1891 *this_cache = arm_make_prologue_cache (this_frame);
1892 cache = (struct arm_prologue_cache *) *this_cache;
1894 /* Use function start address as part of the frame ID. If we cannot
1895 identify the start address (due to missing symbol information),
1896 fall back to just using the current PC. */
1897 pc = get_frame_pc (this_frame);
1898 func = get_frame_func (this_frame);
1902 id = frame_id_build (cache->prev_sp, func);
1906 static struct value *
1907 arm_prologue_prev_register (struct frame_info *this_frame,
1911 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1912 struct arm_prologue_cache *cache;
1914 if (*this_cache == NULL)
1915 *this_cache = arm_make_prologue_cache (this_frame);
1916 cache = (struct arm_prologue_cache *) *this_cache;
1918 /* If we are asked to unwind the PC, then we need to return the LR
1919 instead. The prologue may save PC, but it will point into this
1920 frame's prologue, not the next frame's resume location. Also
1921 strip the saved T bit. A valid LR may have the low bit set, but
1922 a valid PC never does. */
1923 if (prev_regnum == ARM_PC_REGNUM)
1927 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1928 return frame_unwind_got_constant (this_frame, prev_regnum,
1929 arm_addr_bits_remove (gdbarch, lr));
1932 /* SP is generally not saved to the stack, but this frame is
1933 identified by the next frame's stack pointer at the time of the call.
1934 The value was already reconstructed into PREV_SP. */
1935 if (prev_regnum == ARM_SP_REGNUM)
1936 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1938 /* The CPSR may have been changed by the call instruction and by the
1939 called function. The only bit we can reconstruct is the T bit,
1940 by checking the low bit of LR as of the call. This is a reliable
1941 indicator of Thumb-ness except for some ARM v4T pre-interworking
1942 Thumb code, which could get away with a clear low bit as long as
1943 the called function did not use bx. Guess that all other
1944 bits are unchanged; the condition flags are presumably lost,
1945 but the processor status is likely valid. */
1946 if (prev_regnum == ARM_PS_REGNUM)
1949 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1951 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1952 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1953 if (IS_THUMB_ADDR (lr))
1957 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1960 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1964 struct frame_unwind arm_prologue_unwind = {
1966 arm_prologue_unwind_stop_reason,
1967 arm_prologue_this_id,
1968 arm_prologue_prev_register,
1970 default_frame_sniffer
1973 /* Maintain a list of ARM exception table entries per objfile, similar to the
1974 list of mapping symbols. We only cache entries for standard ARM-defined
1975 personality routines; the cache will contain only the frame unwinding
1976 instructions associated with the entry (not the descriptors). */
1978 static const struct objfile_data *arm_exidx_data_key;
1980 struct arm_exidx_entry
1985 typedef struct arm_exidx_entry arm_exidx_entry_s;
1986 DEF_VEC_O(arm_exidx_entry_s);
1988 struct arm_exidx_data
1990 VEC(arm_exidx_entry_s) **section_maps;
1994 arm_exidx_data_free (struct objfile *objfile, void *arg)
1996 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1999 for (i = 0; i < objfile->obfd->section_count; i++)
2000 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2004 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2005 const struct arm_exidx_entry *rhs)
2007 return lhs->addr < rhs->addr;
2010 static struct obj_section *
2011 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2013 struct obj_section *osect;
2015 ALL_OBJFILE_OSECTIONS (objfile, osect)
2016 if (bfd_get_section_flags (objfile->obfd,
2017 osect->the_bfd_section) & SEC_ALLOC)
2019 bfd_vma start, size;
2020 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2021 size = bfd_get_section_size (osect->the_bfd_section);
2023 if (start <= vma && vma < start + size)
2030 /* Parse contents of exception table and exception index sections
2031 of OBJFILE, and fill in the exception table entry cache.
2033 For each entry that refers to a standard ARM-defined personality
2034 routine, extract the frame unwinding instructions (from either
2035 the index or the table section). The unwinding instructions
2037 - extracting them from the rest of the table data
2038 - converting to host endianness
2039 - appending the implicit 0xb0 ("Finish") code
2041 The extracted and normalized instructions are stored for later
2042 retrieval by the arm_find_exidx_entry routine. */
2045 arm_exidx_new_objfile (struct objfile *objfile)
2047 struct arm_exidx_data *data;
2048 asection *exidx, *extab;
2049 bfd_vma exidx_vma = 0, extab_vma = 0;
2052 /* If we've already touched this file, do nothing. */
2053 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2058 gdb::byte_vector exidx_data;
2061 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2062 exidx_data.resize (bfd_get_section_size (exidx));
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data.data (), 0,
2066 exidx_data.size ()))
2070 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2071 gdb::byte_vector extab_data;
2074 extab_vma = bfd_section_vma (objfile->obfd, extab);
2075 extab_data.resize (bfd_get_section_size (extab));
2077 if (!bfd_get_section_contents (objfile->obfd, extab,
2078 extab_data.data (), 0,
2079 extab_data.size ()))
2083 /* Allocate exception table data structure. */
2084 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2085 set_objfile_data (objfile, arm_exidx_data_key, data);
2086 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2087 objfile->obfd->section_count,
2088 VEC(arm_exidx_entry_s) *);
2090 /* Fill in exception table. */
2091 for (i = 0; i < exidx_data.size () / 8; i++)
2093 struct arm_exidx_entry new_exidx_entry;
2094 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2095 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2096 exidx_data.data () + i * 8 + 4);
2097 bfd_vma addr = 0, word = 0;
2098 int n_bytes = 0, n_words = 0;
2099 struct obj_section *sec;
2100 gdb_byte *entry = NULL;
2102 /* Extract address of start of function. */
2103 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2104 idx += exidx_vma + i * 8;
2106 /* Find section containing function and compute section offset. */
2107 sec = arm_obj_section_from_vma (objfile, idx);
2110 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2112 /* Determine address of exception table entry. */
2115 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2117 else if ((val & 0xff000000) == 0x80000000)
2119 /* Exception table entry embedded in .ARM.exidx
2120 -- must be short form. */
2124 else if (!(val & 0x80000000))
2126 /* Exception table entry in .ARM.extab. */
2127 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2128 addr += exidx_vma + i * 8 + 4;
2130 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2132 word = bfd_h_get_32 (objfile->obfd,
2133 extab_data.data () + addr - extab_vma);
2136 if ((word & 0xff000000) == 0x80000000)
2141 else if ((word & 0xff000000) == 0x81000000
2142 || (word & 0xff000000) == 0x82000000)
2146 n_words = ((word >> 16) & 0xff);
2148 else if (!(word & 0x80000000))
2151 struct obj_section *pers_sec;
2152 int gnu_personality = 0;
2154 /* Custom personality routine. */
2155 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2156 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2158 /* Check whether we've got one of the variants of the
2159 GNU personality routines. */
2160 pers_sec = arm_obj_section_from_vma (objfile, pers);
2163 static const char *personality[] =
2165 "__gcc_personality_v0",
2166 "__gxx_personality_v0",
2167 "__gcj_personality_v0",
2168 "__gnu_objc_personality_v0",
2172 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2175 for (k = 0; personality[k]; k++)
2176 if (lookup_minimal_symbol_by_pc_name
2177 (pc, personality[k], objfile))
2179 gnu_personality = 1;
2184 /* If so, the next word contains a word count in the high
2185 byte, followed by the same unwind instructions as the
2186 pre-defined forms. */
2188 && addr + 4 <= extab_vma + extab_data.size ())
2190 word = bfd_h_get_32 (objfile->obfd,
2192 + addr - extab_vma));
2195 n_words = ((word >> 24) & 0xff);
2201 /* Sanity check address. */
2203 if (addr < extab_vma
2204 || addr + 4 * n_words > extab_vma + extab_data.size ())
2205 n_words = n_bytes = 0;
2207 /* The unwind instructions reside in WORD (only the N_BYTES least
2208 significant bytes are valid), followed by N_WORDS words in the
2209 extab section starting at ADDR. */
2210 if (n_bytes || n_words)
2213 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2214 n_bytes + n_words * 4 + 1);
2217 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2221 word = bfd_h_get_32 (objfile->obfd,
2222 extab_data.data () + addr - extab_vma);
2225 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2226 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2227 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2228 *p++ = (gdb_byte) (word & 0xff);
2231 /* Implied "Finish" to terminate the list. */
2235 /* Push entry onto vector. They are guaranteed to always
2236 appear in order of increasing addresses. */
2237 new_exidx_entry.addr = idx;
2238 new_exidx_entry.entry = entry;
2239 VEC_safe_push (arm_exidx_entry_s,
2240 data->section_maps[sec->the_bfd_section->index],
2245 /* Search for the exception table entry covering MEMADDR. If one is found,
2246 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2247 set *START to the start of the region covered by this entry. */
2250 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2252 struct obj_section *sec;
2254 sec = find_pc_section (memaddr);
2257 struct arm_exidx_data *data;
2258 VEC(arm_exidx_entry_s) *map;
2259 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2262 data = ((struct arm_exidx_data *)
2263 objfile_data (sec->objfile, arm_exidx_data_key));
2266 map = data->section_maps[sec->the_bfd_section->index];
2267 if (!VEC_empty (arm_exidx_entry_s, map))
2269 struct arm_exidx_entry *map_sym;
2271 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2272 arm_compare_exidx_entries);
2274 /* VEC_lower_bound finds the earliest ordered insertion
2275 point. If the following symbol starts at this exact
2276 address, we use that; otherwise, the preceding
2277 exception table entry covers this address. */
2278 if (idx < VEC_length (arm_exidx_entry_s, map))
2280 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2281 if (map_sym->addr == map_key.addr)
2284 *start = map_sym->addr + obj_section_addr (sec);
2285 return map_sym->entry;
2291 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2293 *start = map_sym->addr + obj_section_addr (sec);
2294 return map_sym->entry;
2303 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2304 instruction list from the ARM exception table entry ENTRY, allocate and
2305 return a prologue cache structure describing how to unwind this frame.
2307 Return NULL if the unwinding instruction list contains a "spare",
2308 "reserved" or "refuse to unwind" instruction as defined in section
2309 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2310 for the ARM Architecture" document. */
2312 static struct arm_prologue_cache *
2313 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2318 struct arm_prologue_cache *cache;
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2326 /* Whenever we reload SP, we actually have to retrieve its
2327 actual value in the current frame. */
2330 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2332 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2333 vsp = get_frame_register_unsigned (this_frame, reg);
2337 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2338 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2344 /* Decode next unwind instruction. */
2347 if ((insn & 0xc0) == 0)
2349 int offset = insn & 0x3f;
2350 vsp += (offset << 2) + 4;
2352 else if ((insn & 0xc0) == 0x40)
2354 int offset = insn & 0x3f;
2355 vsp -= (offset << 2) + 4;
2357 else if ((insn & 0xf0) == 0x80)
2359 int mask = ((insn & 0xf) << 8) | *entry++;
2362 /* The special case of an all-zero mask identifies
2363 "Refuse to unwind". We return NULL to fall back
2364 to the prologue analyzer. */
2368 /* Pop registers r4..r15 under mask. */
2369 for (i = 0; i < 12; i++)
2370 if (mask & (1 << i))
2372 cache->saved_regs[4 + i].addr = vsp;
2376 /* Special-case popping SP -- we need to reload vsp. */
2377 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2380 else if ((insn & 0xf0) == 0x90)
2382 int reg = insn & 0xf;
2384 /* Reserved cases. */
2385 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2388 /* Set SP from another register and mark VSP for reload. */
2389 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2392 else if ((insn & 0xf0) == 0xa0)
2394 int count = insn & 0x7;
2395 int pop_lr = (insn & 0x8) != 0;
2398 /* Pop r4..r[4+count]. */
2399 for (i = 0; i <= count; i++)
2401 cache->saved_regs[4 + i].addr = vsp;
2405 /* If indicated by flag, pop LR as well. */
2408 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2412 else if (insn == 0xb0)
2414 /* We could only have updated PC by popping into it; if so, it
2415 will show up as address. Otherwise, copy LR into PC. */
2416 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2417 cache->saved_regs[ARM_PC_REGNUM]
2418 = cache->saved_regs[ARM_LR_REGNUM];
2423 else if (insn == 0xb1)
2425 int mask = *entry++;
2428 /* All-zero mask and mask >= 16 is "spare". */
2429 if (mask == 0 || mask >= 16)
2432 /* Pop r0..r3 under mask. */
2433 for (i = 0; i < 4; i++)
2434 if (mask & (1 << i))
2436 cache->saved_regs[i].addr = vsp;
2440 else if (insn == 0xb2)
2442 ULONGEST offset = 0;
2447 offset |= (*entry & 0x7f) << shift;
2450 while (*entry++ & 0x80);
2452 vsp += 0x204 + (offset << 2);
2454 else if (insn == 0xb3)
2456 int start = *entry >> 4;
2457 int count = (*entry++) & 0xf;
2460 /* Only registers D0..D15 are valid here. */
2461 if (start + count >= 16)
2464 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2465 for (i = 0; i <= count; i++)
2467 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2474 else if ((insn & 0xf8) == 0xb8)
2476 int count = insn & 0x7;
2479 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2480 for (i = 0; i <= count; i++)
2482 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2489 else if (insn == 0xc6)
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2495 /* Only registers WR0..WR15 are valid. */
2496 if (start + count >= 16)
2499 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2506 else if (insn == 0xc7)
2508 int mask = *entry++;
2511 /* All-zero mask and mask >= 16 is "spare". */
2512 if (mask == 0 || mask >= 16)
2515 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2516 for (i = 0; i < 4; i++)
2517 if (mask & (1 << i))
2519 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2523 else if ((insn & 0xf8) == 0xc0)
2525 int count = insn & 0x7;
2528 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2529 for (i = 0; i <= count; i++)
2531 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2535 else if (insn == 0xc8)
2537 int start = *entry >> 4;
2538 int count = (*entry++) & 0xf;
2541 /* Only registers D0..D31 are valid. */
2542 if (start + count >= 16)
2545 /* Pop VFP double-precision registers
2546 D[16+start]..D[16+start+count]. */
2547 for (i = 0; i <= count; i++)
2549 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2553 else if (insn == 0xc9)
2555 int start = *entry >> 4;
2556 int count = (*entry++) & 0xf;
2559 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2560 for (i = 0; i <= count; i++)
2562 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2566 else if ((insn & 0xf8) == 0xd0)
2568 int count = insn & 0x7;
2571 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2572 for (i = 0; i <= count; i++)
2574 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2580 /* Everything else is "spare". */
2585 /* If we restore SP from a register, assume this was the frame register.
2586 Otherwise just fall back to SP as frame register. */
2587 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2588 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2590 cache->framereg = ARM_SP_REGNUM;
2592 /* Determine offset to previous frame. */
2594 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2596 /* We already got the previous SP. */
2597 cache->prev_sp = vsp;
2602 /* Unwinding via ARM exception table entries. Note that the sniffer
2603 already computes a filled-in prologue cache, which is then used
2604 with the same arm_prologue_this_id and arm_prologue_prev_register
2605 routines also used for prologue-parsing based unwinding. */
2608 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2609 struct frame_info *this_frame,
2610 void **this_prologue_cache)
2612 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2614 CORE_ADDR addr_in_block, exidx_region, func_start;
2615 struct arm_prologue_cache *cache;
2618 /* See if we have an ARM exception table entry covering this address. */
2619 addr_in_block = get_frame_address_in_block (this_frame);
2620 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2624 /* The ARM exception table does not describe unwind information
2625 for arbitrary PC values, but is guaranteed to be correct only
2626 at call sites. We have to decide here whether we want to use
2627 ARM exception table information for this frame, or fall back
2628 to using prologue parsing. (Note that if we have DWARF CFI,
2629 this sniffer isn't even called -- CFI is always preferred.)
2631 Before we make this decision, however, we check whether we
2632 actually have *symbol* information for the current frame.
2633 If not, prologue parsing would not work anyway, so we might
2634 as well use the exception table and hope for the best. */
2635 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2639 /* If the next frame is "normal", we are at a call site in this
2640 frame, so exception information is guaranteed to be valid. */
2641 if (get_next_frame (this_frame)
2642 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2645 /* We also assume exception information is valid if we're currently
2646 blocked in a system call. The system library is supposed to
2647 ensure this, so that e.g. pthread cancellation works. */
2648 if (arm_frame_is_thumb (this_frame))
2652 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2653 2, byte_order_for_code, &insn)
2654 && (insn & 0xff00) == 0xdf00 /* svc */)
2661 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2662 4, byte_order_for_code, &insn)
2663 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2667 /* Bail out if we don't know that exception information is valid. */
2671 /* The ARM exception index does not mark the *end* of the region
2672 covered by the entry, and some functions will not have any entry.
2673 To correctly recognize the end of the covered region, the linker
2674 should have inserted dummy records with a CANTUNWIND marker.
2676 Unfortunately, current versions of GNU ld do not reliably do
2677 this, and thus we may have found an incorrect entry above.
2678 As a (temporary) sanity check, we only use the entry if it
2679 lies *within* the bounds of the function. Note that this check
2680 might reject perfectly valid entries that just happen to cover
2681 multiple functions; therefore this check ought to be removed
2682 once the linker is fixed. */
2683 if (func_start > exidx_region)
2687 /* Decode the list of unwinding instructions into a prologue cache.
2688 Note that this may fail due to e.g. a "refuse to unwind" code. */
2689 cache = arm_exidx_fill_cache (this_frame, entry);
2693 *this_prologue_cache = cache;
2697 struct frame_unwind arm_exidx_unwind = {
2699 default_frame_unwind_stop_reason,
2700 arm_prologue_this_id,
2701 arm_prologue_prev_register,
2703 arm_exidx_unwind_sniffer
2706 static struct arm_prologue_cache *
2707 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2709 struct arm_prologue_cache *cache;
2712 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2713 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2715 /* Still rely on the offset calculated from prologue. */
2716 arm_scan_prologue (this_frame, cache);
2718 /* Since we are in epilogue, the SP has been restored. */
2719 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2721 /* Calculate actual addresses of saved registers using offsets
2722 determined by arm_scan_prologue. */
2723 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2724 if (trad_frame_addr_p (cache->saved_regs, reg))
2725 cache->saved_regs[reg].addr += cache->prev_sp;
2730 /* Implementation of function hook 'this_id' in
2731 'struct frame_uwnind' for epilogue unwinder. */
2734 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2736 struct frame_id *this_id)
2738 struct arm_prologue_cache *cache;
2741 if (*this_cache == NULL)
2742 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2743 cache = (struct arm_prologue_cache *) *this_cache;
2745 /* Use function start address as part of the frame ID. If we cannot
2746 identify the start address (due to missing symbol information),
2747 fall back to just using the current PC. */
2748 pc = get_frame_pc (this_frame);
2749 func = get_frame_func (this_frame);
2753 (*this_id) = frame_id_build (cache->prev_sp, pc);
2756 /* Implementation of function hook 'prev_register' in
2757 'struct frame_uwnind' for epilogue unwinder. */
2759 static struct value *
2760 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2761 void **this_cache, int regnum)
2763 if (*this_cache == NULL)
2764 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2769 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2771 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2774 /* Implementation of function hook 'sniffer' in
2775 'struct frame_uwnind' for epilogue unwinder. */
2778 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 if (frame_relative_level (this_frame) == 0)
2784 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2785 CORE_ADDR pc = get_frame_pc (this_frame);
2787 if (arm_frame_is_thumb (this_frame))
2788 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2790 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2796 /* Frame unwinder from epilogue. */
2798 static const struct frame_unwind arm_epilogue_frame_unwind =
2801 default_frame_unwind_stop_reason,
2802 arm_epilogue_frame_this_id,
2803 arm_epilogue_frame_prev_register,
2805 arm_epilogue_frame_sniffer,
2808 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2809 trampoline, return the target PC. Otherwise return 0.
2811 void call0a (char c, short s, int i, long l) {}
2815 (*pointer_to_call0a) (c, s, i, l);
2818 Instead of calling a stub library function _call_via_xx (xx is
2819 the register name), GCC may inline the trampoline in the object
2820 file as below (register r2 has the address of call0a).
2823 .type main, %function
2832 The trampoline 'bx r2' doesn't belong to main. */
2835 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2837 /* The heuristics of recognizing such trampoline is that FRAME is
2838 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2839 if (arm_frame_is_thumb (frame))
2843 if (target_read_memory (pc, buf, 2) == 0)
2845 struct gdbarch *gdbarch = get_frame_arch (frame);
2846 enum bfd_endian byte_order_for_code
2847 = gdbarch_byte_order_for_code (gdbarch);
2849 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2851 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2854 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2856 /* Clear the LSB so that gdb core sets step-resume
2857 breakpoint at the right address. */
2858 return UNMAKE_THUMB_ADDR (dest);
2866 static struct arm_prologue_cache *
2867 arm_make_stub_cache (struct frame_info *this_frame)
2869 struct arm_prologue_cache *cache;
2871 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2872 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2874 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2879 /* Our frame ID for a stub frame is the current SP and LR. */
2882 arm_stub_this_id (struct frame_info *this_frame,
2884 struct frame_id *this_id)
2886 struct arm_prologue_cache *cache;
2888 if (*this_cache == NULL)
2889 *this_cache = arm_make_stub_cache (this_frame);
2890 cache = (struct arm_prologue_cache *) *this_cache;
2892 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2896 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2897 struct frame_info *this_frame,
2898 void **this_prologue_cache)
2900 CORE_ADDR addr_in_block;
2902 CORE_ADDR pc, start_addr;
2905 addr_in_block = get_frame_address_in_block (this_frame);
2906 pc = get_frame_pc (this_frame);
2907 if (in_plt_section (addr_in_block)
2908 /* We also use the stub winder if the target memory is unreadable
2909 to avoid having the prologue unwinder trying to read it. */
2910 || target_read_memory (pc, dummy, 4) != 0)
2913 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2914 && arm_skip_bx_reg (this_frame, pc) != 0)
2920 struct frame_unwind arm_stub_unwind = {
2922 default_frame_unwind_stop_reason,
2924 arm_prologue_prev_register,
2926 arm_stub_unwind_sniffer
2929 /* Put here the code to store, into CACHE->saved_regs, the addresses
2930 of the saved registers of frame described by THIS_FRAME. CACHE is
2933 static struct arm_prologue_cache *
2934 arm_m_exception_cache (struct frame_info *this_frame)
2936 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2937 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2938 struct arm_prologue_cache *cache;
2939 CORE_ADDR unwound_sp;
2942 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2943 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2945 unwound_sp = get_frame_register_unsigned (this_frame,
2948 /* The hardware saves eight 32-bit words, comprising xPSR,
2949 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2950 "B1.5.6 Exception entry behavior" in
2951 "ARMv7-M Architecture Reference Manual". */
2952 cache->saved_regs[0].addr = unwound_sp;
2953 cache->saved_regs[1].addr = unwound_sp + 4;
2954 cache->saved_regs[2].addr = unwound_sp + 8;
2955 cache->saved_regs[3].addr = unwound_sp + 12;
2956 cache->saved_regs[12].addr = unwound_sp + 16;
2957 cache->saved_regs[14].addr = unwound_sp + 20;
2958 cache->saved_regs[15].addr = unwound_sp + 24;
2959 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2961 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2962 aligner between the top of the 32-byte stack frame and the
2963 previous context's stack pointer. */
2964 cache->prev_sp = unwound_sp + 32;
2965 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2966 && (xpsr & (1 << 9)) != 0)
2967 cache->prev_sp += 4;
2972 /* Implementation of function hook 'this_id' in
2973 'struct frame_uwnind'. */
2976 arm_m_exception_this_id (struct frame_info *this_frame,
2978 struct frame_id *this_id)
2980 struct arm_prologue_cache *cache;
2982 if (*this_cache == NULL)
2983 *this_cache = arm_m_exception_cache (this_frame);
2984 cache = (struct arm_prologue_cache *) *this_cache;
2986 /* Our frame ID for a stub frame is the current SP and LR. */
2987 *this_id = frame_id_build (cache->prev_sp,
2988 get_frame_pc (this_frame));
2991 /* Implementation of function hook 'prev_register' in
2992 'struct frame_uwnind'. */
2994 static struct value *
2995 arm_m_exception_prev_register (struct frame_info *this_frame,
2999 struct arm_prologue_cache *cache;
3001 if (*this_cache == NULL)
3002 *this_cache = arm_m_exception_cache (this_frame);
3003 cache = (struct arm_prologue_cache *) *this_cache;
3005 /* The value was already reconstructed into PREV_SP. */
3006 if (prev_regnum == ARM_SP_REGNUM)
3007 return frame_unwind_got_constant (this_frame, prev_regnum,
3010 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3014 /* Implementation of function hook 'sniffer' in
3015 'struct frame_uwnind'. */
3018 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3019 struct frame_info *this_frame,
3020 void **this_prologue_cache)
3022 CORE_ADDR this_pc = get_frame_pc (this_frame);
3024 /* No need to check is_m; this sniffer is only registered for
3025 M-profile architectures. */
3027 /* Check if exception frame returns to a magic PC value. */
3028 return arm_m_addr_is_magic (this_pc);
3031 /* Frame unwinder for M-profile exceptions. */
3033 struct frame_unwind arm_m_exception_unwind =
3036 default_frame_unwind_stop_reason,
3037 arm_m_exception_this_id,
3038 arm_m_exception_prev_register,
3040 arm_m_exception_unwind_sniffer
3044 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3046 struct arm_prologue_cache *cache;
3048 if (*this_cache == NULL)
3049 *this_cache = arm_make_prologue_cache (this_frame);
3050 cache = (struct arm_prologue_cache *) *this_cache;
3052 return cache->prev_sp - cache->framesize;
3055 struct frame_base arm_normal_base = {
3056 &arm_prologue_unwind,
3057 arm_normal_frame_base,
3058 arm_normal_frame_base,
3059 arm_normal_frame_base
3062 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3063 dummy frame. The frame ID's base needs to match the TOS value
3064 saved by save_dummy_frame_tos() and returned from
3065 arm_push_dummy_call, and the PC needs to match the dummy frame's
3068 static struct frame_id
3069 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3071 return frame_id_build (get_frame_register_unsigned (this_frame,
3073 get_frame_pc (this_frame));
3076 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3077 be used to construct the previous frame's ID, after looking up the
3078 containing function). */
3081 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3084 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3085 return arm_addr_bits_remove (gdbarch, pc);
3089 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3091 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3094 static struct value *
3095 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3098 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3100 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3105 /* The PC is normally copied from the return column, which
3106 describes saves of LR. However, that version may have an
3107 extra bit set to indicate Thumb state. The bit is not
3109 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3110 return frame_unwind_got_constant (this_frame, regnum,
3111 arm_addr_bits_remove (gdbarch, lr));
3114 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3115 cpsr = get_frame_register_unsigned (this_frame, regnum);
3116 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3117 if (IS_THUMB_ADDR (lr))
3121 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3124 internal_error (__FILE__, __LINE__,
3125 _("Unexpected register %d"), regnum);
3130 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3131 struct dwarf2_frame_state_reg *reg,
3132 struct frame_info *this_frame)
3138 reg->how = DWARF2_FRAME_REG_FN;
3139 reg->loc.fn = arm_dwarf2_prev_register;
3142 reg->how = DWARF2_FRAME_REG_CFA;
3147 /* Implement the stack_frame_destroyed_p gdbarch method. */
3150 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3152 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3153 unsigned int insn, insn2;
3154 int found_return = 0, found_stack_adjust = 0;
3155 CORE_ADDR func_start, func_end;
3159 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3162 /* The epilogue is a sequence of instructions along the following lines:
3164 - add stack frame size to SP or FP
3165 - [if frame pointer used] restore SP from FP
3166 - restore registers from SP [may include PC]
3167 - a return-type instruction [if PC wasn't already restored]
3169 In a first pass, we scan forward from the current PC and verify the
3170 instructions we find as compatible with this sequence, ending in a
3173 However, this is not sufficient to distinguish indirect function calls
3174 within a function from indirect tail calls in the epilogue in some cases.
3175 Therefore, if we didn't already find any SP-changing instruction during
3176 forward scan, we add a backward scanning heuristic to ensure we actually
3177 are in the epilogue. */
3180 while (scan_pc < func_end && !found_return)
3182 if (target_read_memory (scan_pc, buf, 2))
3186 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3188 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3190 else if (insn == 0x46f7) /* mov pc, lr */
3192 else if (thumb_instruction_restores_sp (insn))
3194 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3197 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3199 if (target_read_memory (scan_pc, buf, 2))
3203 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3205 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3207 if (insn2 & 0x8000) /* <registers> include PC. */
3210 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3211 && (insn2 & 0x0fff) == 0x0b04)
3213 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3216 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3217 && (insn2 & 0x0e00) == 0x0a00)
3229 /* Since any instruction in the epilogue sequence, with the possible
3230 exception of return itself, updates the stack pointer, we need to
3231 scan backwards for at most one instruction. Try either a 16-bit or
3232 a 32-bit instruction. This is just a heuristic, so we do not worry
3233 too much about false positives. */
3235 if (pc - 4 < func_start)
3237 if (target_read_memory (pc - 4, buf, 4))
3240 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3241 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3243 if (thumb_instruction_restores_sp (insn2))
3244 found_stack_adjust = 1;
3245 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3246 found_stack_adjust = 1;
3247 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3248 && (insn2 & 0x0fff) == 0x0b04)
3249 found_stack_adjust = 1;
3250 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3251 && (insn2 & 0x0e00) == 0x0a00)
3252 found_stack_adjust = 1;
3254 return found_stack_adjust;
3258 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3260 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3263 CORE_ADDR func_start, func_end;
3265 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3268 /* We are in the epilogue if the previous instruction was a stack
3269 adjustment and the next instruction is a possible return (bx, mov
3270 pc, or pop). We could have to scan backwards to find the stack
3271 adjustment, or forwards to find the return, but this is a decent
3272 approximation. First scan forwards. */
3275 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3276 if (bits (insn, 28, 31) != INST_NV)
3278 if ((insn & 0x0ffffff0) == 0x012fff10)
3281 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3284 else if ((insn & 0x0fff0000) == 0x08bd0000
3285 && (insn & 0x0000c000) != 0)
3286 /* POP (LDMIA), including PC or LR. */
3293 /* Scan backwards. This is just a heuristic, so do not worry about
3294 false positives from mode changes. */
3296 if (pc < func_start + 4)
3299 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3300 if (arm_instruction_restores_sp (insn))
3306 /* Implement the stack_frame_destroyed_p gdbarch method. */
3309 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3311 if (arm_pc_is_thumb (gdbarch, pc))
3312 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3314 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3317 /* When arguments must be pushed onto the stack, they go on in reverse
3318 order. The code below implements a FILO (stack) to do this. */
3323 struct stack_item *prev;
3327 static struct stack_item *
3328 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3330 struct stack_item *si;
3331 si = XNEW (struct stack_item);
3332 si->data = (gdb_byte *) xmalloc (len);
3335 memcpy (si->data, contents, len);
3339 static struct stack_item *
3340 pop_stack_item (struct stack_item *si)
3342 struct stack_item *dead = si;
3350 /* Return the alignment (in bytes) of the given type. */
3353 arm_type_align (struct type *t)
3359 t = check_typedef (t);
3360 switch (TYPE_CODE (t))
3363 /* Should never happen. */
3364 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3368 case TYPE_CODE_ENUM:
3372 case TYPE_CODE_RANGE:
3374 case TYPE_CODE_RVALUE_REF:
3375 case TYPE_CODE_CHAR:
3376 case TYPE_CODE_BOOL:
3377 return TYPE_LENGTH (t);
3379 case TYPE_CODE_ARRAY:
3380 if (TYPE_VECTOR (t))
3382 /* Use the natural alignment for vector types (the same for
3383 scalar type), but the maximum alignment is 64-bit. */
3384 if (TYPE_LENGTH (t) > 8)
3387 return TYPE_LENGTH (t);
3390 return arm_type_align (TYPE_TARGET_TYPE (t));
3391 case TYPE_CODE_COMPLEX:
3392 return arm_type_align (TYPE_TARGET_TYPE (t));
3394 case TYPE_CODE_STRUCT:
3395 case TYPE_CODE_UNION:
3397 for (n = 0; n < TYPE_NFIELDS (t); n++)
3399 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3407 /* Possible base types for a candidate for passing and returning in
3410 enum arm_vfp_cprc_base_type
3419 /* The length of one element of base type B. */
3422 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3426 case VFP_CPRC_SINGLE:
3428 case VFP_CPRC_DOUBLE:
3430 case VFP_CPRC_VEC64:
3432 case VFP_CPRC_VEC128:
3435 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3440 /* The character ('s', 'd' or 'q') for the type of VFP register used
3441 for passing base type B. */
3444 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3448 case VFP_CPRC_SINGLE:
3450 case VFP_CPRC_DOUBLE:
3452 case VFP_CPRC_VEC64:
3454 case VFP_CPRC_VEC128:
3457 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3462 /* Determine whether T may be part of a candidate for passing and
3463 returning in VFP registers, ignoring the limit on the total number
3464 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3465 classification of the first valid component found; if it is not
3466 VFP_CPRC_UNKNOWN, all components must have the same classification
3467 as *BASE_TYPE. If it is found that T contains a type not permitted
3468 for passing and returning in VFP registers, a type differently
3469 classified from *BASE_TYPE, or two types differently classified
3470 from each other, return -1, otherwise return the total number of
3471 base-type elements found (possibly 0 in an empty structure or
3472 array). Vector types are not currently supported, matching the
3473 generic AAPCS support. */
3476 arm_vfp_cprc_sub_candidate (struct type *t,
3477 enum arm_vfp_cprc_base_type *base_type)
3479 t = check_typedef (t);
3480 switch (TYPE_CODE (t))
3483 switch (TYPE_LENGTH (t))
3486 if (*base_type == VFP_CPRC_UNKNOWN)
3487 *base_type = VFP_CPRC_SINGLE;
3488 else if (*base_type != VFP_CPRC_SINGLE)
3493 if (*base_type == VFP_CPRC_UNKNOWN)
3494 *base_type = VFP_CPRC_DOUBLE;
3495 else if (*base_type != VFP_CPRC_DOUBLE)
3504 case TYPE_CODE_COMPLEX:
3505 /* Arguments of complex T where T is one of the types float or
3506 double get treated as if they are implemented as:
3515 switch (TYPE_LENGTH (t))
3518 if (*base_type == VFP_CPRC_UNKNOWN)
3519 *base_type = VFP_CPRC_SINGLE;
3520 else if (*base_type != VFP_CPRC_SINGLE)
3525 if (*base_type == VFP_CPRC_UNKNOWN)
3526 *base_type = VFP_CPRC_DOUBLE;
3527 else if (*base_type != VFP_CPRC_DOUBLE)
3536 case TYPE_CODE_ARRAY:
3538 if (TYPE_VECTOR (t))
3540 /* A 64-bit or 128-bit containerized vector type are VFP
3542 switch (TYPE_LENGTH (t))
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_VEC64;
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_VEC128;
3561 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3565 if (TYPE_LENGTH (t) == 0)
3567 gdb_assert (count == 0);
3570 else if (count == 0)
3572 unitlen = arm_vfp_cprc_unit_length (*base_type);
3573 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3574 return TYPE_LENGTH (t) / unitlen;
3579 case TYPE_CODE_STRUCT:
3584 for (i = 0; i < TYPE_NFIELDS (t); i++)
3588 if (!field_is_static (&TYPE_FIELD (t, i)))
3589 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3591 if (sub_count == -1)
3595 if (TYPE_LENGTH (t) == 0)
3597 gdb_assert (count == 0);
3600 else if (count == 0)
3602 unitlen = arm_vfp_cprc_unit_length (*base_type);
3603 if (TYPE_LENGTH (t) != unitlen * count)
3608 case TYPE_CODE_UNION:
3613 for (i = 0; i < TYPE_NFIELDS (t); i++)
3615 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3617 if (sub_count == -1)
3619 count = (count > sub_count ? count : sub_count);
3621 if (TYPE_LENGTH (t) == 0)
3623 gdb_assert (count == 0);
3626 else if (count == 0)
3628 unitlen = arm_vfp_cprc_unit_length (*base_type);
3629 if (TYPE_LENGTH (t) != unitlen * count)
3641 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3642 if passed to or returned from a non-variadic function with the VFP
3643 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3644 *BASE_TYPE to the base type for T and *COUNT to the number of
3645 elements of that base type before returning. */
3648 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3651 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3652 int c = arm_vfp_cprc_sub_candidate (t, &b);
3653 if (c <= 0 || c > 4)
3660 /* Return 1 if the VFP ABI should be used for passing arguments to and
3661 returning values from a function of type FUNC_TYPE, 0
3665 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3667 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3668 /* Variadic functions always use the base ABI. Assume that functions
3669 without debug info are not variadic. */
3670 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3672 /* The VFP ABI is only supported as a variant of AAPCS. */
3673 if (tdep->arm_abi != ARM_ABI_AAPCS)
3675 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3678 /* We currently only support passing parameters in integer registers, which
3679 conforms with GCC's default model, and VFP argument passing following
3680 the VFP variant of AAPCS. Several other variants exist and
3681 we should probably support some of them based on the selected ABI. */
3684 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3685 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3686 struct value **args, CORE_ADDR sp, int struct_return,
3687 CORE_ADDR struct_addr)
3689 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3693 struct stack_item *si = NULL;
3696 unsigned vfp_regs_free = (1 << 16) - 1;
3698 /* Determine the type of this function and whether the VFP ABI
3700 ftype = check_typedef (value_type (function));
3701 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3702 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3703 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3705 /* Set the return address. For the ARM, the return breakpoint is
3706 always at BP_ADDR. */
3707 if (arm_pc_is_thumb (gdbarch, bp_addr))
3709 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3711 /* Walk through the list of args and determine how large a temporary
3712 stack is required. Need to take care here as structs may be
3713 passed on the stack, and we have to push them. */
3716 argreg = ARM_A1_REGNUM;
3719 /* The struct_return pointer occupies the first parameter
3720 passing register. */
3724 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3725 gdbarch_register_name (gdbarch, argreg),
3726 paddress (gdbarch, struct_addr));
3727 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3731 for (argnum = 0; argnum < nargs; argnum++)
3734 struct type *arg_type;
3735 struct type *target_type;
3736 enum type_code typecode;
3737 const bfd_byte *val;
3739 enum arm_vfp_cprc_base_type vfp_base_type;
3741 int may_use_core_reg = 1;
3743 arg_type = check_typedef (value_type (args[argnum]));
3744 len = TYPE_LENGTH (arg_type);
3745 target_type = TYPE_TARGET_TYPE (arg_type);
3746 typecode = TYPE_CODE (arg_type);
3747 val = value_contents (args[argnum]);
3749 align = arm_type_align (arg_type);
3750 /* Round alignment up to a whole number of words. */
3751 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3752 /* Different ABIs have different maximum alignments. */
3753 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3755 /* The APCS ABI only requires word alignment. */
3756 align = INT_REGISTER_SIZE;
3760 /* The AAPCS requires at most doubleword alignment. */
3761 if (align > INT_REGISTER_SIZE * 2)
3762 align = INT_REGISTER_SIZE * 2;
3766 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3774 /* Because this is a CPRC it cannot go in a core register or
3775 cause a core register to be skipped for alignment.
3776 Either it goes in VFP registers and the rest of this loop
3777 iteration is skipped for this argument, or it goes on the
3778 stack (and the stack alignment code is correct for this
3780 may_use_core_reg = 0;
3782 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3783 shift = unit_length / 4;
3784 mask = (1 << (shift * vfp_base_count)) - 1;
3785 for (regno = 0; regno < 16; regno += shift)
3786 if (((vfp_regs_free >> regno) & mask) == mask)
3795 vfp_regs_free &= ~(mask << regno);
3796 reg_scaled = regno / shift;
3797 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3798 for (i = 0; i < vfp_base_count; i++)
3802 if (reg_char == 'q')
3803 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3804 val + i * unit_length);
3807 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3808 reg_char, reg_scaled + i);
3809 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3811 regcache->cooked_write (regnum, val + i * unit_length);
3818 /* This CPRC could not go in VFP registers, so all VFP
3819 registers are now marked as used. */
3824 /* Push stack padding for dowubleword alignment. */
3825 if (nstack & (align - 1))
3827 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3828 nstack += INT_REGISTER_SIZE;
3831 /* Doubleword aligned quantities must go in even register pairs. */
3832 if (may_use_core_reg
3833 && argreg <= ARM_LAST_ARG_REGNUM
3834 && align > INT_REGISTER_SIZE
3838 /* If the argument is a pointer to a function, and it is a
3839 Thumb function, create a LOCAL copy of the value and set
3840 the THUMB bit in it. */
3841 if (TYPE_CODE_PTR == typecode
3842 && target_type != NULL
3843 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3845 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3846 if (arm_pc_is_thumb (gdbarch, regval))
3848 bfd_byte *copy = (bfd_byte *) alloca (len);
3849 store_unsigned_integer (copy, len, byte_order,
3850 MAKE_THUMB_ADDR (regval));
3855 /* Copy the argument to general registers or the stack in
3856 register-sized pieces. Large arguments are split between
3857 registers and stack. */
3860 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3862 = extract_unsigned_integer (val, partial_len, byte_order);
3864 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3866 /* The argument is being passed in a general purpose
3868 if (byte_order == BFD_ENDIAN_BIG)
3869 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3871 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3873 gdbarch_register_name
3875 phex (regval, INT_REGISTER_SIZE));
3876 regcache_cooked_write_unsigned (regcache, argreg, regval);
3881 gdb_byte buf[INT_REGISTER_SIZE];
3883 memset (buf, 0, sizeof (buf));
3884 store_unsigned_integer (buf, partial_len, byte_order, regval);
3886 /* Push the arguments onto the stack. */
3888 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3890 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3891 nstack += INT_REGISTER_SIZE;
3898 /* If we have an odd number of words to push, then decrement the stack
3899 by one word now, so first stack argument will be dword aligned. */
3906 write_memory (sp, si->data, si->len);
3907 si = pop_stack_item (si);
3910 /* Finally, update teh SP register. */
3911 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3917 /* Always align the frame to an 8-byte boundary. This is required on
3918 some platforms and harmless on the rest. */
3921 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3923 /* Align the stack to eight bytes. */
3924 return sp & ~ (CORE_ADDR) 7;
3928 print_fpu_flags (struct ui_file *file, int flags)
3930 if (flags & (1 << 0))
3931 fputs_filtered ("IVO ", file);
3932 if (flags & (1 << 1))
3933 fputs_filtered ("DVZ ", file);
3934 if (flags & (1 << 2))
3935 fputs_filtered ("OFL ", file);
3936 if (flags & (1 << 3))
3937 fputs_filtered ("UFL ", file);
3938 if (flags & (1 << 4))
3939 fputs_filtered ("INX ", file);
3940 fputc_filtered ('\n', file);
3943 /* Print interesting information about the floating point processor
3944 (if present) or emulator. */
3946 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3947 struct frame_info *frame, const char *args)
3949 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3952 type = (status >> 24) & 127;
3953 if (status & (1 << 31))
3954 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3956 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3957 /* i18n: [floating point unit] mask */
3958 fputs_filtered (_("mask: "), file);
3959 print_fpu_flags (file, status >> 16);
3960 /* i18n: [floating point unit] flags */
3961 fputs_filtered (_("flags: "), file);
3962 print_fpu_flags (file, status);
3965 /* Construct the ARM extended floating point type. */
3966 static struct type *
3967 arm_ext_type (struct gdbarch *gdbarch)
3969 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3971 if (!tdep->arm_ext_type)
3973 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3974 floatformats_arm_ext);
3976 return tdep->arm_ext_type;
3979 static struct type *
3980 arm_neon_double_type (struct gdbarch *gdbarch)
3982 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3984 if (tdep->neon_double_type == NULL)
3986 struct type *t, *elem;
3988 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3990 elem = builtin_type (gdbarch)->builtin_uint8;
3991 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3992 elem = builtin_type (gdbarch)->builtin_uint16;
3993 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3994 elem = builtin_type (gdbarch)->builtin_uint32;
3995 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3996 elem = builtin_type (gdbarch)->builtin_uint64;
3997 append_composite_type_field (t, "u64", elem);
3998 elem = builtin_type (gdbarch)->builtin_float;
3999 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4000 elem = builtin_type (gdbarch)->builtin_double;
4001 append_composite_type_field (t, "f64", elem);
4003 TYPE_VECTOR (t) = 1;
4004 TYPE_NAME (t) = "neon_d";
4005 tdep->neon_double_type = t;
4008 return tdep->neon_double_type;
4011 /* FIXME: The vector types are not correctly ordered on big-endian
4012 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4013 bits of d0 - regardless of what unit size is being held in d0. So
4014 the offset of the first uint8 in d0 is 7, but the offset of the
4015 first float is 4. This code works as-is for little-endian
4018 static struct type *
4019 arm_neon_quad_type (struct gdbarch *gdbarch)
4021 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4023 if (tdep->neon_quad_type == NULL)
4025 struct type *t, *elem;
4027 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4029 elem = builtin_type (gdbarch)->builtin_uint8;
4030 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4031 elem = builtin_type (gdbarch)->builtin_uint16;
4032 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4033 elem = builtin_type (gdbarch)->builtin_uint32;
4034 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4035 elem = builtin_type (gdbarch)->builtin_uint64;
4036 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4037 elem = builtin_type (gdbarch)->builtin_float;
4038 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4039 elem = builtin_type (gdbarch)->builtin_double;
4040 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4042 TYPE_VECTOR (t) = 1;
4043 TYPE_NAME (t) = "neon_q";
4044 tdep->neon_quad_type = t;
4047 return tdep->neon_quad_type;
4050 /* Return the GDB type object for the "standard" data type of data in
4053 static struct type *
4054 arm_register_type (struct gdbarch *gdbarch, int regnum)
4056 int num_regs = gdbarch_num_regs (gdbarch);
4058 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4059 && regnum >= num_regs && regnum < num_regs + 32)
4060 return builtin_type (gdbarch)->builtin_float;
4062 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4063 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4064 return arm_neon_quad_type (gdbarch);
4066 /* If the target description has register information, we are only
4067 in this function so that we can override the types of
4068 double-precision registers for NEON. */
4069 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4071 struct type *t = tdesc_register_type (gdbarch, regnum);
4073 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4074 && TYPE_CODE (t) == TYPE_CODE_FLT
4075 && gdbarch_tdep (gdbarch)->have_neon)
4076 return arm_neon_double_type (gdbarch);
4081 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4083 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4084 return builtin_type (gdbarch)->builtin_void;
4086 return arm_ext_type (gdbarch);
4088 else if (regnum == ARM_SP_REGNUM)
4089 return builtin_type (gdbarch)->builtin_data_ptr;
4090 else if (regnum == ARM_PC_REGNUM)
4091 return builtin_type (gdbarch)->builtin_func_ptr;
4092 else if (regnum >= ARRAY_SIZE (arm_register_names))
4093 /* These registers are only supported on targets which supply
4094 an XML description. */
4095 return builtin_type (gdbarch)->builtin_int0;
4097 return builtin_type (gdbarch)->builtin_uint32;
4100 /* Map a DWARF register REGNUM onto the appropriate GDB register
4104 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4106 /* Core integer regs. */
4107 if (reg >= 0 && reg <= 15)
4110 /* Legacy FPA encoding. These were once used in a way which
4111 overlapped with VFP register numbering, so their use is
4112 discouraged, but GDB doesn't support the ARM toolchain
4113 which used them for VFP. */
4114 if (reg >= 16 && reg <= 23)
4115 return ARM_F0_REGNUM + reg - 16;
4117 /* New assignments for the FPA registers. */
4118 if (reg >= 96 && reg <= 103)
4119 return ARM_F0_REGNUM + reg - 96;
4121 /* WMMX register assignments. */
4122 if (reg >= 104 && reg <= 111)
4123 return ARM_WCGR0_REGNUM + reg - 104;
4125 if (reg >= 112 && reg <= 127)
4126 return ARM_WR0_REGNUM + reg - 112;
4128 if (reg >= 192 && reg <= 199)
4129 return ARM_WC0_REGNUM + reg - 192;
4131 /* VFP v2 registers. A double precision value is actually
4132 in d1 rather than s2, but the ABI only defines numbering
4133 for the single precision registers. This will "just work"
4134 in GDB for little endian targets (we'll read eight bytes,
4135 starting in s0 and then progressing to s1), but will be
4136 reversed on big endian targets with VFP. This won't
4137 be a problem for the new Neon quad registers; you're supposed
4138 to use DW_OP_piece for those. */
4139 if (reg >= 64 && reg <= 95)
4143 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4144 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4148 /* VFP v3 / Neon registers. This range is also used for VFP v2
4149 registers, except that it now describes d0 instead of s0. */
4150 if (reg >= 256 && reg <= 287)
4154 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4155 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4162 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4164 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4167 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4169 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4170 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4172 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4173 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4175 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4176 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4178 if (reg < NUM_GREGS)
4179 return SIM_ARM_R0_REGNUM + reg;
4182 if (reg < NUM_FREGS)
4183 return SIM_ARM_FP0_REGNUM + reg;
4186 if (reg < NUM_SREGS)
4187 return SIM_ARM_FPS_REGNUM + reg;
4190 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4193 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4194 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4195 NULL if an error occurs. BUF is freed. */
4198 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4199 int old_len, int new_len)
4202 int bytes_to_read = new_len - old_len;
4204 new_buf = (gdb_byte *) xmalloc (new_len);
4205 memcpy (new_buf + bytes_to_read, buf, old_len);
4207 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4215 /* An IT block is at most the 2-byte IT instruction followed by
4216 four 4-byte instructions. The furthest back we must search to
4217 find an IT block that affects the current instruction is thus
4218 2 + 3 * 4 == 14 bytes. */
4219 #define MAX_IT_BLOCK_PREFIX 14
4221 /* Use a quick scan if there are more than this many bytes of
4223 #define IT_SCAN_THRESHOLD 32
4225 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4226 A breakpoint in an IT block may not be hit, depending on the
4229 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4233 CORE_ADDR boundary, func_start;
4235 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4236 int i, any, last_it, last_it_count;
4238 /* If we are using BKPT breakpoints, none of this is necessary. */
4239 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4242 /* ARM mode does not have this problem. */
4243 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4246 /* We are setting a breakpoint in Thumb code that could potentially
4247 contain an IT block. The first step is to find how much Thumb
4248 code there is; we do not need to read outside of known Thumb
4250 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4252 /* Thumb-2 code must have mapping symbols to have a chance. */
4255 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4257 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4258 && func_start > boundary)
4259 boundary = func_start;
4261 /* Search for a candidate IT instruction. We have to do some fancy
4262 footwork to distinguish a real IT instruction from the second
4263 half of a 32-bit instruction, but there is no need for that if
4264 there's no candidate. */
4265 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4267 /* No room for an IT instruction. */
4270 buf = (gdb_byte *) xmalloc (buf_len);
4271 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4274 for (i = 0; i < buf_len; i += 2)
4276 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4277 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4290 /* OK, the code bytes before this instruction contain at least one
4291 halfword which resembles an IT instruction. We know that it's
4292 Thumb code, but there are still two possibilities. Either the
4293 halfword really is an IT instruction, or it is the second half of
4294 a 32-bit Thumb instruction. The only way we can tell is to
4295 scan forwards from a known instruction boundary. */
4296 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4300 /* There's a lot of code before this instruction. Start with an
4301 optimistic search; it's easy to recognize halfwords that can
4302 not be the start of a 32-bit instruction, and use that to
4303 lock on to the instruction boundaries. */
4304 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4307 buf_len = IT_SCAN_THRESHOLD;
4310 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4312 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4313 if (thumb_insn_size (inst1) == 2)
4320 /* At this point, if DEFINITE, BUF[I] is the first place we
4321 are sure that we know the instruction boundaries, and it is far
4322 enough from BPADDR that we could not miss an IT instruction
4323 affecting BPADDR. If ! DEFINITE, give up - start from a
4327 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4331 buf_len = bpaddr - boundary;
4337 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4340 buf_len = bpaddr - boundary;
4344 /* Scan forwards. Find the last IT instruction before BPADDR. */
4349 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4351 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4356 else if (inst1 & 0x0002)
4358 else if (inst1 & 0x0004)
4363 i += thumb_insn_size (inst1);
4369 /* There wasn't really an IT instruction after all. */
4372 if (last_it_count < 1)
4373 /* It was too far away. */
4376 /* This really is a trouble spot. Move the breakpoint to the IT
4378 return bpaddr - buf_len + last_it;
4381 /* ARM displaced stepping support.
4383 Generally ARM displaced stepping works as follows:
4385 1. When an instruction is to be single-stepped, it is first decoded by
4386 arm_process_displaced_insn. Depending on the type of instruction, it is
4387 then copied to a scratch location, possibly in a modified form. The
4388 copy_* set of functions performs such modification, as necessary. A
4389 breakpoint is placed after the modified instruction in the scratch space
4390 to return control to GDB. Note in particular that instructions which
4391 modify the PC will no longer do so after modification.
4393 2. The instruction is single-stepped, by setting the PC to the scratch
4394 location address, and resuming. Control returns to GDB when the
4397 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4398 function used for the current instruction. This function's job is to
4399 put the CPU/memory state back to what it would have been if the
4400 instruction had been executed unmodified in its original location. */
4402 /* NOP instruction (mov r0, r0). */
4403 #define ARM_NOP 0xe1a00000
4404 #define THUMB_NOP 0x4600
4406 /* Helper for register reads for displaced stepping. In particular, this
4407 returns the PC as it would be seen by the instruction at its original
4411 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4415 CORE_ADDR from = dsc->insn_addr;
4417 if (regno == ARM_PC_REGNUM)
4419 /* Compute pipeline offset:
4420 - When executing an ARM instruction, PC reads as the address of the
4421 current instruction plus 8.
4422 - When executing a Thumb instruction, PC reads as the address of the
4423 current instruction plus 4. */
4430 if (debug_displaced)
4431 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4432 (unsigned long) from);
4433 return (ULONGEST) from;
4437 regcache_cooked_read_unsigned (regs, regno, &ret);
4438 if (debug_displaced)
4439 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4440 regno, (unsigned long) ret);
4446 displaced_in_arm_mode (struct regcache *regs)
4449 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4451 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4453 return (ps & t_bit) == 0;
4456 /* Write to the PC as from a branch instruction. */
4459 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4463 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4464 architecture versions < 6. */
4465 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4466 val & ~(ULONGEST) 0x3);
4468 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4469 val & ~(ULONGEST) 0x1);
4472 /* Write to the PC as from a branch-exchange instruction. */
4475 bx_write_pc (struct regcache *regs, ULONGEST val)
4478 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4480 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4484 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4485 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4487 else if ((val & 2) == 0)
4489 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4490 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4494 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4495 mode, align dest to 4 bytes). */
4496 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4497 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4498 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4502 /* Write to the PC as if from a load instruction. */
4505 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4508 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4509 bx_write_pc (regs, val);
4511 branch_write_pc (regs, dsc, val);
4514 /* Write to the PC as if from an ALU instruction. */
4517 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4520 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4521 bx_write_pc (regs, val);
4523 branch_write_pc (regs, dsc, val);
4526 /* Helper for writing to registers for displaced stepping. Writing to the PC
4527 has a varying effects depending on the instruction which does the write:
4528 this is controlled by the WRITE_PC argument. */
4531 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4532 int regno, ULONGEST val, enum pc_write_style write_pc)
4534 if (regno == ARM_PC_REGNUM)
4536 if (debug_displaced)
4537 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4538 (unsigned long) val);
4541 case BRANCH_WRITE_PC:
4542 branch_write_pc (regs, dsc, val);
4546 bx_write_pc (regs, val);
4550 load_write_pc (regs, dsc, val);
4554 alu_write_pc (regs, dsc, val);
4557 case CANNOT_WRITE_PC:
4558 warning (_("Instruction wrote to PC in an unexpected way when "
4559 "single-stepping"));
4563 internal_error (__FILE__, __LINE__,
4564 _("Invalid argument to displaced_write_reg"));
4567 dsc->wrote_to_pc = 1;
4571 if (debug_displaced)
4572 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4573 regno, (unsigned long) val);
4574 regcache_cooked_write_unsigned (regs, regno, val);
4578 /* This function is used to concisely determine if an instruction INSN
4579 references PC. Register fields of interest in INSN should have the
4580 corresponding fields of BITMASK set to 0b1111. The function
4581 returns return 1 if any of these fields in INSN reference the PC
4582 (also 0b1111, r15), else it returns 0. */
4585 insn_references_pc (uint32_t insn, uint32_t bitmask)
4587 uint32_t lowbit = 1;
4589 while (bitmask != 0)
4593 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4599 mask = lowbit * 0xf;
4601 if ((insn & mask) == mask)
4610 /* The simplest copy function. Many instructions have the same effect no
4611 matter what address they are executed at: in those cases, use this. */
4614 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4615 const char *iname, arm_displaced_step_closure *dsc)
4617 if (debug_displaced)
4618 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4619 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4622 dsc->modinsn[0] = insn;
4628 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4629 uint16_t insn2, const char *iname,
4630 arm_displaced_step_closure *dsc)
4632 if (debug_displaced)
4633 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4634 "opcode/class '%s' unmodified\n", insn1, insn2,
4637 dsc->modinsn[0] = insn1;
4638 dsc->modinsn[1] = insn2;
4644 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4647 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4649 arm_displaced_step_closure *dsc)
4651 if (debug_displaced)
4652 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4653 "opcode/class '%s' unmodified\n", insn,
4656 dsc->modinsn[0] = insn;
4661 /* Preload instructions with immediate offset. */
4664 cleanup_preload (struct gdbarch *gdbarch,
4665 struct regcache *regs, arm_displaced_step_closure *dsc)
4667 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4668 if (!dsc->u.preload.immed)
4669 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4673 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4674 arm_displaced_step_closure *dsc, unsigned int rn)
4677 /* Preload instructions:
4679 {pli/pld} [rn, #+/-imm]
4681 {pli/pld} [r0, #+/-imm]. */
4683 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4684 rn_val = displaced_read_reg (regs, dsc, rn);
4685 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4686 dsc->u.preload.immed = 1;
4688 dsc->cleanup = &cleanup_preload;
4692 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4693 arm_displaced_step_closure *dsc)
4695 unsigned int rn = bits (insn, 16, 19);
4697 if (!insn_references_pc (insn, 0x000f0000ul))
4698 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4700 if (debug_displaced)
4701 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4702 (unsigned long) insn);
4704 dsc->modinsn[0] = insn & 0xfff0ffff;
4706 install_preload (gdbarch, regs, dsc, rn);
4712 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4713 struct regcache *regs, arm_displaced_step_closure *dsc)
4715 unsigned int rn = bits (insn1, 0, 3);
4716 unsigned int u_bit = bit (insn1, 7);
4717 int imm12 = bits (insn2, 0, 11);
4720 if (rn != ARM_PC_REGNUM)
4721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4723 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4724 PLD (literal) Encoding T1. */
4725 if (debug_displaced)
4726 fprintf_unfiltered (gdb_stdlog,
4727 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4728 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4734 /* Rewrite instruction {pli/pld} PC imm12 into:
4735 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4739 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4741 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4742 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4744 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4746 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4747 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4748 dsc->u.preload.immed = 0;
4750 /* {pli/pld} [r0, r1] */
4751 dsc->modinsn[0] = insn1 & 0xfff0;
4752 dsc->modinsn[1] = 0xf001;
4755 dsc->cleanup = &cleanup_preload;
4759 /* Preload instructions with register offset. */
4762 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4763 arm_displaced_step_closure *dsc, unsigned int rn,
4766 ULONGEST rn_val, rm_val;
4768 /* Preload register-offset instructions:
4770 {pli/pld} [rn, rm {, shift}]
4772 {pli/pld} [r0, r1 {, shift}]. */
4774 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4775 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4776 rn_val = displaced_read_reg (regs, dsc, rn);
4777 rm_val = displaced_read_reg (regs, dsc, rm);
4778 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4779 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4780 dsc->u.preload.immed = 0;
4782 dsc->cleanup = &cleanup_preload;
4786 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4787 struct regcache *regs,
4788 arm_displaced_step_closure *dsc)
4790 unsigned int rn = bits (insn, 16, 19);
4791 unsigned int rm = bits (insn, 0, 3);
4794 if (!insn_references_pc (insn, 0x000f000ful))
4795 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4797 if (debug_displaced)
4798 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4799 (unsigned long) insn);
4801 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4803 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4807 /* Copy/cleanup coprocessor load and store instructions. */
4810 cleanup_copro_load_store (struct gdbarch *gdbarch,
4811 struct regcache *regs,
4812 arm_displaced_step_closure *dsc)
4814 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4816 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4818 if (dsc->u.ldst.writeback)
4819 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4823 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4824 arm_displaced_step_closure *dsc,
4825 int writeback, unsigned int rn)
4829 /* Coprocessor load/store instructions:
4831 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4833 {stc/stc2} [r0, #+/-imm].
4835 ldc/ldc2 are handled identically. */
4837 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4838 rn_val = displaced_read_reg (regs, dsc, rn);
4839 /* PC should be 4-byte aligned. */
4840 rn_val = rn_val & 0xfffffffc;
4841 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4843 dsc->u.ldst.writeback = writeback;
4844 dsc->u.ldst.rn = rn;
4846 dsc->cleanup = &cleanup_copro_load_store;
4850 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4851 struct regcache *regs,
4852 arm_displaced_step_closure *dsc)
4854 unsigned int rn = bits (insn, 16, 19);
4856 if (!insn_references_pc (insn, 0x000f0000ul))
4857 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4859 if (debug_displaced)
4860 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4861 "load/store insn %.8lx\n", (unsigned long) insn);
4863 dsc->modinsn[0] = insn & 0xfff0ffff;
4865 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4871 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4872 uint16_t insn2, struct regcache *regs,
4873 arm_displaced_step_closure *dsc)
4875 unsigned int rn = bits (insn1, 0, 3);
4877 if (rn != ARM_PC_REGNUM)
4878 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4879 "copro load/store", dsc);
4881 if (debug_displaced)
4882 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4883 "load/store insn %.4x%.4x\n", insn1, insn2);
4885 dsc->modinsn[0] = insn1 & 0xfff0;
4886 dsc->modinsn[1] = insn2;
4889 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4890 doesn't support writeback, so pass 0. */
4891 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4896 /* Clean up branch instructions (actually perform the branch, by setting
4900 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4901 arm_displaced_step_closure *dsc)
4903 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4904 int branch_taken = condition_true (dsc->u.branch.cond, status);
4905 enum pc_write_style write_pc = dsc->u.branch.exchange
4906 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4911 if (dsc->u.branch.link)
4913 /* The value of LR should be the next insn of current one. In order
4914 not to confuse logic hanlding later insn `bx lr', if current insn mode
4915 is Thumb, the bit 0 of LR value should be set to 1. */
4916 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4919 next_insn_addr |= 0x1;
4921 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4925 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4928 /* Copy B/BL/BLX instructions with immediate destinations. */
4931 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4932 arm_displaced_step_closure *dsc,
4933 unsigned int cond, int exchange, int link, long offset)
4935 /* Implement "BL<cond> <label>" as:
4937 Preparation: cond <- instruction condition
4938 Insn: mov r0, r0 (nop)
4939 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4941 B<cond> similar, but don't set r14 in cleanup. */
4943 dsc->u.branch.cond = cond;
4944 dsc->u.branch.link = link;
4945 dsc->u.branch.exchange = exchange;
4947 dsc->u.branch.dest = dsc->insn_addr;
4948 if (link && exchange)
4949 /* For BLX, offset is computed from the Align (PC, 4). */
4950 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4953 dsc->u.branch.dest += 4 + offset;
4955 dsc->u.branch.dest += 8 + offset;
4957 dsc->cleanup = &cleanup_branch;
4960 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4961 struct regcache *regs, arm_displaced_step_closure *dsc)
4963 unsigned int cond = bits (insn, 28, 31);
4964 int exchange = (cond == 0xf);
4965 int link = exchange || bit (insn, 24);
4968 if (debug_displaced)
4969 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4970 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4971 (unsigned long) insn);
4973 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4974 then arrange the switch into Thumb mode. */
4975 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4977 offset = bits (insn, 0, 23) << 2;
4979 if (bit (offset, 25))
4980 offset = offset | ~0x3ffffff;
4982 dsc->modinsn[0] = ARM_NOP;
4984 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4989 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4990 uint16_t insn2, struct regcache *regs,
4991 arm_displaced_step_closure *dsc)
4993 int link = bit (insn2, 14);
4994 int exchange = link && !bit (insn2, 12);
4997 int j1 = bit (insn2, 13);
4998 int j2 = bit (insn2, 11);
4999 int s = sbits (insn1, 10, 10);
5000 int i1 = !(j1 ^ bit (insn1, 10));
5001 int i2 = !(j2 ^ bit (insn1, 10));
5003 if (!link && !exchange) /* B */
5005 offset = (bits (insn2, 0, 10) << 1);
5006 if (bit (insn2, 12)) /* Encoding T4 */
5008 offset |= (bits (insn1, 0, 9) << 12)
5014 else /* Encoding T3 */
5016 offset |= (bits (insn1, 0, 5) << 12)
5020 cond = bits (insn1, 6, 9);
5025 offset = (bits (insn1, 0, 9) << 12);
5026 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5027 offset |= exchange ?
5028 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5031 if (debug_displaced)
5032 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5033 "%.4x %.4x with offset %.8lx\n",
5034 link ? (exchange) ? "blx" : "bl" : "b",
5035 insn1, insn2, offset);
5037 dsc->modinsn[0] = THUMB_NOP;
5039 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5043 /* Copy B Thumb instructions. */
5045 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5046 arm_displaced_step_closure *dsc)
5048 unsigned int cond = 0;
5050 unsigned short bit_12_15 = bits (insn, 12, 15);
5051 CORE_ADDR from = dsc->insn_addr;
5053 if (bit_12_15 == 0xd)
5055 /* offset = SignExtend (imm8:0, 32) */
5056 offset = sbits ((insn << 1), 0, 8);
5057 cond = bits (insn, 8, 11);
5059 else if (bit_12_15 == 0xe) /* Encoding T2 */
5061 offset = sbits ((insn << 1), 0, 11);
5065 if (debug_displaced)
5066 fprintf_unfiltered (gdb_stdlog,
5067 "displaced: copying b immediate insn %.4x "
5068 "with offset %d\n", insn, offset);
5070 dsc->u.branch.cond = cond;
5071 dsc->u.branch.link = 0;
5072 dsc->u.branch.exchange = 0;
5073 dsc->u.branch.dest = from + 4 + offset;
5075 dsc->modinsn[0] = THUMB_NOP;
5077 dsc->cleanup = &cleanup_branch;
5082 /* Copy BX/BLX with register-specified destinations. */
5085 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5086 arm_displaced_step_closure *dsc, int link,
5087 unsigned int cond, unsigned int rm)
5089 /* Implement {BX,BLX}<cond> <reg>" as:
5091 Preparation: cond <- instruction condition
5092 Insn: mov r0, r0 (nop)
5093 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5095 Don't set r14 in cleanup for BX. */
5097 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5099 dsc->u.branch.cond = cond;
5100 dsc->u.branch.link = link;
5102 dsc->u.branch.exchange = 1;
5104 dsc->cleanup = &cleanup_branch;
5108 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5109 struct regcache *regs, arm_displaced_step_closure *dsc)
5111 unsigned int cond = bits (insn, 28, 31);
5114 int link = bit (insn, 5);
5115 unsigned int rm = bits (insn, 0, 3);
5117 if (debug_displaced)
5118 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5119 (unsigned long) insn);
5121 dsc->modinsn[0] = ARM_NOP;
5123 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5128 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5129 struct regcache *regs,
5130 arm_displaced_step_closure *dsc)
5132 int link = bit (insn, 7);
5133 unsigned int rm = bits (insn, 3, 6);
5135 if (debug_displaced)
5136 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5137 (unsigned short) insn);
5139 dsc->modinsn[0] = THUMB_NOP;
5141 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5147 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5150 cleanup_alu_imm (struct gdbarch *gdbarch,
5151 struct regcache *regs, arm_displaced_step_closure *dsc)
5153 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5154 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5155 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5156 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5160 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5161 arm_displaced_step_closure *dsc)
5163 unsigned int rn = bits (insn, 16, 19);
5164 unsigned int rd = bits (insn, 12, 15);
5165 unsigned int op = bits (insn, 21, 24);
5166 int is_mov = (op == 0xd);
5167 ULONGEST rd_val, rn_val;
5169 if (!insn_references_pc (insn, 0x000ff000ul))
5170 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5172 if (debug_displaced)
5173 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5174 "%.8lx\n", is_mov ? "move" : "ALU",
5175 (unsigned long) insn);
5177 /* Instruction is of form:
5179 <op><cond> rd, [rn,] #imm
5183 Preparation: tmp1, tmp2 <- r0, r1;
5185 Insn: <op><cond> r0, r1, #imm
5186 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5189 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5190 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5191 rn_val = displaced_read_reg (regs, dsc, rn);
5192 rd_val = displaced_read_reg (regs, dsc, rd);
5193 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5194 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5198 dsc->modinsn[0] = insn & 0xfff00fff;
5200 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5202 dsc->cleanup = &cleanup_alu_imm;
5208 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5209 uint16_t insn2, struct regcache *regs,
5210 arm_displaced_step_closure *dsc)
5212 unsigned int op = bits (insn1, 5, 8);
5213 unsigned int rn, rm, rd;
5214 ULONGEST rd_val, rn_val;
5216 rn = bits (insn1, 0, 3); /* Rn */
5217 rm = bits (insn2, 0, 3); /* Rm */
5218 rd = bits (insn2, 8, 11); /* Rd */
5220 /* This routine is only called for instruction MOV. */
5221 gdb_assert (op == 0x2 && rn == 0xf);
5223 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5224 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5226 if (debug_displaced)
5227 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5228 "ALU", insn1, insn2);
5230 /* Instruction is of form:
5232 <op><cond> rd, [rn,] #imm
5236 Preparation: tmp1, tmp2 <- r0, r1;
5238 Insn: <op><cond> r0, r1, #imm
5239 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5242 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5243 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5244 rn_val = displaced_read_reg (regs, dsc, rn);
5245 rd_val = displaced_read_reg (regs, dsc, rd);
5246 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5247 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5250 dsc->modinsn[0] = insn1;
5251 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5254 dsc->cleanup = &cleanup_alu_imm;
5259 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5262 cleanup_alu_reg (struct gdbarch *gdbarch,
5263 struct regcache *regs, arm_displaced_step_closure *dsc)
5268 rd_val = displaced_read_reg (regs, dsc, 0);
5270 for (i = 0; i < 3; i++)
5271 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5273 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5277 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5278 arm_displaced_step_closure *dsc,
5279 unsigned int rd, unsigned int rn, unsigned int rm)
5281 ULONGEST rd_val, rn_val, rm_val;
5283 /* Instruction is of form:
5285 <op><cond> rd, [rn,] rm [, <shift>]
5289 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5290 r0, r1, r2 <- rd, rn, rm
5291 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5292 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5295 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5296 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5297 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5298 rd_val = displaced_read_reg (regs, dsc, rd);
5299 rn_val = displaced_read_reg (regs, dsc, rn);
5300 rm_val = displaced_read_reg (regs, dsc, rm);
5301 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5302 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5303 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5306 dsc->cleanup = &cleanup_alu_reg;
5310 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5311 arm_displaced_step_closure *dsc)
5313 unsigned int op = bits (insn, 21, 24);
5314 int is_mov = (op == 0xd);
5316 if (!insn_references_pc (insn, 0x000ff00ful))
5317 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5319 if (debug_displaced)
5320 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5321 is_mov ? "move" : "ALU", (unsigned long) insn);
5324 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5326 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5328 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5334 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5335 struct regcache *regs,
5336 arm_displaced_step_closure *dsc)
5340 rm = bits (insn, 3, 6);
5341 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5343 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5344 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5346 if (debug_displaced)
5347 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5348 (unsigned short) insn);
5350 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5352 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5357 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5360 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5361 struct regcache *regs,
5362 arm_displaced_step_closure *dsc)
5364 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5367 for (i = 0; i < 4; i++)
5368 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5370 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5374 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5375 arm_displaced_step_closure *dsc,
5376 unsigned int rd, unsigned int rn, unsigned int rm,
5380 ULONGEST rd_val, rn_val, rm_val, rs_val;
5382 /* Instruction is of form:
5384 <op><cond> rd, [rn,] rm, <shift> rs
5388 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5389 r0, r1, r2, r3 <- rd, rn, rm, rs
5390 Insn: <op><cond> r0, r1, r2, <shift> r3
5392 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5396 for (i = 0; i < 4; i++)
5397 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5399 rd_val = displaced_read_reg (regs, dsc, rd);
5400 rn_val = displaced_read_reg (regs, dsc, rn);
5401 rm_val = displaced_read_reg (regs, dsc, rm);
5402 rs_val = displaced_read_reg (regs, dsc, rs);
5403 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5404 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5405 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5406 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5408 dsc->cleanup = &cleanup_alu_shifted_reg;
5412 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5413 struct regcache *regs,
5414 arm_displaced_step_closure *dsc)
5416 unsigned int op = bits (insn, 21, 24);
5417 int is_mov = (op == 0xd);
5418 unsigned int rd, rn, rm, rs;
5420 if (!insn_references_pc (insn, 0x000fff0ful))
5421 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5423 if (debug_displaced)
5424 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5425 "%.8lx\n", is_mov ? "move" : "ALU",
5426 (unsigned long) insn);
5428 rn = bits (insn, 16, 19);
5429 rm = bits (insn, 0, 3);
5430 rs = bits (insn, 8, 11);
5431 rd = bits (insn, 12, 15);
5434 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5436 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5438 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5443 /* Clean up load instructions. */
5446 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5447 arm_displaced_step_closure *dsc)
5449 ULONGEST rt_val, rt_val2 = 0, rn_val;
5451 rt_val = displaced_read_reg (regs, dsc, 0);
5452 if (dsc->u.ldst.xfersize == 8)
5453 rt_val2 = displaced_read_reg (regs, dsc, 1);
5454 rn_val = displaced_read_reg (regs, dsc, 2);
5456 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5457 if (dsc->u.ldst.xfersize > 4)
5458 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5460 if (!dsc->u.ldst.immed)
5461 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5463 /* Handle register writeback. */
5464 if (dsc->u.ldst.writeback)
5465 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5466 /* Put result in right place. */
5467 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5468 if (dsc->u.ldst.xfersize == 8)
5469 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5472 /* Clean up store instructions. */
5475 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5476 arm_displaced_step_closure *dsc)
5478 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5480 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5481 if (dsc->u.ldst.xfersize > 4)
5482 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5483 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5484 if (!dsc->u.ldst.immed)
5485 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5486 if (!dsc->u.ldst.restore_r4)
5487 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5490 if (dsc->u.ldst.writeback)
5491 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5494 /* Copy "extra" load/store instructions. These are halfword/doubleword
5495 transfers, which have a different encoding to byte/word transfers. */
5498 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5499 struct regcache *regs, arm_displaced_step_closure *dsc)
5501 unsigned int op1 = bits (insn, 20, 24);
5502 unsigned int op2 = bits (insn, 5, 6);
5503 unsigned int rt = bits (insn, 12, 15);
5504 unsigned int rn = bits (insn, 16, 19);
5505 unsigned int rm = bits (insn, 0, 3);
5506 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5507 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5508 int immed = (op1 & 0x4) != 0;
5510 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5512 if (!insn_references_pc (insn, 0x000ff00ful))
5513 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5515 if (debug_displaced)
5516 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5517 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5518 (unsigned long) insn);
5520 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5523 internal_error (__FILE__, __LINE__,
5524 _("copy_extra_ld_st: instruction decode error"));
5526 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5527 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5528 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5530 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5532 rt_val = displaced_read_reg (regs, dsc, rt);
5533 if (bytesize[opcode] == 8)
5534 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5535 rn_val = displaced_read_reg (regs, dsc, rn);
5537 rm_val = displaced_read_reg (regs, dsc, rm);
5539 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5540 if (bytesize[opcode] == 8)
5541 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5542 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5544 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5547 dsc->u.ldst.xfersize = bytesize[opcode];
5548 dsc->u.ldst.rn = rn;
5549 dsc->u.ldst.immed = immed;
5550 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5551 dsc->u.ldst.restore_r4 = 0;
5554 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5556 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5557 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5559 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5561 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5562 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5564 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5569 /* Copy byte/half word/word loads and stores. */
5572 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5573 arm_displaced_step_closure *dsc, int load,
5574 int immed, int writeback, int size, int usermode,
5575 int rt, int rm, int rn)
5577 ULONGEST rt_val, rn_val, rm_val = 0;
5579 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5580 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5582 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5584 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5586 rt_val = displaced_read_reg (regs, dsc, rt);
5587 rn_val = displaced_read_reg (regs, dsc, rn);
5589 rm_val = displaced_read_reg (regs, dsc, rm);
5591 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5592 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5594 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5596 dsc->u.ldst.xfersize = size;
5597 dsc->u.ldst.rn = rn;
5598 dsc->u.ldst.immed = immed;
5599 dsc->u.ldst.writeback = writeback;
5601 /* To write PC we can do:
5603 Before this sequence of instructions:
5604 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5605 r2 is the Rn value got from dispalced_read_reg.
5607 Insn1: push {pc} Write address of STR instruction + offset on stack
5608 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5609 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5610 = addr(Insn1) + offset - addr(Insn3) - 8
5612 Insn4: add r4, r4, #8 r4 = offset - 8
5613 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5615 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5617 Otherwise we don't know what value to write for PC, since the offset is
5618 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5619 of this can be found in Section "Saving from r15" in
5620 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5622 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5627 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5628 uint16_t insn2, struct regcache *regs,
5629 arm_displaced_step_closure *dsc, int size)
5631 unsigned int u_bit = bit (insn1, 7);
5632 unsigned int rt = bits (insn2, 12, 15);
5633 int imm12 = bits (insn2, 0, 11);
5636 if (debug_displaced)
5637 fprintf_unfiltered (gdb_stdlog,
5638 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5639 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5645 /* Rewrite instruction LDR Rt imm12 into:
5647 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5651 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5654 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5655 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5656 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5658 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5660 pc_val = pc_val & 0xfffffffc;
5662 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5663 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5667 dsc->u.ldst.xfersize = size;
5668 dsc->u.ldst.immed = 0;
5669 dsc->u.ldst.writeback = 0;
5670 dsc->u.ldst.restore_r4 = 0;
5672 /* LDR R0, R2, R3 */
5673 dsc->modinsn[0] = 0xf852;
5674 dsc->modinsn[1] = 0x3;
5677 dsc->cleanup = &cleanup_load;
5683 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5684 uint16_t insn2, struct regcache *regs,
5685 arm_displaced_step_closure *dsc,
5686 int writeback, int immed)
5688 unsigned int rt = bits (insn2, 12, 15);
5689 unsigned int rn = bits (insn1, 0, 3);
5690 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5691 /* In LDR (register), there is also a register Rm, which is not allowed to
5692 be PC, so we don't have to check it. */
5694 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5695 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5698 if (debug_displaced)
5699 fprintf_unfiltered (gdb_stdlog,
5700 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5701 rt, rn, insn1, insn2);
5703 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5706 dsc->u.ldst.restore_r4 = 0;
5709 /* ldr[b]<cond> rt, [rn, #imm], etc.
5711 ldr[b]<cond> r0, [r2, #imm]. */
5713 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5714 dsc->modinsn[1] = insn2 & 0x0fff;
5717 /* ldr[b]<cond> rt, [rn, rm], etc.
5719 ldr[b]<cond> r0, [r2, r3]. */
5721 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5722 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5732 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5733 struct regcache *regs,
5734 arm_displaced_step_closure *dsc,
5735 int load, int size, int usermode)
5737 int immed = !bit (insn, 25);
5738 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5739 unsigned int rt = bits (insn, 12, 15);
5740 unsigned int rn = bits (insn, 16, 19);
5741 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5743 if (!insn_references_pc (insn, 0x000ff00ful))
5744 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5746 if (debug_displaced)
5747 fprintf_unfiltered (gdb_stdlog,
5748 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5749 load ? (size == 1 ? "ldrb" : "ldr")
5750 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5752 (unsigned long) insn);
5754 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5755 usermode, rt, rm, rn);
5757 if (load || rt != ARM_PC_REGNUM)
5759 dsc->u.ldst.restore_r4 = 0;
5762 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5764 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5765 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5767 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5769 {ldr,str}[b]<cond> r0, [r2, r3]. */
5770 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5774 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5775 dsc->u.ldst.restore_r4 = 1;
5776 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5777 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5778 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5779 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5780 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5784 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5786 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5791 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5796 /* Cleanup LDM instructions with fully-populated register list. This is an
5797 unfortunate corner case: it's impossible to implement correctly by modifying
5798 the instruction. The issue is as follows: we have an instruction,
5802 which we must rewrite to avoid loading PC. A possible solution would be to
5803 do the load in two halves, something like (with suitable cleanup
5807 ldm[id][ab] r8!, {r0-r7}
5809 ldm[id][ab] r8, {r7-r14}
5812 but at present there's no suitable place for <temp>, since the scratch space
5813 is overwritten before the cleanup routine is called. For now, we simply
5814 emulate the instruction. */
5817 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5818 arm_displaced_step_closure *dsc)
5820 int inc = dsc->u.block.increment;
5821 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5822 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5823 uint32_t regmask = dsc->u.block.regmask;
5824 int regno = inc ? 0 : 15;
5825 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5826 int exception_return = dsc->u.block.load && dsc->u.block.user
5827 && (regmask & 0x8000) != 0;
5828 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5829 int do_transfer = condition_true (dsc->u.block.cond, status);
5830 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5835 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5836 sensible we can do here. Complain loudly. */
5837 if (exception_return)
5838 error (_("Cannot single-step exception return"));
5840 /* We don't handle any stores here for now. */
5841 gdb_assert (dsc->u.block.load != 0);
5843 if (debug_displaced)
5844 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5845 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5846 dsc->u.block.increment ? "inc" : "dec",
5847 dsc->u.block.before ? "before" : "after");
5854 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5857 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5860 xfer_addr += bump_before;
5862 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5863 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5865 xfer_addr += bump_after;
5867 regmask &= ~(1 << regno);
5870 if (dsc->u.block.writeback)
5871 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5875 /* Clean up an STM which included the PC in the register list. */
5878 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5879 arm_displaced_step_closure *dsc)
5881 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5882 int store_executed = condition_true (dsc->u.block.cond, status);
5883 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5884 CORE_ADDR stm_insn_addr;
5887 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5889 /* If condition code fails, there's nothing else to do. */
5890 if (!store_executed)
5893 if (dsc->u.block.increment)
5895 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5897 if (dsc->u.block.before)
5902 pc_stored_at = dsc->u.block.xfer_addr;
5904 if (dsc->u.block.before)
5908 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5909 stm_insn_addr = dsc->scratch_base;
5910 offset = pc_val - stm_insn_addr;
5912 if (debug_displaced)
5913 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5914 "STM instruction\n", offset);
5916 /* Rewrite the stored PC to the proper value for the non-displaced original
5918 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5919 dsc->insn_addr + offset);
5922 /* Clean up an LDM which includes the PC in the register list. We clumped all
5923 the registers in the transferred list into a contiguous range r0...rX (to
5924 avoid loading PC directly and losing control of the debugged program), so we
5925 must undo that here. */
5928 cleanup_block_load_pc (struct gdbarch *gdbarch,
5929 struct regcache *regs,
5930 arm_displaced_step_closure *dsc)
5932 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5933 int load_executed = condition_true (dsc->u.block.cond, status);
5934 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5935 unsigned int regs_loaded = bitcount (mask);
5936 unsigned int num_to_shuffle = regs_loaded, clobbered;
5938 /* The method employed here will fail if the register list is fully populated
5939 (we need to avoid loading PC directly). */
5940 gdb_assert (num_to_shuffle < 16);
5945 clobbered = (1 << num_to_shuffle) - 1;
5947 while (num_to_shuffle > 0)
5949 if ((mask & (1 << write_reg)) != 0)
5951 unsigned int read_reg = num_to_shuffle - 1;
5953 if (read_reg != write_reg)
5955 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5956 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5957 if (debug_displaced)
5958 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5959 "loaded register r%d to r%d\n"), read_reg,
5962 else if (debug_displaced)
5963 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5964 "r%d already in the right place\n"),
5967 clobbered &= ~(1 << write_reg);
5975 /* Restore any registers we scribbled over. */
5976 for (write_reg = 0; clobbered != 0; write_reg++)
5978 if ((clobbered & (1 << write_reg)) != 0)
5980 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5982 if (debug_displaced)
5983 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5984 "clobbered register r%d\n"), write_reg);
5985 clobbered &= ~(1 << write_reg);
5989 /* Perform register writeback manually. */
5990 if (dsc->u.block.writeback)
5992 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5994 if (dsc->u.block.increment)
5995 new_rn_val += regs_loaded * 4;
5997 new_rn_val -= regs_loaded * 4;
5999 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6004 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6005 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6008 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6009 struct regcache *regs,
6010 arm_displaced_step_closure *dsc)
6012 int load = bit (insn, 20);
6013 int user = bit (insn, 22);
6014 int increment = bit (insn, 23);
6015 int before = bit (insn, 24);
6016 int writeback = bit (insn, 21);
6017 int rn = bits (insn, 16, 19);
6019 /* Block transfers which don't mention PC can be run directly
6021 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6022 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6024 if (rn == ARM_PC_REGNUM)
6026 warning (_("displaced: Unpredictable LDM or STM with "
6027 "base register r15"));
6028 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6031 if (debug_displaced)
6032 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6033 "%.8lx\n", (unsigned long) insn);
6035 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6036 dsc->u.block.rn = rn;
6038 dsc->u.block.load = load;
6039 dsc->u.block.user = user;
6040 dsc->u.block.increment = increment;
6041 dsc->u.block.before = before;
6042 dsc->u.block.writeback = writeback;
6043 dsc->u.block.cond = bits (insn, 28, 31);
6045 dsc->u.block.regmask = insn & 0xffff;
6049 if ((insn & 0xffff) == 0xffff)
6051 /* LDM with a fully-populated register list. This case is
6052 particularly tricky. Implement for now by fully emulating the
6053 instruction (which might not behave perfectly in all cases, but
6054 these instructions should be rare enough for that not to matter
6056 dsc->modinsn[0] = ARM_NOP;
6058 dsc->cleanup = &cleanup_block_load_all;
6062 /* LDM of a list of registers which includes PC. Implement by
6063 rewriting the list of registers to be transferred into a
6064 contiguous chunk r0...rX before doing the transfer, then shuffling
6065 registers into the correct places in the cleanup routine. */
6066 unsigned int regmask = insn & 0xffff;
6067 unsigned int num_in_list = bitcount (regmask), new_regmask;
6070 for (i = 0; i < num_in_list; i++)
6071 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6073 /* Writeback makes things complicated. We need to avoid clobbering
6074 the base register with one of the registers in our modified
6075 register list, but just using a different register can't work in
6078 ldm r14!, {r0-r13,pc}
6080 which would need to be rewritten as:
6084 but that can't work, because there's no free register for N.
6086 Solve this by turning off the writeback bit, and emulating
6087 writeback manually in the cleanup routine. */
6092 new_regmask = (1 << num_in_list) - 1;
6094 if (debug_displaced)
6095 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6096 "{..., pc}: original reg list %.4x, modified "
6097 "list %.4x\n"), rn, writeback ? "!" : "",
6098 (int) insn & 0xffff, new_regmask);
6100 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6102 dsc->cleanup = &cleanup_block_load_pc;
6107 /* STM of a list of registers which includes PC. Run the instruction
6108 as-is, but out of line: this will store the wrong value for the PC,
6109 so we must manually fix up the memory in the cleanup routine.
6110 Doing things this way has the advantage that we can auto-detect
6111 the offset of the PC write (which is architecture-dependent) in
6112 the cleanup routine. */
6113 dsc->modinsn[0] = insn;
6115 dsc->cleanup = &cleanup_block_store_pc;
6122 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6123 struct regcache *regs,
6124 arm_displaced_step_closure *dsc)
6126 int rn = bits (insn1, 0, 3);
6127 int load = bit (insn1, 4);
6128 int writeback = bit (insn1, 5);
6130 /* Block transfers which don't mention PC can be run directly
6132 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6133 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6135 if (rn == ARM_PC_REGNUM)
6137 warning (_("displaced: Unpredictable LDM or STM with "
6138 "base register r15"));
6139 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6140 "unpredictable ldm/stm", dsc);
6143 if (debug_displaced)
6144 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6145 "%.4x%.4x\n", insn1, insn2);
6147 /* Clear bit 13, since it should be always zero. */
6148 dsc->u.block.regmask = (insn2 & 0xdfff);
6149 dsc->u.block.rn = rn;
6151 dsc->u.block.load = load;
6152 dsc->u.block.user = 0;
6153 dsc->u.block.increment = bit (insn1, 7);
6154 dsc->u.block.before = bit (insn1, 8);
6155 dsc->u.block.writeback = writeback;
6156 dsc->u.block.cond = INST_AL;
6157 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6161 if (dsc->u.block.regmask == 0xffff)
6163 /* This branch is impossible to happen. */
6168 unsigned int regmask = dsc->u.block.regmask;
6169 unsigned int num_in_list = bitcount (regmask), new_regmask;
6172 for (i = 0; i < num_in_list; i++)
6173 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6178 new_regmask = (1 << num_in_list) - 1;
6180 if (debug_displaced)
6181 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6182 "{..., pc}: original reg list %.4x, modified "
6183 "list %.4x\n"), rn, writeback ? "!" : "",
6184 (int) dsc->u.block.regmask, new_regmask);
6186 dsc->modinsn[0] = insn1;
6187 dsc->modinsn[1] = (new_regmask & 0xffff);
6190 dsc->cleanup = &cleanup_block_load_pc;
6195 dsc->modinsn[0] = insn1;
6196 dsc->modinsn[1] = insn2;
6198 dsc->cleanup = &cleanup_block_store_pc;
6203 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6204 This is used to avoid a dependency on BFD's bfd_endian enum. */
6207 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6210 return read_memory_unsigned_integer (memaddr, len,
6211 (enum bfd_endian) byte_order);
6214 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6217 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6220 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6223 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6226 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6231 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6234 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6236 return arm_is_thumb (self->regcache);
6239 /* single_step() is called just before we want to resume the inferior,
6240 if we want to single-step it but there is no hardware or kernel
6241 single-step support. We find the target of the coming instructions
6242 and breakpoint them. */
6244 std::vector<CORE_ADDR>
6245 arm_software_single_step (struct regcache *regcache)
6247 struct gdbarch *gdbarch = regcache->arch ();
6248 struct arm_get_next_pcs next_pcs_ctx;
6250 arm_get_next_pcs_ctor (&next_pcs_ctx,
6251 &arm_get_next_pcs_ops,
6252 gdbarch_byte_order (gdbarch),
6253 gdbarch_byte_order_for_code (gdbarch),
6257 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6259 for (CORE_ADDR &pc_ref : next_pcs)
6260 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6265 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6266 for Linux, where some SVC instructions must be treated specially. */
6269 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6270 arm_displaced_step_closure *dsc)
6272 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6274 if (debug_displaced)
6275 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6276 "%.8lx\n", (unsigned long) resume_addr);
6278 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6282 /* Common copy routine for svc instruciton. */
6285 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6286 arm_displaced_step_closure *dsc)
6288 /* Preparation: none.
6289 Insn: unmodified svc.
6290 Cleanup: pc <- insn_addr + insn_size. */
6292 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6294 dsc->wrote_to_pc = 1;
6296 /* Allow OS-specific code to override SVC handling. */
6297 if (dsc->u.svc.copy_svc_os)
6298 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6301 dsc->cleanup = &cleanup_svc;
6307 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6308 struct regcache *regs, arm_displaced_step_closure *dsc)
6311 if (debug_displaced)
6312 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6313 (unsigned long) insn);
6315 dsc->modinsn[0] = insn;
6317 return install_svc (gdbarch, regs, dsc);
6321 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6322 struct regcache *regs, arm_displaced_step_closure *dsc)
6325 if (debug_displaced)
6326 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6329 dsc->modinsn[0] = insn;
6331 return install_svc (gdbarch, regs, dsc);
6334 /* Copy undefined instructions. */
6337 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6338 arm_displaced_step_closure *dsc)
6340 if (debug_displaced)
6341 fprintf_unfiltered (gdb_stdlog,
6342 "displaced: copying undefined insn %.8lx\n",
6343 (unsigned long) insn);
6345 dsc->modinsn[0] = insn;
6351 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6352 arm_displaced_step_closure *dsc)
6355 if (debug_displaced)
6356 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6357 "%.4x %.4x\n", (unsigned short) insn1,
6358 (unsigned short) insn2);
6360 dsc->modinsn[0] = insn1;
6361 dsc->modinsn[1] = insn2;
6367 /* Copy unpredictable instructions. */
6370 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6371 arm_displaced_step_closure *dsc)
6373 if (debug_displaced)
6374 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6375 "%.8lx\n", (unsigned long) insn);
6377 dsc->modinsn[0] = insn;
6382 /* The decode_* functions are instruction decoding helpers. They mostly follow
6383 the presentation in the ARM ARM. */
6386 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6387 struct regcache *regs,
6388 arm_displaced_step_closure *dsc)
6390 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6391 unsigned int rn = bits (insn, 16, 19);
6393 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6394 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6395 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6396 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6397 else if ((op1 & 0x60) == 0x20)
6398 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6399 else if ((op1 & 0x71) == 0x40)
6400 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6402 else if ((op1 & 0x77) == 0x41)
6403 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6404 else if ((op1 & 0x77) == 0x45)
6405 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6406 else if ((op1 & 0x77) == 0x51)
6409 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6411 return arm_copy_unpred (gdbarch, insn, dsc);
6413 else if ((op1 & 0x77) == 0x55)
6414 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6415 else if (op1 == 0x57)
6418 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6419 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6420 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6421 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6422 default: return arm_copy_unpred (gdbarch, insn, dsc);
6424 else if ((op1 & 0x63) == 0x43)
6425 return arm_copy_unpred (gdbarch, insn, dsc);
6426 else if ((op2 & 0x1) == 0x0)
6427 switch (op1 & ~0x80)
6430 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6432 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6433 case 0x71: case 0x75:
6435 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6436 case 0x63: case 0x67: case 0x73: case 0x77:
6437 return arm_copy_unpred (gdbarch, insn, dsc);
6439 return arm_copy_undef (gdbarch, insn, dsc);
6442 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6446 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6447 struct regcache *regs,
6448 arm_displaced_step_closure *dsc)
6450 if (bit (insn, 27) == 0)
6451 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6452 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6453 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6456 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6459 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6461 case 0x4: case 0x5: case 0x6: case 0x7:
6462 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6465 switch ((insn & 0xe00000) >> 21)
6467 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6469 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6472 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6475 return arm_copy_undef (gdbarch, insn, dsc);
6480 int rn_f = (bits (insn, 16, 19) == 0xf);
6481 switch ((insn & 0xe00000) >> 21)
6484 /* ldc/ldc2 imm (undefined for rn == pc). */
6485 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6486 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6489 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6491 case 0x4: case 0x5: case 0x6: case 0x7:
6492 /* ldc/ldc2 lit (undefined for rn != pc). */
6493 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6494 : arm_copy_undef (gdbarch, insn, dsc);
6497 return arm_copy_undef (gdbarch, insn, dsc);
6502 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6505 if (bits (insn, 16, 19) == 0xf)
6507 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6509 return arm_copy_undef (gdbarch, insn, dsc);
6513 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6515 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6519 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6521 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6524 return arm_copy_undef (gdbarch, insn, dsc);
6528 /* Decode miscellaneous instructions in dp/misc encoding space. */
6531 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6532 struct regcache *regs,
6533 arm_displaced_step_closure *dsc)
6535 unsigned int op2 = bits (insn, 4, 6);
6536 unsigned int op = bits (insn, 21, 22);
6541 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6544 if (op == 0x1) /* bx. */
6545 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6547 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6549 return arm_copy_undef (gdbarch, insn, dsc);
6553 /* Not really supported. */
6554 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6556 return arm_copy_undef (gdbarch, insn, dsc);
6560 return arm_copy_bx_blx_reg (gdbarch, insn,
6561 regs, dsc); /* blx register. */
6563 return arm_copy_undef (gdbarch, insn, dsc);
6566 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6570 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6572 /* Not really supported. */
6573 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6577 return arm_copy_undef (gdbarch, insn, dsc);
6582 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6583 struct regcache *regs,
6584 arm_displaced_step_closure *dsc)
6587 switch (bits (insn, 20, 24))
6590 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6593 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6595 case 0x12: case 0x16:
6596 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6599 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6603 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6605 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6606 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6607 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6608 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6609 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6610 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6611 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6612 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6613 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6614 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6615 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6616 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6617 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6618 /* 2nd arg means "unprivileged". */
6619 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6623 /* Should be unreachable. */
6628 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6629 struct regcache *regs,
6630 arm_displaced_step_closure *dsc)
6632 int a = bit (insn, 25), b = bit (insn, 4);
6633 uint32_t op1 = bits (insn, 20, 24);
6635 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6636 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6637 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6638 else if ((!a && (op1 & 0x17) == 0x02)
6639 || (a && (op1 & 0x17) == 0x02 && !b))
6640 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6641 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6642 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6643 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6644 else if ((!a && (op1 & 0x17) == 0x03)
6645 || (a && (op1 & 0x17) == 0x03 && !b))
6646 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6647 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6648 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6649 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6650 else if ((!a && (op1 & 0x17) == 0x06)
6651 || (a && (op1 & 0x17) == 0x06 && !b))
6652 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6653 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6654 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6655 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6656 else if ((!a && (op1 & 0x17) == 0x07)
6657 || (a && (op1 & 0x17) == 0x07 && !b))
6658 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6660 /* Should be unreachable. */
6665 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6666 arm_displaced_step_closure *dsc)
6668 switch (bits (insn, 20, 24))
6670 case 0x00: case 0x01: case 0x02: case 0x03:
6671 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6673 case 0x04: case 0x05: case 0x06: case 0x07:
6674 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6676 case 0x08: case 0x09: case 0x0a: case 0x0b:
6677 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6678 return arm_copy_unmodified (gdbarch, insn,
6679 "decode/pack/unpack/saturate/reverse", dsc);
6682 if (bits (insn, 5, 7) == 0) /* op2. */
6684 if (bits (insn, 12, 15) == 0xf)
6685 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6687 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6690 return arm_copy_undef (gdbarch, insn, dsc);
6692 case 0x1a: case 0x1b:
6693 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6694 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6696 return arm_copy_undef (gdbarch, insn, dsc);
6698 case 0x1c: case 0x1d:
6699 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6701 if (bits (insn, 0, 3) == 0xf)
6702 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6704 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6707 return arm_copy_undef (gdbarch, insn, dsc);
6709 case 0x1e: case 0x1f:
6710 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6711 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6713 return arm_copy_undef (gdbarch, insn, dsc);
6716 /* Should be unreachable. */
6721 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6722 struct regcache *regs,
6723 arm_displaced_step_closure *dsc)
6726 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6728 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6732 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6733 struct regcache *regs,
6734 arm_displaced_step_closure *dsc)
6736 unsigned int opcode = bits (insn, 20, 24);
6740 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6741 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6743 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6744 case 0x12: case 0x16:
6745 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6747 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6748 case 0x13: case 0x17:
6749 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6751 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6752 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6753 /* Note: no writeback for these instructions. Bit 25 will always be
6754 zero though (via caller), so the following works OK. */
6755 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6758 /* Should be unreachable. */
6762 /* Decode shifted register instructions. */
6765 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6766 uint16_t insn2, struct regcache *regs,
6767 arm_displaced_step_closure *dsc)
6769 /* PC is only allowed to be used in instruction MOV. */
6771 unsigned int op = bits (insn1, 5, 8);
6772 unsigned int rn = bits (insn1, 0, 3);
6774 if (op == 0x2 && rn == 0xf) /* MOV */
6775 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6777 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6778 "dp (shift reg)", dsc);
6782 /* Decode extension register load/store. Exactly the same as
6783 arm_decode_ext_reg_ld_st. */
6786 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6787 uint16_t insn2, struct regcache *regs,
6788 arm_displaced_step_closure *dsc)
6790 unsigned int opcode = bits (insn1, 4, 8);
6794 case 0x04: case 0x05:
6795 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6796 "vfp/neon vmov", dsc);
6798 case 0x08: case 0x0c: /* 01x00 */
6799 case 0x0a: case 0x0e: /* 01x10 */
6800 case 0x12: case 0x16: /* 10x10 */
6801 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6802 "vfp/neon vstm/vpush", dsc);
6804 case 0x09: case 0x0d: /* 01x01 */
6805 case 0x0b: case 0x0f: /* 01x11 */
6806 case 0x13: case 0x17: /* 10x11 */
6807 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6808 "vfp/neon vldm/vpop", dsc);
6810 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6814 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6817 /* Should be unreachable. */
6822 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6823 struct regcache *regs, arm_displaced_step_closure *dsc)
6825 unsigned int op1 = bits (insn, 20, 25);
6826 int op = bit (insn, 4);
6827 unsigned int coproc = bits (insn, 8, 11);
6829 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6830 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6831 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6832 && (coproc & 0xe) != 0xa)
6834 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6835 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6836 && (coproc & 0xe) != 0xa)
6837 /* ldc/ldc2 imm/lit. */
6838 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6839 else if ((op1 & 0x3e) == 0x00)
6840 return arm_copy_undef (gdbarch, insn, dsc);
6841 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6842 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6843 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6844 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6845 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6846 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6847 else if ((op1 & 0x30) == 0x20 && !op)
6849 if ((coproc & 0xe) == 0xa)
6850 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6852 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6854 else if ((op1 & 0x30) == 0x20 && op)
6855 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6856 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6857 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6858 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6859 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6860 else if ((op1 & 0x30) == 0x30)
6861 return arm_copy_svc (gdbarch, insn, regs, dsc);
6863 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6867 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6868 uint16_t insn2, struct regcache *regs,
6869 arm_displaced_step_closure *dsc)
6871 unsigned int coproc = bits (insn2, 8, 11);
6872 unsigned int bit_5_8 = bits (insn1, 5, 8);
6873 unsigned int bit_9 = bit (insn1, 9);
6874 unsigned int bit_4 = bit (insn1, 4);
6879 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6880 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6882 else if (bit_5_8 == 0) /* UNDEFINED. */
6883 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6886 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6887 if ((coproc & 0xe) == 0xa)
6888 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6890 else /* coproc is not 101x. */
6892 if (bit_4 == 0) /* STC/STC2. */
6893 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 else /* LDC/LDC2 {literal, immeidate}. */
6896 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6902 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6908 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6909 arm_displaced_step_closure *dsc, int rd)
6915 Preparation: Rd <- PC
6921 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6922 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6926 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6927 arm_displaced_step_closure *dsc,
6928 int rd, unsigned int imm)
6931 /* Encoding T2: ADDS Rd, #imm */
6932 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6934 install_pc_relative (gdbarch, regs, dsc, rd);
6940 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6941 struct regcache *regs,
6942 arm_displaced_step_closure *dsc)
6944 unsigned int rd = bits (insn, 8, 10);
6945 unsigned int imm8 = bits (insn, 0, 7);
6947 if (debug_displaced)
6948 fprintf_unfiltered (gdb_stdlog,
6949 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6952 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6956 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6957 uint16_t insn2, struct regcache *regs,
6958 arm_displaced_step_closure *dsc)
6960 unsigned int rd = bits (insn2, 8, 11);
6961 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6962 extract raw immediate encoding rather than computing immediate. When
6963 generating ADD or SUB instruction, we can simply perform OR operation to
6964 set immediate into ADD. */
6965 unsigned int imm_3_8 = insn2 & 0x70ff;
6966 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6968 if (debug_displaced)
6969 fprintf_unfiltered (gdb_stdlog,
6970 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6971 rd, imm_i, imm_3_8, insn1, insn2);
6973 if (bit (insn1, 7)) /* Encoding T2 */
6975 /* Encoding T3: SUB Rd, Rd, #imm */
6976 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6977 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6979 else /* Encoding T3 */
6981 /* Encoding T3: ADD Rd, Rd, #imm */
6982 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6983 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6987 install_pc_relative (gdbarch, regs, dsc, rd);
6993 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6994 struct regcache *regs,
6995 arm_displaced_step_closure *dsc)
6997 unsigned int rt = bits (insn1, 8, 10);
6999 int imm8 = (bits (insn1, 0, 7) << 2);
7005 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7007 Insn: LDR R0, [R2, R3];
7008 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7010 if (debug_displaced)
7011 fprintf_unfiltered (gdb_stdlog,
7012 "displaced: copying thumb ldr r%d [pc #%d]\n"
7015 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7016 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7017 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7018 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7019 /* The assembler calculates the required value of the offset from the
7020 Align(PC,4) value of this instruction to the label. */
7021 pc = pc & 0xfffffffc;
7023 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7024 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7027 dsc->u.ldst.xfersize = 4;
7029 dsc->u.ldst.immed = 0;
7030 dsc->u.ldst.writeback = 0;
7031 dsc->u.ldst.restore_r4 = 0;
7033 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7035 dsc->cleanup = &cleanup_load;
7040 /* Copy Thumb cbnz/cbz insruction. */
7043 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7044 struct regcache *regs,
7045 arm_displaced_step_closure *dsc)
7047 int non_zero = bit (insn1, 11);
7048 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7049 CORE_ADDR from = dsc->insn_addr;
7050 int rn = bits (insn1, 0, 2);
7051 int rn_val = displaced_read_reg (regs, dsc, rn);
7053 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7054 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7055 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7056 condition is false, let it be, cleanup_branch will do nothing. */
7057 if (dsc->u.branch.cond)
7059 dsc->u.branch.cond = INST_AL;
7060 dsc->u.branch.dest = from + 4 + imm5;
7063 dsc->u.branch.dest = from + 2;
7065 dsc->u.branch.link = 0;
7066 dsc->u.branch.exchange = 0;
7068 if (debug_displaced)
7069 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7070 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7071 rn, rn_val, insn1, dsc->u.branch.dest);
7073 dsc->modinsn[0] = THUMB_NOP;
7075 dsc->cleanup = &cleanup_branch;
7079 /* Copy Table Branch Byte/Halfword */
7081 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7082 uint16_t insn2, struct regcache *regs,
7083 arm_displaced_step_closure *dsc)
7085 ULONGEST rn_val, rm_val;
7086 int is_tbh = bit (insn2, 4);
7087 CORE_ADDR halfwords = 0;
7088 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7090 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7091 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7097 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7098 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7104 target_read_memory (rn_val + rm_val, buf, 1);
7105 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7108 if (debug_displaced)
7109 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7110 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7111 (unsigned int) rn_val, (unsigned int) rm_val,
7112 (unsigned int) halfwords);
7114 dsc->u.branch.cond = INST_AL;
7115 dsc->u.branch.link = 0;
7116 dsc->u.branch.exchange = 0;
7117 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7119 dsc->cleanup = &cleanup_branch;
7125 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7126 arm_displaced_step_closure *dsc)
7129 int val = displaced_read_reg (regs, dsc, 7);
7130 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7133 val = displaced_read_reg (regs, dsc, 8);
7134 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7137 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7142 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7143 struct regcache *regs,
7144 arm_displaced_step_closure *dsc)
7146 dsc->u.block.regmask = insn1 & 0x00ff;
7148 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7151 (1) register list is full, that is, r0-r7 are used.
7152 Prepare: tmp[0] <- r8
7154 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7155 MOV r8, r7; Move value of r7 to r8;
7156 POP {r7}; Store PC value into r7.
7158 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7160 (2) register list is not full, supposing there are N registers in
7161 register list (except PC, 0 <= N <= 7).
7162 Prepare: for each i, 0 - N, tmp[i] <- ri.
7164 POP {r0, r1, ...., rN};
7166 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7167 from tmp[] properly.
7169 if (debug_displaced)
7170 fprintf_unfiltered (gdb_stdlog,
7171 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7172 dsc->u.block.regmask, insn1);
7174 if (dsc->u.block.regmask == 0xff)
7176 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7178 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7179 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7180 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7183 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7187 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7189 unsigned int new_regmask;
7191 for (i = 0; i < num_in_list + 1; i++)
7192 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7194 new_regmask = (1 << (num_in_list + 1)) - 1;
7196 if (debug_displaced)
7197 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7198 "{..., pc}: original reg list %.4x,"
7199 " modified list %.4x\n"),
7200 (int) dsc->u.block.regmask, new_regmask);
7202 dsc->u.block.regmask |= 0x8000;
7203 dsc->u.block.writeback = 0;
7204 dsc->u.block.cond = INST_AL;
7206 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7208 dsc->cleanup = &cleanup_block_load_pc;
7215 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7216 struct regcache *regs,
7217 arm_displaced_step_closure *dsc)
7219 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7220 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7223 /* 16-bit thumb instructions. */
7224 switch (op_bit_12_15)
7226 /* Shift (imme), add, subtract, move and compare. */
7227 case 0: case 1: case 2: case 3:
7228 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7229 "shift/add/sub/mov/cmp",
7233 switch (op_bit_10_11)
7235 case 0: /* Data-processing */
7236 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7240 case 1: /* Special data instructions and branch and exchange. */
7242 unsigned short op = bits (insn1, 7, 9);
7243 if (op == 6 || op == 7) /* BX or BLX */
7244 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7245 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7246 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7248 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7252 default: /* LDR (literal) */
7253 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7256 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7257 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7260 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7261 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7262 else /* Generate SP-relative address */
7263 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7265 case 11: /* Misc 16-bit instructions */
7267 switch (bits (insn1, 8, 11))
7269 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7270 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7272 case 12: case 13: /* POP */
7273 if (bit (insn1, 8)) /* PC is in register list. */
7274 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7276 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7278 case 15: /* If-Then, and hints */
7279 if (bits (insn1, 0, 3))
7280 /* If-Then makes up to four following instructions conditional.
7281 IT instruction itself is not conditional, so handle it as a
7282 common unmodified instruction. */
7283 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7286 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7289 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7294 if (op_bit_10_11 < 2) /* Store multiple registers */
7295 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7296 else /* Load multiple registers */
7297 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7299 case 13: /* Conditional branch and supervisor call */
7300 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7301 err = thumb_copy_b (gdbarch, insn1, dsc);
7303 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7305 case 14: /* Unconditional branch */
7306 err = thumb_copy_b (gdbarch, insn1, dsc);
7313 internal_error (__FILE__, __LINE__,
7314 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7318 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7319 uint16_t insn1, uint16_t insn2,
7320 struct regcache *regs,
7321 arm_displaced_step_closure *dsc)
7323 int rt = bits (insn2, 12, 15);
7324 int rn = bits (insn1, 0, 3);
7325 int op1 = bits (insn1, 7, 8);
7327 switch (bits (insn1, 5, 6))
7329 case 0: /* Load byte and memory hints */
7330 if (rt == 0xf) /* PLD/PLI */
7333 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7334 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7336 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7341 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7342 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7345 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7346 "ldrb{reg, immediate}/ldrbt",
7351 case 1: /* Load halfword and memory hints. */
7352 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7353 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7354 "pld/unalloc memhint", dsc);
7358 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7361 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7365 case 2: /* Load word */
7367 int insn2_bit_8_11 = bits (insn2, 8, 11);
7370 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7371 else if (op1 == 0x1) /* Encoding T3 */
7372 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7374 else /* op1 == 0x0 */
7376 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7377 /* LDR (immediate) */
7378 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7379 dsc, bit (insn2, 8), 1);
7380 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7381 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7384 /* LDR (register) */
7385 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7391 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7398 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7399 uint16_t insn2, struct regcache *regs,
7400 arm_displaced_step_closure *dsc)
7403 unsigned short op = bit (insn2, 15);
7404 unsigned int op1 = bits (insn1, 11, 12);
7410 switch (bits (insn1, 9, 10))
7415 /* Load/store {dual, execlusive}, table branch. */
7416 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7417 && bits (insn2, 5, 7) == 0)
7418 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7421 /* PC is not allowed to use in load/store {dual, exclusive}
7423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "load/store dual/ex", dsc);
7426 else /* load/store multiple */
7428 switch (bits (insn1, 7, 8))
7430 case 0: case 3: /* SRS, RFE */
7431 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 case 1: case 2: /* LDM/STM/PUSH/POP */
7435 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7442 /* Data-processing (shift register). */
7443 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7446 default: /* Coprocessor instructions. */
7447 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7452 case 2: /* op1 = 2 */
7453 if (op) /* Branch and misc control. */
7455 if (bit (insn2, 14) /* BLX/BL */
7456 || bit (insn2, 12) /* Unconditional branch */
7457 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7458 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7460 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7467 int dp_op = bits (insn1, 4, 8);
7468 int rn = bits (insn1, 0, 3);
7469 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7470 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7473 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7476 else /* Data processing (modified immeidate) */
7477 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7481 case 3: /* op1 = 3 */
7482 switch (bits (insn1, 9, 10))
7486 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7488 else /* NEON Load/Store and Store single data item */
7489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7490 "neon elt/struct load/store",
7493 case 1: /* op1 = 3, bits (9, 10) == 1 */
7494 switch (bits (insn1, 7, 8))
7496 case 0: case 1: /* Data processing (register) */
7497 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7500 case 2: /* Multiply and absolute difference */
7501 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7502 "mul/mua/diff", dsc);
7504 case 3: /* Long multiply and divide */
7505 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7510 default: /* Coprocessor instructions */
7511 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7520 internal_error (__FILE__, __LINE__,
7521 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7526 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7527 struct regcache *regs,
7528 arm_displaced_step_closure *dsc)
7530 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7532 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7534 if (debug_displaced)
7535 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7536 "at %.8lx\n", insn1, (unsigned long) from);
7539 dsc->insn_size = thumb_insn_size (insn1);
7540 if (thumb_insn_size (insn1) == 4)
7543 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7544 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7547 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7551 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7552 CORE_ADDR to, struct regcache *regs,
7553 arm_displaced_step_closure *dsc)
7556 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7559 /* Most displaced instructions use a 1-instruction scratch space, so set this
7560 here and override below if/when necessary. */
7562 dsc->insn_addr = from;
7563 dsc->scratch_base = to;
7564 dsc->cleanup = NULL;
7565 dsc->wrote_to_pc = 0;
7567 if (!displaced_in_arm_mode (regs))
7568 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7572 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7573 if (debug_displaced)
7574 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7575 "at %.8lx\n", (unsigned long) insn,
7576 (unsigned long) from);
7578 if ((insn & 0xf0000000) == 0xf0000000)
7579 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7580 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7582 case 0x0: case 0x1: case 0x2: case 0x3:
7583 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7586 case 0x4: case 0x5: case 0x6:
7587 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7591 err = arm_decode_media (gdbarch, insn, dsc);
7594 case 0x8: case 0x9: case 0xa: case 0xb:
7595 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7598 case 0xc: case 0xd: case 0xe: case 0xf:
7599 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7604 internal_error (__FILE__, __LINE__,
7605 _("arm_process_displaced_insn: Instruction decode error"));
7608 /* Actually set up the scratch space for a displaced instruction. */
7611 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7612 CORE_ADDR to, arm_displaced_step_closure *dsc)
7614 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7615 unsigned int i, len, offset;
7616 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7617 int size = dsc->is_thumb? 2 : 4;
7618 const gdb_byte *bkp_insn;
7621 /* Poke modified instruction(s). */
7622 for (i = 0; i < dsc->numinsns; i++)
7624 if (debug_displaced)
7626 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7628 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7631 fprintf_unfiltered (gdb_stdlog, "%.4x",
7632 (unsigned short)dsc->modinsn[i]);
7634 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7635 (unsigned long) to + offset);
7638 write_memory_unsigned_integer (to + offset, size,
7639 byte_order_for_code,
7644 /* Choose the correct breakpoint instruction. */
7647 bkp_insn = tdep->thumb_breakpoint;
7648 len = tdep->thumb_breakpoint_size;
7652 bkp_insn = tdep->arm_breakpoint;
7653 len = tdep->arm_breakpoint_size;
7656 /* Put breakpoint afterwards. */
7657 write_memory (to + offset, bkp_insn, len);
7659 if (debug_displaced)
7660 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7661 paddress (gdbarch, from), paddress (gdbarch, to));
7664 /* Entry point for cleaning things up after a displaced instruction has been
7668 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7669 struct displaced_step_closure *dsc_,
7670 CORE_ADDR from, CORE_ADDR to,
7671 struct regcache *regs)
7673 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7676 dsc->cleanup (gdbarch, regs, dsc);
7678 if (!dsc->wrote_to_pc)
7679 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7680 dsc->insn_addr + dsc->insn_size);
7684 #include "bfd-in2.h"
7685 #include "libcoff.h"
7688 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7690 gdb_disassembler *di
7691 = static_cast<gdb_disassembler *>(info->application_data);
7692 struct gdbarch *gdbarch = di->arch ();
7694 if (arm_pc_is_thumb (gdbarch, memaddr))
7696 static asymbol *asym;
7697 static combined_entry_type ce;
7698 static struct coff_symbol_struct csym;
7699 static struct bfd fake_bfd;
7700 static bfd_target fake_target;
7702 if (csym.native == NULL)
7704 /* Create a fake symbol vector containing a Thumb symbol.
7705 This is solely so that the code in print_insn_little_arm()
7706 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7707 the presence of a Thumb symbol and switch to decoding
7708 Thumb instructions. */
7710 fake_target.flavour = bfd_target_coff_flavour;
7711 fake_bfd.xvec = &fake_target;
7712 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7714 csym.symbol.the_bfd = &fake_bfd;
7715 csym.symbol.name = "fake";
7716 asym = (asymbol *) & csym;
7719 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7720 info->symbols = &asym;
7723 info->symbols = NULL;
7725 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7726 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7727 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7728 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7729 in default_print_insn. */
7730 if (exec_bfd != NULL)
7731 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7733 return default_print_insn (memaddr, info);
7736 /* The following define instruction sequences that will cause ARM
7737 cpu's to take an undefined instruction trap. These are used to
7738 signal a breakpoint to GDB.
7740 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7741 modes. A different instruction is required for each mode. The ARM
7742 cpu's can also be big or little endian. Thus four different
7743 instructions are needed to support all cases.
7745 Note: ARMv4 defines several new instructions that will take the
7746 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7747 not in fact add the new instructions. The new undefined
7748 instructions in ARMv4 are all instructions that had no defined
7749 behaviour in earlier chips. There is no guarantee that they will
7750 raise an exception, but may be treated as NOP's. In practice, it
7751 may only safe to rely on instructions matching:
7753 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7754 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7755 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7757 Even this may only true if the condition predicate is true. The
7758 following use a condition predicate of ALWAYS so it is always TRUE.
7760 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7761 and NetBSD all use a software interrupt rather than an undefined
7762 instruction to force a trap. This can be handled by by the
7763 abi-specific code during establishment of the gdbarch vector. */
7765 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7766 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7767 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7768 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7770 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7771 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7772 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7773 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7775 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7778 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7780 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7781 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7783 if (arm_pc_is_thumb (gdbarch, *pcptr))
7785 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7787 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7788 check whether we are replacing a 32-bit instruction. */
7789 if (tdep->thumb2_breakpoint != NULL)
7793 if (target_read_memory (*pcptr, buf, 2) == 0)
7795 unsigned short inst1;
7797 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7798 if (thumb_insn_size (inst1) == 4)
7799 return ARM_BP_KIND_THUMB2;
7803 return ARM_BP_KIND_THUMB;
7806 return ARM_BP_KIND_ARM;
7810 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7812 static const gdb_byte *
7813 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7815 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7819 case ARM_BP_KIND_ARM:
7820 *size = tdep->arm_breakpoint_size;
7821 return tdep->arm_breakpoint;
7822 case ARM_BP_KIND_THUMB:
7823 *size = tdep->thumb_breakpoint_size;
7824 return tdep->thumb_breakpoint;
7825 case ARM_BP_KIND_THUMB2:
7826 *size = tdep->thumb2_breakpoint_size;
7827 return tdep->thumb2_breakpoint;
7829 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7833 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7836 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7837 struct regcache *regcache,
7842 /* Check the memory pointed by PC is readable. */
7843 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7845 struct arm_get_next_pcs next_pcs_ctx;
7847 arm_get_next_pcs_ctor (&next_pcs_ctx,
7848 &arm_get_next_pcs_ops,
7849 gdbarch_byte_order (gdbarch),
7850 gdbarch_byte_order_for_code (gdbarch),
7854 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7856 /* If MEMADDR is the next instruction of current pc, do the
7857 software single step computation, and get the thumb mode by
7858 the destination address. */
7859 for (CORE_ADDR pc : next_pcs)
7861 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7863 if (IS_THUMB_ADDR (pc))
7865 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7866 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7869 return ARM_BP_KIND_ARM;
7874 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7877 /* Extract from an array REGBUF containing the (raw) register state a
7878 function return value of type TYPE, and copy that, in virtual
7879 format, into VALBUF. */
7882 arm_extract_return_value (struct type *type, struct regcache *regs,
7885 struct gdbarch *gdbarch = regs->arch ();
7886 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7888 if (TYPE_CODE_FLT == TYPE_CODE (type))
7890 switch (gdbarch_tdep (gdbarch)->fp_model)
7894 /* The value is in register F0 in internal format. We need to
7895 extract the raw value and then convert it to the desired
7897 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7899 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7900 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7905 case ARM_FLOAT_SOFT_FPA:
7906 case ARM_FLOAT_SOFT_VFP:
7907 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7908 not using the VFP ABI code. */
7910 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7911 if (TYPE_LENGTH (type) > 4)
7912 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7916 internal_error (__FILE__, __LINE__,
7917 _("arm_extract_return_value: "
7918 "Floating point model not supported"));
7922 else if (TYPE_CODE (type) == TYPE_CODE_INT
7923 || TYPE_CODE (type) == TYPE_CODE_CHAR
7924 || TYPE_CODE (type) == TYPE_CODE_BOOL
7925 || TYPE_CODE (type) == TYPE_CODE_PTR
7926 || TYPE_IS_REFERENCE (type)
7927 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7929 /* If the type is a plain integer, then the access is
7930 straight-forward. Otherwise we have to play around a bit
7932 int len = TYPE_LENGTH (type);
7933 int regno = ARM_A1_REGNUM;
7938 /* By using store_unsigned_integer we avoid having to do
7939 anything special for small big-endian values. */
7940 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7941 store_unsigned_integer (valbuf,
7942 (len > INT_REGISTER_SIZE
7943 ? INT_REGISTER_SIZE : len),
7945 len -= INT_REGISTER_SIZE;
7946 valbuf += INT_REGISTER_SIZE;
7951 /* For a structure or union the behaviour is as if the value had
7952 been stored to word-aligned memory and then loaded into
7953 registers with 32-bit load instruction(s). */
7954 int len = TYPE_LENGTH (type);
7955 int regno = ARM_A1_REGNUM;
7956 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7960 regs->cooked_read (regno++, tmpbuf);
7961 memcpy (valbuf, tmpbuf,
7962 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7963 len -= INT_REGISTER_SIZE;
7964 valbuf += INT_REGISTER_SIZE;
7970 /* Will a function return an aggregate type in memory or in a
7971 register? Return 0 if an aggregate type can be returned in a
7972 register, 1 if it must be returned in memory. */
7975 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7977 enum type_code code;
7979 type = check_typedef (type);
7981 /* Simple, non-aggregate types (ie not including vectors and
7982 complex) are always returned in a register (or registers). */
7983 code = TYPE_CODE (type);
7984 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7985 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7988 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7990 /* Vector values should be returned using ARM registers if they
7991 are not over 16 bytes. */
7992 return (TYPE_LENGTH (type) > 16);
7995 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7997 /* The AAPCS says all aggregates not larger than a word are returned
7999 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8008 /* All aggregate types that won't fit in a register must be returned
8010 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8013 /* In the ARM ABI, "integer" like aggregate types are returned in
8014 registers. For an aggregate type to be integer like, its size
8015 must be less than or equal to INT_REGISTER_SIZE and the
8016 offset of each addressable subfield must be zero. Note that bit
8017 fields are not addressable, and all addressable subfields of
8018 unions always start at offset zero.
8020 This function is based on the behaviour of GCC 2.95.1.
8021 See: gcc/arm.c: arm_return_in_memory() for details.
8023 Note: All versions of GCC before GCC 2.95.2 do not set up the
8024 parameters correctly for a function returning the following
8025 structure: struct { float f;}; This should be returned in memory,
8026 not a register. Richard Earnshaw sent me a patch, but I do not
8027 know of any way to detect if a function like the above has been
8028 compiled with the correct calling convention. */
8030 /* Assume all other aggregate types can be returned in a register.
8031 Run a check for structures, unions and arrays. */
8034 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8037 /* Need to check if this struct/union is "integer" like. For
8038 this to be true, its size must be less than or equal to
8039 INT_REGISTER_SIZE and the offset of each addressable
8040 subfield must be zero. Note that bit fields are not
8041 addressable, and unions always start at offset zero. If any
8042 of the subfields is a floating point type, the struct/union
8043 cannot be an integer type. */
8045 /* For each field in the object, check:
8046 1) Is it FP? --> yes, nRc = 1;
8047 2) Is it addressable (bitpos != 0) and
8048 not packed (bitsize == 0)?
8052 for (i = 0; i < TYPE_NFIELDS (type); i++)
8054 enum type_code field_type_code;
8057 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8060 /* Is it a floating point type field? */
8061 if (field_type_code == TYPE_CODE_FLT)
8067 /* If bitpos != 0, then we have to care about it. */
8068 if (TYPE_FIELD_BITPOS (type, i) != 0)
8070 /* Bitfields are not addressable. If the field bitsize is
8071 zero, then the field is not packed. Hence it cannot be
8072 a bitfield or any other packed type. */
8073 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8086 /* Write into appropriate registers a function return value of type
8087 TYPE, given in virtual format. */
8090 arm_store_return_value (struct type *type, struct regcache *regs,
8091 const gdb_byte *valbuf)
8093 struct gdbarch *gdbarch = regs->arch ();
8094 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8096 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8098 gdb_byte buf[FP_REGISTER_SIZE];
8100 switch (gdbarch_tdep (gdbarch)->fp_model)
8104 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8105 regs->cooked_write (ARM_F0_REGNUM, buf);
8108 case ARM_FLOAT_SOFT_FPA:
8109 case ARM_FLOAT_SOFT_VFP:
8110 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8111 not using the VFP ABI code. */
8113 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8114 if (TYPE_LENGTH (type) > 4)
8115 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8119 internal_error (__FILE__, __LINE__,
8120 _("arm_store_return_value: Floating "
8121 "point model not supported"));
8125 else if (TYPE_CODE (type) == TYPE_CODE_INT
8126 || TYPE_CODE (type) == TYPE_CODE_CHAR
8127 || TYPE_CODE (type) == TYPE_CODE_BOOL
8128 || TYPE_CODE (type) == TYPE_CODE_PTR
8129 || TYPE_IS_REFERENCE (type)
8130 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8132 if (TYPE_LENGTH (type) <= 4)
8134 /* Values of one word or less are zero/sign-extended and
8136 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8137 LONGEST val = unpack_long (type, valbuf);
8139 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8140 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8144 /* Integral values greater than one word are stored in consecutive
8145 registers starting with r0. This will always be a multiple of
8146 the regiser size. */
8147 int len = TYPE_LENGTH (type);
8148 int regno = ARM_A1_REGNUM;
8152 regs->cooked_write (regno++, valbuf);
8153 len -= INT_REGISTER_SIZE;
8154 valbuf += INT_REGISTER_SIZE;
8160 /* For a structure or union the behaviour is as if the value had
8161 been stored to word-aligned memory and then loaded into
8162 registers with 32-bit load instruction(s). */
8163 int len = TYPE_LENGTH (type);
8164 int regno = ARM_A1_REGNUM;
8165 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8169 memcpy (tmpbuf, valbuf,
8170 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8171 regs->cooked_write (regno++, tmpbuf);
8172 len -= INT_REGISTER_SIZE;
8173 valbuf += INT_REGISTER_SIZE;
8179 /* Handle function return values. */
8181 static enum return_value_convention
8182 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8183 struct type *valtype, struct regcache *regcache,
8184 gdb_byte *readbuf, const gdb_byte *writebuf)
8186 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8187 struct type *func_type = function ? value_type (function) : NULL;
8188 enum arm_vfp_cprc_base_type vfp_base_type;
8191 if (arm_vfp_abi_for_function (gdbarch, func_type)
8192 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8194 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8195 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8197 for (i = 0; i < vfp_base_count; i++)
8199 if (reg_char == 'q')
8202 arm_neon_quad_write (gdbarch, regcache, i,
8203 writebuf + i * unit_length);
8206 arm_neon_quad_read (gdbarch, regcache, i,
8207 readbuf + i * unit_length);
8214 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8215 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8218 regcache->cooked_write (regnum, writebuf + i * unit_length);
8220 regcache->cooked_read (regnum, readbuf + i * unit_length);
8223 return RETURN_VALUE_REGISTER_CONVENTION;
8226 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8227 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8228 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8230 if (tdep->struct_return == pcc_struct_return
8231 || arm_return_in_memory (gdbarch, valtype))
8232 return RETURN_VALUE_STRUCT_CONVENTION;
8234 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8236 if (arm_return_in_memory (gdbarch, valtype))
8237 return RETURN_VALUE_STRUCT_CONVENTION;
8241 arm_store_return_value (valtype, regcache, writebuf);
8244 arm_extract_return_value (valtype, regcache, readbuf);
8246 return RETURN_VALUE_REGISTER_CONVENTION;
8251 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8253 struct gdbarch *gdbarch = get_frame_arch (frame);
8254 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8255 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8257 gdb_byte buf[INT_REGISTER_SIZE];
8259 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8261 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8265 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8269 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8270 return the target PC. Otherwise return 0. */
8273 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8277 CORE_ADDR start_addr;
8279 /* Find the starting address and name of the function containing the PC. */
8280 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8282 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8284 start_addr = arm_skip_bx_reg (frame, pc);
8285 if (start_addr != 0)
8291 /* If PC is in a Thumb call or return stub, return the address of the
8292 target PC, which is in a register. The thunk functions are called
8293 _call_via_xx, where x is the register name. The possible names
8294 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8295 functions, named __ARM_call_via_r[0-7]. */
8296 if (startswith (name, "_call_via_")
8297 || startswith (name, "__ARM_call_via_"))
8299 /* Use the name suffix to determine which register contains the
8301 static const char *table[15] =
8302 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8303 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8306 int offset = strlen (name) - 2;
8308 for (regno = 0; regno <= 14; regno++)
8309 if (strcmp (&name[offset], table[regno]) == 0)
8310 return get_frame_register_unsigned (frame, regno);
8313 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8314 non-interworking calls to foo. We could decode the stubs
8315 to find the target but it's easier to use the symbol table. */
8316 namelen = strlen (name);
8317 if (name[0] == '_' && name[1] == '_'
8318 && ((namelen > 2 + strlen ("_from_thumb")
8319 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8320 || (namelen > 2 + strlen ("_from_arm")
8321 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8324 int target_len = namelen - 2;
8325 struct bound_minimal_symbol minsym;
8326 struct objfile *objfile;
8327 struct obj_section *sec;
8329 if (name[namelen - 1] == 'b')
8330 target_len -= strlen ("_from_thumb");
8332 target_len -= strlen ("_from_arm");
8334 target_name = (char *) alloca (target_len + 1);
8335 memcpy (target_name, name + 2, target_len);
8336 target_name[target_len] = '\0';
8338 sec = find_pc_section (pc);
8339 objfile = (sec == NULL) ? NULL : sec->objfile;
8340 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8341 if (minsym.minsym != NULL)
8342 return BMSYMBOL_VALUE_ADDRESS (minsym);
8347 return 0; /* not a stub */
8351 set_arm_command (const char *args, int from_tty)
8353 printf_unfiltered (_("\
8354 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8355 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8359 show_arm_command (const char *args, int from_tty)
8361 cmd_show_list (showarmcmdlist, from_tty, "");
8365 arm_update_current_architecture (void)
8367 struct gdbarch_info info;
8369 /* If the current architecture is not ARM, we have nothing to do. */
8370 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8373 /* Update the architecture. */
8374 gdbarch_info_init (&info);
8376 if (!gdbarch_update_p (info))
8377 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8381 set_fp_model_sfunc (const char *args, int from_tty,
8382 struct cmd_list_element *c)
8386 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8387 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8389 arm_fp_model = (enum arm_float_model) fp_model;
8393 if (fp_model == ARM_FLOAT_LAST)
8394 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8397 arm_update_current_architecture ();
8401 show_fp_model (struct ui_file *file, int from_tty,
8402 struct cmd_list_element *c, const char *value)
8404 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8406 if (arm_fp_model == ARM_FLOAT_AUTO
8407 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8408 fprintf_filtered (file, _("\
8409 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8410 fp_model_strings[tdep->fp_model]);
8412 fprintf_filtered (file, _("\
8413 The current ARM floating point model is \"%s\".\n"),
8414 fp_model_strings[arm_fp_model]);
8418 arm_set_abi (const char *args, int from_tty,
8419 struct cmd_list_element *c)
8423 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8424 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8426 arm_abi_global = (enum arm_abi_kind) arm_abi;
8430 if (arm_abi == ARM_ABI_LAST)
8431 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8434 arm_update_current_architecture ();
8438 arm_show_abi (struct ui_file *file, int from_tty,
8439 struct cmd_list_element *c, const char *value)
8441 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8443 if (arm_abi_global == ARM_ABI_AUTO
8444 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8445 fprintf_filtered (file, _("\
8446 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8447 arm_abi_strings[tdep->arm_abi]);
8449 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8454 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8455 struct cmd_list_element *c, const char *value)
8457 fprintf_filtered (file,
8458 _("The current execution mode assumed "
8459 "(when symbols are unavailable) is \"%s\".\n"),
8460 arm_fallback_mode_string);
8464 arm_show_force_mode (struct ui_file *file, int from_tty,
8465 struct cmd_list_element *c, const char *value)
8467 fprintf_filtered (file,
8468 _("The current execution mode assumed "
8469 "(even when symbols are available) is \"%s\".\n"),
8470 arm_force_mode_string);
8473 /* If the user changes the register disassembly style used for info
8474 register and other commands, we have to also switch the style used
8475 in opcodes for disassembly output. This function is run in the "set
8476 arm disassembly" command, and does that. */
8479 set_disassembly_style_sfunc (const char *args, int from_tty,
8480 struct cmd_list_element *c)
8482 /* Convert the short style name into the long style name (eg, reg-names-*)
8483 before calling the generic set_disassembler_options() function. */
8484 std::string long_name = std::string ("reg-names-") + disassembly_style;
8485 set_disassembler_options (&long_name[0]);
8489 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8490 struct cmd_list_element *c, const char *value)
8492 struct gdbarch *gdbarch = get_current_arch ();
8493 char *options = get_disassembler_options (gdbarch);
8494 const char *style = "";
8498 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8499 if (CONST_STRNEQ (opt, "reg-names-"))
8501 style = &opt[strlen ("reg-names-")];
8502 len = strcspn (style, ",");
8505 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8508 /* Return the ARM register name corresponding to register I. */
8510 arm_register_name (struct gdbarch *gdbarch, int i)
8512 const int num_regs = gdbarch_num_regs (gdbarch);
8514 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8515 && i >= num_regs && i < num_regs + 32)
8517 static const char *const vfp_pseudo_names[] = {
8518 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8519 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8520 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8521 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8524 return vfp_pseudo_names[i - num_regs];
8527 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8528 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8530 static const char *const neon_pseudo_names[] = {
8531 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8532 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8535 return neon_pseudo_names[i - num_regs - 32];
8538 if (i >= ARRAY_SIZE (arm_register_names))
8539 /* These registers are only supported on targets which supply
8540 an XML description. */
8543 return arm_register_names[i];
8546 /* Test whether the coff symbol specific value corresponds to a Thumb
8550 coff_sym_is_thumb (int val)
8552 return (val == C_THUMBEXT
8553 || val == C_THUMBSTAT
8554 || val == C_THUMBEXTFUNC
8555 || val == C_THUMBSTATFUNC
8556 || val == C_THUMBLABEL);
8559 /* arm_coff_make_msymbol_special()
8560 arm_elf_make_msymbol_special()
8562 These functions test whether the COFF or ELF symbol corresponds to
8563 an address in thumb code, and set a "special" bit in a minimal
8564 symbol to indicate that it does. */
8567 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8569 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8571 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8572 == ST_BRANCH_TO_THUMB)
8573 MSYMBOL_SET_SPECIAL (msym);
8577 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8579 if (coff_sym_is_thumb (val))
8580 MSYMBOL_SET_SPECIAL (msym);
8584 arm_objfile_data_free (struct objfile *objfile, void *arg)
8586 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8589 for (i = 0; i < objfile->obfd->section_count; i++)
8590 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8594 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8597 const char *name = bfd_asymbol_name (sym);
8598 struct arm_per_objfile *data;
8599 VEC(arm_mapping_symbol_s) **map_p;
8600 struct arm_mapping_symbol new_map_sym;
8602 gdb_assert (name[0] == '$');
8603 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8606 data = (struct arm_per_objfile *) objfile_data (objfile,
8607 arm_objfile_data_key);
8610 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8611 struct arm_per_objfile);
8612 set_objfile_data (objfile, arm_objfile_data_key, data);
8613 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8614 objfile->obfd->section_count,
8615 VEC(arm_mapping_symbol_s) *);
8617 map_p = &data->section_maps[bfd_get_section (sym)->index];
8619 new_map_sym.value = sym->value;
8620 new_map_sym.type = name[1];
8622 /* Assume that most mapping symbols appear in order of increasing
8623 value. If they were randomly distributed, it would be faster to
8624 always push here and then sort at first use. */
8625 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8627 struct arm_mapping_symbol *prev_map_sym;
8629 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8630 if (prev_map_sym->value >= sym->value)
8633 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8634 arm_compare_mapping_symbols);
8635 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8640 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8644 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8646 struct gdbarch *gdbarch = regcache->arch ();
8647 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8649 /* If necessary, set the T bit. */
8652 ULONGEST val, t_bit;
8653 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8654 t_bit = arm_psr_thumb_bit (gdbarch);
8655 if (arm_pc_is_thumb (gdbarch, pc))
8656 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8659 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8664 /* Read the contents of a NEON quad register, by reading from two
8665 double registers. This is used to implement the quad pseudo
8666 registers, and for argument passing in case the quad registers are
8667 missing; vectors are passed in quad registers when using the VFP
8668 ABI, even if a NEON unit is not present. REGNUM is the index of
8669 the quad register, in [0, 15]. */
8671 static enum register_status
8672 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8673 int regnum, gdb_byte *buf)
8676 gdb_byte reg_buf[8];
8677 int offset, double_regnum;
8678 enum register_status status;
8680 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8681 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8684 /* d0 is always the least significant half of q0. */
8685 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8690 status = regcache->raw_read (double_regnum, reg_buf);
8691 if (status != REG_VALID)
8693 memcpy (buf + offset, reg_buf, 8);
8695 offset = 8 - offset;
8696 status = regcache->raw_read (double_regnum + 1, reg_buf);
8697 if (status != REG_VALID)
8699 memcpy (buf + offset, reg_buf, 8);
8704 static enum register_status
8705 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8706 int regnum, gdb_byte *buf)
8708 const int num_regs = gdbarch_num_regs (gdbarch);
8710 gdb_byte reg_buf[8];
8711 int offset, double_regnum;
8713 gdb_assert (regnum >= num_regs);
8716 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8717 /* Quad-precision register. */
8718 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8721 enum register_status status;
8723 /* Single-precision register. */
8724 gdb_assert (regnum < 32);
8726 /* s0 is always the least significant half of d0. */
8727 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8728 offset = (regnum & 1) ? 0 : 4;
8730 offset = (regnum & 1) ? 4 : 0;
8732 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8733 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8736 status = regcache->raw_read (double_regnum, reg_buf);
8737 if (status == REG_VALID)
8738 memcpy (buf, reg_buf + offset, 4);
8743 /* Store the contents of BUF to a NEON quad register, by writing to
8744 two double registers. This is used to implement the quad pseudo
8745 registers, and for argument passing in case the quad registers are
8746 missing; vectors are passed in quad registers when using the VFP
8747 ABI, even if a NEON unit is not present. REGNUM is the index
8748 of the quad register, in [0, 15]. */
8751 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8752 int regnum, const gdb_byte *buf)
8755 int offset, double_regnum;
8757 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8758 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8761 /* d0 is always the least significant half of q0. */
8762 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8767 regcache->raw_write (double_regnum, buf + offset);
8768 offset = 8 - offset;
8769 regcache->raw_write (double_regnum + 1, buf + offset);
8773 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8774 int regnum, const gdb_byte *buf)
8776 const int num_regs = gdbarch_num_regs (gdbarch);
8778 gdb_byte reg_buf[8];
8779 int offset, double_regnum;
8781 gdb_assert (regnum >= num_regs);
8784 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8785 /* Quad-precision register. */
8786 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8789 /* Single-precision register. */
8790 gdb_assert (regnum < 32);
8792 /* s0 is always the least significant half of d0. */
8793 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8794 offset = (regnum & 1) ? 0 : 4;
8796 offset = (regnum & 1) ? 4 : 0;
8798 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8799 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8802 regcache->raw_read (double_regnum, reg_buf);
8803 memcpy (reg_buf + offset, buf, 4);
8804 regcache->raw_write (double_regnum, reg_buf);
8808 static struct value *
8809 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8811 const int *reg_p = (const int *) baton;
8812 return value_of_register (*reg_p, frame);
8815 static enum gdb_osabi
8816 arm_elf_osabi_sniffer (bfd *abfd)
8818 unsigned int elfosabi;
8819 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8821 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8823 if (elfosabi == ELFOSABI_ARM)
8824 /* GNU tools use this value. Check note sections in this case,
8826 bfd_map_over_sections (abfd,
8827 generic_elf_osabi_sniff_abi_tag_sections,
8830 /* Anything else will be handled by the generic ELF sniffer. */
8835 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8836 struct reggroup *group)
8838 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8839 this, FPS register belongs to save_regroup, restore_reggroup, and
8840 all_reggroup, of course. */
8841 if (regnum == ARM_FPS_REGNUM)
8842 return (group == float_reggroup
8843 || group == save_reggroup
8844 || group == restore_reggroup
8845 || group == all_reggroup);
8847 return default_register_reggroup_p (gdbarch, regnum, group);
8851 /* For backward-compatibility we allow two 'g' packet lengths with
8852 the remote protocol depending on whether FPA registers are
8853 supplied. M-profile targets do not have FPA registers, but some
8854 stubs already exist in the wild which use a 'g' packet which
8855 supplies them albeit with dummy values. The packet format which
8856 includes FPA registers should be considered deprecated for
8857 M-profile targets. */
8860 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8862 if (gdbarch_tdep (gdbarch)->is_m)
8864 /* If we know from the executable this is an M-profile target,
8865 cater for remote targets whose register set layout is the
8866 same as the FPA layout. */
8867 register_remote_g_packet_guess (gdbarch,
8868 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8869 (16 * INT_REGISTER_SIZE)
8870 + (8 * FP_REGISTER_SIZE)
8871 + (2 * INT_REGISTER_SIZE),
8872 tdesc_arm_with_m_fpa_layout);
8874 /* The regular M-profile layout. */
8875 register_remote_g_packet_guess (gdbarch,
8876 /* r0-r12,sp,lr,pc; xpsr */
8877 (16 * INT_REGISTER_SIZE)
8878 + INT_REGISTER_SIZE,
8881 /* M-profile plus M4F VFP. */
8882 register_remote_g_packet_guess (gdbarch,
8883 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8884 (16 * INT_REGISTER_SIZE)
8885 + (16 * VFP_REGISTER_SIZE)
8886 + (2 * INT_REGISTER_SIZE),
8887 tdesc_arm_with_m_vfp_d16);
8890 /* Otherwise we don't have a useful guess. */
8893 /* Implement the code_of_frame_writable gdbarch method. */
8896 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8898 if (gdbarch_tdep (gdbarch)->is_m
8899 && get_frame_type (frame) == SIGTRAMP_FRAME)
8901 /* M-profile exception frames return to some magic PCs, where
8902 isn't writable at all. */
8910 /* Initialize the current architecture based on INFO. If possible,
8911 re-use an architecture from ARCHES, which is a list of
8912 architectures already created during this debugging session.
8914 Called e.g. at program startup, when reading a core file, and when
8915 reading a binary file. */
8917 static struct gdbarch *
8918 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8920 struct gdbarch_tdep *tdep;
8921 struct gdbarch *gdbarch;
8922 struct gdbarch_list *best_arch;
8923 enum arm_abi_kind arm_abi = arm_abi_global;
8924 enum arm_float_model fp_model = arm_fp_model;
8925 struct tdesc_arch_data *tdesc_data = NULL;
8927 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8928 int have_wmmx_registers = 0;
8930 int have_fpa_registers = 1;
8931 const struct target_desc *tdesc = info.target_desc;
8933 /* If we have an object to base this architecture on, try to determine
8936 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8938 int ei_osabi, e_flags;
8940 switch (bfd_get_flavour (info.abfd))
8942 case bfd_target_coff_flavour:
8943 /* Assume it's an old APCS-style ABI. */
8945 arm_abi = ARM_ABI_APCS;
8948 case bfd_target_elf_flavour:
8949 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8950 e_flags = elf_elfheader (info.abfd)->e_flags;
8952 if (ei_osabi == ELFOSABI_ARM)
8954 /* GNU tools used to use this value, but do not for EABI
8955 objects. There's nowhere to tag an EABI version
8956 anyway, so assume APCS. */
8957 arm_abi = ARM_ABI_APCS;
8959 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8961 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8965 case EF_ARM_EABI_UNKNOWN:
8966 /* Assume GNU tools. */
8967 arm_abi = ARM_ABI_APCS;
8970 case EF_ARM_EABI_VER4:
8971 case EF_ARM_EABI_VER5:
8972 arm_abi = ARM_ABI_AAPCS;
8973 /* EABI binaries default to VFP float ordering.
8974 They may also contain build attributes that can
8975 be used to identify if the VFP argument-passing
8977 if (fp_model == ARM_FLOAT_AUTO)
8980 switch (bfd_elf_get_obj_attr_int (info.abfd,
8984 case AEABI_VFP_args_base:
8985 /* "The user intended FP parameter/result
8986 passing to conform to AAPCS, base
8988 fp_model = ARM_FLOAT_SOFT_VFP;
8990 case AEABI_VFP_args_vfp:
8991 /* "The user intended FP parameter/result
8992 passing to conform to AAPCS, VFP
8994 fp_model = ARM_FLOAT_VFP;
8996 case AEABI_VFP_args_toolchain:
8997 /* "The user intended FP parameter/result
8998 passing to conform to tool chain-specific
8999 conventions" - we don't know any such
9000 conventions, so leave it as "auto". */
9002 case AEABI_VFP_args_compatible:
9003 /* "Code is compatible with both the base
9004 and VFP variants; the user did not permit
9005 non-variadic functions to pass FP
9006 parameters/results" - leave it as
9010 /* Attribute value not mentioned in the
9011 November 2012 ABI, so leave it as
9016 fp_model = ARM_FLOAT_SOFT_VFP;
9022 /* Leave it as "auto". */
9023 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9028 /* Detect M-profile programs. This only works if the
9029 executable file includes build attributes; GCC does
9030 copy them to the executable, but e.g. RealView does
9033 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9036 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9037 Tag_CPU_arch_profile);
9039 /* GCC specifies the profile for v6-M; RealView only
9040 specifies the profile for architectures starting with
9041 V7 (as opposed to architectures with a tag
9042 numerically greater than TAG_CPU_ARCH_V7). */
9043 if (!tdesc_has_registers (tdesc)
9044 && (attr_arch == TAG_CPU_ARCH_V6_M
9045 || attr_arch == TAG_CPU_ARCH_V6S_M
9046 || attr_profile == 'M'))
9051 if (fp_model == ARM_FLOAT_AUTO)
9053 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9056 /* Leave it as "auto". Strictly speaking this case
9057 means FPA, but almost nobody uses that now, and
9058 many toolchains fail to set the appropriate bits
9059 for the floating-point model they use. */
9061 case EF_ARM_SOFT_FLOAT:
9062 fp_model = ARM_FLOAT_SOFT_FPA;
9064 case EF_ARM_VFP_FLOAT:
9065 fp_model = ARM_FLOAT_VFP;
9067 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9068 fp_model = ARM_FLOAT_SOFT_VFP;
9073 if (e_flags & EF_ARM_BE8)
9074 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9079 /* Leave it as "auto". */
9084 /* Check any target description for validity. */
9085 if (tdesc_has_registers (tdesc))
9087 /* For most registers we require GDB's default names; but also allow
9088 the numeric names for sp / lr / pc, as a convenience. */
9089 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9090 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9091 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9093 const struct tdesc_feature *feature;
9096 feature = tdesc_find_feature (tdesc,
9097 "org.gnu.gdb.arm.core");
9098 if (feature == NULL)
9100 feature = tdesc_find_feature (tdesc,
9101 "org.gnu.gdb.arm.m-profile");
9102 if (feature == NULL)
9108 tdesc_data = tdesc_data_alloc ();
9111 for (i = 0; i < ARM_SP_REGNUM; i++)
9112 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9113 arm_register_names[i]);
9114 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9117 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9120 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9124 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9125 ARM_PS_REGNUM, "xpsr");
9127 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9128 ARM_PS_REGNUM, "cpsr");
9132 tdesc_data_cleanup (tdesc_data);
9136 feature = tdesc_find_feature (tdesc,
9137 "org.gnu.gdb.arm.fpa");
9138 if (feature != NULL)
9141 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9142 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9143 arm_register_names[i]);
9146 tdesc_data_cleanup (tdesc_data);
9151 have_fpa_registers = 0;
9153 feature = tdesc_find_feature (tdesc,
9154 "org.gnu.gdb.xscale.iwmmxt");
9155 if (feature != NULL)
9157 static const char *const iwmmxt_names[] = {
9158 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9159 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9160 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9161 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9165 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9167 &= tdesc_numbered_register (feature, tdesc_data, i,
9168 iwmmxt_names[i - ARM_WR0_REGNUM]);
9170 /* Check for the control registers, but do not fail if they
9172 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9173 tdesc_numbered_register (feature, tdesc_data, i,
9174 iwmmxt_names[i - ARM_WR0_REGNUM]);
9176 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9178 &= tdesc_numbered_register (feature, tdesc_data, i,
9179 iwmmxt_names[i - ARM_WR0_REGNUM]);
9183 tdesc_data_cleanup (tdesc_data);
9187 have_wmmx_registers = 1;
9190 /* If we have a VFP unit, check whether the single precision registers
9191 are present. If not, then we will synthesize them as pseudo
9193 feature = tdesc_find_feature (tdesc,
9194 "org.gnu.gdb.arm.vfp");
9195 if (feature != NULL)
9197 static const char *const vfp_double_names[] = {
9198 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9199 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9200 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9201 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9204 /* Require the double precision registers. There must be either
9207 for (i = 0; i < 32; i++)
9209 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9211 vfp_double_names[i]);
9215 if (!valid_p && i == 16)
9218 /* Also require FPSCR. */
9219 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9220 ARM_FPSCR_REGNUM, "fpscr");
9223 tdesc_data_cleanup (tdesc_data);
9227 if (tdesc_unnumbered_register (feature, "s0") == 0)
9228 have_vfp_pseudos = 1;
9230 vfp_register_count = i;
9232 /* If we have VFP, also check for NEON. The architecture allows
9233 NEON without VFP (integer vector operations only), but GDB
9234 does not support that. */
9235 feature = tdesc_find_feature (tdesc,
9236 "org.gnu.gdb.arm.neon");
9237 if (feature != NULL)
9239 /* NEON requires 32 double-precision registers. */
9242 tdesc_data_cleanup (tdesc_data);
9246 /* If there are quad registers defined by the stub, use
9247 their type; otherwise (normally) provide them with
9248 the default type. */
9249 if (tdesc_unnumbered_register (feature, "q0") == 0)
9250 have_neon_pseudos = 1;
9257 /* If there is already a candidate, use it. */
9258 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9260 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9262 if (arm_abi != ARM_ABI_AUTO
9263 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9266 if (fp_model != ARM_FLOAT_AUTO
9267 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9270 /* There are various other properties in tdep that we do not
9271 need to check here: those derived from a target description,
9272 since gdbarches with a different target description are
9273 automatically disqualified. */
9275 /* Do check is_m, though, since it might come from the binary. */
9276 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9279 /* Found a match. */
9283 if (best_arch != NULL)
9285 if (tdesc_data != NULL)
9286 tdesc_data_cleanup (tdesc_data);
9287 return best_arch->gdbarch;
9290 tdep = XCNEW (struct gdbarch_tdep);
9291 gdbarch = gdbarch_alloc (&info, tdep);
9293 /* Record additional information about the architecture we are defining.
9294 These are gdbarch discriminators, like the OSABI. */
9295 tdep->arm_abi = arm_abi;
9296 tdep->fp_model = fp_model;
9298 tdep->have_fpa_registers = have_fpa_registers;
9299 tdep->have_wmmx_registers = have_wmmx_registers;
9300 gdb_assert (vfp_register_count == 0
9301 || vfp_register_count == 16
9302 || vfp_register_count == 32);
9303 tdep->vfp_register_count = vfp_register_count;
9304 tdep->have_vfp_pseudos = have_vfp_pseudos;
9305 tdep->have_neon_pseudos = have_neon_pseudos;
9306 tdep->have_neon = have_neon;
9308 arm_register_g_packet_guesses (gdbarch);
9311 switch (info.byte_order_for_code)
9313 case BFD_ENDIAN_BIG:
9314 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9315 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9316 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9317 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9321 case BFD_ENDIAN_LITTLE:
9322 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9323 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9324 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9325 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9330 internal_error (__FILE__, __LINE__,
9331 _("arm_gdbarch_init: bad byte order for float format"));
9334 /* On ARM targets char defaults to unsigned. */
9335 set_gdbarch_char_signed (gdbarch, 0);
9337 /* wchar_t is unsigned under the AAPCS. */
9338 if (tdep->arm_abi == ARM_ABI_AAPCS)
9339 set_gdbarch_wchar_signed (gdbarch, 0);
9341 set_gdbarch_wchar_signed (gdbarch, 1);
9343 /* Note: for displaced stepping, this includes the breakpoint, and one word
9344 of additional scratch space. This setting isn't used for anything beside
9345 displaced stepping at present. */
9346 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9348 /* This should be low enough for everything. */
9349 tdep->lowest_pc = 0x20;
9350 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9352 /* The default, for both APCS and AAPCS, is to return small
9353 structures in registers. */
9354 tdep->struct_return = reg_struct_return;
9356 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9357 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9360 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9362 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9364 /* Frame handling. */
9365 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9366 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9367 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9369 frame_base_set_default (gdbarch, &arm_normal_base);
9371 /* Address manipulation. */
9372 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9374 /* Advance PC across function entry code. */
9375 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9377 /* Detect whether PC is at a point where the stack has been destroyed. */
9378 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9380 /* Skip trampolines. */
9381 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9383 /* The stack grows downward. */
9384 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9386 /* Breakpoint manipulation. */
9387 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9388 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9389 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9390 arm_breakpoint_kind_from_current_state);
9392 /* Information about registers, etc. */
9393 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9394 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9395 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9396 set_gdbarch_register_type (gdbarch, arm_register_type);
9397 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9399 /* This "info float" is FPA-specific. Use the generic version if we
9401 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9402 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9404 /* Internal <-> external register number maps. */
9405 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9406 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9408 set_gdbarch_register_name (gdbarch, arm_register_name);
9410 /* Returning results. */
9411 set_gdbarch_return_value (gdbarch, arm_return_value);
9414 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9416 /* Minsymbol frobbing. */
9417 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9418 set_gdbarch_coff_make_msymbol_special (gdbarch,
9419 arm_coff_make_msymbol_special);
9420 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9422 /* Thumb-2 IT block support. */
9423 set_gdbarch_adjust_breakpoint_address (gdbarch,
9424 arm_adjust_breakpoint_address);
9426 /* Virtual tables. */
9427 set_gdbarch_vbit_in_delta (gdbarch, 1);
9429 /* Hook in the ABI-specific overrides, if they have been registered. */
9430 gdbarch_init_osabi (info, gdbarch);
9432 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9434 /* Add some default predicates. */
9436 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9437 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9438 dwarf2_append_unwinders (gdbarch);
9439 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9440 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9441 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9443 /* Now we have tuned the configuration, set a few final things,
9444 based on what the OS ABI has told us. */
9446 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9447 binaries are always marked. */
9448 if (tdep->arm_abi == ARM_ABI_AUTO)
9449 tdep->arm_abi = ARM_ABI_APCS;
9451 /* Watchpoints are not steppable. */
9452 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9454 /* We used to default to FPA for generic ARM, but almost nobody
9455 uses that now, and we now provide a way for the user to force
9456 the model. So default to the most useful variant. */
9457 if (tdep->fp_model == ARM_FLOAT_AUTO)
9458 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9460 if (tdep->jb_pc >= 0)
9461 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9463 /* Floating point sizes and format. */
9464 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9465 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9467 set_gdbarch_double_format
9468 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9469 set_gdbarch_long_double_format
9470 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9474 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9475 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9478 if (have_vfp_pseudos)
9480 /* NOTE: These are the only pseudo registers used by
9481 the ARM target at the moment. If more are added, a
9482 little more care in numbering will be needed. */
9484 int num_pseudos = 32;
9485 if (have_neon_pseudos)
9487 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9488 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9489 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9494 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9496 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9498 /* Override tdesc_register_type to adjust the types of VFP
9499 registers for NEON. */
9500 set_gdbarch_register_type (gdbarch, arm_register_type);
9503 /* Add standard register aliases. We add aliases even for those
9504 nanes which are used by the current architecture - it's simpler,
9505 and does no harm, since nothing ever lists user registers. */
9506 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9507 user_reg_add (gdbarch, arm_register_aliases[i].name,
9508 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9510 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9511 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9517 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9519 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9524 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9525 (unsigned long) tdep->lowest_pc);
9531 static void arm_record_test (void);
9536 _initialize_arm_tdep (void)
9540 char regdesc[1024], *rdptr = regdesc;
9541 size_t rest = sizeof (regdesc);
9543 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9545 arm_objfile_data_key
9546 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9548 /* Add ourselves to objfile event chain. */
9549 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9551 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9553 /* Register an ELF OS ABI sniffer for ARM binaries. */
9554 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9555 bfd_target_elf_flavour,
9556 arm_elf_osabi_sniffer);
9558 /* Initialize the standard target descriptions. */
9559 initialize_tdesc_arm_with_m ();
9560 initialize_tdesc_arm_with_m_fpa_layout ();
9561 initialize_tdesc_arm_with_m_vfp_d16 ();
9562 initialize_tdesc_arm_with_iwmmxt ();
9563 initialize_tdesc_arm_with_vfpv2 ();
9564 initialize_tdesc_arm_with_vfpv3 ();
9565 initialize_tdesc_arm_with_neon ();
9567 /* Add root prefix command for all "set arm"/"show arm" commands. */
9568 add_prefix_cmd ("arm", no_class, set_arm_command,
9569 _("Various ARM-specific commands."),
9570 &setarmcmdlist, "set arm ", 0, &setlist);
9572 add_prefix_cmd ("arm", no_class, show_arm_command,
9573 _("Various ARM-specific commands."),
9574 &showarmcmdlist, "show arm ", 0, &showlist);
9577 arm_disassembler_options = xstrdup ("reg-names-std");
9578 const disasm_options_t *disasm_options
9579 = &disassembler_options_arm ()->options;
9580 int num_disassembly_styles = 0;
9581 for (i = 0; disasm_options->name[i] != NULL; i++)
9582 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9583 num_disassembly_styles++;
9585 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9586 valid_disassembly_styles = XNEWVEC (const char *,
9587 num_disassembly_styles + 1);
9588 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9589 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9591 size_t offset = strlen ("reg-names-");
9592 const char *style = disasm_options->name[i];
9593 valid_disassembly_styles[j++] = &style[offset];
9594 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9595 disasm_options->description[i]);
9599 /* Mark the end of valid options. */
9600 valid_disassembly_styles[num_disassembly_styles] = NULL;
9602 /* Create the help text. */
9603 std::string helptext = string_printf ("%s%s%s",
9604 _("The valid values are:\n"),
9606 _("The default is \"std\"."));
9608 add_setshow_enum_cmd("disassembler", no_class,
9609 valid_disassembly_styles, &disassembly_style,
9610 _("Set the disassembly style."),
9611 _("Show the disassembly style."),
9613 set_disassembly_style_sfunc,
9614 show_disassembly_style_sfunc,
9615 &setarmcmdlist, &showarmcmdlist);
9617 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9618 _("Set usage of ARM 32-bit mode."),
9619 _("Show usage of ARM 32-bit mode."),
9620 _("When off, a 26-bit PC will be used."),
9622 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9624 &setarmcmdlist, &showarmcmdlist);
9626 /* Add a command to allow the user to force the FPU model. */
9627 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9628 _("Set the floating point type."),
9629 _("Show the floating point type."),
9630 _("auto - Determine the FP typefrom the OS-ABI.\n\
9631 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9632 fpa - FPA co-processor (GCC compiled).\n\
9633 softvfp - Software FP with pure-endian doubles.\n\
9634 vfp - VFP co-processor."),
9635 set_fp_model_sfunc, show_fp_model,
9636 &setarmcmdlist, &showarmcmdlist);
9638 /* Add a command to allow the user to force the ABI. */
9639 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9642 NULL, arm_set_abi, arm_show_abi,
9643 &setarmcmdlist, &showarmcmdlist);
9645 /* Add two commands to allow the user to force the assumed
9647 add_setshow_enum_cmd ("fallback-mode", class_support,
9648 arm_mode_strings, &arm_fallback_mode_string,
9649 _("Set the mode assumed when symbols are unavailable."),
9650 _("Show the mode assumed when symbols are unavailable."),
9651 NULL, NULL, arm_show_fallback_mode,
9652 &setarmcmdlist, &showarmcmdlist);
9653 add_setshow_enum_cmd ("force-mode", class_support,
9654 arm_mode_strings, &arm_force_mode_string,
9655 _("Set the mode assumed even when symbols are available."),
9656 _("Show the mode assumed even when symbols are available."),
9657 NULL, NULL, arm_show_force_mode,
9658 &setarmcmdlist, &showarmcmdlist);
9660 /* Debugging flag. */
9661 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9662 _("Set ARM debugging."),
9663 _("Show ARM debugging."),
9664 _("When on, arm-specific debugging is enabled."),
9666 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9667 &setdebuglist, &showdebuglist);
9670 selftests::register_test ("arm-record", selftests::arm_record_test);
9675 /* ARM-reversible process record data structures. */
9677 #define ARM_INSN_SIZE_BYTES 4
9678 #define THUMB_INSN_SIZE_BYTES 2
9679 #define THUMB2_INSN_SIZE_BYTES 4
9682 /* Position of the bit within a 32-bit ARM instruction
9683 that defines whether the instruction is a load or store. */
9684 #define INSN_S_L_BIT_NUM 20
9686 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9689 unsigned int reg_len = LENGTH; \
9692 REGS = XNEWVEC (uint32_t, reg_len); \
9693 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9698 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9701 unsigned int mem_len = LENGTH; \
9704 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9705 memcpy(&MEMS->len, &RECORD_BUF[0], \
9706 sizeof(struct arm_mem_r) * LENGTH); \
9711 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9712 #define INSN_RECORDED(ARM_RECORD) \
9713 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9715 /* ARM memory record structure. */
9718 uint32_t len; /* Record length. */
9719 uint32_t addr; /* Memory address. */
9722 /* ARM instruction record contains opcode of current insn
9723 and execution state (before entry to decode_insn()),
9724 contains list of to-be-modified registers and
9725 memory blocks (on return from decode_insn()). */
9727 typedef struct insn_decode_record_t
9729 struct gdbarch *gdbarch;
9730 struct regcache *regcache;
9731 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9732 uint32_t arm_insn; /* Should accommodate thumb. */
9733 uint32_t cond; /* Condition code. */
9734 uint32_t opcode; /* Insn opcode. */
9735 uint32_t decode; /* Insn decode bits. */
9736 uint32_t mem_rec_count; /* No of mem records. */
9737 uint32_t reg_rec_count; /* No of reg records. */
9738 uint32_t *arm_regs; /* Registers to be saved for this record. */
9739 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9740 } insn_decode_record;
9743 /* Checks ARM SBZ and SBO mandatory fields. */
9746 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9748 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9767 enum arm_record_result
9769 ARM_RECORD_SUCCESS = 0,
9770 ARM_RECORD_FAILURE = 1
9777 } arm_record_strx_t;
9788 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9789 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9792 struct regcache *reg_cache = arm_insn_r->regcache;
9793 ULONGEST u_regval[2]= {0};
9795 uint32_t reg_src1 = 0, reg_src2 = 0;
9796 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9798 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9799 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9801 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9803 /* 1) Handle misc store, immediate offset. */
9804 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9805 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9806 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9807 regcache_raw_read_unsigned (reg_cache, reg_src1,
9809 if (ARM_PC_REGNUM == reg_src1)
9811 /* If R15 was used as Rn, hence current PC+8. */
9812 u_regval[0] = u_regval[0] + 8;
9814 offset_8 = (immed_high << 4) | immed_low;
9815 /* Calculate target store address. */
9816 if (14 == arm_insn_r->opcode)
9818 tgt_mem_addr = u_regval[0] + offset_8;
9822 tgt_mem_addr = u_regval[0] - offset_8;
9824 if (ARM_RECORD_STRH == str_type)
9826 record_buf_mem[0] = 2;
9827 record_buf_mem[1] = tgt_mem_addr;
9828 arm_insn_r->mem_rec_count = 1;
9830 else if (ARM_RECORD_STRD == str_type)
9832 record_buf_mem[0] = 4;
9833 record_buf_mem[1] = tgt_mem_addr;
9834 record_buf_mem[2] = 4;
9835 record_buf_mem[3] = tgt_mem_addr + 4;
9836 arm_insn_r->mem_rec_count = 2;
9839 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9841 /* 2) Store, register offset. */
9843 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9845 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9846 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9847 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9850 /* If R15 was used as Rn, hence current PC+8. */
9851 u_regval[0] = u_regval[0] + 8;
9853 /* Calculate target store address, Rn +/- Rm, register offset. */
9854 if (12 == arm_insn_r->opcode)
9856 tgt_mem_addr = u_regval[0] + u_regval[1];
9860 tgt_mem_addr = u_regval[1] - u_regval[0];
9862 if (ARM_RECORD_STRH == str_type)
9864 record_buf_mem[0] = 2;
9865 record_buf_mem[1] = tgt_mem_addr;
9866 arm_insn_r->mem_rec_count = 1;
9868 else if (ARM_RECORD_STRD == str_type)
9870 record_buf_mem[0] = 4;
9871 record_buf_mem[1] = tgt_mem_addr;
9872 record_buf_mem[2] = 4;
9873 record_buf_mem[3] = tgt_mem_addr + 4;
9874 arm_insn_r->mem_rec_count = 2;
9877 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9878 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9880 /* 3) Store, immediate pre-indexed. */
9881 /* 5) Store, immediate post-indexed. */
9882 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9883 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9884 offset_8 = (immed_high << 4) | immed_low;
9885 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9886 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9887 /* Calculate target store address, Rn +/- Rm, register offset. */
9888 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9890 tgt_mem_addr = u_regval[0] + offset_8;
9894 tgt_mem_addr = u_regval[0] - offset_8;
9896 if (ARM_RECORD_STRH == str_type)
9898 record_buf_mem[0] = 2;
9899 record_buf_mem[1] = tgt_mem_addr;
9900 arm_insn_r->mem_rec_count = 1;
9902 else if (ARM_RECORD_STRD == str_type)
9904 record_buf_mem[0] = 4;
9905 record_buf_mem[1] = tgt_mem_addr;
9906 record_buf_mem[2] = 4;
9907 record_buf_mem[3] = tgt_mem_addr + 4;
9908 arm_insn_r->mem_rec_count = 2;
9910 /* Record Rn also as it changes. */
9911 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9912 arm_insn_r->reg_rec_count = 1;
9914 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9915 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9917 /* 4) Store, register pre-indexed. */
9918 /* 6) Store, register post -indexed. */
9919 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9920 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9921 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9922 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9923 /* Calculate target store address, Rn +/- Rm, register offset. */
9924 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9926 tgt_mem_addr = u_regval[0] + u_regval[1];
9930 tgt_mem_addr = u_regval[1] - u_regval[0];
9932 if (ARM_RECORD_STRH == str_type)
9934 record_buf_mem[0] = 2;
9935 record_buf_mem[1] = tgt_mem_addr;
9936 arm_insn_r->mem_rec_count = 1;
9938 else if (ARM_RECORD_STRD == str_type)
9940 record_buf_mem[0] = 4;
9941 record_buf_mem[1] = tgt_mem_addr;
9942 record_buf_mem[2] = 4;
9943 record_buf_mem[3] = tgt_mem_addr + 4;
9944 arm_insn_r->mem_rec_count = 2;
9946 /* Record Rn also as it changes. */
9947 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9948 arm_insn_r->reg_rec_count = 1;
9953 /* Handling ARM extension space insns. */
9956 arm_record_extension_space (insn_decode_record *arm_insn_r)
9958 int ret = 0; /* Return value: -1:record failure ; 0:success */
9959 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9960 uint32_t record_buf[8], record_buf_mem[8];
9961 uint32_t reg_src1 = 0;
9962 struct regcache *reg_cache = arm_insn_r->regcache;
9963 ULONGEST u_regval = 0;
9965 gdb_assert (!INSN_RECORDED(arm_insn_r));
9966 /* Handle unconditional insn extension space. */
9968 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9969 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9970 if (arm_insn_r->cond)
9972 /* PLD has no affect on architectural state, it just affects
9974 if (5 == ((opcode1 & 0xE0) >> 5))
9977 record_buf[0] = ARM_PS_REGNUM;
9978 record_buf[1] = ARM_LR_REGNUM;
9979 arm_insn_r->reg_rec_count = 2;
9981 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9985 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9986 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9989 /* Undefined instruction on ARM V5; need to handle if later
9990 versions define it. */
9993 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9994 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9995 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9997 /* Handle arithmetic insn extension space. */
9998 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9999 && !INSN_RECORDED(arm_insn_r))
10001 /* Handle MLA(S) and MUL(S). */
10002 if (in_inclusive_range (insn_op1, 0U, 3U))
10004 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10005 record_buf[1] = ARM_PS_REGNUM;
10006 arm_insn_r->reg_rec_count = 2;
10008 else if (in_inclusive_range (insn_op1, 4U, 15U))
10010 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10011 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10012 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10013 record_buf[2] = ARM_PS_REGNUM;
10014 arm_insn_r->reg_rec_count = 3;
10018 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10019 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10020 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10022 /* Handle control insn extension space. */
10024 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10025 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10027 if (!bit (arm_insn_r->arm_insn,25))
10029 if (!bits (arm_insn_r->arm_insn, 4, 7))
10031 if ((0 == insn_op1) || (2 == insn_op1))
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10035 arm_insn_r->reg_rec_count = 1;
10037 else if (1 == insn_op1)
10039 /* CSPR is going to be changed. */
10040 record_buf[0] = ARM_PS_REGNUM;
10041 arm_insn_r->reg_rec_count = 1;
10043 else if (3 == insn_op1)
10045 /* SPSR is going to be changed. */
10046 /* We need to get SPSR value, which is yet to be done. */
10050 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10055 record_buf[0] = ARM_PS_REGNUM;
10056 arm_insn_r->reg_rec_count = 1;
10058 else if (3 == insn_op1)
10061 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10062 arm_insn_r->reg_rec_count = 1;
10065 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10068 record_buf[0] = ARM_PS_REGNUM;
10069 record_buf[1] = ARM_LR_REGNUM;
10070 arm_insn_r->reg_rec_count = 2;
10072 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10074 /* QADD, QSUB, QDADD, QDSUB */
10075 record_buf[0] = ARM_PS_REGNUM;
10076 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10077 arm_insn_r->reg_rec_count = 2;
10079 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10082 record_buf[0] = ARM_PS_REGNUM;
10083 record_buf[1] = ARM_LR_REGNUM;
10084 arm_insn_r->reg_rec_count = 2;
10086 /* Save SPSR also;how? */
10089 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10090 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10091 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10092 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10095 if (0 == insn_op1 || 1 == insn_op1)
10097 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10098 /* We dont do optimization for SMULW<y> where we
10100 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10101 record_buf[1] = ARM_PS_REGNUM;
10102 arm_insn_r->reg_rec_count = 2;
10104 else if (2 == insn_op1)
10107 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10108 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10109 arm_insn_r->reg_rec_count = 2;
10111 else if (3 == insn_op1)
10114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10115 arm_insn_r->reg_rec_count = 1;
10121 /* MSR : immediate form. */
10124 /* CSPR is going to be changed. */
10125 record_buf[0] = ARM_PS_REGNUM;
10126 arm_insn_r->reg_rec_count = 1;
10128 else if (3 == insn_op1)
10130 /* SPSR is going to be changed. */
10131 /* we need to get SPSR value, which is yet to be done */
10137 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10138 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10139 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10141 /* Handle load/store insn extension space. */
10143 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10144 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10145 && !INSN_RECORDED(arm_insn_r))
10150 /* These insn, changes register and memory as well. */
10151 /* SWP or SWPB insn. */
10152 /* Get memory address given by Rn. */
10153 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10154 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10155 /* SWP insn ?, swaps word. */
10156 if (8 == arm_insn_r->opcode)
10158 record_buf_mem[0] = 4;
10162 /* SWPB insn, swaps only byte. */
10163 record_buf_mem[0] = 1;
10165 record_buf_mem[1] = u_regval;
10166 arm_insn_r->mem_rec_count = 1;
10167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10168 arm_insn_r->reg_rec_count = 1;
10170 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10173 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10176 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10179 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10180 record_buf[1] = record_buf[0] + 1;
10181 arm_insn_r->reg_rec_count = 2;
10183 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10186 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10189 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10191 /* LDRH, LDRSB, LDRSH. */
10192 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10193 arm_insn_r->reg_rec_count = 1;
10198 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10199 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10200 && !INSN_RECORDED(arm_insn_r))
10203 /* Handle coprocessor insn extension space. */
10206 /* To be done for ARMv5 and later; as of now we return -1. */
10210 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10211 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10216 /* Handling opcode 000 insns. */
10219 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10221 struct regcache *reg_cache = arm_insn_r->regcache;
10222 uint32_t record_buf[8], record_buf_mem[8];
10223 ULONGEST u_regval[2] = {0};
10225 uint32_t reg_src1 = 0;
10226 uint32_t opcode1 = 0;
10228 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10229 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10230 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10232 if (!((opcode1 & 0x19) == 0x10))
10234 /* Data-processing (register) and Data-processing (register-shifted
10236 /* Out of 11 shifter operands mode, all the insn modifies destination
10237 register, which is specified by 13-16 decode. */
10238 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10239 record_buf[1] = ARM_PS_REGNUM;
10240 arm_insn_r->reg_rec_count = 2;
10242 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10244 /* Miscellaneous instructions */
10246 if (3 == arm_insn_r->decode && 0x12 == opcode1
10247 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10249 /* Handle BLX, branch and link/exchange. */
10250 if (9 == arm_insn_r->opcode)
10252 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10253 and R14 stores the return address. */
10254 record_buf[0] = ARM_PS_REGNUM;
10255 record_buf[1] = ARM_LR_REGNUM;
10256 arm_insn_r->reg_rec_count = 2;
10259 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10261 /* Handle enhanced software breakpoint insn, BKPT. */
10262 /* CPSR is changed to be executed in ARM state, disabling normal
10263 interrupts, entering abort mode. */
10264 /* According to high vector configuration PC is set. */
10265 /* user hit breakpoint and type reverse, in
10266 that case, we need to go back with previous CPSR and
10267 Program Counter. */
10268 record_buf[0] = ARM_PS_REGNUM;
10269 record_buf[1] = ARM_LR_REGNUM;
10270 arm_insn_r->reg_rec_count = 2;
10272 /* Save SPSR also; how? */
10275 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10276 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10278 /* Handle BX, branch and link/exchange. */
10279 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10280 record_buf[0] = ARM_PS_REGNUM;
10281 arm_insn_r->reg_rec_count = 1;
10283 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10284 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10285 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10287 /* Count leading zeros: CLZ. */
10288 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10289 arm_insn_r->reg_rec_count = 1;
10291 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10292 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10293 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10294 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10296 /* Handle MRS insn. */
10297 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10298 arm_insn_r->reg_rec_count = 1;
10301 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10303 /* Multiply and multiply-accumulate */
10305 /* Handle multiply instructions. */
10306 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10307 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10309 /* Handle MLA and MUL. */
10310 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10311 record_buf[1] = ARM_PS_REGNUM;
10312 arm_insn_r->reg_rec_count = 2;
10314 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10316 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10317 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10318 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10319 record_buf[2] = ARM_PS_REGNUM;
10320 arm_insn_r->reg_rec_count = 3;
10323 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10325 /* Synchronization primitives */
10327 /* Handling SWP, SWPB. */
10328 /* These insn, changes register and memory as well. */
10329 /* SWP or SWPB insn. */
10331 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10332 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10333 /* SWP insn ?, swaps word. */
10334 if (8 == arm_insn_r->opcode)
10336 record_buf_mem[0] = 4;
10340 /* SWPB insn, swaps only byte. */
10341 record_buf_mem[0] = 1;
10343 record_buf_mem[1] = u_regval[0];
10344 arm_insn_r->mem_rec_count = 1;
10345 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10346 arm_insn_r->reg_rec_count = 1;
10348 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10349 || 15 == arm_insn_r->decode)
10351 if ((opcode1 & 0x12) == 2)
10353 /* Extra load/store (unprivileged) */
10358 /* Extra load/store */
10359 switch (bits (arm_insn_r->arm_insn, 5, 6))
10362 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10364 /* STRH (register), STRH (immediate) */
10365 arm_record_strx (arm_insn_r, &record_buf[0],
10366 &record_buf_mem[0], ARM_RECORD_STRH);
10368 else if ((opcode1 & 0x05) == 0x1)
10370 /* LDRH (register) */
10371 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10372 arm_insn_r->reg_rec_count = 1;
10374 if (bit (arm_insn_r->arm_insn, 21))
10376 /* Write back to Rn. */
10377 record_buf[arm_insn_r->reg_rec_count++]
10378 = bits (arm_insn_r->arm_insn, 16, 19);
10381 else if ((opcode1 & 0x05) == 0x5)
10383 /* LDRH (immediate), LDRH (literal) */
10384 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10386 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10387 arm_insn_r->reg_rec_count = 1;
10391 /*LDRH (immediate) */
10392 if (bit (arm_insn_r->arm_insn, 21))
10394 /* Write back to Rn. */
10395 record_buf[arm_insn_r->reg_rec_count++] = rn;
10403 if ((opcode1 & 0x05) == 0x0)
10405 /* LDRD (register) */
10406 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10407 record_buf[1] = record_buf[0] + 1;
10408 arm_insn_r->reg_rec_count = 2;
10410 if (bit (arm_insn_r->arm_insn, 21))
10412 /* Write back to Rn. */
10413 record_buf[arm_insn_r->reg_rec_count++]
10414 = bits (arm_insn_r->arm_insn, 16, 19);
10417 else if ((opcode1 & 0x05) == 0x1)
10419 /* LDRSB (register) */
10420 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10421 arm_insn_r->reg_rec_count = 1;
10423 if (bit (arm_insn_r->arm_insn, 21))
10425 /* Write back to Rn. */
10426 record_buf[arm_insn_r->reg_rec_count++]
10427 = bits (arm_insn_r->arm_insn, 16, 19);
10430 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10432 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10434 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10436 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10437 arm_insn_r->reg_rec_count = 1;
10441 /*LDRD (immediate), LDRSB (immediate) */
10442 if (bit (arm_insn_r->arm_insn, 21))
10444 /* Write back to Rn. */
10445 record_buf[arm_insn_r->reg_rec_count++] = rn;
10453 if ((opcode1 & 0x05) == 0x0)
10455 /* STRD (register) */
10456 arm_record_strx (arm_insn_r, &record_buf[0],
10457 &record_buf_mem[0], ARM_RECORD_STRD);
10459 else if ((opcode1 & 0x05) == 0x1)
10461 /* LDRSH (register) */
10462 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10463 arm_insn_r->reg_rec_count = 1;
10465 if (bit (arm_insn_r->arm_insn, 21))
10467 /* Write back to Rn. */
10468 record_buf[arm_insn_r->reg_rec_count++]
10469 = bits (arm_insn_r->arm_insn, 16, 19);
10472 else if ((opcode1 & 0x05) == 0x4)
10474 /* STRD (immediate) */
10475 arm_record_strx (arm_insn_r, &record_buf[0],
10476 &record_buf_mem[0], ARM_RECORD_STRD);
10478 else if ((opcode1 & 0x05) == 0x5)
10480 /* LDRSH (immediate), LDRSH (literal) */
10481 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10482 arm_insn_r->reg_rec_count = 1;
10484 if (bit (arm_insn_r->arm_insn, 21))
10486 /* Write back to Rn. */
10487 record_buf[arm_insn_r->reg_rec_count++]
10488 = bits (arm_insn_r->arm_insn, 16, 19);
10504 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10505 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10509 /* Handling opcode 001 insns. */
10512 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10514 uint32_t record_buf[8], record_buf_mem[8];
10516 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10517 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10519 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10520 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10521 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10524 /* Handle MSR insn. */
10525 if (9 == arm_insn_r->opcode)
10527 /* CSPR is going to be changed. */
10528 record_buf[0] = ARM_PS_REGNUM;
10529 arm_insn_r->reg_rec_count = 1;
10533 /* SPSR is going to be changed. */
10536 else if (arm_insn_r->opcode <= 15)
10538 /* Normal data processing insns. */
10539 /* Out of 11 shifter operands mode, all the insn modifies destination
10540 register, which is specified by 13-16 decode. */
10541 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10542 record_buf[1] = ARM_PS_REGNUM;
10543 arm_insn_r->reg_rec_count = 2;
10550 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10551 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10556 arm_record_media (insn_decode_record *arm_insn_r)
10558 uint32_t record_buf[8];
10560 switch (bits (arm_insn_r->arm_insn, 22, 24))
10563 /* Parallel addition and subtraction, signed */
10565 /* Parallel addition and subtraction, unsigned */
10568 /* Packing, unpacking, saturation and reversal */
10570 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10572 record_buf[arm_insn_r->reg_rec_count++] = rd;
10578 /* Signed multiplies */
10580 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10581 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10583 record_buf[arm_insn_r->reg_rec_count++] = rd;
10585 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10586 else if (op1 == 0x4)
10587 record_buf[arm_insn_r->reg_rec_count++]
10588 = bits (arm_insn_r->arm_insn, 12, 15);
10594 if (bit (arm_insn_r->arm_insn, 21)
10595 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10598 record_buf[arm_insn_r->reg_rec_count++]
10599 = bits (arm_insn_r->arm_insn, 12, 15);
10601 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10602 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10604 /* USAD8 and USADA8 */
10605 record_buf[arm_insn_r->reg_rec_count++]
10606 = bits (arm_insn_r->arm_insn, 16, 19);
10613 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10614 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10616 /* Permanently UNDEFINED */
10621 /* BFC, BFI and UBFX */
10622 record_buf[arm_insn_r->reg_rec_count++]
10623 = bits (arm_insn_r->arm_insn, 12, 15);
10632 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10637 /* Handle ARM mode instructions with opcode 010. */
10640 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10642 struct regcache *reg_cache = arm_insn_r->regcache;
10644 uint32_t reg_base , reg_dest;
10645 uint32_t offset_12, tgt_mem_addr;
10646 uint32_t record_buf[8], record_buf_mem[8];
10647 unsigned char wback;
10650 /* Calculate wback. */
10651 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10652 || (bit (arm_insn_r->arm_insn, 21) == 1);
10654 arm_insn_r->reg_rec_count = 0;
10655 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10657 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10659 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10662 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10663 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10665 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10666 preceeds a LDR instruction having R15 as reg_base, it
10667 emulates a branch and link instruction, and hence we need to save
10668 CPSR and PC as well. */
10669 if (ARM_PC_REGNUM == reg_dest)
10670 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10672 /* If wback is true, also save the base register, which is going to be
10675 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10679 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10681 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10682 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10684 /* Handle bit U. */
10685 if (bit (arm_insn_r->arm_insn, 23))
10687 /* U == 1: Add the offset. */
10688 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10692 /* U == 0: subtract the offset. */
10693 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10696 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10698 if (bit (arm_insn_r->arm_insn, 22))
10700 /* STRB and STRBT: 1 byte. */
10701 record_buf_mem[0] = 1;
10705 /* STR and STRT: 4 bytes. */
10706 record_buf_mem[0] = 4;
10709 /* Handle bit P. */
10710 if (bit (arm_insn_r->arm_insn, 24))
10711 record_buf_mem[1] = tgt_mem_addr;
10713 record_buf_mem[1] = (uint32_t) u_regval;
10715 arm_insn_r->mem_rec_count = 1;
10717 /* If wback is true, also save the base register, which is going to be
10720 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10723 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10724 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10728 /* Handling opcode 011 insns. */
10731 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10733 struct regcache *reg_cache = arm_insn_r->regcache;
10735 uint32_t shift_imm = 0;
10736 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10737 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10738 uint32_t record_buf[8], record_buf_mem[8];
10741 ULONGEST u_regval[2];
10743 if (bit (arm_insn_r->arm_insn, 4))
10744 return arm_record_media (arm_insn_r);
10746 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10747 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10749 /* Handle enhanced store insns and LDRD DSP insn,
10750 order begins according to addressing modes for store insns
10754 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10756 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10757 /* LDR insn has a capability to do branching, if
10758 MOV LR, PC is precedded by LDR insn having Rn as R15
10759 in that case, it emulates branch and link insn, and hence we
10760 need to save CSPR and PC as well. */
10761 if (15 != reg_dest)
10763 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10764 arm_insn_r->reg_rec_count = 1;
10768 record_buf[0] = reg_dest;
10769 record_buf[1] = ARM_PS_REGNUM;
10770 arm_insn_r->reg_rec_count = 2;
10775 if (! bits (arm_insn_r->arm_insn, 4, 11))
10777 /* Store insn, register offset and register pre-indexed,
10778 register post-indexed. */
10780 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10782 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10783 regcache_raw_read_unsigned (reg_cache, reg_src1
10785 regcache_raw_read_unsigned (reg_cache, reg_src2
10787 if (15 == reg_src2)
10789 /* If R15 was used as Rn, hence current PC+8. */
10790 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10791 u_regval[0] = u_regval[0] + 8;
10793 /* Calculate target store address, Rn +/- Rm, register offset. */
10795 if (bit (arm_insn_r->arm_insn, 23))
10797 tgt_mem_addr = u_regval[0] + u_regval[1];
10801 tgt_mem_addr = u_regval[1] - u_regval[0];
10804 switch (arm_insn_r->opcode)
10818 record_buf_mem[0] = 4;
10833 record_buf_mem[0] = 1;
10837 gdb_assert_not_reached ("no decoding pattern found");
10840 record_buf_mem[1] = tgt_mem_addr;
10841 arm_insn_r->mem_rec_count = 1;
10843 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10844 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10845 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10846 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10847 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10848 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10851 /* Rn is going to be changed in pre-indexed mode and
10852 post-indexed mode as well. */
10853 record_buf[0] = reg_src2;
10854 arm_insn_r->reg_rec_count = 1;
10859 /* Store insn, scaled register offset; scaled pre-indexed. */
10860 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10862 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10864 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10865 /* Get shift_imm. */
10866 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10867 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10868 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10869 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10870 /* Offset_12 used as shift. */
10874 /* Offset_12 used as index. */
10875 offset_12 = u_regval[0] << shift_imm;
10879 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10885 if (bit (u_regval[0], 31))
10887 offset_12 = 0xFFFFFFFF;
10896 /* This is arithmetic shift. */
10897 offset_12 = s_word >> shift_imm;
10904 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10906 /* Get C flag value and shift it by 31. */
10907 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10908 | (u_regval[0]) >> 1);
10912 offset_12 = (u_regval[0] >> shift_imm) \
10914 (sizeof(uint32_t) - shift_imm));
10919 gdb_assert_not_reached ("no decoding pattern found");
10923 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10925 if (bit (arm_insn_r->arm_insn, 23))
10927 tgt_mem_addr = u_regval[1] + offset_12;
10931 tgt_mem_addr = u_regval[1] - offset_12;
10934 switch (arm_insn_r->opcode)
10948 record_buf_mem[0] = 4;
10963 record_buf_mem[0] = 1;
10967 gdb_assert_not_reached ("no decoding pattern found");
10970 record_buf_mem[1] = tgt_mem_addr;
10971 arm_insn_r->mem_rec_count = 1;
10973 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10974 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10975 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10976 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10977 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10978 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10981 /* Rn is going to be changed in register scaled pre-indexed
10982 mode,and scaled post indexed mode. */
10983 record_buf[0] = reg_src2;
10984 arm_insn_r->reg_rec_count = 1;
10989 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10990 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10994 /* Handle ARM mode instructions with opcode 100. */
10997 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10999 struct regcache *reg_cache = arm_insn_r->regcache;
11000 uint32_t register_count = 0, register_bits;
11001 uint32_t reg_base, addr_mode;
11002 uint32_t record_buf[24], record_buf_mem[48];
11006 /* Fetch the list of registers. */
11007 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11008 arm_insn_r->reg_rec_count = 0;
11010 /* Fetch the base register that contains the address we are loading data
11012 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11014 /* Calculate wback. */
11015 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11017 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11019 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11021 /* Find out which registers are going to be loaded from memory. */
11022 while (register_bits)
11024 if (register_bits & 0x00000001)
11025 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11026 register_bits = register_bits >> 1;
11031 /* If wback is true, also save the base register, which is going to be
11034 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11036 /* Save the CPSR register. */
11037 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11041 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11043 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11045 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11047 /* Find out how many registers are going to be stored to memory. */
11048 while (register_bits)
11050 if (register_bits & 0x00000001)
11052 register_bits = register_bits >> 1;
11057 /* STMDA (STMED): Decrement after. */
11059 record_buf_mem[1] = (uint32_t) u_regval
11060 - register_count * INT_REGISTER_SIZE + 4;
11062 /* STM (STMIA, STMEA): Increment after. */
11064 record_buf_mem[1] = (uint32_t) u_regval;
11066 /* STMDB (STMFD): Decrement before. */
11068 record_buf_mem[1] = (uint32_t) u_regval
11069 - register_count * INT_REGISTER_SIZE;
11071 /* STMIB (STMFA): Increment before. */
11073 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11076 gdb_assert_not_reached ("no decoding pattern found");
11080 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11081 arm_insn_r->mem_rec_count = 1;
11083 /* If wback is true, also save the base register, which is going to be
11086 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11089 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11090 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11094 /* Handling opcode 101 insns. */
11097 arm_record_b_bl (insn_decode_record *arm_insn_r)
11099 uint32_t record_buf[8];
11101 /* Handle B, BL, BLX(1) insns. */
11102 /* B simply branches so we do nothing here. */
11103 /* Note: BLX(1) doesnt fall here but instead it falls into
11104 extension space. */
11105 if (bit (arm_insn_r->arm_insn, 24))
11107 record_buf[0] = ARM_LR_REGNUM;
11108 arm_insn_r->reg_rec_count = 1;
11111 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11117 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11119 printf_unfiltered (_("Process record does not support instruction "
11120 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11121 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11126 /* Record handler for vector data transfer instructions. */
11129 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11131 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11132 uint32_t record_buf[4];
11134 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11135 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11136 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11137 bit_l = bit (arm_insn_r->arm_insn, 20);
11138 bit_c = bit (arm_insn_r->arm_insn, 8);
11140 /* Handle VMOV instruction. */
11141 if (bit_l && bit_c)
11143 record_buf[0] = reg_t;
11144 arm_insn_r->reg_rec_count = 1;
11146 else if (bit_l && !bit_c)
11148 /* Handle VMOV instruction. */
11149 if (bits_a == 0x00)
11151 record_buf[0] = reg_t;
11152 arm_insn_r->reg_rec_count = 1;
11154 /* Handle VMRS instruction. */
11155 else if (bits_a == 0x07)
11158 reg_t = ARM_PS_REGNUM;
11160 record_buf[0] = reg_t;
11161 arm_insn_r->reg_rec_count = 1;
11164 else if (!bit_l && !bit_c)
11166 /* Handle VMOV instruction. */
11167 if (bits_a == 0x00)
11169 record_buf[0] = ARM_D0_REGNUM + reg_v;
11171 arm_insn_r->reg_rec_count = 1;
11173 /* Handle VMSR instruction. */
11174 else if (bits_a == 0x07)
11176 record_buf[0] = ARM_FPSCR_REGNUM;
11177 arm_insn_r->reg_rec_count = 1;
11180 else if (!bit_l && bit_c)
11182 /* Handle VMOV instruction. */
11183 if (!(bits_a & 0x04))
11185 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11187 arm_insn_r->reg_rec_count = 1;
11189 /* Handle VDUP instruction. */
11192 if (bit (arm_insn_r->arm_insn, 21))
11194 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11195 record_buf[0] = reg_v + ARM_D0_REGNUM;
11196 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11197 arm_insn_r->reg_rec_count = 2;
11201 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11202 record_buf[0] = reg_v + ARM_D0_REGNUM;
11203 arm_insn_r->reg_rec_count = 1;
11208 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11212 /* Record handler for extension register load/store instructions. */
11215 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11217 uint32_t opcode, single_reg;
11218 uint8_t op_vldm_vstm;
11219 uint32_t record_buf[8], record_buf_mem[128];
11220 ULONGEST u_regval = 0;
11222 struct regcache *reg_cache = arm_insn_r->regcache;
11224 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11225 single_reg = !bit (arm_insn_r->arm_insn, 8);
11226 op_vldm_vstm = opcode & 0x1b;
11228 /* Handle VMOV instructions. */
11229 if ((opcode & 0x1e) == 0x04)
11231 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11233 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11234 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11235 arm_insn_r->reg_rec_count = 2;
11239 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11240 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11244 /* The first S register number m is REG_M:M (M is bit 5),
11245 the corresponding D register number is REG_M:M / 2, which
11247 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11248 /* The second S register number is REG_M:M + 1, the
11249 corresponding D register number is (REG_M:M + 1) / 2.
11250 IOW, if bit M is 1, the first and second S registers
11251 are mapped to different D registers, otherwise, they are
11252 in the same D register. */
11255 record_buf[arm_insn_r->reg_rec_count++]
11256 = ARM_D0_REGNUM + reg_m + 1;
11261 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11262 arm_insn_r->reg_rec_count = 1;
11266 /* Handle VSTM and VPUSH instructions. */
11267 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11268 || op_vldm_vstm == 0x12)
11270 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11271 uint32_t memory_index = 0;
11273 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11274 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11275 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11276 imm_off32 = imm_off8 << 2;
11277 memory_count = imm_off8;
11279 if (bit (arm_insn_r->arm_insn, 23))
11280 start_address = u_regval;
11282 start_address = u_regval - imm_off32;
11284 if (bit (arm_insn_r->arm_insn, 21))
11286 record_buf[0] = reg_rn;
11287 arm_insn_r->reg_rec_count = 1;
11290 while (memory_count > 0)
11294 record_buf_mem[memory_index] = 4;
11295 record_buf_mem[memory_index + 1] = start_address;
11296 start_address = start_address + 4;
11297 memory_index = memory_index + 2;
11301 record_buf_mem[memory_index] = 4;
11302 record_buf_mem[memory_index + 1] = start_address;
11303 record_buf_mem[memory_index + 2] = 4;
11304 record_buf_mem[memory_index + 3] = start_address + 4;
11305 start_address = start_address + 8;
11306 memory_index = memory_index + 4;
11310 arm_insn_r->mem_rec_count = (memory_index >> 1);
11312 /* Handle VLDM instructions. */
11313 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11314 || op_vldm_vstm == 0x13)
11316 uint32_t reg_count, reg_vd;
11317 uint32_t reg_index = 0;
11318 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11320 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11321 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11323 /* REG_VD is the first D register number. If the instruction
11324 loads memory to S registers (SINGLE_REG is TRUE), the register
11325 number is (REG_VD << 1 | bit D), so the corresponding D
11326 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11328 reg_vd = reg_vd | (bit_d << 4);
11330 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11331 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11333 /* If the instruction loads memory to D register, REG_COUNT should
11334 be divided by 2, according to the ARM Architecture Reference
11335 Manual. If the instruction loads memory to S register, divide by
11336 2 as well because two S registers are mapped to D register. */
11337 reg_count = reg_count / 2;
11338 if (single_reg && bit_d)
11340 /* Increase the register count if S register list starts from
11341 an odd number (bit d is one). */
11345 while (reg_count > 0)
11347 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11350 arm_insn_r->reg_rec_count = reg_index;
11352 /* VSTR Vector store register. */
11353 else if ((opcode & 0x13) == 0x10)
11355 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11356 uint32_t memory_index = 0;
11358 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11359 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11360 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11361 imm_off32 = imm_off8 << 2;
11363 if (bit (arm_insn_r->arm_insn, 23))
11364 start_address = u_regval + imm_off32;
11366 start_address = u_regval - imm_off32;
11370 record_buf_mem[memory_index] = 4;
11371 record_buf_mem[memory_index + 1] = start_address;
11372 arm_insn_r->mem_rec_count = 1;
11376 record_buf_mem[memory_index] = 4;
11377 record_buf_mem[memory_index + 1] = start_address;
11378 record_buf_mem[memory_index + 2] = 4;
11379 record_buf_mem[memory_index + 3] = start_address + 4;
11380 arm_insn_r->mem_rec_count = 2;
11383 /* VLDR Vector load register. */
11384 else if ((opcode & 0x13) == 0x11)
11386 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11390 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11391 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11395 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11396 /* Record register D rather than pseudo register S. */
11397 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11399 arm_insn_r->reg_rec_count = 1;
11402 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11403 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11407 /* Record handler for arm/thumb mode VFP data processing instructions. */
11410 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11412 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11413 uint32_t record_buf[4];
11414 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11415 enum insn_types curr_insn_type = INSN_INV;
11417 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11418 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11419 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11420 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11421 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11422 bit_d = bit (arm_insn_r->arm_insn, 22);
11423 /* Mask off the "D" bit. */
11424 opc1 = opc1 & ~0x04;
11426 /* Handle VMLA, VMLS. */
11429 if (bit (arm_insn_r->arm_insn, 10))
11431 if (bit (arm_insn_r->arm_insn, 6))
11432 curr_insn_type = INSN_T0;
11434 curr_insn_type = INSN_T1;
11439 curr_insn_type = INSN_T1;
11441 curr_insn_type = INSN_T2;
11444 /* Handle VNMLA, VNMLS, VNMUL. */
11445 else if (opc1 == 0x01)
11448 curr_insn_type = INSN_T1;
11450 curr_insn_type = INSN_T2;
11453 else if (opc1 == 0x02 && !(opc3 & 0x01))
11455 if (bit (arm_insn_r->arm_insn, 10))
11457 if (bit (arm_insn_r->arm_insn, 6))
11458 curr_insn_type = INSN_T0;
11460 curr_insn_type = INSN_T1;
11465 curr_insn_type = INSN_T1;
11467 curr_insn_type = INSN_T2;
11470 /* Handle VADD, VSUB. */
11471 else if (opc1 == 0x03)
11473 if (!bit (arm_insn_r->arm_insn, 9))
11475 if (bit (arm_insn_r->arm_insn, 6))
11476 curr_insn_type = INSN_T0;
11478 curr_insn_type = INSN_T1;
11483 curr_insn_type = INSN_T1;
11485 curr_insn_type = INSN_T2;
11489 else if (opc1 == 0x08)
11492 curr_insn_type = INSN_T1;
11494 curr_insn_type = INSN_T2;
11496 /* Handle all other vfp data processing instructions. */
11497 else if (opc1 == 0x0b)
11500 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11502 if (bit (arm_insn_r->arm_insn, 4))
11504 if (bit (arm_insn_r->arm_insn, 6))
11505 curr_insn_type = INSN_T0;
11507 curr_insn_type = INSN_T1;
11512 curr_insn_type = INSN_T1;
11514 curr_insn_type = INSN_T2;
11517 /* Handle VNEG and VABS. */
11518 else if ((opc2 == 0x01 && opc3 == 0x01)
11519 || (opc2 == 0x00 && opc3 == 0x03))
11521 if (!bit (arm_insn_r->arm_insn, 11))
11523 if (bit (arm_insn_r->arm_insn, 6))
11524 curr_insn_type = INSN_T0;
11526 curr_insn_type = INSN_T1;
11531 curr_insn_type = INSN_T1;
11533 curr_insn_type = INSN_T2;
11536 /* Handle VSQRT. */
11537 else if (opc2 == 0x01 && opc3 == 0x03)
11540 curr_insn_type = INSN_T1;
11542 curr_insn_type = INSN_T2;
11545 else if (opc2 == 0x07 && opc3 == 0x03)
11548 curr_insn_type = INSN_T1;
11550 curr_insn_type = INSN_T2;
11552 else if (opc3 & 0x01)
11555 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11557 if (!bit (arm_insn_r->arm_insn, 18))
11558 curr_insn_type = INSN_T2;
11562 curr_insn_type = INSN_T1;
11564 curr_insn_type = INSN_T2;
11568 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11571 curr_insn_type = INSN_T1;
11573 curr_insn_type = INSN_T2;
11575 /* Handle VCVTB, VCVTT. */
11576 else if ((opc2 & 0x0e) == 0x02)
11577 curr_insn_type = INSN_T2;
11578 /* Handle VCMP, VCMPE. */
11579 else if ((opc2 & 0x0e) == 0x04)
11580 curr_insn_type = INSN_T3;
11584 switch (curr_insn_type)
11587 reg_vd = reg_vd | (bit_d << 4);
11588 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11589 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11590 arm_insn_r->reg_rec_count = 2;
11594 reg_vd = reg_vd | (bit_d << 4);
11595 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11596 arm_insn_r->reg_rec_count = 1;
11600 reg_vd = (reg_vd << 1) | bit_d;
11601 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11602 arm_insn_r->reg_rec_count = 1;
11606 record_buf[0] = ARM_FPSCR_REGNUM;
11607 arm_insn_r->reg_rec_count = 1;
11611 gdb_assert_not_reached ("no decoding pattern found");
11615 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11619 /* Handling opcode 110 insns. */
11622 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11624 uint32_t op1, op1_ebit, coproc;
11626 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11627 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11628 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11630 if ((coproc & 0x0e) == 0x0a)
11632 /* Handle extension register ld/st instructions. */
11634 return arm_record_exreg_ld_st_insn (arm_insn_r);
11636 /* 64-bit transfers between arm core and extension registers. */
11637 if ((op1 & 0x3e) == 0x04)
11638 return arm_record_exreg_ld_st_insn (arm_insn_r);
11642 /* Handle coprocessor ld/st instructions. */
11647 return arm_record_unsupported_insn (arm_insn_r);
11650 return arm_record_unsupported_insn (arm_insn_r);
11653 /* Move to coprocessor from two arm core registers. */
11655 return arm_record_unsupported_insn (arm_insn_r);
11657 /* Move to two arm core registers from coprocessor. */
11662 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11663 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11664 arm_insn_r->reg_rec_count = 2;
11666 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11670 return arm_record_unsupported_insn (arm_insn_r);
11673 /* Handling opcode 111 insns. */
11676 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11678 uint32_t op, op1_ebit, coproc, bits_24_25;
11679 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11680 struct regcache *reg_cache = arm_insn_r->regcache;
11682 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11683 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11684 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11685 op = bit (arm_insn_r->arm_insn, 4);
11686 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11688 /* Handle arm SWI/SVC system call instructions. */
11689 if (bits_24_25 == 0x3)
11691 if (tdep->arm_syscall_record != NULL)
11693 ULONGEST svc_operand, svc_number;
11695 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11697 if (svc_operand) /* OABI. */
11698 svc_number = svc_operand - 0x900000;
11700 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11702 return tdep->arm_syscall_record (reg_cache, svc_number);
11706 printf_unfiltered (_("no syscall record support\n"));
11710 else if (bits_24_25 == 0x02)
11714 if ((coproc & 0x0e) == 0x0a)
11716 /* 8, 16, and 32-bit transfer */
11717 return arm_record_vdata_transfer_insn (arm_insn_r);
11724 uint32_t record_buf[1];
11726 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11727 if (record_buf[0] == 15)
11728 record_buf[0] = ARM_PS_REGNUM;
11730 arm_insn_r->reg_rec_count = 1;
11731 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11744 if ((coproc & 0x0e) == 0x0a)
11746 /* VFP data-processing instructions. */
11747 return arm_record_vfp_data_proc_insn (arm_insn_r);
11758 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11762 if ((coproc & 0x0e) != 0x0a)
11768 else if (op1 == 4 || op1 == 5)
11770 if ((coproc & 0x0e) == 0x0a)
11772 /* 64-bit transfers between ARM core and extension */
11781 else if (op1 == 0 || op1 == 1)
11788 if ((coproc & 0x0e) == 0x0a)
11790 /* Extension register load/store */
11794 /* STC, STC2, LDC, LDC2 */
11803 /* Handling opcode 000 insns. */
11806 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11808 uint32_t record_buf[8];
11809 uint32_t reg_src1 = 0;
11811 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11813 record_buf[0] = ARM_PS_REGNUM;
11814 record_buf[1] = reg_src1;
11815 thumb_insn_r->reg_rec_count = 2;
11817 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11823 /* Handling opcode 001 insns. */
11826 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11828 uint32_t record_buf[8];
11829 uint32_t reg_src1 = 0;
11831 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11833 record_buf[0] = ARM_PS_REGNUM;
11834 record_buf[1] = reg_src1;
11835 thumb_insn_r->reg_rec_count = 2;
11837 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11842 /* Handling opcode 010 insns. */
11845 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11847 struct regcache *reg_cache = thumb_insn_r->regcache;
11848 uint32_t record_buf[8], record_buf_mem[8];
11850 uint32_t reg_src1 = 0, reg_src2 = 0;
11851 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11853 ULONGEST u_regval[2] = {0};
11855 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11857 if (bit (thumb_insn_r->arm_insn, 12))
11859 /* Handle load/store register offset. */
11860 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11862 if (in_inclusive_range (opB, 4U, 7U))
11864 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11865 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11866 record_buf[0] = reg_src1;
11867 thumb_insn_r->reg_rec_count = 1;
11869 else if (in_inclusive_range (opB, 0U, 2U))
11871 /* STR(2), STRB(2), STRH(2) . */
11872 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11873 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11874 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11875 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11877 record_buf_mem[0] = 4; /* STR (2). */
11879 record_buf_mem[0] = 1; /* STRB (2). */
11881 record_buf_mem[0] = 2; /* STRH (2). */
11882 record_buf_mem[1] = u_regval[0] + u_regval[1];
11883 thumb_insn_r->mem_rec_count = 1;
11886 else if (bit (thumb_insn_r->arm_insn, 11))
11888 /* Handle load from literal pool. */
11890 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11891 record_buf[0] = reg_src1;
11892 thumb_insn_r->reg_rec_count = 1;
11896 /* Special data instructions and branch and exchange */
11897 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11898 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11899 if ((3 == opcode2) && (!opcode3))
11901 /* Branch with exchange. */
11902 record_buf[0] = ARM_PS_REGNUM;
11903 thumb_insn_r->reg_rec_count = 1;
11907 /* Format 8; special data processing insns. */
11908 record_buf[0] = ARM_PS_REGNUM;
11909 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11910 | bits (thumb_insn_r->arm_insn, 0, 2));
11911 thumb_insn_r->reg_rec_count = 2;
11916 /* Format 5; data processing insns. */
11917 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11918 if (bit (thumb_insn_r->arm_insn, 7))
11920 reg_src1 = reg_src1 + 8;
11922 record_buf[0] = ARM_PS_REGNUM;
11923 record_buf[1] = reg_src1;
11924 thumb_insn_r->reg_rec_count = 2;
11927 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11928 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11934 /* Handling opcode 001 insns. */
11937 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11939 struct regcache *reg_cache = thumb_insn_r->regcache;
11940 uint32_t record_buf[8], record_buf_mem[8];
11942 uint32_t reg_src1 = 0;
11943 uint32_t opcode = 0, immed_5 = 0;
11945 ULONGEST u_regval = 0;
11947 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11952 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11953 record_buf[0] = reg_src1;
11954 thumb_insn_r->reg_rec_count = 1;
11959 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11960 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11961 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11962 record_buf_mem[0] = 4;
11963 record_buf_mem[1] = u_regval + (immed_5 * 4);
11964 thumb_insn_r->mem_rec_count = 1;
11967 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11968 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11974 /* Handling opcode 100 insns. */
11977 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11979 struct regcache *reg_cache = thumb_insn_r->regcache;
11980 uint32_t record_buf[8], record_buf_mem[8];
11982 uint32_t reg_src1 = 0;
11983 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11985 ULONGEST u_regval = 0;
11987 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11992 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11993 record_buf[0] = reg_src1;
11994 thumb_insn_r->reg_rec_count = 1;
11996 else if (1 == opcode)
11999 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12000 record_buf[0] = reg_src1;
12001 thumb_insn_r->reg_rec_count = 1;
12003 else if (2 == opcode)
12006 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12007 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12008 record_buf_mem[0] = 4;
12009 record_buf_mem[1] = u_regval + (immed_8 * 4);
12010 thumb_insn_r->mem_rec_count = 1;
12012 else if (0 == opcode)
12015 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12016 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12017 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12018 record_buf_mem[0] = 2;
12019 record_buf_mem[1] = u_regval + (immed_5 * 2);
12020 thumb_insn_r->mem_rec_count = 1;
12023 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12024 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12030 /* Handling opcode 101 insns. */
12033 thumb_record_misc (insn_decode_record *thumb_insn_r)
12035 struct regcache *reg_cache = thumb_insn_r->regcache;
12037 uint32_t opcode = 0;
12038 uint32_t register_bits = 0, register_count = 0;
12039 uint32_t index = 0, start_address = 0;
12040 uint32_t record_buf[24], record_buf_mem[48];
12043 ULONGEST u_regval = 0;
12045 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12047 if (opcode == 0 || opcode == 1)
12049 /* ADR and ADD (SP plus immediate) */
12051 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12052 record_buf[0] = reg_src1;
12053 thumb_insn_r->reg_rec_count = 1;
12057 /* Miscellaneous 16-bit instructions */
12058 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12063 /* SETEND and CPS */
12066 /* ADD/SUB (SP plus immediate) */
12067 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12068 record_buf[0] = ARM_SP_REGNUM;
12069 thumb_insn_r->reg_rec_count = 1;
12071 case 1: /* fall through */
12072 case 3: /* fall through */
12073 case 9: /* fall through */
12078 /* SXTH, SXTB, UXTH, UXTB */
12079 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12080 thumb_insn_r->reg_rec_count = 1;
12082 case 4: /* fall through */
12085 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12086 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12087 while (register_bits)
12089 if (register_bits & 0x00000001)
12091 register_bits = register_bits >> 1;
12093 start_address = u_regval - \
12094 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12095 thumb_insn_r->mem_rec_count = register_count;
12096 while (register_count)
12098 record_buf_mem[(register_count * 2) - 1] = start_address;
12099 record_buf_mem[(register_count * 2) - 2] = 4;
12100 start_address = start_address + 4;
12103 record_buf[0] = ARM_SP_REGNUM;
12104 thumb_insn_r->reg_rec_count = 1;
12107 /* REV, REV16, REVSH */
12108 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12109 thumb_insn_r->reg_rec_count = 1;
12111 case 12: /* fall through */
12114 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12115 while (register_bits)
12117 if (register_bits & 0x00000001)
12118 record_buf[index++] = register_count;
12119 register_bits = register_bits >> 1;
12122 record_buf[index++] = ARM_PS_REGNUM;
12123 record_buf[index++] = ARM_SP_REGNUM;
12124 thumb_insn_r->reg_rec_count = index;
12128 /* Handle enhanced software breakpoint insn, BKPT. */
12129 /* CPSR is changed to be executed in ARM state, disabling normal
12130 interrupts, entering abort mode. */
12131 /* According to high vector configuration PC is set. */
12132 /* User hits breakpoint and type reverse, in that case, we need to go back with
12133 previous CPSR and Program Counter. */
12134 record_buf[0] = ARM_PS_REGNUM;
12135 record_buf[1] = ARM_LR_REGNUM;
12136 thumb_insn_r->reg_rec_count = 2;
12137 /* We need to save SPSR value, which is not yet done. */
12138 printf_unfiltered (_("Process record does not support instruction "
12139 "0x%0x at address %s.\n"),
12140 thumb_insn_r->arm_insn,
12141 paddress (thumb_insn_r->gdbarch,
12142 thumb_insn_r->this_addr));
12146 /* If-Then, and hints */
12153 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12154 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12160 /* Handling opcode 110 insns. */
12163 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12165 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12166 struct regcache *reg_cache = thumb_insn_r->regcache;
12168 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12169 uint32_t reg_src1 = 0;
12170 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12171 uint32_t index = 0, start_address = 0;
12172 uint32_t record_buf[24], record_buf_mem[48];
12174 ULONGEST u_regval = 0;
12176 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12177 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12183 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12185 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12186 while (register_bits)
12188 if (register_bits & 0x00000001)
12189 record_buf[index++] = register_count;
12190 register_bits = register_bits >> 1;
12193 record_buf[index++] = reg_src1;
12194 thumb_insn_r->reg_rec_count = index;
12196 else if (0 == opcode2)
12198 /* It handles both STMIA. */
12199 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12201 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12202 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12203 while (register_bits)
12205 if (register_bits & 0x00000001)
12207 register_bits = register_bits >> 1;
12209 start_address = u_regval;
12210 thumb_insn_r->mem_rec_count = register_count;
12211 while (register_count)
12213 record_buf_mem[(register_count * 2) - 1] = start_address;
12214 record_buf_mem[(register_count * 2) - 2] = 4;
12215 start_address = start_address + 4;
12219 else if (0x1F == opcode1)
12221 /* Handle arm syscall insn. */
12222 if (tdep->arm_syscall_record != NULL)
12224 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12225 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12229 printf_unfiltered (_("no syscall record support\n"));
12234 /* B (1), conditional branch is automatically taken care in process_record,
12235 as PC is saved there. */
12237 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12238 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12244 /* Handling opcode 111 insns. */
12247 thumb_record_branch (insn_decode_record *thumb_insn_r)
12249 uint32_t record_buf[8];
12250 uint32_t bits_h = 0;
12252 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12254 if (2 == bits_h || 3 == bits_h)
12257 record_buf[0] = ARM_LR_REGNUM;
12258 thumb_insn_r->reg_rec_count = 1;
12260 else if (1 == bits_h)
12263 record_buf[0] = ARM_PS_REGNUM;
12264 record_buf[1] = ARM_LR_REGNUM;
12265 thumb_insn_r->reg_rec_count = 2;
12268 /* B(2) is automatically taken care in process_record, as PC is
12271 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12276 /* Handler for thumb2 load/store multiple instructions. */
12279 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12281 struct regcache *reg_cache = thumb2_insn_r->regcache;
12283 uint32_t reg_rn, op;
12284 uint32_t register_bits = 0, register_count = 0;
12285 uint32_t index = 0, start_address = 0;
12286 uint32_t record_buf[24], record_buf_mem[48];
12288 ULONGEST u_regval = 0;
12290 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12291 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12293 if (0 == op || 3 == op)
12295 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12297 /* Handle RFE instruction. */
12298 record_buf[0] = ARM_PS_REGNUM;
12299 thumb2_insn_r->reg_rec_count = 1;
12303 /* Handle SRS instruction after reading banked SP. */
12304 return arm_record_unsupported_insn (thumb2_insn_r);
12307 else if (1 == op || 2 == op)
12309 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12311 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12312 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12313 while (register_bits)
12315 if (register_bits & 0x00000001)
12316 record_buf[index++] = register_count;
12319 register_bits = register_bits >> 1;
12321 record_buf[index++] = reg_rn;
12322 record_buf[index++] = ARM_PS_REGNUM;
12323 thumb2_insn_r->reg_rec_count = index;
12327 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12328 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12329 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12330 while (register_bits)
12332 if (register_bits & 0x00000001)
12335 register_bits = register_bits >> 1;
12340 /* Start address calculation for LDMDB/LDMEA. */
12341 start_address = u_regval;
12345 /* Start address calculation for LDMDB/LDMEA. */
12346 start_address = u_regval - register_count * 4;
12349 thumb2_insn_r->mem_rec_count = register_count;
12350 while (register_count)
12352 record_buf_mem[register_count * 2 - 1] = start_address;
12353 record_buf_mem[register_count * 2 - 2] = 4;
12354 start_address = start_address + 4;
12357 record_buf[0] = reg_rn;
12358 record_buf[1] = ARM_PS_REGNUM;
12359 thumb2_insn_r->reg_rec_count = 2;
12363 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12365 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12367 return ARM_RECORD_SUCCESS;
12370 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12374 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12376 struct regcache *reg_cache = thumb2_insn_r->regcache;
12378 uint32_t reg_rd, reg_rn, offset_imm;
12379 uint32_t reg_dest1, reg_dest2;
12380 uint32_t address, offset_addr;
12381 uint32_t record_buf[8], record_buf_mem[8];
12382 uint32_t op1, op2, op3;
12384 ULONGEST u_regval[2];
12386 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12387 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12388 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12390 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12392 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12394 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12395 record_buf[0] = reg_dest1;
12396 record_buf[1] = ARM_PS_REGNUM;
12397 thumb2_insn_r->reg_rec_count = 2;
12400 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12402 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12403 record_buf[2] = reg_dest2;
12404 thumb2_insn_r->reg_rec_count = 3;
12409 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12410 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12412 if (0 == op1 && 0 == op2)
12414 /* Handle STREX. */
12415 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12416 address = u_regval[0] + (offset_imm * 4);
12417 record_buf_mem[0] = 4;
12418 record_buf_mem[1] = address;
12419 thumb2_insn_r->mem_rec_count = 1;
12420 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12421 record_buf[0] = reg_rd;
12422 thumb2_insn_r->reg_rec_count = 1;
12424 else if (1 == op1 && 0 == op2)
12426 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12427 record_buf[0] = reg_rd;
12428 thumb2_insn_r->reg_rec_count = 1;
12429 address = u_regval[0];
12430 record_buf_mem[1] = address;
12434 /* Handle STREXB. */
12435 record_buf_mem[0] = 1;
12436 thumb2_insn_r->mem_rec_count = 1;
12440 /* Handle STREXH. */
12441 record_buf_mem[0] = 2 ;
12442 thumb2_insn_r->mem_rec_count = 1;
12446 /* Handle STREXD. */
12447 address = u_regval[0];
12448 record_buf_mem[0] = 4;
12449 record_buf_mem[2] = 4;
12450 record_buf_mem[3] = address + 4;
12451 thumb2_insn_r->mem_rec_count = 2;
12456 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12458 if (bit (thumb2_insn_r->arm_insn, 24))
12460 if (bit (thumb2_insn_r->arm_insn, 23))
12461 offset_addr = u_regval[0] + (offset_imm * 4);
12463 offset_addr = u_regval[0] - (offset_imm * 4);
12465 address = offset_addr;
12468 address = u_regval[0];
12470 record_buf_mem[0] = 4;
12471 record_buf_mem[1] = address;
12472 record_buf_mem[2] = 4;
12473 record_buf_mem[3] = address + 4;
12474 thumb2_insn_r->mem_rec_count = 2;
12475 record_buf[0] = reg_rn;
12476 thumb2_insn_r->reg_rec_count = 1;
12480 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12482 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12484 return ARM_RECORD_SUCCESS;
12487 /* Handler for thumb2 data processing (shift register and modified immediate)
12491 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12493 uint32_t reg_rd, op;
12494 uint32_t record_buf[8];
12496 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12497 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12499 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12501 record_buf[0] = ARM_PS_REGNUM;
12502 thumb2_insn_r->reg_rec_count = 1;
12506 record_buf[0] = reg_rd;
12507 record_buf[1] = ARM_PS_REGNUM;
12508 thumb2_insn_r->reg_rec_count = 2;
12511 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12513 return ARM_RECORD_SUCCESS;
12516 /* Generic handler for thumb2 instructions which effect destination and PS
12520 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12523 uint32_t record_buf[8];
12525 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12527 record_buf[0] = reg_rd;
12528 record_buf[1] = ARM_PS_REGNUM;
12529 thumb2_insn_r->reg_rec_count = 2;
12531 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12533 return ARM_RECORD_SUCCESS;
12536 /* Handler for thumb2 branch and miscellaneous control instructions. */
12539 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12541 uint32_t op, op1, op2;
12542 uint32_t record_buf[8];
12544 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12545 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12546 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12548 /* Handle MSR insn. */
12549 if (!(op1 & 0x2) && 0x38 == op)
12553 /* CPSR is going to be changed. */
12554 record_buf[0] = ARM_PS_REGNUM;
12555 thumb2_insn_r->reg_rec_count = 1;
12559 arm_record_unsupported_insn(thumb2_insn_r);
12563 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12566 record_buf[0] = ARM_PS_REGNUM;
12567 record_buf[1] = ARM_LR_REGNUM;
12568 thumb2_insn_r->reg_rec_count = 2;
12571 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12573 return ARM_RECORD_SUCCESS;
12576 /* Handler for thumb2 store single data item instructions. */
12579 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12581 struct regcache *reg_cache = thumb2_insn_r->regcache;
12583 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12584 uint32_t address, offset_addr;
12585 uint32_t record_buf[8], record_buf_mem[8];
12588 ULONGEST u_regval[2];
12590 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12591 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12592 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12593 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12595 if (bit (thumb2_insn_r->arm_insn, 23))
12598 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12599 offset_addr = u_regval[0] + offset_imm;
12600 address = offset_addr;
12605 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12607 /* Handle STRB (register). */
12608 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12609 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12610 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12611 offset_addr = u_regval[1] << shift_imm;
12612 address = u_regval[0] + offset_addr;
12616 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12617 if (bit (thumb2_insn_r->arm_insn, 10))
12619 if (bit (thumb2_insn_r->arm_insn, 9))
12620 offset_addr = u_regval[0] + offset_imm;
12622 offset_addr = u_regval[0] - offset_imm;
12624 address = offset_addr;
12627 address = u_regval[0];
12633 /* Store byte instructions. */
12636 record_buf_mem[0] = 1;
12638 /* Store half word instructions. */
12641 record_buf_mem[0] = 2;
12643 /* Store word instructions. */
12646 record_buf_mem[0] = 4;
12650 gdb_assert_not_reached ("no decoding pattern found");
12654 record_buf_mem[1] = address;
12655 thumb2_insn_r->mem_rec_count = 1;
12656 record_buf[0] = reg_rn;
12657 thumb2_insn_r->reg_rec_count = 1;
12659 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12661 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12663 return ARM_RECORD_SUCCESS;
12666 /* Handler for thumb2 load memory hints instructions. */
12669 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12671 uint32_t record_buf[8];
12672 uint32_t reg_rt, reg_rn;
12674 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12675 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12677 if (ARM_PC_REGNUM != reg_rt)
12679 record_buf[0] = reg_rt;
12680 record_buf[1] = reg_rn;
12681 record_buf[2] = ARM_PS_REGNUM;
12682 thumb2_insn_r->reg_rec_count = 3;
12684 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12686 return ARM_RECORD_SUCCESS;
12689 return ARM_RECORD_FAILURE;
12692 /* Handler for thumb2 load word instructions. */
12695 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12697 uint32_t record_buf[8];
12699 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12700 record_buf[1] = ARM_PS_REGNUM;
12701 thumb2_insn_r->reg_rec_count = 2;
12703 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12705 return ARM_RECORD_SUCCESS;
12708 /* Handler for thumb2 long multiply, long multiply accumulate, and
12709 divide instructions. */
12712 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12714 uint32_t opcode1 = 0, opcode2 = 0;
12715 uint32_t record_buf[8];
12717 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12718 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12720 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12722 /* Handle SMULL, UMULL, SMULAL. */
12723 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12724 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12725 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12726 record_buf[2] = ARM_PS_REGNUM;
12727 thumb2_insn_r->reg_rec_count = 3;
12729 else if (1 == opcode1 || 3 == opcode2)
12731 /* Handle SDIV and UDIV. */
12732 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12733 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12734 record_buf[2] = ARM_PS_REGNUM;
12735 thumb2_insn_r->reg_rec_count = 3;
12738 return ARM_RECORD_FAILURE;
12740 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12742 return ARM_RECORD_SUCCESS;
12745 /* Record handler for thumb32 coprocessor instructions. */
12748 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12750 if (bit (thumb2_insn_r->arm_insn, 25))
12751 return arm_record_coproc_data_proc (thumb2_insn_r);
12753 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12756 /* Record handler for advance SIMD structure load/store instructions. */
12759 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12761 struct regcache *reg_cache = thumb2_insn_r->regcache;
12762 uint32_t l_bit, a_bit, b_bits;
12763 uint32_t record_buf[128], record_buf_mem[128];
12764 uint32_t reg_rn, reg_vd, address, f_elem;
12765 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12768 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12769 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12770 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12771 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12772 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12773 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12774 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12775 f_elem = 8 / f_ebytes;
12779 ULONGEST u_regval = 0;
12780 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12781 address = u_regval;
12786 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12788 if (b_bits == 0x07)
12790 else if (b_bits == 0x0a)
12792 else if (b_bits == 0x06)
12794 else if (b_bits == 0x02)
12799 for (index_r = 0; index_r < bf_regs; index_r++)
12801 for (index_e = 0; index_e < f_elem; index_e++)
12803 record_buf_mem[index_m++] = f_ebytes;
12804 record_buf_mem[index_m++] = address;
12805 address = address + f_ebytes;
12806 thumb2_insn_r->mem_rec_count += 1;
12811 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12813 if (b_bits == 0x09 || b_bits == 0x08)
12815 else if (b_bits == 0x03)
12820 for (index_r = 0; index_r < bf_regs; index_r++)
12821 for (index_e = 0; index_e < f_elem; index_e++)
12823 for (loop_t = 0; loop_t < 2; loop_t++)
12825 record_buf_mem[index_m++] = f_ebytes;
12826 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12827 thumb2_insn_r->mem_rec_count += 1;
12829 address = address + (2 * f_ebytes);
12833 else if ((b_bits & 0x0e) == 0x04)
12835 for (index_e = 0; index_e < f_elem; index_e++)
12837 for (loop_t = 0; loop_t < 3; loop_t++)
12839 record_buf_mem[index_m++] = f_ebytes;
12840 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12841 thumb2_insn_r->mem_rec_count += 1;
12843 address = address + (3 * f_ebytes);
12847 else if (!(b_bits & 0x0e))
12849 for (index_e = 0; index_e < f_elem; index_e++)
12851 for (loop_t = 0; loop_t < 4; loop_t++)
12853 record_buf_mem[index_m++] = f_ebytes;
12854 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12855 thumb2_insn_r->mem_rec_count += 1;
12857 address = address + (4 * f_ebytes);
12863 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12865 if (bft_size == 0x00)
12867 else if (bft_size == 0x01)
12869 else if (bft_size == 0x02)
12875 if (!(b_bits & 0x0b) || b_bits == 0x08)
12876 thumb2_insn_r->mem_rec_count = 1;
12878 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12879 thumb2_insn_r->mem_rec_count = 2;
12881 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12882 thumb2_insn_r->mem_rec_count = 3;
12884 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12885 thumb2_insn_r->mem_rec_count = 4;
12887 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12889 record_buf_mem[index_m] = f_ebytes;
12890 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12899 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12900 thumb2_insn_r->reg_rec_count = 1;
12902 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12903 thumb2_insn_r->reg_rec_count = 2;
12905 else if ((b_bits & 0x0e) == 0x04)
12906 thumb2_insn_r->reg_rec_count = 3;
12908 else if (!(b_bits & 0x0e))
12909 thumb2_insn_r->reg_rec_count = 4;
12914 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12915 thumb2_insn_r->reg_rec_count = 1;
12917 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12918 thumb2_insn_r->reg_rec_count = 2;
12920 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12921 thumb2_insn_r->reg_rec_count = 3;
12923 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12924 thumb2_insn_r->reg_rec_count = 4;
12926 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12927 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12931 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12933 record_buf[index_r] = reg_rn;
12934 thumb2_insn_r->reg_rec_count += 1;
12937 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12939 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12944 /* Decodes thumb2 instruction type and invokes its record handler. */
12946 static unsigned int
12947 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12949 uint32_t op, op1, op2;
12951 op = bit (thumb2_insn_r->arm_insn, 15);
12952 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12953 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12957 if (!(op2 & 0x64 ))
12959 /* Load/store multiple instruction. */
12960 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12962 else if ((op2 & 0x64) == 0x4)
12964 /* Load/store (dual/exclusive) and table branch instruction. */
12965 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12967 else if ((op2 & 0x60) == 0x20)
12969 /* Data-processing (shifted register). */
12970 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12972 else if (op2 & 0x40)
12974 /* Co-processor instructions. */
12975 return thumb2_record_coproc_insn (thumb2_insn_r);
12978 else if (op1 == 0x02)
12982 /* Branches and miscellaneous control instructions. */
12983 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12985 else if (op2 & 0x20)
12987 /* Data-processing (plain binary immediate) instruction. */
12988 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12992 /* Data-processing (modified immediate). */
12993 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12996 else if (op1 == 0x03)
12998 if (!(op2 & 0x71 ))
13000 /* Store single data item. */
13001 return thumb2_record_str_single_data (thumb2_insn_r);
13003 else if (!((op2 & 0x71) ^ 0x10))
13005 /* Advanced SIMD or structure load/store instructions. */
13006 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13008 else if (!((op2 & 0x67) ^ 0x01))
13010 /* Load byte, memory hints instruction. */
13011 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13013 else if (!((op2 & 0x67) ^ 0x03))
13015 /* Load halfword, memory hints instruction. */
13016 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13018 else if (!((op2 & 0x67) ^ 0x05))
13020 /* Load word instruction. */
13021 return thumb2_record_ld_word (thumb2_insn_r);
13023 else if (!((op2 & 0x70) ^ 0x20))
13025 /* Data-processing (register) instruction. */
13026 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13028 else if (!((op2 & 0x78) ^ 0x30))
13030 /* Multiply, multiply accumulate, abs diff instruction. */
13031 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13033 else if (!((op2 & 0x78) ^ 0x38))
13035 /* Long multiply, long multiply accumulate, and divide. */
13036 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13038 else if (op2 & 0x40)
13040 /* Co-processor instructions. */
13041 return thumb2_record_coproc_insn (thumb2_insn_r);
13049 /* Abstract memory reader. */
13051 class abstract_memory_reader
13054 /* Read LEN bytes of target memory at address MEMADDR, placing the
13055 results in GDB's memory at BUF. Return true on success. */
13057 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13060 /* Instruction reader from real target. */
13062 class instruction_reader : public abstract_memory_reader
13065 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13067 if (target_read_memory (memaddr, buf, len))
13076 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13077 and positive val on fauilure. */
13080 extract_arm_insn (abstract_memory_reader& reader,
13081 insn_decode_record *insn_record, uint32_t insn_size)
13083 gdb_byte buf[insn_size];
13085 memset (&buf[0], 0, insn_size);
13087 if (!reader.read (insn_record->this_addr, buf, insn_size))
13089 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13091 gdbarch_byte_order_for_code (insn_record->gdbarch));
13095 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13097 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13101 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13102 record_type_t record_type, uint32_t insn_size)
13105 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13107 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13109 arm_record_data_proc_misc_ld_str, /* 000. */
13110 arm_record_data_proc_imm, /* 001. */
13111 arm_record_ld_st_imm_offset, /* 010. */
13112 arm_record_ld_st_reg_offset, /* 011. */
13113 arm_record_ld_st_multiple, /* 100. */
13114 arm_record_b_bl, /* 101. */
13115 arm_record_asimd_vfp_coproc, /* 110. */
13116 arm_record_coproc_data_proc /* 111. */
13119 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13121 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13123 thumb_record_shift_add_sub, /* 000. */
13124 thumb_record_add_sub_cmp_mov, /* 001. */
13125 thumb_record_ld_st_reg_offset, /* 010. */
13126 thumb_record_ld_st_imm_offset, /* 011. */
13127 thumb_record_ld_st_stack, /* 100. */
13128 thumb_record_misc, /* 101. */
13129 thumb_record_ldm_stm_swi, /* 110. */
13130 thumb_record_branch /* 111. */
13133 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13134 uint32_t insn_id = 0;
13136 if (extract_arm_insn (reader, arm_record, insn_size))
13140 printf_unfiltered (_("Process record: error reading memory at "
13141 "addr %s len = %d.\n"),
13142 paddress (arm_record->gdbarch,
13143 arm_record->this_addr), insn_size);
13147 else if (ARM_RECORD == record_type)
13149 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13150 insn_id = bits (arm_record->arm_insn, 25, 27);
13152 if (arm_record->cond == 0xf)
13153 ret = arm_record_extension_space (arm_record);
13156 /* If this insn has fallen into extension space
13157 then we need not decode it anymore. */
13158 ret = arm_handle_insn[insn_id] (arm_record);
13160 if (ret != ARM_RECORD_SUCCESS)
13162 arm_record_unsupported_insn (arm_record);
13166 else if (THUMB_RECORD == record_type)
13168 /* As thumb does not have condition codes, we set negative. */
13169 arm_record->cond = -1;
13170 insn_id = bits (arm_record->arm_insn, 13, 15);
13171 ret = thumb_handle_insn[insn_id] (arm_record);
13172 if (ret != ARM_RECORD_SUCCESS)
13174 arm_record_unsupported_insn (arm_record);
13178 else if (THUMB2_RECORD == record_type)
13180 /* As thumb does not have condition codes, we set negative. */
13181 arm_record->cond = -1;
13183 /* Swap first half of 32bit thumb instruction with second half. */
13184 arm_record->arm_insn
13185 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13187 ret = thumb2_record_decode_insn_handler (arm_record);
13189 if (ret != ARM_RECORD_SUCCESS)
13191 arm_record_unsupported_insn (arm_record);
13197 /* Throw assertion. */
13198 gdb_assert_not_reached ("not a valid instruction, could not decode");
13205 namespace selftests {
13207 /* Provide both 16-bit and 32-bit thumb instructions. */
13209 class instruction_reader_thumb : public abstract_memory_reader
13212 template<size_t SIZE>
13213 instruction_reader_thumb (enum bfd_endian endian,
13214 const uint16_t (&insns)[SIZE])
13215 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13218 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13220 SELF_CHECK (len == 4 || len == 2);
13221 SELF_CHECK (memaddr % 2 == 0);
13222 SELF_CHECK ((memaddr / 2) < m_insns_size);
13224 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13227 store_unsigned_integer (&buf[2], 2, m_endian,
13228 m_insns[memaddr / 2 + 1]);
13234 enum bfd_endian m_endian;
13235 const uint16_t *m_insns;
13236 size_t m_insns_size;
13240 arm_record_test (void)
13242 struct gdbarch_info info;
13243 gdbarch_info_init (&info);
13244 info.bfd_arch_info = bfd_scan_arch ("arm");
13246 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13248 SELF_CHECK (gdbarch != NULL);
13250 /* 16-bit Thumb instructions. */
13252 insn_decode_record arm_record;
13254 memset (&arm_record, 0, sizeof (insn_decode_record));
13255 arm_record.gdbarch = gdbarch;
13257 static const uint16_t insns[] = {
13258 /* db b2 uxtb r3, r3 */
13260 /* cd 58 ldr r5, [r1, r3] */
13264 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13265 instruction_reader_thumb reader (endian, insns);
13266 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13267 THUMB_INSN_SIZE_BYTES);
13269 SELF_CHECK (ret == 0);
13270 SELF_CHECK (arm_record.mem_rec_count == 0);
13271 SELF_CHECK (arm_record.reg_rec_count == 1);
13272 SELF_CHECK (arm_record.arm_regs[0] == 3);
13274 arm_record.this_addr += 2;
13275 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13276 THUMB_INSN_SIZE_BYTES);
13278 SELF_CHECK (ret == 0);
13279 SELF_CHECK (arm_record.mem_rec_count == 0);
13280 SELF_CHECK (arm_record.reg_rec_count == 1);
13281 SELF_CHECK (arm_record.arm_regs[0] == 5);
13284 /* 32-bit Thumb-2 instructions. */
13286 insn_decode_record arm_record;
13288 memset (&arm_record, 0, sizeof (insn_decode_record));
13289 arm_record.gdbarch = gdbarch;
13291 static const uint16_t insns[] = {
13292 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13296 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13297 instruction_reader_thumb reader (endian, insns);
13298 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13299 THUMB2_INSN_SIZE_BYTES);
13301 SELF_CHECK (ret == 0);
13302 SELF_CHECK (arm_record.mem_rec_count == 0);
13303 SELF_CHECK (arm_record.reg_rec_count == 1);
13304 SELF_CHECK (arm_record.arm_regs[0] == 7);
13307 } // namespace selftests
13308 #endif /* GDB_SELF_TEST */
13310 /* Cleans up local record registers and memory allocations. */
13313 deallocate_reg_mem (insn_decode_record *record)
13315 xfree (record->arm_regs);
13316 xfree (record->arm_mems);
13320 /* Parse the current instruction and record the values of the registers and
13321 memory that will be changed in current instruction to record_arch_list".
13322 Return -1 if something is wrong. */
13325 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13326 CORE_ADDR insn_addr)
13329 uint32_t no_of_rec = 0;
13330 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13331 ULONGEST t_bit = 0, insn_id = 0;
13333 ULONGEST u_regval = 0;
13335 insn_decode_record arm_record;
13337 memset (&arm_record, 0, sizeof (insn_decode_record));
13338 arm_record.regcache = regcache;
13339 arm_record.this_addr = insn_addr;
13340 arm_record.gdbarch = gdbarch;
13343 if (record_debug > 1)
13345 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13347 paddress (gdbarch, arm_record.this_addr));
13350 instruction_reader reader;
13351 if (extract_arm_insn (reader, &arm_record, 2))
13355 printf_unfiltered (_("Process record: error reading memory at "
13356 "addr %s len = %d.\n"),
13357 paddress (arm_record.gdbarch,
13358 arm_record.this_addr), 2);
13363 /* Check the insn, whether it is thumb or arm one. */
13365 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13366 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13369 if (!(u_regval & t_bit))
13371 /* We are decoding arm insn. */
13372 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13376 insn_id = bits (arm_record.arm_insn, 11, 15);
13377 /* is it thumb2 insn? */
13378 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13380 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13381 THUMB2_INSN_SIZE_BYTES);
13385 /* We are decoding thumb insn. */
13386 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13387 THUMB_INSN_SIZE_BYTES);
13393 /* Record registers. */
13394 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13395 if (arm_record.arm_regs)
13397 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13399 if (record_full_arch_list_add_reg
13400 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13404 /* Record memories. */
13405 if (arm_record.arm_mems)
13407 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13409 if (record_full_arch_list_add_mem
13410 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13411 arm_record.arm_mems[no_of_rec].len))
13416 if (record_full_arch_list_add_end ())
13421 deallocate_reg_mem (&arm_record);