1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
456 /* Otherwise we're out of luck; we assume ARM. */
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
495 arm_m_addr_is_magic (CORE_ADDR addr)
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
507 /* Address is magic. */
511 /* Address is not magic. */
516 /* Remove useless bits from addresses in a running program. */
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
527 return UNMAKE_THUMB_ADDR (val);
529 return (val & 0x03fffffc);
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
549 /* The GNU linker's Thumb call stub to foo is named
551 if (strstr (name, "_from_thumb") != NULL)
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
557 if (startswith (name, "__truncdfsf2"))
559 if (startswith (name, "__aeabi_d2f"))
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
565 if (startswith (name, "__aeabi_read_tp"))
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
604 thumb_expand_immediate (unsigned int imm)
606 unsigned int count = imm >> 7;
614 return (imm & 0xff) | ((imm & 0xff) << 16);
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
622 return (0x80 | (imm & 0x7f)) << (32 - count);
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
629 thumb_instruction_restores_sp (unsigned short insn)
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
652 CORE_ADDR unrecognized_pc = 0;
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 while (start < limit)
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 else if (thumb_instruction_restores_sp (insn))
693 /* Don't scan past the epilogue. */
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
736 if (stack.store_would_trash (addr))
739 stack.store (addr, 4, regs[regno]);
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
750 if (stack.store_would_trash (addr))
753 stack.store (addr, 4, regs[rd]);
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
791 unsigned short inst2;
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
802 int j1, j2, imm1, imm2;
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 pv_t addr = regs[bits (insn, 0, 3)];
829 if (stack.store_would_trash (addr))
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
841 regs[bits (insn, 0, 3)] = addr;
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
852 offset = inst2 & 0xff;
854 addr = pv_add_constant (addr, offset);
856 addr = pv_add_constant (addr, -offset);
858 if (stack.store_would_trash (addr))
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
866 regs[bits (insn, 0, 3)] = addr;
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
876 offset = inst2 & 0xff;
878 addr = pv_add_constant (addr, offset);
880 addr = pv_add_constant (addr, -offset);
882 if (stack.store_would_trash (addr))
885 stack.store (addr, 4, regs[regno]);
888 regs[bits (insn, 0, 3)] = addr;
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 int regno = bits (inst2, 12, 15);
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
900 if (stack.store_would_trash (addr))
903 stack.store (addr, 4, regs[regno]);
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1016 /* Constant pool loads. */
1017 unsigned int constant;
1020 offset = bits (inst2, 0, 11);
1022 loc = start + 4 + offset;
1024 loc = start + 4 - offset;
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1032 /* Constant pool loads. */
1033 unsigned int constant;
1036 offset = bits (inst2, 0, 7) << 2;
1038 loc = start + 4 + offset;
1040 loc = start + 4 - offset;
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1051 /* Don't scan past anything that might change control flow. */
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1063 else if (thumb_instruction_changes_pc (insn))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1086 return unrecognized_pc;
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1111 return unrecognized_pc;
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1137 *destreg = bits (insn1, 8, 10);
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1161 address = (high << 16 | low);
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1176 *destreg = bits (insn, 12, 15);
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1191 address = (high << 16 | low);
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1222 .word __stack_chk_guard
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1254 unsigned int destreg;
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1261 if (bits (insn, 3, 5) != basereg)
1263 destreg = bits (insn, 0, 2);
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1270 if (destreg != bits (insn, 0, 2))
1275 unsigned int destreg;
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1282 if (bits (insn, 16, 19) != basereg)
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1290 if (bits (insn, 12, 15) != destreg)
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1296 return pc + offset + 4;
1298 return pc + offset + 8;
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1304 The APCS (ARM Procedure Call Standard) defines the following
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1319 CORE_ADDR func_addr, limit_pc;
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1330 if (post_prologue_pc)
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1348 if (post_prologue_pc != 0)
1350 CORE_ADDR analyzed_limit;
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1368 if (analyzed_limit != post_prologue_pc)
1371 return post_prologue_pc;
1375 /* Can't determine prologue from the symbol table, need to examine
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1384 limit_pc = pc + 64; /* Magic. */
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1426 /* See comment in arm_scan_prologue for an explanation of
1428 if (prologue_end > prologue_start + 64)
1430 prologue_end = prologue_start + 64;
1434 /* We're in the boondocks: we have no idea where the start of the
1438 prologue_end = std::min (prologue_end, prev_pc);
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1447 arm_instruction_restores_sp (unsigned int insn)
1449 if (bits (insn, 28, 31) != INST_NV)
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1457 || (insn & 0x0fff0000) == 0x08bd0000
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1547 stmfd sp!, {a1, a2, a3, a4} */
1549 int mask = insn & 0xffff;
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1624 n_saved_fp_regs = 1;
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1631 n_saved_fp_regs = 4;
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1650 if (skip_prologue_function (gdbarch, dest, 0))
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1660 else if (arm_instruction_restores_sp (insn))
1662 /* Don't scan past the epilogue. */
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1686 unrecognized_pc = current_pc;
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1699 int framereg, framesize;
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1728 return unrecognized_pc;
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1789 if (prologue_end > prologue_start + 64)
1791 prologue_end = prologue_start + 64; /* See above. */
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1804 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1805 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1810 prologue_start = gdbarch_addr_bits_remove
1811 (gdbarch, return_value) - 8;
1812 prologue_end = prologue_start + 64; /* See above. */
1816 if (prev_pc < prologue_end)
1817 prologue_end = prev_pc;
1819 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1822 static struct arm_prologue_cache *
1823 arm_make_prologue_cache (struct frame_info *this_frame)
1826 struct arm_prologue_cache *cache;
1827 CORE_ADDR unwound_fp;
1829 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1830 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1832 arm_scan_prologue (this_frame, cache);
1834 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1835 if (unwound_fp == 0)
1838 cache->prev_sp = unwound_fp + cache->framesize;
1840 /* Calculate actual addresses of saved registers using offsets
1841 determined by arm_scan_prologue. */
1842 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1843 if (trad_frame_addr_p (cache->saved_regs, reg))
1844 cache->saved_regs[reg].addr += cache->prev_sp;
1849 /* Implementation of the stop_reason hook for arm_prologue frames. */
1851 static enum unwind_stop_reason
1852 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1855 struct arm_prologue_cache *cache;
1858 if (*this_cache == NULL)
1859 *this_cache = arm_make_prologue_cache (this_frame);
1860 cache = (struct arm_prologue_cache *) *this_cache;
1862 /* This is meant to halt the backtrace at "_start". */
1863 pc = get_frame_pc (this_frame);
1864 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1865 return UNWIND_OUTERMOST;
1867 /* If we've hit a wall, stop. */
1868 if (cache->prev_sp == 0)
1869 return UNWIND_OUTERMOST;
1871 return UNWIND_NO_REASON;
1874 /* Our frame ID for a normal frame is the current function's starting PC
1875 and the caller's SP when we were called. */
1878 arm_prologue_this_id (struct frame_info *this_frame,
1880 struct frame_id *this_id)
1882 struct arm_prologue_cache *cache;
1886 if (*this_cache == NULL)
1887 *this_cache = arm_make_prologue_cache (this_frame);
1888 cache = (struct arm_prologue_cache *) *this_cache;
1890 /* Use function start address as part of the frame ID. If we cannot
1891 identify the start address (due to missing symbol information),
1892 fall back to just using the current PC. */
1893 pc = get_frame_pc (this_frame);
1894 func = get_frame_func (this_frame);
1898 id = frame_id_build (cache->prev_sp, func);
1902 static struct value *
1903 arm_prologue_prev_register (struct frame_info *this_frame,
1907 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1908 struct arm_prologue_cache *cache;
1910 if (*this_cache == NULL)
1911 *this_cache = arm_make_prologue_cache (this_frame);
1912 cache = (struct arm_prologue_cache *) *this_cache;
1914 /* If we are asked to unwind the PC, then we need to return the LR
1915 instead. The prologue may save PC, but it will point into this
1916 frame's prologue, not the next frame's resume location. Also
1917 strip the saved T bit. A valid LR may have the low bit set, but
1918 a valid PC never does. */
1919 if (prev_regnum == ARM_PC_REGNUM)
1923 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1924 return frame_unwind_got_constant (this_frame, prev_regnum,
1925 arm_addr_bits_remove (gdbarch, lr));
1928 /* SP is generally not saved to the stack, but this frame is
1929 identified by the next frame's stack pointer at the time of the call.
1930 The value was already reconstructed into PREV_SP. */
1931 if (prev_regnum == ARM_SP_REGNUM)
1932 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1934 /* The CPSR may have been changed by the call instruction and by the
1935 called function. The only bit we can reconstruct is the T bit,
1936 by checking the low bit of LR as of the call. This is a reliable
1937 indicator of Thumb-ness except for some ARM v4T pre-interworking
1938 Thumb code, which could get away with a clear low bit as long as
1939 the called function did not use bx. Guess that all other
1940 bits are unchanged; the condition flags are presumably lost,
1941 but the processor status is likely valid. */
1942 if (prev_regnum == ARM_PS_REGNUM)
1945 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1947 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1948 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1949 if (IS_THUMB_ADDR (lr))
1953 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1956 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1960 struct frame_unwind arm_prologue_unwind = {
1962 arm_prologue_unwind_stop_reason,
1963 arm_prologue_this_id,
1964 arm_prologue_prev_register,
1966 default_frame_sniffer
1969 /* Maintain a list of ARM exception table entries per objfile, similar to the
1970 list of mapping symbols. We only cache entries for standard ARM-defined
1971 personality routines; the cache will contain only the frame unwinding
1972 instructions associated with the entry (not the descriptors). */
1974 static const struct objfile_data *arm_exidx_data_key;
1976 struct arm_exidx_entry
1981 typedef struct arm_exidx_entry arm_exidx_entry_s;
1982 DEF_VEC_O(arm_exidx_entry_s);
1984 struct arm_exidx_data
1986 VEC(arm_exidx_entry_s) **section_maps;
1990 arm_exidx_data_free (struct objfile *objfile, void *arg)
1992 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1995 for (i = 0; i < objfile->obfd->section_count; i++)
1996 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2000 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2001 const struct arm_exidx_entry *rhs)
2003 return lhs->addr < rhs->addr;
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2009 struct obj_section *osect;
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_get_section_flags (objfile->obfd,
2013 osect->the_bfd_section) & SEC_ALLOC)
2015 bfd_vma start, size;
2016 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2017 size = bfd_get_section_size (osect->the_bfd_section);
2019 if (start <= vma && vma < start + size)
2026 /* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2041 arm_exidx_new_objfile (struct objfile *objfile)
2043 struct cleanup *cleanups;
2044 struct arm_exidx_data *data;
2045 asection *exidx, *extab;
2046 bfd_vma exidx_vma = 0, extab_vma = 0;
2047 bfd_size_type exidx_size = 0, extab_size = 0;
2048 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2051 /* If we've already touched this file, do nothing. */
2052 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2054 cleanups = make_cleanup (null_cleanup, NULL);
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2060 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2061 exidx_size = bfd_get_section_size (exidx);
2062 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2063 make_cleanup (xfree, exidx_data);
2065 if (!bfd_get_section_contents (objfile->obfd, exidx,
2066 exidx_data, 0, exidx_size))
2068 do_cleanups (cleanups);
2073 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2076 extab_vma = bfd_section_vma (objfile->obfd, extab);
2077 extab_size = bfd_get_section_size (extab);
2078 extab_data = (gdb_byte *) xmalloc (extab_size);
2079 make_cleanup (xfree, extab_data);
2081 if (!bfd_get_section_contents (objfile->obfd, extab,
2082 extab_data, 0, extab_size))
2084 do_cleanups (cleanups);
2089 /* Allocate exception table data structure. */
2090 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2091 set_objfile_data (objfile, arm_exidx_data_key, data);
2092 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2093 objfile->obfd->section_count,
2094 VEC(arm_exidx_entry_s) *);
2096 /* Fill in exception table. */
2097 for (i = 0; i < exidx_size / 8; i++)
2099 struct arm_exidx_entry new_exidx_entry;
2100 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2101 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2102 bfd_vma addr = 0, word = 0;
2103 int n_bytes = 0, n_words = 0;
2104 struct obj_section *sec;
2105 gdb_byte *entry = NULL;
2107 /* Extract address of start of function. */
2108 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2109 idx += exidx_vma + i * 8;
2111 /* Find section containing function and compute section offset. */
2112 sec = arm_obj_section_from_vma (objfile, idx);
2115 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2117 /* Determine address of exception table entry. */
2120 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2122 else if ((val & 0xff000000) == 0x80000000)
2124 /* Exception table entry embedded in .ARM.exidx
2125 -- must be short form. */
2129 else if (!(val & 0x80000000))
2131 /* Exception table entry in .ARM.extab. */
2132 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2133 addr += exidx_vma + i * 8 + 4;
2135 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2137 word = bfd_h_get_32 (objfile->obfd,
2138 extab_data + addr - extab_vma);
2141 if ((word & 0xff000000) == 0x80000000)
2146 else if ((word & 0xff000000) == 0x81000000
2147 || (word & 0xff000000) == 0x82000000)
2151 n_words = ((word >> 16) & 0xff);
2153 else if (!(word & 0x80000000))
2156 struct obj_section *pers_sec;
2157 int gnu_personality = 0;
2159 /* Custom personality routine. */
2160 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2161 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2163 /* Check whether we've got one of the variants of the
2164 GNU personality routines. */
2165 pers_sec = arm_obj_section_from_vma (objfile, pers);
2168 static const char *personality[] =
2170 "__gcc_personality_v0",
2171 "__gxx_personality_v0",
2172 "__gcj_personality_v0",
2173 "__gnu_objc_personality_v0",
2177 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2180 for (k = 0; personality[k]; k++)
2181 if (lookup_minimal_symbol_by_pc_name
2182 (pc, personality[k], objfile))
2184 gnu_personality = 1;
2189 /* If so, the next word contains a word count in the high
2190 byte, followed by the same unwind instructions as the
2191 pre-defined forms. */
2193 && addr + 4 <= extab_vma + extab_size)
2195 word = bfd_h_get_32 (objfile->obfd,
2196 extab_data + addr - extab_vma);
2199 n_words = ((word >> 24) & 0xff);
2205 /* Sanity check address. */
2207 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2208 n_words = n_bytes = 0;
2210 /* The unwind instructions reside in WORD (only the N_BYTES least
2211 significant bytes are valid), followed by N_WORDS words in the
2212 extab section starting at ADDR. */
2213 if (n_bytes || n_words)
2216 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2217 n_bytes + n_words * 4 + 1);
2220 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2224 word = bfd_h_get_32 (objfile->obfd,
2225 extab_data + addr - extab_vma);
2228 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2229 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2230 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2231 *p++ = (gdb_byte) (word & 0xff);
2234 /* Implied "Finish" to terminate the list. */
2238 /* Push entry onto vector. They are guaranteed to always
2239 appear in order of increasing addresses. */
2240 new_exidx_entry.addr = idx;
2241 new_exidx_entry.entry = entry;
2242 VEC_safe_push (arm_exidx_entry_s,
2243 data->section_maps[sec->the_bfd_section->index],
2247 do_cleanups (cleanups);
2250 /* Search for the exception table entry covering MEMADDR. If one is found,
2251 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2252 set *START to the start of the region covered by this entry. */
2255 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2257 struct obj_section *sec;
2259 sec = find_pc_section (memaddr);
2262 struct arm_exidx_data *data;
2263 VEC(arm_exidx_entry_s) *map;
2264 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2267 data = ((struct arm_exidx_data *)
2268 objfile_data (sec->objfile, arm_exidx_data_key));
2271 map = data->section_maps[sec->the_bfd_section->index];
2272 if (!VEC_empty (arm_exidx_entry_s, map))
2274 struct arm_exidx_entry *map_sym;
2276 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2277 arm_compare_exidx_entries);
2279 /* VEC_lower_bound finds the earliest ordered insertion
2280 point. If the following symbol starts at this exact
2281 address, we use that; otherwise, the preceding
2282 exception table entry covers this address. */
2283 if (idx < VEC_length (arm_exidx_entry_s, map))
2285 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2286 if (map_sym->addr == map_key.addr)
2289 *start = map_sym->addr + obj_section_addr (sec);
2290 return map_sym->entry;
2296 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2298 *start = map_sym->addr + obj_section_addr (sec);
2299 return map_sym->entry;
2308 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2309 instruction list from the ARM exception table entry ENTRY, allocate and
2310 return a prologue cache structure describing how to unwind this frame.
2312 Return NULL if the unwinding instruction list contains a "spare",
2313 "reserved" or "refuse to unwind" instruction as defined in section
2314 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2315 for the ARM Architecture" document. */
2317 static struct arm_prologue_cache *
2318 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2323 struct arm_prologue_cache *cache;
2324 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2325 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2331 /* Whenever we reload SP, we actually have to retrieve its
2332 actual value in the current frame. */
2335 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2337 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2338 vsp = get_frame_register_unsigned (this_frame, reg);
2342 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2343 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2349 /* Decode next unwind instruction. */
2352 if ((insn & 0xc0) == 0)
2354 int offset = insn & 0x3f;
2355 vsp += (offset << 2) + 4;
2357 else if ((insn & 0xc0) == 0x40)
2359 int offset = insn & 0x3f;
2360 vsp -= (offset << 2) + 4;
2362 else if ((insn & 0xf0) == 0x80)
2364 int mask = ((insn & 0xf) << 8) | *entry++;
2367 /* The special case of an all-zero mask identifies
2368 "Refuse to unwind". We return NULL to fall back
2369 to the prologue analyzer. */
2373 /* Pop registers r4..r15 under mask. */
2374 for (i = 0; i < 12; i++)
2375 if (mask & (1 << i))
2377 cache->saved_regs[4 + i].addr = vsp;
2381 /* Special-case popping SP -- we need to reload vsp. */
2382 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2385 else if ((insn & 0xf0) == 0x90)
2387 int reg = insn & 0xf;
2389 /* Reserved cases. */
2390 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2393 /* Set SP from another register and mark VSP for reload. */
2394 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2397 else if ((insn & 0xf0) == 0xa0)
2399 int count = insn & 0x7;
2400 int pop_lr = (insn & 0x8) != 0;
2403 /* Pop r4..r[4+count]. */
2404 for (i = 0; i <= count; i++)
2406 cache->saved_regs[4 + i].addr = vsp;
2410 /* If indicated by flag, pop LR as well. */
2413 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2417 else if (insn == 0xb0)
2419 /* We could only have updated PC by popping into it; if so, it
2420 will show up as address. Otherwise, copy LR into PC. */
2421 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2422 cache->saved_regs[ARM_PC_REGNUM]
2423 = cache->saved_regs[ARM_LR_REGNUM];
2428 else if (insn == 0xb1)
2430 int mask = *entry++;
2433 /* All-zero mask and mask >= 16 is "spare". */
2434 if (mask == 0 || mask >= 16)
2437 /* Pop r0..r3 under mask. */
2438 for (i = 0; i < 4; i++)
2439 if (mask & (1 << i))
2441 cache->saved_regs[i].addr = vsp;
2445 else if (insn == 0xb2)
2447 ULONGEST offset = 0;
2452 offset |= (*entry & 0x7f) << shift;
2455 while (*entry++ & 0x80);
2457 vsp += 0x204 + (offset << 2);
2459 else if (insn == 0xb3)
2461 int start = *entry >> 4;
2462 int count = (*entry++) & 0xf;
2465 /* Only registers D0..D15 are valid here. */
2466 if (start + count >= 16)
2469 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2470 for (i = 0; i <= count; i++)
2472 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2476 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2479 else if ((insn & 0xf8) == 0xb8)
2481 int count = insn & 0x7;
2484 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2485 for (i = 0; i <= count; i++)
2487 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2491 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2494 else if (insn == 0xc6)
2496 int start = *entry >> 4;
2497 int count = (*entry++) & 0xf;
2500 /* Only registers WR0..WR15 are valid. */
2501 if (start + count >= 16)
2504 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2505 for (i = 0; i <= count; i++)
2507 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2511 else if (insn == 0xc7)
2513 int mask = *entry++;
2516 /* All-zero mask and mask >= 16 is "spare". */
2517 if (mask == 0 || mask >= 16)
2520 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2521 for (i = 0; i < 4; i++)
2522 if (mask & (1 << i))
2524 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2528 else if ((insn & 0xf8) == 0xc0)
2530 int count = insn & 0x7;
2533 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2534 for (i = 0; i <= count; i++)
2536 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2540 else if (insn == 0xc8)
2542 int start = *entry >> 4;
2543 int count = (*entry++) & 0xf;
2546 /* Only registers D0..D31 are valid. */
2547 if (start + count >= 16)
2550 /* Pop VFP double-precision registers
2551 D[16+start]..D[16+start+count]. */
2552 for (i = 0; i <= count; i++)
2554 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2558 else if (insn == 0xc9)
2560 int start = *entry >> 4;
2561 int count = (*entry++) & 0xf;
2564 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2565 for (i = 0; i <= count; i++)
2567 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2571 else if ((insn & 0xf8) == 0xd0)
2573 int count = insn & 0x7;
2576 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2577 for (i = 0; i <= count; i++)
2579 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2585 /* Everything else is "spare". */
2590 /* If we restore SP from a register, assume this was the frame register.
2591 Otherwise just fall back to SP as frame register. */
2592 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2593 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2595 cache->framereg = ARM_SP_REGNUM;
2597 /* Determine offset to previous frame. */
2599 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2601 /* We already got the previous SP. */
2602 cache->prev_sp = vsp;
2607 /* Unwinding via ARM exception table entries. Note that the sniffer
2608 already computes a filled-in prologue cache, which is then used
2609 with the same arm_prologue_this_id and arm_prologue_prev_register
2610 routines also used for prologue-parsing based unwinding. */
2613 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2614 struct frame_info *this_frame,
2615 void **this_prologue_cache)
2617 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2618 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2619 CORE_ADDR addr_in_block, exidx_region, func_start;
2620 struct arm_prologue_cache *cache;
2623 /* See if we have an ARM exception table entry covering this address. */
2624 addr_in_block = get_frame_address_in_block (this_frame);
2625 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2629 /* The ARM exception table does not describe unwind information
2630 for arbitrary PC values, but is guaranteed to be correct only
2631 at call sites. We have to decide here whether we want to use
2632 ARM exception table information for this frame, or fall back
2633 to using prologue parsing. (Note that if we have DWARF CFI,
2634 this sniffer isn't even called -- CFI is always preferred.)
2636 Before we make this decision, however, we check whether we
2637 actually have *symbol* information for the current frame.
2638 If not, prologue parsing would not work anyway, so we might
2639 as well use the exception table and hope for the best. */
2640 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2644 /* If the next frame is "normal", we are at a call site in this
2645 frame, so exception information is guaranteed to be valid. */
2646 if (get_next_frame (this_frame)
2647 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2650 /* We also assume exception information is valid if we're currently
2651 blocked in a system call. The system library is supposed to
2652 ensure this, so that e.g. pthread cancellation works. */
2653 if (arm_frame_is_thumb (this_frame))
2657 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2658 2, byte_order_for_code, &insn)
2659 && (insn & 0xff00) == 0xdf00 /* svc */)
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2667 4, byte_order_for_code, &insn)
2668 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2672 /* Bail out if we don't know that exception information is valid. */
2676 /* The ARM exception index does not mark the *end* of the region
2677 covered by the entry, and some functions will not have any entry.
2678 To correctly recognize the end of the covered region, the linker
2679 should have inserted dummy records with a CANTUNWIND marker.
2681 Unfortunately, current versions of GNU ld do not reliably do
2682 this, and thus we may have found an incorrect entry above.
2683 As a (temporary) sanity check, we only use the entry if it
2684 lies *within* the bounds of the function. Note that this check
2685 might reject perfectly valid entries that just happen to cover
2686 multiple functions; therefore this check ought to be removed
2687 once the linker is fixed. */
2688 if (func_start > exidx_region)
2692 /* Decode the list of unwinding instructions into a prologue cache.
2693 Note that this may fail due to e.g. a "refuse to unwind" code. */
2694 cache = arm_exidx_fill_cache (this_frame, entry);
2698 *this_prologue_cache = cache;
2702 struct frame_unwind arm_exidx_unwind = {
2704 default_frame_unwind_stop_reason,
2705 arm_prologue_this_id,
2706 arm_prologue_prev_register,
2708 arm_exidx_unwind_sniffer
2711 static struct arm_prologue_cache *
2712 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2714 struct arm_prologue_cache *cache;
2717 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2718 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2720 /* Still rely on the offset calculated from prologue. */
2721 arm_scan_prologue (this_frame, cache);
2723 /* Since we are in epilogue, the SP has been restored. */
2724 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2726 /* Calculate actual addresses of saved registers using offsets
2727 determined by arm_scan_prologue. */
2728 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2729 if (trad_frame_addr_p (cache->saved_regs, reg))
2730 cache->saved_regs[reg].addr += cache->prev_sp;
2735 /* Implementation of function hook 'this_id' in
2736 'struct frame_uwnind' for epilogue unwinder. */
2739 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2741 struct frame_id *this_id)
2743 struct arm_prologue_cache *cache;
2746 if (*this_cache == NULL)
2747 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2748 cache = (struct arm_prologue_cache *) *this_cache;
2750 /* Use function start address as part of the frame ID. If we cannot
2751 identify the start address (due to missing symbol information),
2752 fall back to just using the current PC. */
2753 pc = get_frame_pc (this_frame);
2754 func = get_frame_func (this_frame);
2758 (*this_id) = frame_id_build (cache->prev_sp, pc);
2761 /* Implementation of function hook 'prev_register' in
2762 'struct frame_uwnind' for epilogue unwinder. */
2764 static struct value *
2765 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2766 void **this_cache, int regnum)
2768 if (*this_cache == NULL)
2769 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2771 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2774 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2776 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2779 /* Implementation of function hook 'sniffer' in
2780 'struct frame_uwnind' for epilogue unwinder. */
2783 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2784 struct frame_info *this_frame,
2785 void **this_prologue_cache)
2787 if (frame_relative_level (this_frame) == 0)
2789 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2790 CORE_ADDR pc = get_frame_pc (this_frame);
2792 if (arm_frame_is_thumb (this_frame))
2793 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2795 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2801 /* Frame unwinder from epilogue. */
2803 static const struct frame_unwind arm_epilogue_frame_unwind =
2806 default_frame_unwind_stop_reason,
2807 arm_epilogue_frame_this_id,
2808 arm_epilogue_frame_prev_register,
2810 arm_epilogue_frame_sniffer,
2813 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2814 trampoline, return the target PC. Otherwise return 0.
2816 void call0a (char c, short s, int i, long l) {}
2820 (*pointer_to_call0a) (c, s, i, l);
2823 Instead of calling a stub library function _call_via_xx (xx is
2824 the register name), GCC may inline the trampoline in the object
2825 file as below (register r2 has the address of call0a).
2828 .type main, %function
2837 The trampoline 'bx r2' doesn't belong to main. */
2840 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2842 /* The heuristics of recognizing such trampoline is that FRAME is
2843 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2844 if (arm_frame_is_thumb (frame))
2848 if (target_read_memory (pc, buf, 2) == 0)
2850 struct gdbarch *gdbarch = get_frame_arch (frame);
2851 enum bfd_endian byte_order_for_code
2852 = gdbarch_byte_order_for_code (gdbarch);
2854 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2856 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2859 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2861 /* Clear the LSB so that gdb core sets step-resume
2862 breakpoint at the right address. */
2863 return UNMAKE_THUMB_ADDR (dest);
2871 static struct arm_prologue_cache *
2872 arm_make_stub_cache (struct frame_info *this_frame)
2874 struct arm_prologue_cache *cache;
2876 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2877 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2879 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2884 /* Our frame ID for a stub frame is the current SP and LR. */
2887 arm_stub_this_id (struct frame_info *this_frame,
2889 struct frame_id *this_id)
2891 struct arm_prologue_cache *cache;
2893 if (*this_cache == NULL)
2894 *this_cache = arm_make_stub_cache (this_frame);
2895 cache = (struct arm_prologue_cache *) *this_cache;
2897 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2901 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 struct frame_info *this_frame,
2903 void **this_prologue_cache)
2905 CORE_ADDR addr_in_block;
2907 CORE_ADDR pc, start_addr;
2910 addr_in_block = get_frame_address_in_block (this_frame);
2911 pc = get_frame_pc (this_frame);
2912 if (in_plt_section (addr_in_block)
2913 /* We also use the stub winder if the target memory is unreadable
2914 to avoid having the prologue unwinder trying to read it. */
2915 || target_read_memory (pc, dummy, 4) != 0)
2918 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2919 && arm_skip_bx_reg (this_frame, pc) != 0)
2925 struct frame_unwind arm_stub_unwind = {
2927 default_frame_unwind_stop_reason,
2929 arm_prologue_prev_register,
2931 arm_stub_unwind_sniffer
2934 /* Put here the code to store, into CACHE->saved_regs, the addresses
2935 of the saved registers of frame described by THIS_FRAME. CACHE is
2938 static struct arm_prologue_cache *
2939 arm_m_exception_cache (struct frame_info *this_frame)
2941 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2942 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2943 struct arm_prologue_cache *cache;
2944 CORE_ADDR unwound_sp;
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2950 unwound_sp = get_frame_register_unsigned (this_frame,
2953 /* The hardware saves eight 32-bit words, comprising xPSR,
2954 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2955 "B1.5.6 Exception entry behavior" in
2956 "ARMv7-M Architecture Reference Manual". */
2957 cache->saved_regs[0].addr = unwound_sp;
2958 cache->saved_regs[1].addr = unwound_sp + 4;
2959 cache->saved_regs[2].addr = unwound_sp + 8;
2960 cache->saved_regs[3].addr = unwound_sp + 12;
2961 cache->saved_regs[12].addr = unwound_sp + 16;
2962 cache->saved_regs[14].addr = unwound_sp + 20;
2963 cache->saved_regs[15].addr = unwound_sp + 24;
2964 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2966 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2967 aligner between the top of the 32-byte stack frame and the
2968 previous context's stack pointer. */
2969 cache->prev_sp = unwound_sp + 32;
2970 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2971 && (xpsr & (1 << 9)) != 0)
2972 cache->prev_sp += 4;
2977 /* Implementation of function hook 'this_id' in
2978 'struct frame_uwnind'. */
2981 arm_m_exception_this_id (struct frame_info *this_frame,
2983 struct frame_id *this_id)
2985 struct arm_prologue_cache *cache;
2987 if (*this_cache == NULL)
2988 *this_cache = arm_m_exception_cache (this_frame);
2989 cache = (struct arm_prologue_cache *) *this_cache;
2991 /* Our frame ID for a stub frame is the current SP and LR. */
2992 *this_id = frame_id_build (cache->prev_sp,
2993 get_frame_pc (this_frame));
2996 /* Implementation of function hook 'prev_register' in
2997 'struct frame_uwnind'. */
2999 static struct value *
3000 arm_m_exception_prev_register (struct frame_info *this_frame,
3004 struct arm_prologue_cache *cache;
3006 if (*this_cache == NULL)
3007 *this_cache = arm_m_exception_cache (this_frame);
3008 cache = (struct arm_prologue_cache *) *this_cache;
3010 /* The value was already reconstructed into PREV_SP. */
3011 if (prev_regnum == ARM_SP_REGNUM)
3012 return frame_unwind_got_constant (this_frame, prev_regnum,
3015 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3019 /* Implementation of function hook 'sniffer' in
3020 'struct frame_uwnind'. */
3023 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3024 struct frame_info *this_frame,
3025 void **this_prologue_cache)
3027 CORE_ADDR this_pc = get_frame_pc (this_frame);
3029 /* No need to check is_m; this sniffer is only registered for
3030 M-profile architectures. */
3032 /* Check if exception frame returns to a magic PC value. */
3033 return arm_m_addr_is_magic (this_pc);
3036 /* Frame unwinder for M-profile exceptions. */
3038 struct frame_unwind arm_m_exception_unwind =
3041 default_frame_unwind_stop_reason,
3042 arm_m_exception_this_id,
3043 arm_m_exception_prev_register,
3045 arm_m_exception_unwind_sniffer
3049 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3051 struct arm_prologue_cache *cache;
3053 if (*this_cache == NULL)
3054 *this_cache = arm_make_prologue_cache (this_frame);
3055 cache = (struct arm_prologue_cache *) *this_cache;
3057 return cache->prev_sp - cache->framesize;
3060 struct frame_base arm_normal_base = {
3061 &arm_prologue_unwind,
3062 arm_normal_frame_base,
3063 arm_normal_frame_base,
3064 arm_normal_frame_base
3067 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3068 dummy frame. The frame ID's base needs to match the TOS value
3069 saved by save_dummy_frame_tos() and returned from
3070 arm_push_dummy_call, and the PC needs to match the dummy frame's
3073 static struct frame_id
3074 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3076 return frame_id_build (get_frame_register_unsigned (this_frame,
3078 get_frame_pc (this_frame));
3081 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3082 be used to construct the previous frame's ID, after looking up the
3083 containing function). */
3086 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3089 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3090 return arm_addr_bits_remove (gdbarch, pc);
3094 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3096 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3099 static struct value *
3100 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3103 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3105 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3110 /* The PC is normally copied from the return column, which
3111 describes saves of LR. However, that version may have an
3112 extra bit set to indicate Thumb state. The bit is not
3114 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3115 return frame_unwind_got_constant (this_frame, regnum,
3116 arm_addr_bits_remove (gdbarch, lr));
3119 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3120 cpsr = get_frame_register_unsigned (this_frame, regnum);
3121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3122 if (IS_THUMB_ADDR (lr))
3126 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3129 internal_error (__FILE__, __LINE__,
3130 _("Unexpected register %d"), regnum);
3135 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3136 struct dwarf2_frame_state_reg *reg,
3137 struct frame_info *this_frame)
3143 reg->how = DWARF2_FRAME_REG_FN;
3144 reg->loc.fn = arm_dwarf2_prev_register;
3147 reg->how = DWARF2_FRAME_REG_CFA;
3152 /* Implement the stack_frame_destroyed_p gdbarch method. */
3155 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3157 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3158 unsigned int insn, insn2;
3159 int found_return = 0, found_stack_adjust = 0;
3160 CORE_ADDR func_start, func_end;
3164 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3167 /* The epilogue is a sequence of instructions along the following lines:
3169 - add stack frame size to SP or FP
3170 - [if frame pointer used] restore SP from FP
3171 - restore registers from SP [may include PC]
3172 - a return-type instruction [if PC wasn't already restored]
3174 In a first pass, we scan forward from the current PC and verify the
3175 instructions we find as compatible with this sequence, ending in a
3178 However, this is not sufficient to distinguish indirect function calls
3179 within a function from indirect tail calls in the epilogue in some cases.
3180 Therefore, if we didn't already find any SP-changing instruction during
3181 forward scan, we add a backward scanning heuristic to ensure we actually
3182 are in the epilogue. */
3185 while (scan_pc < func_end && !found_return)
3187 if (target_read_memory (scan_pc, buf, 2))
3191 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3193 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3195 else if (insn == 0x46f7) /* mov pc, lr */
3197 else if (thumb_instruction_restores_sp (insn))
3199 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3202 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3204 if (target_read_memory (scan_pc, buf, 2))
3208 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3210 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3212 if (insn2 & 0x8000) /* <registers> include PC. */
3215 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3216 && (insn2 & 0x0fff) == 0x0b04)
3218 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3221 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3222 && (insn2 & 0x0e00) == 0x0a00)
3234 /* Since any instruction in the epilogue sequence, with the possible
3235 exception of return itself, updates the stack pointer, we need to
3236 scan backwards for at most one instruction. Try either a 16-bit or
3237 a 32-bit instruction. This is just a heuristic, so we do not worry
3238 too much about false positives. */
3240 if (pc - 4 < func_start)
3242 if (target_read_memory (pc - 4, buf, 4))
3245 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3246 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3248 if (thumb_instruction_restores_sp (insn2))
3249 found_stack_adjust = 1;
3250 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3251 found_stack_adjust = 1;
3252 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3253 && (insn2 & 0x0fff) == 0x0b04)
3254 found_stack_adjust = 1;
3255 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3256 && (insn2 & 0x0e00) == 0x0a00)
3257 found_stack_adjust = 1;
3259 return found_stack_adjust;
3263 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3265 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3268 CORE_ADDR func_start, func_end;
3270 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3273 /* We are in the epilogue if the previous instruction was a stack
3274 adjustment and the next instruction is a possible return (bx, mov
3275 pc, or pop). We could have to scan backwards to find the stack
3276 adjustment, or forwards to find the return, but this is a decent
3277 approximation. First scan forwards. */
3280 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3281 if (bits (insn, 28, 31) != INST_NV)
3283 if ((insn & 0x0ffffff0) == 0x012fff10)
3286 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3289 else if ((insn & 0x0fff0000) == 0x08bd0000
3290 && (insn & 0x0000c000) != 0)
3291 /* POP (LDMIA), including PC or LR. */
3298 /* Scan backwards. This is just a heuristic, so do not worry about
3299 false positives from mode changes. */
3301 if (pc < func_start + 4)
3304 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3305 if (arm_instruction_restores_sp (insn))
3311 /* Implement the stack_frame_destroyed_p gdbarch method. */
3314 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3316 if (arm_pc_is_thumb (gdbarch, pc))
3317 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3319 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3322 /* When arguments must be pushed onto the stack, they go on in reverse
3323 order. The code below implements a FILO (stack) to do this. */
3328 struct stack_item *prev;
3332 static struct stack_item *
3333 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3335 struct stack_item *si;
3336 si = XNEW (struct stack_item);
3337 si->data = (gdb_byte *) xmalloc (len);
3340 memcpy (si->data, contents, len);
3344 static struct stack_item *
3345 pop_stack_item (struct stack_item *si)
3347 struct stack_item *dead = si;
3355 /* Return the alignment (in bytes) of the given type. */
3358 arm_type_align (struct type *t)
3364 t = check_typedef (t);
3365 switch (TYPE_CODE (t))
3368 /* Should never happen. */
3369 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3373 case TYPE_CODE_ENUM:
3377 case TYPE_CODE_RANGE:
3379 case TYPE_CODE_RVALUE_REF:
3380 case TYPE_CODE_CHAR:
3381 case TYPE_CODE_BOOL:
3382 return TYPE_LENGTH (t);
3384 case TYPE_CODE_ARRAY:
3385 if (TYPE_VECTOR (t))
3387 /* Use the natural alignment for vector types (the same for
3388 scalar type), but the maximum alignment is 64-bit. */
3389 if (TYPE_LENGTH (t) > 8)
3392 return TYPE_LENGTH (t);
3395 return arm_type_align (TYPE_TARGET_TYPE (t));
3396 case TYPE_CODE_COMPLEX:
3397 return arm_type_align (TYPE_TARGET_TYPE (t));
3399 case TYPE_CODE_STRUCT:
3400 case TYPE_CODE_UNION:
3402 for (n = 0; n < TYPE_NFIELDS (t); n++)
3404 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3412 /* Possible base types for a candidate for passing and returning in
3415 enum arm_vfp_cprc_base_type
3424 /* The length of one element of base type B. */
3427 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3431 case VFP_CPRC_SINGLE:
3433 case VFP_CPRC_DOUBLE:
3435 case VFP_CPRC_VEC64:
3437 case VFP_CPRC_VEC128:
3440 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3445 /* The character ('s', 'd' or 'q') for the type of VFP register used
3446 for passing base type B. */
3449 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3453 case VFP_CPRC_SINGLE:
3455 case VFP_CPRC_DOUBLE:
3457 case VFP_CPRC_VEC64:
3459 case VFP_CPRC_VEC128:
3462 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3467 /* Determine whether T may be part of a candidate for passing and
3468 returning in VFP registers, ignoring the limit on the total number
3469 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3470 classification of the first valid component found; if it is not
3471 VFP_CPRC_UNKNOWN, all components must have the same classification
3472 as *BASE_TYPE. If it is found that T contains a type not permitted
3473 for passing and returning in VFP registers, a type differently
3474 classified from *BASE_TYPE, or two types differently classified
3475 from each other, return -1, otherwise return the total number of
3476 base-type elements found (possibly 0 in an empty structure or
3477 array). Vector types are not currently supported, matching the
3478 generic AAPCS support. */
3481 arm_vfp_cprc_sub_candidate (struct type *t,
3482 enum arm_vfp_cprc_base_type *base_type)
3484 t = check_typedef (t);
3485 switch (TYPE_CODE (t))
3488 switch (TYPE_LENGTH (t))
3491 if (*base_type == VFP_CPRC_UNKNOWN)
3492 *base_type = VFP_CPRC_SINGLE;
3493 else if (*base_type != VFP_CPRC_SINGLE)
3498 if (*base_type == VFP_CPRC_UNKNOWN)
3499 *base_type = VFP_CPRC_DOUBLE;
3500 else if (*base_type != VFP_CPRC_DOUBLE)
3509 case TYPE_CODE_COMPLEX:
3510 /* Arguments of complex T where T is one of the types float or
3511 double get treated as if they are implemented as:
3520 switch (TYPE_LENGTH (t))
3523 if (*base_type == VFP_CPRC_UNKNOWN)
3524 *base_type = VFP_CPRC_SINGLE;
3525 else if (*base_type != VFP_CPRC_SINGLE)
3530 if (*base_type == VFP_CPRC_UNKNOWN)
3531 *base_type = VFP_CPRC_DOUBLE;
3532 else if (*base_type != VFP_CPRC_DOUBLE)
3541 case TYPE_CODE_ARRAY:
3543 if (TYPE_VECTOR (t))
3545 /* A 64-bit or 128-bit containerized vector type are VFP
3547 switch (TYPE_LENGTH (t))
3550 if (*base_type == VFP_CPRC_UNKNOWN)
3551 *base_type = VFP_CPRC_VEC64;
3554 if (*base_type == VFP_CPRC_UNKNOWN)
3555 *base_type = VFP_CPRC_VEC128;
3566 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3570 if (TYPE_LENGTH (t) == 0)
3572 gdb_assert (count == 0);
3575 else if (count == 0)
3577 unitlen = arm_vfp_cprc_unit_length (*base_type);
3578 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3579 return TYPE_LENGTH (t) / unitlen;
3584 case TYPE_CODE_STRUCT:
3589 for (i = 0; i < TYPE_NFIELDS (t); i++)
3593 if (!field_is_static (&TYPE_FIELD (t, i)))
3594 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3596 if (sub_count == -1)
3600 if (TYPE_LENGTH (t) == 0)
3602 gdb_assert (count == 0);
3605 else if (count == 0)
3607 unitlen = arm_vfp_cprc_unit_length (*base_type);
3608 if (TYPE_LENGTH (t) != unitlen * count)
3613 case TYPE_CODE_UNION:
3618 for (i = 0; i < TYPE_NFIELDS (t); i++)
3620 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3622 if (sub_count == -1)
3624 count = (count > sub_count ? count : sub_count);
3626 if (TYPE_LENGTH (t) == 0)
3628 gdb_assert (count == 0);
3631 else if (count == 0)
3633 unitlen = arm_vfp_cprc_unit_length (*base_type);
3634 if (TYPE_LENGTH (t) != unitlen * count)
3646 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3647 if passed to or returned from a non-variadic function with the VFP
3648 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3649 *BASE_TYPE to the base type for T and *COUNT to the number of
3650 elements of that base type before returning. */
3653 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3656 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3657 int c = arm_vfp_cprc_sub_candidate (t, &b);
3658 if (c <= 0 || c > 4)
3665 /* Return 1 if the VFP ABI should be used for passing arguments to and
3666 returning values from a function of type FUNC_TYPE, 0
3670 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3672 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3673 /* Variadic functions always use the base ABI. Assume that functions
3674 without debug info are not variadic. */
3675 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3677 /* The VFP ABI is only supported as a variant of AAPCS. */
3678 if (tdep->arm_abi != ARM_ABI_AAPCS)
3680 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3683 /* We currently only support passing parameters in integer registers, which
3684 conforms with GCC's default model, and VFP argument passing following
3685 the VFP variant of AAPCS. Several other variants exist and
3686 we should probably support some of them based on the selected ABI. */
3689 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3690 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3691 struct value **args, CORE_ADDR sp, int struct_return,
3692 CORE_ADDR struct_addr)
3694 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3698 struct stack_item *si = NULL;
3701 unsigned vfp_regs_free = (1 << 16) - 1;
3703 /* Determine the type of this function and whether the VFP ABI
3705 ftype = check_typedef (value_type (function));
3706 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3707 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3708 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3710 /* Set the return address. For the ARM, the return breakpoint is
3711 always at BP_ADDR. */
3712 if (arm_pc_is_thumb (gdbarch, bp_addr))
3714 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3716 /* Walk through the list of args and determine how large a temporary
3717 stack is required. Need to take care here as structs may be
3718 passed on the stack, and we have to push them. */
3721 argreg = ARM_A1_REGNUM;
3724 /* The struct_return pointer occupies the first parameter
3725 passing register. */
3729 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3730 gdbarch_register_name (gdbarch, argreg),
3731 paddress (gdbarch, struct_addr));
3732 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3736 for (argnum = 0; argnum < nargs; argnum++)
3739 struct type *arg_type;
3740 struct type *target_type;
3741 enum type_code typecode;
3742 const bfd_byte *val;
3744 enum arm_vfp_cprc_base_type vfp_base_type;
3746 int may_use_core_reg = 1;
3748 arg_type = check_typedef (value_type (args[argnum]));
3749 len = TYPE_LENGTH (arg_type);
3750 target_type = TYPE_TARGET_TYPE (arg_type);
3751 typecode = TYPE_CODE (arg_type);
3752 val = value_contents (args[argnum]);
3754 align = arm_type_align (arg_type);
3755 /* Round alignment up to a whole number of words. */
3756 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3757 /* Different ABIs have different maximum alignments. */
3758 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3760 /* The APCS ABI only requires word alignment. */
3761 align = INT_REGISTER_SIZE;
3765 /* The AAPCS requires at most doubleword alignment. */
3766 if (align > INT_REGISTER_SIZE * 2)
3767 align = INT_REGISTER_SIZE * 2;
3771 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3779 /* Because this is a CPRC it cannot go in a core register or
3780 cause a core register to be skipped for alignment.
3781 Either it goes in VFP registers and the rest of this loop
3782 iteration is skipped for this argument, or it goes on the
3783 stack (and the stack alignment code is correct for this
3785 may_use_core_reg = 0;
3787 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3788 shift = unit_length / 4;
3789 mask = (1 << (shift * vfp_base_count)) - 1;
3790 for (regno = 0; regno < 16; regno += shift)
3791 if (((vfp_regs_free >> regno) & mask) == mask)
3800 vfp_regs_free &= ~(mask << regno);
3801 reg_scaled = regno / shift;
3802 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3803 for (i = 0; i < vfp_base_count; i++)
3807 if (reg_char == 'q')
3808 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3809 val + i * unit_length);
3812 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3813 reg_char, reg_scaled + i);
3814 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3816 regcache_cooked_write (regcache, regnum,
3817 val + i * unit_length);
3824 /* This CPRC could not go in VFP registers, so all VFP
3825 registers are now marked as used. */
3830 /* Push stack padding for dowubleword alignment. */
3831 if (nstack & (align - 1))
3833 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3834 nstack += INT_REGISTER_SIZE;
3837 /* Doubleword aligned quantities must go in even register pairs. */
3838 if (may_use_core_reg
3839 && argreg <= ARM_LAST_ARG_REGNUM
3840 && align > INT_REGISTER_SIZE
3844 /* If the argument is a pointer to a function, and it is a
3845 Thumb function, create a LOCAL copy of the value and set
3846 the THUMB bit in it. */
3847 if (TYPE_CODE_PTR == typecode
3848 && target_type != NULL
3849 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3851 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3852 if (arm_pc_is_thumb (gdbarch, regval))
3854 bfd_byte *copy = (bfd_byte *) alloca (len);
3855 store_unsigned_integer (copy, len, byte_order,
3856 MAKE_THUMB_ADDR (regval));
3861 /* Copy the argument to general registers or the stack in
3862 register-sized pieces. Large arguments are split between
3863 registers and stack. */
3866 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3868 = extract_unsigned_integer (val, partial_len, byte_order);
3870 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3872 /* The argument is being passed in a general purpose
3874 if (byte_order == BFD_ENDIAN_BIG)
3875 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3877 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3879 gdbarch_register_name
3881 phex (regval, INT_REGISTER_SIZE));
3882 regcache_cooked_write_unsigned (regcache, argreg, regval);
3887 gdb_byte buf[INT_REGISTER_SIZE];
3889 memset (buf, 0, sizeof (buf));
3890 store_unsigned_integer (buf, partial_len, byte_order, regval);
3892 /* Push the arguments onto the stack. */
3894 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3896 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3897 nstack += INT_REGISTER_SIZE;
3904 /* If we have an odd number of words to push, then decrement the stack
3905 by one word now, so first stack argument will be dword aligned. */
3912 write_memory (sp, si->data, si->len);
3913 si = pop_stack_item (si);
3916 /* Finally, update teh SP register. */
3917 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3923 /* Always align the frame to an 8-byte boundary. This is required on
3924 some platforms and harmless on the rest. */
3927 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3929 /* Align the stack to eight bytes. */
3930 return sp & ~ (CORE_ADDR) 7;
3934 print_fpu_flags (struct ui_file *file, int flags)
3936 if (flags & (1 << 0))
3937 fputs_filtered ("IVO ", file);
3938 if (flags & (1 << 1))
3939 fputs_filtered ("DVZ ", file);
3940 if (flags & (1 << 2))
3941 fputs_filtered ("OFL ", file);
3942 if (flags & (1 << 3))
3943 fputs_filtered ("UFL ", file);
3944 if (flags & (1 << 4))
3945 fputs_filtered ("INX ", file);
3946 fputc_filtered ('\n', file);
3949 /* Print interesting information about the floating point processor
3950 (if present) or emulator. */
3952 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3953 struct frame_info *frame, const char *args)
3955 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3958 type = (status >> 24) & 127;
3959 if (status & (1 << 31))
3960 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3962 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3963 /* i18n: [floating point unit] mask */
3964 fputs_filtered (_("mask: "), file);
3965 print_fpu_flags (file, status >> 16);
3966 /* i18n: [floating point unit] flags */
3967 fputs_filtered (_("flags: "), file);
3968 print_fpu_flags (file, status);
3971 /* Construct the ARM extended floating point type. */
3972 static struct type *
3973 arm_ext_type (struct gdbarch *gdbarch)
3975 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3977 if (!tdep->arm_ext_type)
3979 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3980 floatformats_arm_ext);
3982 return tdep->arm_ext_type;
3985 static struct type *
3986 arm_neon_double_type (struct gdbarch *gdbarch)
3988 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3990 if (tdep->neon_double_type == NULL)
3992 struct type *t, *elem;
3994 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3996 elem = builtin_type (gdbarch)->builtin_uint8;
3997 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3998 elem = builtin_type (gdbarch)->builtin_uint16;
3999 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4000 elem = builtin_type (gdbarch)->builtin_uint32;
4001 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4002 elem = builtin_type (gdbarch)->builtin_uint64;
4003 append_composite_type_field (t, "u64", elem);
4004 elem = builtin_type (gdbarch)->builtin_float;
4005 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4006 elem = builtin_type (gdbarch)->builtin_double;
4007 append_composite_type_field (t, "f64", elem);
4009 TYPE_VECTOR (t) = 1;
4010 TYPE_NAME (t) = "neon_d";
4011 tdep->neon_double_type = t;
4014 return tdep->neon_double_type;
4017 /* FIXME: The vector types are not correctly ordered on big-endian
4018 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4019 bits of d0 - regardless of what unit size is being held in d0. So
4020 the offset of the first uint8 in d0 is 7, but the offset of the
4021 first float is 4. This code works as-is for little-endian
4024 static struct type *
4025 arm_neon_quad_type (struct gdbarch *gdbarch)
4027 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4029 if (tdep->neon_quad_type == NULL)
4031 struct type *t, *elem;
4033 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4035 elem = builtin_type (gdbarch)->builtin_uint8;
4036 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4037 elem = builtin_type (gdbarch)->builtin_uint16;
4038 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4039 elem = builtin_type (gdbarch)->builtin_uint32;
4040 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4041 elem = builtin_type (gdbarch)->builtin_uint64;
4042 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4043 elem = builtin_type (gdbarch)->builtin_float;
4044 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4045 elem = builtin_type (gdbarch)->builtin_double;
4046 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4048 TYPE_VECTOR (t) = 1;
4049 TYPE_NAME (t) = "neon_q";
4050 tdep->neon_quad_type = t;
4053 return tdep->neon_quad_type;
4056 /* Return the GDB type object for the "standard" data type of data in
4059 static struct type *
4060 arm_register_type (struct gdbarch *gdbarch, int regnum)
4062 int num_regs = gdbarch_num_regs (gdbarch);
4064 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4065 && regnum >= num_regs && regnum < num_regs + 32)
4066 return builtin_type (gdbarch)->builtin_float;
4068 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4069 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4070 return arm_neon_quad_type (gdbarch);
4072 /* If the target description has register information, we are only
4073 in this function so that we can override the types of
4074 double-precision registers for NEON. */
4075 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4077 struct type *t = tdesc_register_type (gdbarch, regnum);
4079 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4080 && TYPE_CODE (t) == TYPE_CODE_FLT
4081 && gdbarch_tdep (gdbarch)->have_neon)
4082 return arm_neon_double_type (gdbarch);
4087 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4089 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4090 return builtin_type (gdbarch)->builtin_void;
4092 return arm_ext_type (gdbarch);
4094 else if (regnum == ARM_SP_REGNUM)
4095 return builtin_type (gdbarch)->builtin_data_ptr;
4096 else if (regnum == ARM_PC_REGNUM)
4097 return builtin_type (gdbarch)->builtin_func_ptr;
4098 else if (regnum >= ARRAY_SIZE (arm_register_names))
4099 /* These registers are only supported on targets which supply
4100 an XML description. */
4101 return builtin_type (gdbarch)->builtin_int0;
4103 return builtin_type (gdbarch)->builtin_uint32;
4106 /* Map a DWARF register REGNUM onto the appropriate GDB register
4110 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4112 /* Core integer regs. */
4113 if (reg >= 0 && reg <= 15)
4116 /* Legacy FPA encoding. These were once used in a way which
4117 overlapped with VFP register numbering, so their use is
4118 discouraged, but GDB doesn't support the ARM toolchain
4119 which used them for VFP. */
4120 if (reg >= 16 && reg <= 23)
4121 return ARM_F0_REGNUM + reg - 16;
4123 /* New assignments for the FPA registers. */
4124 if (reg >= 96 && reg <= 103)
4125 return ARM_F0_REGNUM + reg - 96;
4127 /* WMMX register assignments. */
4128 if (reg >= 104 && reg <= 111)
4129 return ARM_WCGR0_REGNUM + reg - 104;
4131 if (reg >= 112 && reg <= 127)
4132 return ARM_WR0_REGNUM + reg - 112;
4134 if (reg >= 192 && reg <= 199)
4135 return ARM_WC0_REGNUM + reg - 192;
4137 /* VFP v2 registers. A double precision value is actually
4138 in d1 rather than s2, but the ABI only defines numbering
4139 for the single precision registers. This will "just work"
4140 in GDB for little endian targets (we'll read eight bytes,
4141 starting in s0 and then progressing to s1), but will be
4142 reversed on big endian targets with VFP. This won't
4143 be a problem for the new Neon quad registers; you're supposed
4144 to use DW_OP_piece for those. */
4145 if (reg >= 64 && reg <= 95)
4149 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4150 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4154 /* VFP v3 / Neon registers. This range is also used for VFP v2
4155 registers, except that it now describes d0 instead of s0. */
4156 if (reg >= 256 && reg <= 287)
4160 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4161 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4168 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4170 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4173 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4175 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4176 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4178 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4179 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4181 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4182 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4184 if (reg < NUM_GREGS)
4185 return SIM_ARM_R0_REGNUM + reg;
4188 if (reg < NUM_FREGS)
4189 return SIM_ARM_FP0_REGNUM + reg;
4192 if (reg < NUM_SREGS)
4193 return SIM_ARM_FPS_REGNUM + reg;
4196 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4199 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4200 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4201 NULL if an error occurs. BUF is freed. */
4204 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4205 int old_len, int new_len)
4208 int bytes_to_read = new_len - old_len;
4210 new_buf = (gdb_byte *) xmalloc (new_len);
4211 memcpy (new_buf + bytes_to_read, buf, old_len);
4213 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4221 /* An IT block is at most the 2-byte IT instruction followed by
4222 four 4-byte instructions. The furthest back we must search to
4223 find an IT block that affects the current instruction is thus
4224 2 + 3 * 4 == 14 bytes. */
4225 #define MAX_IT_BLOCK_PREFIX 14
4227 /* Use a quick scan if there are more than this many bytes of
4229 #define IT_SCAN_THRESHOLD 32
4231 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4232 A breakpoint in an IT block may not be hit, depending on the
4235 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4239 CORE_ADDR boundary, func_start;
4241 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4242 int i, any, last_it, last_it_count;
4244 /* If we are using BKPT breakpoints, none of this is necessary. */
4245 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4248 /* ARM mode does not have this problem. */
4249 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4252 /* We are setting a breakpoint in Thumb code that could potentially
4253 contain an IT block. The first step is to find how much Thumb
4254 code there is; we do not need to read outside of known Thumb
4256 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4258 /* Thumb-2 code must have mapping symbols to have a chance. */
4261 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4263 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4264 && func_start > boundary)
4265 boundary = func_start;
4267 /* Search for a candidate IT instruction. We have to do some fancy
4268 footwork to distinguish a real IT instruction from the second
4269 half of a 32-bit instruction, but there is no need for that if
4270 there's no candidate. */
4271 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4273 /* No room for an IT instruction. */
4276 buf = (gdb_byte *) xmalloc (buf_len);
4277 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4280 for (i = 0; i < buf_len; i += 2)
4282 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4283 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4296 /* OK, the code bytes before this instruction contain at least one
4297 halfword which resembles an IT instruction. We know that it's
4298 Thumb code, but there are still two possibilities. Either the
4299 halfword really is an IT instruction, or it is the second half of
4300 a 32-bit Thumb instruction. The only way we can tell is to
4301 scan forwards from a known instruction boundary. */
4302 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4306 /* There's a lot of code before this instruction. Start with an
4307 optimistic search; it's easy to recognize halfwords that can
4308 not be the start of a 32-bit instruction, and use that to
4309 lock on to the instruction boundaries. */
4310 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4313 buf_len = IT_SCAN_THRESHOLD;
4316 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4318 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4319 if (thumb_insn_size (inst1) == 2)
4326 /* At this point, if DEFINITE, BUF[I] is the first place we
4327 are sure that we know the instruction boundaries, and it is far
4328 enough from BPADDR that we could not miss an IT instruction
4329 affecting BPADDR. If ! DEFINITE, give up - start from a
4333 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4337 buf_len = bpaddr - boundary;
4343 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4346 buf_len = bpaddr - boundary;
4350 /* Scan forwards. Find the last IT instruction before BPADDR. */
4355 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4357 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4362 else if (inst1 & 0x0002)
4364 else if (inst1 & 0x0004)
4369 i += thumb_insn_size (inst1);
4375 /* There wasn't really an IT instruction after all. */
4378 if (last_it_count < 1)
4379 /* It was too far away. */
4382 /* This really is a trouble spot. Move the breakpoint to the IT
4384 return bpaddr - buf_len + last_it;
4387 /* ARM displaced stepping support.
4389 Generally ARM displaced stepping works as follows:
4391 1. When an instruction is to be single-stepped, it is first decoded by
4392 arm_process_displaced_insn. Depending on the type of instruction, it is
4393 then copied to a scratch location, possibly in a modified form. The
4394 copy_* set of functions performs such modification, as necessary. A
4395 breakpoint is placed after the modified instruction in the scratch space
4396 to return control to GDB. Note in particular that instructions which
4397 modify the PC will no longer do so after modification.
4399 2. The instruction is single-stepped, by setting the PC to the scratch
4400 location address, and resuming. Control returns to GDB when the
4403 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4404 function used for the current instruction. This function's job is to
4405 put the CPU/memory state back to what it would have been if the
4406 instruction had been executed unmodified in its original location. */
4408 /* NOP instruction (mov r0, r0). */
4409 #define ARM_NOP 0xe1a00000
4410 #define THUMB_NOP 0x4600
4412 /* Helper for register reads for displaced stepping. In particular, this
4413 returns the PC as it would be seen by the instruction at its original
4417 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4421 CORE_ADDR from = dsc->insn_addr;
4423 if (regno == ARM_PC_REGNUM)
4425 /* Compute pipeline offset:
4426 - When executing an ARM instruction, PC reads as the address of the
4427 current instruction plus 8.
4428 - When executing a Thumb instruction, PC reads as the address of the
4429 current instruction plus 4. */
4436 if (debug_displaced)
4437 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4438 (unsigned long) from);
4439 return (ULONGEST) from;
4443 regcache_cooked_read_unsigned (regs, regno, &ret);
4444 if (debug_displaced)
4445 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4446 regno, (unsigned long) ret);
4452 displaced_in_arm_mode (struct regcache *regs)
4455 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4457 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4459 return (ps & t_bit) == 0;
4462 /* Write to the PC as from a branch instruction. */
4465 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4469 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4470 architecture versions < 6. */
4471 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4472 val & ~(ULONGEST) 0x3);
4474 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4475 val & ~(ULONGEST) 0x1);
4478 /* Write to the PC as from a branch-exchange instruction. */
4481 bx_write_pc (struct regcache *regs, ULONGEST val)
4484 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4486 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4490 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4493 else if ((val & 2) == 0)
4495 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4496 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4500 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4501 mode, align dest to 4 bytes). */
4502 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4503 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4504 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4508 /* Write to the PC as if from a load instruction. */
4511 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4514 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4515 bx_write_pc (regs, val);
4517 branch_write_pc (regs, dsc, val);
4520 /* Write to the PC as if from an ALU instruction. */
4523 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4526 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4527 bx_write_pc (regs, val);
4529 branch_write_pc (regs, dsc, val);
4532 /* Helper for writing to registers for displaced stepping. Writing to the PC
4533 has a varying effects depending on the instruction which does the write:
4534 this is controlled by the WRITE_PC argument. */
4537 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4538 int regno, ULONGEST val, enum pc_write_style write_pc)
4540 if (regno == ARM_PC_REGNUM)
4542 if (debug_displaced)
4543 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4544 (unsigned long) val);
4547 case BRANCH_WRITE_PC:
4548 branch_write_pc (regs, dsc, val);
4552 bx_write_pc (regs, val);
4556 load_write_pc (regs, dsc, val);
4560 alu_write_pc (regs, dsc, val);
4563 case CANNOT_WRITE_PC:
4564 warning (_("Instruction wrote to PC in an unexpected way when "
4565 "single-stepping"));
4569 internal_error (__FILE__, __LINE__,
4570 _("Invalid argument to displaced_write_reg"));
4573 dsc->wrote_to_pc = 1;
4577 if (debug_displaced)
4578 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4579 regno, (unsigned long) val);
4580 regcache_cooked_write_unsigned (regs, regno, val);
4584 /* This function is used to concisely determine if an instruction INSN
4585 references PC. Register fields of interest in INSN should have the
4586 corresponding fields of BITMASK set to 0b1111. The function
4587 returns return 1 if any of these fields in INSN reference the PC
4588 (also 0b1111, r15), else it returns 0. */
4591 insn_references_pc (uint32_t insn, uint32_t bitmask)
4593 uint32_t lowbit = 1;
4595 while (bitmask != 0)
4599 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4605 mask = lowbit * 0xf;
4607 if ((insn & mask) == mask)
4616 /* The simplest copy function. Many instructions have the same effect no
4617 matter what address they are executed at: in those cases, use this. */
4620 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4621 const char *iname, arm_displaced_step_closure *dsc)
4623 if (debug_displaced)
4624 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4625 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4628 dsc->modinsn[0] = insn;
4634 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4635 uint16_t insn2, const char *iname,
4636 arm_displaced_step_closure *dsc)
4638 if (debug_displaced)
4639 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4640 "opcode/class '%s' unmodified\n", insn1, insn2,
4643 dsc->modinsn[0] = insn1;
4644 dsc->modinsn[1] = insn2;
4650 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4653 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4655 arm_displaced_step_closure *dsc)
4657 if (debug_displaced)
4658 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4659 "opcode/class '%s' unmodified\n", insn,
4662 dsc->modinsn[0] = insn;
4667 /* Preload instructions with immediate offset. */
4670 cleanup_preload (struct gdbarch *gdbarch,
4671 struct regcache *regs, arm_displaced_step_closure *dsc)
4673 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4674 if (!dsc->u.preload.immed)
4675 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4679 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4680 arm_displaced_step_closure *dsc, unsigned int rn)
4683 /* Preload instructions:
4685 {pli/pld} [rn, #+/-imm]
4687 {pli/pld} [r0, #+/-imm]. */
4689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4690 rn_val = displaced_read_reg (regs, dsc, rn);
4691 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4692 dsc->u.preload.immed = 1;
4694 dsc->cleanup = &cleanup_preload;
4698 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4699 arm_displaced_step_closure *dsc)
4701 unsigned int rn = bits (insn, 16, 19);
4703 if (!insn_references_pc (insn, 0x000f0000ul))
4704 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4706 if (debug_displaced)
4707 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4708 (unsigned long) insn);
4710 dsc->modinsn[0] = insn & 0xfff0ffff;
4712 install_preload (gdbarch, regs, dsc, rn);
4718 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4719 struct regcache *regs, arm_displaced_step_closure *dsc)
4721 unsigned int rn = bits (insn1, 0, 3);
4722 unsigned int u_bit = bit (insn1, 7);
4723 int imm12 = bits (insn2, 0, 11);
4726 if (rn != ARM_PC_REGNUM)
4727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4729 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4730 PLD (literal) Encoding T1. */
4731 if (debug_displaced)
4732 fprintf_unfiltered (gdb_stdlog,
4733 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4734 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4740 /* Rewrite instruction {pli/pld} PC imm12 into:
4741 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4745 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4747 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4748 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4750 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4752 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4753 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4754 dsc->u.preload.immed = 0;
4756 /* {pli/pld} [r0, r1] */
4757 dsc->modinsn[0] = insn1 & 0xfff0;
4758 dsc->modinsn[1] = 0xf001;
4761 dsc->cleanup = &cleanup_preload;
4765 /* Preload instructions with register offset. */
4768 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4769 arm_displaced_step_closure *dsc, unsigned int rn,
4772 ULONGEST rn_val, rm_val;
4774 /* Preload register-offset instructions:
4776 {pli/pld} [rn, rm {, shift}]
4778 {pli/pld} [r0, r1 {, shift}]. */
4780 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4781 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4782 rn_val = displaced_read_reg (regs, dsc, rn);
4783 rm_val = displaced_read_reg (regs, dsc, rm);
4784 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4785 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4786 dsc->u.preload.immed = 0;
4788 dsc->cleanup = &cleanup_preload;
4792 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4793 struct regcache *regs,
4794 arm_displaced_step_closure *dsc)
4796 unsigned int rn = bits (insn, 16, 19);
4797 unsigned int rm = bits (insn, 0, 3);
4800 if (!insn_references_pc (insn, 0x000f000ful))
4801 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4803 if (debug_displaced)
4804 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4805 (unsigned long) insn);
4807 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4809 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4813 /* Copy/cleanup coprocessor load and store instructions. */
4816 cleanup_copro_load_store (struct gdbarch *gdbarch,
4817 struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4820 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4822 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4824 if (dsc->u.ldst.writeback)
4825 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4829 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4830 arm_displaced_step_closure *dsc,
4831 int writeback, unsigned int rn)
4835 /* Coprocessor load/store instructions:
4837 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4839 {stc/stc2} [r0, #+/-imm].
4841 ldc/ldc2 are handled identically. */
4843 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4844 rn_val = displaced_read_reg (regs, dsc, rn);
4845 /* PC should be 4-byte aligned. */
4846 rn_val = rn_val & 0xfffffffc;
4847 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4849 dsc->u.ldst.writeback = writeback;
4850 dsc->u.ldst.rn = rn;
4852 dsc->cleanup = &cleanup_copro_load_store;
4856 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4857 struct regcache *regs,
4858 arm_displaced_step_closure *dsc)
4860 unsigned int rn = bits (insn, 16, 19);
4862 if (!insn_references_pc (insn, 0x000f0000ul))
4863 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4865 if (debug_displaced)
4866 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4867 "load/store insn %.8lx\n", (unsigned long) insn);
4869 dsc->modinsn[0] = insn & 0xfff0ffff;
4871 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4877 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4878 uint16_t insn2, struct regcache *regs,
4879 arm_displaced_step_closure *dsc)
4881 unsigned int rn = bits (insn1, 0, 3);
4883 if (rn != ARM_PC_REGNUM)
4884 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4885 "copro load/store", dsc);
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4889 "load/store insn %.4x%.4x\n", insn1, insn2);
4891 dsc->modinsn[0] = insn1 & 0xfff0;
4892 dsc->modinsn[1] = insn2;
4895 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4896 doesn't support writeback, so pass 0. */
4897 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4902 /* Clean up branch instructions (actually perform the branch, by setting
4906 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4907 arm_displaced_step_closure *dsc)
4909 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4910 int branch_taken = condition_true (dsc->u.branch.cond, status);
4911 enum pc_write_style write_pc = dsc->u.branch.exchange
4912 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4917 if (dsc->u.branch.link)
4919 /* The value of LR should be the next insn of current one. In order
4920 not to confuse logic hanlding later insn `bx lr', if current insn mode
4921 is Thumb, the bit 0 of LR value should be set to 1. */
4922 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4925 next_insn_addr |= 0x1;
4927 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4931 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4934 /* Copy B/BL/BLX instructions with immediate destinations. */
4937 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4938 arm_displaced_step_closure *dsc,
4939 unsigned int cond, int exchange, int link, long offset)
4941 /* Implement "BL<cond> <label>" as:
4943 Preparation: cond <- instruction condition
4944 Insn: mov r0, r0 (nop)
4945 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4947 B<cond> similar, but don't set r14 in cleanup. */
4949 dsc->u.branch.cond = cond;
4950 dsc->u.branch.link = link;
4951 dsc->u.branch.exchange = exchange;
4953 dsc->u.branch.dest = dsc->insn_addr;
4954 if (link && exchange)
4955 /* For BLX, offset is computed from the Align (PC, 4). */
4956 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4959 dsc->u.branch.dest += 4 + offset;
4961 dsc->u.branch.dest += 8 + offset;
4963 dsc->cleanup = &cleanup_branch;
4966 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4967 struct regcache *regs, arm_displaced_step_closure *dsc)
4969 unsigned int cond = bits (insn, 28, 31);
4970 int exchange = (cond == 0xf);
4971 int link = exchange || bit (insn, 24);
4974 if (debug_displaced)
4975 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4976 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4977 (unsigned long) insn);
4979 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4980 then arrange the switch into Thumb mode. */
4981 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4983 offset = bits (insn, 0, 23) << 2;
4985 if (bit (offset, 25))
4986 offset = offset | ~0x3ffffff;
4988 dsc->modinsn[0] = ARM_NOP;
4990 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4995 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4996 uint16_t insn2, struct regcache *regs,
4997 arm_displaced_step_closure *dsc)
4999 int link = bit (insn2, 14);
5000 int exchange = link && !bit (insn2, 12);
5003 int j1 = bit (insn2, 13);
5004 int j2 = bit (insn2, 11);
5005 int s = sbits (insn1, 10, 10);
5006 int i1 = !(j1 ^ bit (insn1, 10));
5007 int i2 = !(j2 ^ bit (insn1, 10));
5009 if (!link && !exchange) /* B */
5011 offset = (bits (insn2, 0, 10) << 1);
5012 if (bit (insn2, 12)) /* Encoding T4 */
5014 offset |= (bits (insn1, 0, 9) << 12)
5020 else /* Encoding T3 */
5022 offset |= (bits (insn1, 0, 5) << 12)
5026 cond = bits (insn1, 6, 9);
5031 offset = (bits (insn1, 0, 9) << 12);
5032 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5033 offset |= exchange ?
5034 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5039 "%.4x %.4x with offset %.8lx\n",
5040 link ? (exchange) ? "blx" : "bl" : "b",
5041 insn1, insn2, offset);
5043 dsc->modinsn[0] = THUMB_NOP;
5045 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5049 /* Copy B Thumb instructions. */
5051 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5052 arm_displaced_step_closure *dsc)
5054 unsigned int cond = 0;
5056 unsigned short bit_12_15 = bits (insn, 12, 15);
5057 CORE_ADDR from = dsc->insn_addr;
5059 if (bit_12_15 == 0xd)
5061 /* offset = SignExtend (imm8:0, 32) */
5062 offset = sbits ((insn << 1), 0, 8);
5063 cond = bits (insn, 8, 11);
5065 else if (bit_12_15 == 0xe) /* Encoding T2 */
5067 offset = sbits ((insn << 1), 0, 11);
5071 if (debug_displaced)
5072 fprintf_unfiltered (gdb_stdlog,
5073 "displaced: copying b immediate insn %.4x "
5074 "with offset %d\n", insn, offset);
5076 dsc->u.branch.cond = cond;
5077 dsc->u.branch.link = 0;
5078 dsc->u.branch.exchange = 0;
5079 dsc->u.branch.dest = from + 4 + offset;
5081 dsc->modinsn[0] = THUMB_NOP;
5083 dsc->cleanup = &cleanup_branch;
5088 /* Copy BX/BLX with register-specified destinations. */
5091 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5092 arm_displaced_step_closure *dsc, int link,
5093 unsigned int cond, unsigned int rm)
5095 /* Implement {BX,BLX}<cond> <reg>" as:
5097 Preparation: cond <- instruction condition
5098 Insn: mov r0, r0 (nop)
5099 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5101 Don't set r14 in cleanup for BX. */
5103 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5105 dsc->u.branch.cond = cond;
5106 dsc->u.branch.link = link;
5108 dsc->u.branch.exchange = 1;
5110 dsc->cleanup = &cleanup_branch;
5114 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5115 struct regcache *regs, arm_displaced_step_closure *dsc)
5117 unsigned int cond = bits (insn, 28, 31);
5120 int link = bit (insn, 5);
5121 unsigned int rm = bits (insn, 0, 3);
5123 if (debug_displaced)
5124 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5125 (unsigned long) insn);
5127 dsc->modinsn[0] = ARM_NOP;
5129 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5134 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5135 struct regcache *regs,
5136 arm_displaced_step_closure *dsc)
5138 int link = bit (insn, 7);
5139 unsigned int rm = bits (insn, 3, 6);
5141 if (debug_displaced)
5142 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5143 (unsigned short) insn);
5145 dsc->modinsn[0] = THUMB_NOP;
5147 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5153 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5156 cleanup_alu_imm (struct gdbarch *gdbarch,
5157 struct regcache *regs, arm_displaced_step_closure *dsc)
5159 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5160 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5161 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5162 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5166 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5167 arm_displaced_step_closure *dsc)
5169 unsigned int rn = bits (insn, 16, 19);
5170 unsigned int rd = bits (insn, 12, 15);
5171 unsigned int op = bits (insn, 21, 24);
5172 int is_mov = (op == 0xd);
5173 ULONGEST rd_val, rn_val;
5175 if (!insn_references_pc (insn, 0x000ff000ul))
5176 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5178 if (debug_displaced)
5179 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5180 "%.8lx\n", is_mov ? "move" : "ALU",
5181 (unsigned long) insn);
5183 /* Instruction is of form:
5185 <op><cond> rd, [rn,] #imm
5189 Preparation: tmp1, tmp2 <- r0, r1;
5191 Insn: <op><cond> r0, r1, #imm
5192 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5195 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5196 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5197 rn_val = displaced_read_reg (regs, dsc, rn);
5198 rd_val = displaced_read_reg (regs, dsc, rd);
5199 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5200 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5204 dsc->modinsn[0] = insn & 0xfff00fff;
5206 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5208 dsc->cleanup = &cleanup_alu_imm;
5214 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5215 uint16_t insn2, struct regcache *regs,
5216 arm_displaced_step_closure *dsc)
5218 unsigned int op = bits (insn1, 5, 8);
5219 unsigned int rn, rm, rd;
5220 ULONGEST rd_val, rn_val;
5222 rn = bits (insn1, 0, 3); /* Rn */
5223 rm = bits (insn2, 0, 3); /* Rm */
5224 rd = bits (insn2, 8, 11); /* Rd */
5226 /* This routine is only called for instruction MOV. */
5227 gdb_assert (op == 0x2 && rn == 0xf);
5229 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5230 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5232 if (debug_displaced)
5233 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5234 "ALU", insn1, insn2);
5236 /* Instruction is of form:
5238 <op><cond> rd, [rn,] #imm
5242 Preparation: tmp1, tmp2 <- r0, r1;
5244 Insn: <op><cond> r0, r1, #imm
5245 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5248 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5249 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5250 rn_val = displaced_read_reg (regs, dsc, rn);
5251 rd_val = displaced_read_reg (regs, dsc, rd);
5252 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5253 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5256 dsc->modinsn[0] = insn1;
5257 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5260 dsc->cleanup = &cleanup_alu_imm;
5265 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5268 cleanup_alu_reg (struct gdbarch *gdbarch,
5269 struct regcache *regs, arm_displaced_step_closure *dsc)
5274 rd_val = displaced_read_reg (regs, dsc, 0);
5276 for (i = 0; i < 3; i++)
5277 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5279 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5283 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5284 arm_displaced_step_closure *dsc,
5285 unsigned int rd, unsigned int rn, unsigned int rm)
5287 ULONGEST rd_val, rn_val, rm_val;
5289 /* Instruction is of form:
5291 <op><cond> rd, [rn,] rm [, <shift>]
5295 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5296 r0, r1, r2 <- rd, rn, rm
5297 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5298 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5301 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5302 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5303 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5304 rd_val = displaced_read_reg (regs, dsc, rd);
5305 rn_val = displaced_read_reg (regs, dsc, rn);
5306 rm_val = displaced_read_reg (regs, dsc, rm);
5307 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5308 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5309 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5312 dsc->cleanup = &cleanup_alu_reg;
5316 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5317 arm_displaced_step_closure *dsc)
5319 unsigned int op = bits (insn, 21, 24);
5320 int is_mov = (op == 0xd);
5322 if (!insn_references_pc (insn, 0x000ff00ful))
5323 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5325 if (debug_displaced)
5326 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5327 is_mov ? "move" : "ALU", (unsigned long) insn);
5330 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5332 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5334 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5340 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5341 struct regcache *regs,
5342 arm_displaced_step_closure *dsc)
5346 rm = bits (insn, 3, 6);
5347 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5349 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5350 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5352 if (debug_displaced)
5353 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5354 (unsigned short) insn);
5356 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5358 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5363 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5366 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5367 struct regcache *regs,
5368 arm_displaced_step_closure *dsc)
5370 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5373 for (i = 0; i < 4; i++)
5374 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5376 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5380 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5381 arm_displaced_step_closure *dsc,
5382 unsigned int rd, unsigned int rn, unsigned int rm,
5386 ULONGEST rd_val, rn_val, rm_val, rs_val;
5388 /* Instruction is of form:
5390 <op><cond> rd, [rn,] rm, <shift> rs
5394 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5395 r0, r1, r2, r3 <- rd, rn, rm, rs
5396 Insn: <op><cond> r0, r1, r2, <shift> r3
5398 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5402 for (i = 0; i < 4; i++)
5403 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5405 rd_val = displaced_read_reg (regs, dsc, rd);
5406 rn_val = displaced_read_reg (regs, dsc, rn);
5407 rm_val = displaced_read_reg (regs, dsc, rm);
5408 rs_val = displaced_read_reg (regs, dsc, rs);
5409 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5410 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5411 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5412 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5414 dsc->cleanup = &cleanup_alu_shifted_reg;
5418 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5419 struct regcache *regs,
5420 arm_displaced_step_closure *dsc)
5422 unsigned int op = bits (insn, 21, 24);
5423 int is_mov = (op == 0xd);
5424 unsigned int rd, rn, rm, rs;
5426 if (!insn_references_pc (insn, 0x000fff0ful))
5427 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5429 if (debug_displaced)
5430 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5431 "%.8lx\n", is_mov ? "move" : "ALU",
5432 (unsigned long) insn);
5434 rn = bits (insn, 16, 19);
5435 rm = bits (insn, 0, 3);
5436 rs = bits (insn, 8, 11);
5437 rd = bits (insn, 12, 15);
5440 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5442 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5444 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5449 /* Clean up load instructions. */
5452 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5453 arm_displaced_step_closure *dsc)
5455 ULONGEST rt_val, rt_val2 = 0, rn_val;
5457 rt_val = displaced_read_reg (regs, dsc, 0);
5458 if (dsc->u.ldst.xfersize == 8)
5459 rt_val2 = displaced_read_reg (regs, dsc, 1);
5460 rn_val = displaced_read_reg (regs, dsc, 2);
5462 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5463 if (dsc->u.ldst.xfersize > 4)
5464 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5465 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5466 if (!dsc->u.ldst.immed)
5467 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5469 /* Handle register writeback. */
5470 if (dsc->u.ldst.writeback)
5471 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5472 /* Put result in right place. */
5473 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5474 if (dsc->u.ldst.xfersize == 8)
5475 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5478 /* Clean up store instructions. */
5481 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5482 arm_displaced_step_closure *dsc)
5484 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5486 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5487 if (dsc->u.ldst.xfersize > 4)
5488 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5489 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5490 if (!dsc->u.ldst.immed)
5491 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5492 if (!dsc->u.ldst.restore_r4)
5493 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5496 if (dsc->u.ldst.writeback)
5497 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5500 /* Copy "extra" load/store instructions. These are halfword/doubleword
5501 transfers, which have a different encoding to byte/word transfers. */
5504 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5505 struct regcache *regs, arm_displaced_step_closure *dsc)
5507 unsigned int op1 = bits (insn, 20, 24);
5508 unsigned int op2 = bits (insn, 5, 6);
5509 unsigned int rt = bits (insn, 12, 15);
5510 unsigned int rn = bits (insn, 16, 19);
5511 unsigned int rm = bits (insn, 0, 3);
5512 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5513 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5514 int immed = (op1 & 0x4) != 0;
5516 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5518 if (!insn_references_pc (insn, 0x000ff00ful))
5519 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5521 if (debug_displaced)
5522 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5523 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5524 (unsigned long) insn);
5526 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5529 internal_error (__FILE__, __LINE__,
5530 _("copy_extra_ld_st: instruction decode error"));
5532 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5533 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5534 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5536 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5538 rt_val = displaced_read_reg (regs, dsc, rt);
5539 if (bytesize[opcode] == 8)
5540 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5541 rn_val = displaced_read_reg (regs, dsc, rn);
5543 rm_val = displaced_read_reg (regs, dsc, rm);
5545 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5546 if (bytesize[opcode] == 8)
5547 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5548 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5550 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5553 dsc->u.ldst.xfersize = bytesize[opcode];
5554 dsc->u.ldst.rn = rn;
5555 dsc->u.ldst.immed = immed;
5556 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5557 dsc->u.ldst.restore_r4 = 0;
5560 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5562 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5563 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5565 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5567 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5568 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5570 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5575 /* Copy byte/half word/word loads and stores. */
5578 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5579 arm_displaced_step_closure *dsc, int load,
5580 int immed, int writeback, int size, int usermode,
5581 int rt, int rm, int rn)
5583 ULONGEST rt_val, rn_val, rm_val = 0;
5585 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5586 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5590 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5592 rt_val = displaced_read_reg (regs, dsc, rt);
5593 rn_val = displaced_read_reg (regs, dsc, rn);
5595 rm_val = displaced_read_reg (regs, dsc, rm);
5597 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5598 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5600 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5602 dsc->u.ldst.xfersize = size;
5603 dsc->u.ldst.rn = rn;
5604 dsc->u.ldst.immed = immed;
5605 dsc->u.ldst.writeback = writeback;
5607 /* To write PC we can do:
5609 Before this sequence of instructions:
5610 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5611 r2 is the Rn value got from dispalced_read_reg.
5613 Insn1: push {pc} Write address of STR instruction + offset on stack
5614 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5615 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5616 = addr(Insn1) + offset - addr(Insn3) - 8
5618 Insn4: add r4, r4, #8 r4 = offset - 8
5619 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5621 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5623 Otherwise we don't know what value to write for PC, since the offset is
5624 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5625 of this can be found in Section "Saving from r15" in
5626 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5628 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5633 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5634 uint16_t insn2, struct regcache *regs,
5635 arm_displaced_step_closure *dsc, int size)
5637 unsigned int u_bit = bit (insn1, 7);
5638 unsigned int rt = bits (insn2, 12, 15);
5639 int imm12 = bits (insn2, 0, 11);
5642 if (debug_displaced)
5643 fprintf_unfiltered (gdb_stdlog,
5644 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5645 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5651 /* Rewrite instruction LDR Rt imm12 into:
5653 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5657 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5661 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5662 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5666 pc_val = pc_val & 0xfffffffc;
5668 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5669 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5673 dsc->u.ldst.xfersize = size;
5674 dsc->u.ldst.immed = 0;
5675 dsc->u.ldst.writeback = 0;
5676 dsc->u.ldst.restore_r4 = 0;
5678 /* LDR R0, R2, R3 */
5679 dsc->modinsn[0] = 0xf852;
5680 dsc->modinsn[1] = 0x3;
5683 dsc->cleanup = &cleanup_load;
5689 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5690 uint16_t insn2, struct regcache *regs,
5691 arm_displaced_step_closure *dsc,
5692 int writeback, int immed)
5694 unsigned int rt = bits (insn2, 12, 15);
5695 unsigned int rn = bits (insn1, 0, 3);
5696 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5697 /* In LDR (register), there is also a register Rm, which is not allowed to
5698 be PC, so we don't have to check it. */
5700 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5701 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5704 if (debug_displaced)
5705 fprintf_unfiltered (gdb_stdlog,
5706 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5707 rt, rn, insn1, insn2);
5709 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5712 dsc->u.ldst.restore_r4 = 0;
5715 /* ldr[b]<cond> rt, [rn, #imm], etc.
5717 ldr[b]<cond> r0, [r2, #imm]. */
5719 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5720 dsc->modinsn[1] = insn2 & 0x0fff;
5723 /* ldr[b]<cond> rt, [rn, rm], etc.
5725 ldr[b]<cond> r0, [r2, r3]. */
5727 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5728 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5738 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5739 struct regcache *regs,
5740 arm_displaced_step_closure *dsc,
5741 int load, int size, int usermode)
5743 int immed = !bit (insn, 25);
5744 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5745 unsigned int rt = bits (insn, 12, 15);
5746 unsigned int rn = bits (insn, 16, 19);
5747 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5749 if (!insn_references_pc (insn, 0x000ff00ful))
5750 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5752 if (debug_displaced)
5753 fprintf_unfiltered (gdb_stdlog,
5754 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5755 load ? (size == 1 ? "ldrb" : "ldr")
5756 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5758 (unsigned long) insn);
5760 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5761 usermode, rt, rm, rn);
5763 if (load || rt != ARM_PC_REGNUM)
5765 dsc->u.ldst.restore_r4 = 0;
5768 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5770 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5771 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5773 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5775 {ldr,str}[b]<cond> r0, [r2, r3]. */
5776 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5780 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5781 dsc->u.ldst.restore_r4 = 1;
5782 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5783 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5784 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5785 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5786 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5790 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5792 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5797 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5802 /* Cleanup LDM instructions with fully-populated register list. This is an
5803 unfortunate corner case: it's impossible to implement correctly by modifying
5804 the instruction. The issue is as follows: we have an instruction,
5808 which we must rewrite to avoid loading PC. A possible solution would be to
5809 do the load in two halves, something like (with suitable cleanup
5813 ldm[id][ab] r8!, {r0-r7}
5815 ldm[id][ab] r8, {r7-r14}
5818 but at present there's no suitable place for <temp>, since the scratch space
5819 is overwritten before the cleanup routine is called. For now, we simply
5820 emulate the instruction. */
5823 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5824 arm_displaced_step_closure *dsc)
5826 int inc = dsc->u.block.increment;
5827 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5828 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5829 uint32_t regmask = dsc->u.block.regmask;
5830 int regno = inc ? 0 : 15;
5831 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5832 int exception_return = dsc->u.block.load && dsc->u.block.user
5833 && (regmask & 0x8000) != 0;
5834 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5835 int do_transfer = condition_true (dsc->u.block.cond, status);
5836 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5841 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5842 sensible we can do here. Complain loudly. */
5843 if (exception_return)
5844 error (_("Cannot single-step exception return"));
5846 /* We don't handle any stores here for now. */
5847 gdb_assert (dsc->u.block.load != 0);
5849 if (debug_displaced)
5850 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5851 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5852 dsc->u.block.increment ? "inc" : "dec",
5853 dsc->u.block.before ? "before" : "after");
5860 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5863 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5866 xfer_addr += bump_before;
5868 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5869 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5871 xfer_addr += bump_after;
5873 regmask &= ~(1 << regno);
5876 if (dsc->u.block.writeback)
5877 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5881 /* Clean up an STM which included the PC in the register list. */
5884 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5885 arm_displaced_step_closure *dsc)
5887 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5888 int store_executed = condition_true (dsc->u.block.cond, status);
5889 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5890 CORE_ADDR stm_insn_addr;
5893 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5895 /* If condition code fails, there's nothing else to do. */
5896 if (!store_executed)
5899 if (dsc->u.block.increment)
5901 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5903 if (dsc->u.block.before)
5908 pc_stored_at = dsc->u.block.xfer_addr;
5910 if (dsc->u.block.before)
5914 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5915 stm_insn_addr = dsc->scratch_base;
5916 offset = pc_val - stm_insn_addr;
5918 if (debug_displaced)
5919 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5920 "STM instruction\n", offset);
5922 /* Rewrite the stored PC to the proper value for the non-displaced original
5924 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5925 dsc->insn_addr + offset);
5928 /* Clean up an LDM which includes the PC in the register list. We clumped all
5929 the registers in the transferred list into a contiguous range r0...rX (to
5930 avoid loading PC directly and losing control of the debugged program), so we
5931 must undo that here. */
5934 cleanup_block_load_pc (struct gdbarch *gdbarch,
5935 struct regcache *regs,
5936 arm_displaced_step_closure *dsc)
5938 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5939 int load_executed = condition_true (dsc->u.block.cond, status);
5940 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5941 unsigned int regs_loaded = bitcount (mask);
5942 unsigned int num_to_shuffle = regs_loaded, clobbered;
5944 /* The method employed here will fail if the register list is fully populated
5945 (we need to avoid loading PC directly). */
5946 gdb_assert (num_to_shuffle < 16);
5951 clobbered = (1 << num_to_shuffle) - 1;
5953 while (num_to_shuffle > 0)
5955 if ((mask & (1 << write_reg)) != 0)
5957 unsigned int read_reg = num_to_shuffle - 1;
5959 if (read_reg != write_reg)
5961 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5962 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5963 if (debug_displaced)
5964 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5965 "loaded register r%d to r%d\n"), read_reg,
5968 else if (debug_displaced)
5969 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5970 "r%d already in the right place\n"),
5973 clobbered &= ~(1 << write_reg);
5981 /* Restore any registers we scribbled over. */
5982 for (write_reg = 0; clobbered != 0; write_reg++)
5984 if ((clobbered & (1 << write_reg)) != 0)
5986 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5990 "clobbered register r%d\n"), write_reg);
5991 clobbered &= ~(1 << write_reg);
5995 /* Perform register writeback manually. */
5996 if (dsc->u.block.writeback)
5998 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6000 if (dsc->u.block.increment)
6001 new_rn_val += regs_loaded * 4;
6003 new_rn_val -= regs_loaded * 4;
6005 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6010 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6011 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6014 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6015 struct regcache *regs,
6016 arm_displaced_step_closure *dsc)
6018 int load = bit (insn, 20);
6019 int user = bit (insn, 22);
6020 int increment = bit (insn, 23);
6021 int before = bit (insn, 24);
6022 int writeback = bit (insn, 21);
6023 int rn = bits (insn, 16, 19);
6025 /* Block transfers which don't mention PC can be run directly
6027 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6028 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6030 if (rn == ARM_PC_REGNUM)
6032 warning (_("displaced: Unpredictable LDM or STM with "
6033 "base register r15"));
6034 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6037 if (debug_displaced)
6038 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6039 "%.8lx\n", (unsigned long) insn);
6041 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6042 dsc->u.block.rn = rn;
6044 dsc->u.block.load = load;
6045 dsc->u.block.user = user;
6046 dsc->u.block.increment = increment;
6047 dsc->u.block.before = before;
6048 dsc->u.block.writeback = writeback;
6049 dsc->u.block.cond = bits (insn, 28, 31);
6051 dsc->u.block.regmask = insn & 0xffff;
6055 if ((insn & 0xffff) == 0xffff)
6057 /* LDM with a fully-populated register list. This case is
6058 particularly tricky. Implement for now by fully emulating the
6059 instruction (which might not behave perfectly in all cases, but
6060 these instructions should be rare enough for that not to matter
6062 dsc->modinsn[0] = ARM_NOP;
6064 dsc->cleanup = &cleanup_block_load_all;
6068 /* LDM of a list of registers which includes PC. Implement by
6069 rewriting the list of registers to be transferred into a
6070 contiguous chunk r0...rX before doing the transfer, then shuffling
6071 registers into the correct places in the cleanup routine. */
6072 unsigned int regmask = insn & 0xffff;
6073 unsigned int num_in_list = bitcount (regmask), new_regmask;
6076 for (i = 0; i < num_in_list; i++)
6077 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6079 /* Writeback makes things complicated. We need to avoid clobbering
6080 the base register with one of the registers in our modified
6081 register list, but just using a different register can't work in
6084 ldm r14!, {r0-r13,pc}
6086 which would need to be rewritten as:
6090 but that can't work, because there's no free register for N.
6092 Solve this by turning off the writeback bit, and emulating
6093 writeback manually in the cleanup routine. */
6098 new_regmask = (1 << num_in_list) - 1;
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) insn & 0xffff, new_regmask);
6106 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6108 dsc->cleanup = &cleanup_block_load_pc;
6113 /* STM of a list of registers which includes PC. Run the instruction
6114 as-is, but out of line: this will store the wrong value for the PC,
6115 so we must manually fix up the memory in the cleanup routine.
6116 Doing things this way has the advantage that we can auto-detect
6117 the offset of the PC write (which is architecture-dependent) in
6118 the cleanup routine. */
6119 dsc->modinsn[0] = insn;
6121 dsc->cleanup = &cleanup_block_store_pc;
6128 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6129 struct regcache *regs,
6130 arm_displaced_step_closure *dsc)
6132 int rn = bits (insn1, 0, 3);
6133 int load = bit (insn1, 4);
6134 int writeback = bit (insn1, 5);
6136 /* Block transfers which don't mention PC can be run directly
6138 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6139 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6141 if (rn == ARM_PC_REGNUM)
6143 warning (_("displaced: Unpredictable LDM or STM with "
6144 "base register r15"));
6145 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6146 "unpredictable ldm/stm", dsc);
6149 if (debug_displaced)
6150 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6151 "%.4x%.4x\n", insn1, insn2);
6153 /* Clear bit 13, since it should be always zero. */
6154 dsc->u.block.regmask = (insn2 & 0xdfff);
6155 dsc->u.block.rn = rn;
6157 dsc->u.block.load = load;
6158 dsc->u.block.user = 0;
6159 dsc->u.block.increment = bit (insn1, 7);
6160 dsc->u.block.before = bit (insn1, 8);
6161 dsc->u.block.writeback = writeback;
6162 dsc->u.block.cond = INST_AL;
6163 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6167 if (dsc->u.block.regmask == 0xffff)
6169 /* This branch is impossible to happen. */
6174 unsigned int regmask = dsc->u.block.regmask;
6175 unsigned int num_in_list = bitcount (regmask), new_regmask;
6178 for (i = 0; i < num_in_list; i++)
6179 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6184 new_regmask = (1 << num_in_list) - 1;
6186 if (debug_displaced)
6187 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6188 "{..., pc}: original reg list %.4x, modified "
6189 "list %.4x\n"), rn, writeback ? "!" : "",
6190 (int) dsc->u.block.regmask, new_regmask);
6192 dsc->modinsn[0] = insn1;
6193 dsc->modinsn[1] = (new_regmask & 0xffff);
6196 dsc->cleanup = &cleanup_block_load_pc;
6201 dsc->modinsn[0] = insn1;
6202 dsc->modinsn[1] = insn2;
6204 dsc->cleanup = &cleanup_block_store_pc;
6209 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6210 This is used to avoid a dependency on BFD's bfd_endian enum. */
6213 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6216 return read_memory_unsigned_integer (memaddr, len,
6217 (enum bfd_endian) byte_order);
6220 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6223 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6226 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6229 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6232 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6237 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6240 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6242 return arm_is_thumb (self->regcache);
6245 /* single_step() is called just before we want to resume the inferior,
6246 if we want to single-step it but there is no hardware or kernel
6247 single-step support. We find the target of the coming instructions
6248 and breakpoint them. */
6250 std::vector<CORE_ADDR>
6251 arm_software_single_step (struct regcache *regcache)
6253 struct gdbarch *gdbarch = regcache->arch ();
6254 struct arm_get_next_pcs next_pcs_ctx;
6256 arm_get_next_pcs_ctor (&next_pcs_ctx,
6257 &arm_get_next_pcs_ops,
6258 gdbarch_byte_order (gdbarch),
6259 gdbarch_byte_order_for_code (gdbarch),
6263 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6265 for (CORE_ADDR &pc_ref : next_pcs)
6266 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6271 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6272 for Linux, where some SVC instructions must be treated specially. */
6275 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6276 arm_displaced_step_closure *dsc)
6278 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6280 if (debug_displaced)
6281 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6282 "%.8lx\n", (unsigned long) resume_addr);
6284 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6288 /* Common copy routine for svc instruciton. */
6291 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6292 arm_displaced_step_closure *dsc)
6294 /* Preparation: none.
6295 Insn: unmodified svc.
6296 Cleanup: pc <- insn_addr + insn_size. */
6298 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6300 dsc->wrote_to_pc = 1;
6302 /* Allow OS-specific code to override SVC handling. */
6303 if (dsc->u.svc.copy_svc_os)
6304 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6307 dsc->cleanup = &cleanup_svc;
6313 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6314 struct regcache *regs, arm_displaced_step_closure *dsc)
6317 if (debug_displaced)
6318 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6319 (unsigned long) insn);
6321 dsc->modinsn[0] = insn;
6323 return install_svc (gdbarch, regs, dsc);
6327 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6328 struct regcache *regs, arm_displaced_step_closure *dsc)
6331 if (debug_displaced)
6332 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6335 dsc->modinsn[0] = insn;
6337 return install_svc (gdbarch, regs, dsc);
6340 /* Copy undefined instructions. */
6343 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6344 arm_displaced_step_closure *dsc)
6346 if (debug_displaced)
6347 fprintf_unfiltered (gdb_stdlog,
6348 "displaced: copying undefined insn %.8lx\n",
6349 (unsigned long) insn);
6351 dsc->modinsn[0] = insn;
6357 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6358 arm_displaced_step_closure *dsc)
6361 if (debug_displaced)
6362 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6363 "%.4x %.4x\n", (unsigned short) insn1,
6364 (unsigned short) insn2);
6366 dsc->modinsn[0] = insn1;
6367 dsc->modinsn[1] = insn2;
6373 /* Copy unpredictable instructions. */
6376 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6377 arm_displaced_step_closure *dsc)
6379 if (debug_displaced)
6380 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6381 "%.8lx\n", (unsigned long) insn);
6383 dsc->modinsn[0] = insn;
6388 /* The decode_* functions are instruction decoding helpers. They mostly follow
6389 the presentation in the ARM ARM. */
6392 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6393 struct regcache *regs,
6394 arm_displaced_step_closure *dsc)
6396 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6397 unsigned int rn = bits (insn, 16, 19);
6399 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6400 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6401 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6402 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6403 else if ((op1 & 0x60) == 0x20)
6404 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6405 else if ((op1 & 0x71) == 0x40)
6406 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6408 else if ((op1 & 0x77) == 0x41)
6409 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6410 else if ((op1 & 0x77) == 0x45)
6411 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6412 else if ((op1 & 0x77) == 0x51)
6415 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6417 return arm_copy_unpred (gdbarch, insn, dsc);
6419 else if ((op1 & 0x77) == 0x55)
6420 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6421 else if (op1 == 0x57)
6424 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6425 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6426 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6427 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6428 default: return arm_copy_unpred (gdbarch, insn, dsc);
6430 else if ((op1 & 0x63) == 0x43)
6431 return arm_copy_unpred (gdbarch, insn, dsc);
6432 else if ((op2 & 0x1) == 0x0)
6433 switch (op1 & ~0x80)
6436 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6438 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6439 case 0x71: case 0x75:
6441 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6442 case 0x63: case 0x67: case 0x73: case 0x77:
6443 return arm_copy_unpred (gdbarch, insn, dsc);
6445 return arm_copy_undef (gdbarch, insn, dsc);
6448 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6452 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6453 struct regcache *regs,
6454 arm_displaced_step_closure *dsc)
6456 if (bit (insn, 27) == 0)
6457 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6458 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6459 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6462 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6465 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6467 case 0x4: case 0x5: case 0x6: case 0x7:
6468 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6471 switch ((insn & 0xe00000) >> 21)
6473 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6475 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6478 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6481 return arm_copy_undef (gdbarch, insn, dsc);
6486 int rn_f = (bits (insn, 16, 19) == 0xf);
6487 switch ((insn & 0xe00000) >> 21)
6490 /* ldc/ldc2 imm (undefined for rn == pc). */
6491 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6492 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6495 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6497 case 0x4: case 0x5: case 0x6: case 0x7:
6498 /* ldc/ldc2 lit (undefined for rn != pc). */
6499 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6500 : arm_copy_undef (gdbarch, insn, dsc);
6503 return arm_copy_undef (gdbarch, insn, dsc);
6508 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6511 if (bits (insn, 16, 19) == 0xf)
6513 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6515 return arm_copy_undef (gdbarch, insn, dsc);
6519 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6521 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6525 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6527 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6530 return arm_copy_undef (gdbarch, insn, dsc);
6534 /* Decode miscellaneous instructions in dp/misc encoding space. */
6537 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6538 struct regcache *regs,
6539 arm_displaced_step_closure *dsc)
6541 unsigned int op2 = bits (insn, 4, 6);
6542 unsigned int op = bits (insn, 21, 22);
6547 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6550 if (op == 0x1) /* bx. */
6551 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6553 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6555 return arm_copy_undef (gdbarch, insn, dsc);
6559 /* Not really supported. */
6560 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6562 return arm_copy_undef (gdbarch, insn, dsc);
6566 return arm_copy_bx_blx_reg (gdbarch, insn,
6567 regs, dsc); /* blx register. */
6569 return arm_copy_undef (gdbarch, insn, dsc);
6572 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6576 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6578 /* Not really supported. */
6579 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6582 return arm_copy_undef (gdbarch, insn, dsc);
6587 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6588 struct regcache *regs,
6589 arm_displaced_step_closure *dsc)
6592 switch (bits (insn, 20, 24))
6595 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6598 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6600 case 0x12: case 0x16:
6601 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6604 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6608 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6610 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6611 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6612 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6613 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6614 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6615 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6616 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6617 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6618 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6619 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6620 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6621 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6622 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6623 /* 2nd arg means "unprivileged". */
6624 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6628 /* Should be unreachable. */
6633 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6634 struct regcache *regs,
6635 arm_displaced_step_closure *dsc)
6637 int a = bit (insn, 25), b = bit (insn, 4);
6638 uint32_t op1 = bits (insn, 20, 24);
6640 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6641 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6642 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6643 else if ((!a && (op1 & 0x17) == 0x02)
6644 || (a && (op1 & 0x17) == 0x02 && !b))
6645 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6646 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6647 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6649 else if ((!a && (op1 & 0x17) == 0x03)
6650 || (a && (op1 & 0x17) == 0x03 && !b))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6652 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6653 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6655 else if ((!a && (op1 & 0x17) == 0x06)
6656 || (a && (op1 & 0x17) == 0x06 && !b))
6657 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6658 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6659 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6660 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6661 else if ((!a && (op1 & 0x17) == 0x07)
6662 || (a && (op1 & 0x17) == 0x07 && !b))
6663 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6665 /* Should be unreachable. */
6670 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6671 arm_displaced_step_closure *dsc)
6673 switch (bits (insn, 20, 24))
6675 case 0x00: case 0x01: case 0x02: case 0x03:
6676 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6678 case 0x04: case 0x05: case 0x06: case 0x07:
6679 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6681 case 0x08: case 0x09: case 0x0a: case 0x0b:
6682 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6683 return arm_copy_unmodified (gdbarch, insn,
6684 "decode/pack/unpack/saturate/reverse", dsc);
6687 if (bits (insn, 5, 7) == 0) /* op2. */
6689 if (bits (insn, 12, 15) == 0xf)
6690 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6692 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6695 return arm_copy_undef (gdbarch, insn, dsc);
6697 case 0x1a: case 0x1b:
6698 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6699 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6701 return arm_copy_undef (gdbarch, insn, dsc);
6703 case 0x1c: case 0x1d:
6704 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6706 if (bits (insn, 0, 3) == 0xf)
6707 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6709 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6712 return arm_copy_undef (gdbarch, insn, dsc);
6714 case 0x1e: case 0x1f:
6715 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6716 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6718 return arm_copy_undef (gdbarch, insn, dsc);
6721 /* Should be unreachable. */
6726 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6727 struct regcache *regs,
6728 arm_displaced_step_closure *dsc)
6731 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6733 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6737 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6738 struct regcache *regs,
6739 arm_displaced_step_closure *dsc)
6741 unsigned int opcode = bits (insn, 20, 24);
6745 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6746 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6748 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6749 case 0x12: case 0x16:
6750 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6752 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6753 case 0x13: case 0x17:
6754 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6756 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6757 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6758 /* Note: no writeback for these instructions. Bit 25 will always be
6759 zero though (via caller), so the following works OK. */
6760 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6763 /* Should be unreachable. */
6767 /* Decode shifted register instructions. */
6770 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6771 uint16_t insn2, struct regcache *regs,
6772 arm_displaced_step_closure *dsc)
6774 /* PC is only allowed to be used in instruction MOV. */
6776 unsigned int op = bits (insn1, 5, 8);
6777 unsigned int rn = bits (insn1, 0, 3);
6779 if (op == 0x2 && rn == 0xf) /* MOV */
6780 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6782 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6783 "dp (shift reg)", dsc);
6787 /* Decode extension register load/store. Exactly the same as
6788 arm_decode_ext_reg_ld_st. */
6791 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6792 uint16_t insn2, struct regcache *regs,
6793 arm_displaced_step_closure *dsc)
6795 unsigned int opcode = bits (insn1, 4, 8);
6799 case 0x04: case 0x05:
6800 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6801 "vfp/neon vmov", dsc);
6803 case 0x08: case 0x0c: /* 01x00 */
6804 case 0x0a: case 0x0e: /* 01x10 */
6805 case 0x12: case 0x16: /* 10x10 */
6806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6807 "vfp/neon vstm/vpush", dsc);
6809 case 0x09: case 0x0d: /* 01x01 */
6810 case 0x0b: case 0x0f: /* 01x11 */
6811 case 0x13: case 0x17: /* 10x11 */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 "vfp/neon vldm/vpop", dsc);
6815 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6818 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6819 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6822 /* Should be unreachable. */
6827 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6828 struct regcache *regs, arm_displaced_step_closure *dsc)
6830 unsigned int op1 = bits (insn, 20, 25);
6831 int op = bit (insn, 4);
6832 unsigned int coproc = bits (insn, 8, 11);
6834 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6835 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6836 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6837 && (coproc & 0xe) != 0xa)
6839 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6840 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6841 && (coproc & 0xe) != 0xa)
6842 /* ldc/ldc2 imm/lit. */
6843 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6844 else if ((op1 & 0x3e) == 0x00)
6845 return arm_copy_undef (gdbarch, insn, dsc);
6846 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6847 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6848 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6849 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6850 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6851 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6852 else if ((op1 & 0x30) == 0x20 && !op)
6854 if ((coproc & 0xe) == 0xa)
6855 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6857 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6859 else if ((op1 & 0x30) == 0x20 && op)
6860 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6861 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6862 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6863 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6864 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6865 else if ((op1 & 0x30) == 0x30)
6866 return arm_copy_svc (gdbarch, insn, regs, dsc);
6868 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6872 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6873 uint16_t insn2, struct regcache *regs,
6874 arm_displaced_step_closure *dsc)
6876 unsigned int coproc = bits (insn2, 8, 11);
6877 unsigned int bit_5_8 = bits (insn1, 5, 8);
6878 unsigned int bit_9 = bit (insn1, 9);
6879 unsigned int bit_4 = bit (insn1, 4);
6884 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6885 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6887 else if (bit_5_8 == 0) /* UNDEFINED. */
6888 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6891 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6892 if ((coproc & 0xe) == 0xa)
6893 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6895 else /* coproc is not 101x. */
6897 if (bit_4 == 0) /* STC/STC2. */
6898 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6900 else /* LDC/LDC2 {literal, immeidate}. */
6901 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6907 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6913 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6914 arm_displaced_step_closure *dsc, int rd)
6920 Preparation: Rd <- PC
6926 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6927 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6931 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6932 arm_displaced_step_closure *dsc,
6933 int rd, unsigned int imm)
6936 /* Encoding T2: ADDS Rd, #imm */
6937 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6939 install_pc_relative (gdbarch, regs, dsc, rd);
6945 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6946 struct regcache *regs,
6947 arm_displaced_step_closure *dsc)
6949 unsigned int rd = bits (insn, 8, 10);
6950 unsigned int imm8 = bits (insn, 0, 7);
6952 if (debug_displaced)
6953 fprintf_unfiltered (gdb_stdlog,
6954 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6957 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6961 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6962 uint16_t insn2, struct regcache *regs,
6963 arm_displaced_step_closure *dsc)
6965 unsigned int rd = bits (insn2, 8, 11);
6966 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6967 extract raw immediate encoding rather than computing immediate. When
6968 generating ADD or SUB instruction, we can simply perform OR operation to
6969 set immediate into ADD. */
6970 unsigned int imm_3_8 = insn2 & 0x70ff;
6971 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6973 if (debug_displaced)
6974 fprintf_unfiltered (gdb_stdlog,
6975 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6976 rd, imm_i, imm_3_8, insn1, insn2);
6978 if (bit (insn1, 7)) /* Encoding T2 */
6980 /* Encoding T3: SUB Rd, Rd, #imm */
6981 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6982 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6984 else /* Encoding T3 */
6986 /* Encoding T3: ADD Rd, Rd, #imm */
6987 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6988 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6992 install_pc_relative (gdbarch, regs, dsc, rd);
6998 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6999 struct regcache *regs,
7000 arm_displaced_step_closure *dsc)
7002 unsigned int rt = bits (insn1, 8, 10);
7004 int imm8 = (bits (insn1, 0, 7) << 2);
7010 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7012 Insn: LDR R0, [R2, R3];
7013 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7015 if (debug_displaced)
7016 fprintf_unfiltered (gdb_stdlog,
7017 "displaced: copying thumb ldr r%d [pc #%d]\n"
7020 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7021 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7022 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7023 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7024 /* The assembler calculates the required value of the offset from the
7025 Align(PC,4) value of this instruction to the label. */
7026 pc = pc & 0xfffffffc;
7028 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7029 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7032 dsc->u.ldst.xfersize = 4;
7034 dsc->u.ldst.immed = 0;
7035 dsc->u.ldst.writeback = 0;
7036 dsc->u.ldst.restore_r4 = 0;
7038 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7040 dsc->cleanup = &cleanup_load;
7045 /* Copy Thumb cbnz/cbz insruction. */
7048 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7049 struct regcache *regs,
7050 arm_displaced_step_closure *dsc)
7052 int non_zero = bit (insn1, 11);
7053 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7054 CORE_ADDR from = dsc->insn_addr;
7055 int rn = bits (insn1, 0, 2);
7056 int rn_val = displaced_read_reg (regs, dsc, rn);
7058 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7059 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7060 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7061 condition is false, let it be, cleanup_branch will do nothing. */
7062 if (dsc->u.branch.cond)
7064 dsc->u.branch.cond = INST_AL;
7065 dsc->u.branch.dest = from + 4 + imm5;
7068 dsc->u.branch.dest = from + 2;
7070 dsc->u.branch.link = 0;
7071 dsc->u.branch.exchange = 0;
7073 if (debug_displaced)
7074 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7075 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7076 rn, rn_val, insn1, dsc->u.branch.dest);
7078 dsc->modinsn[0] = THUMB_NOP;
7080 dsc->cleanup = &cleanup_branch;
7084 /* Copy Table Branch Byte/Halfword */
7086 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7087 uint16_t insn2, struct regcache *regs,
7088 arm_displaced_step_closure *dsc)
7090 ULONGEST rn_val, rm_val;
7091 int is_tbh = bit (insn2, 4);
7092 CORE_ADDR halfwords = 0;
7093 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7095 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7096 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7102 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7103 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7109 target_read_memory (rn_val + rm_val, buf, 1);
7110 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7113 if (debug_displaced)
7114 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7115 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7116 (unsigned int) rn_val, (unsigned int) rm_val,
7117 (unsigned int) halfwords);
7119 dsc->u.branch.cond = INST_AL;
7120 dsc->u.branch.link = 0;
7121 dsc->u.branch.exchange = 0;
7122 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7124 dsc->cleanup = &cleanup_branch;
7130 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7131 arm_displaced_step_closure *dsc)
7134 int val = displaced_read_reg (regs, dsc, 7);
7135 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7138 val = displaced_read_reg (regs, dsc, 8);
7139 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7142 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7147 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7148 struct regcache *regs,
7149 arm_displaced_step_closure *dsc)
7151 dsc->u.block.regmask = insn1 & 0x00ff;
7153 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7156 (1) register list is full, that is, r0-r7 are used.
7157 Prepare: tmp[0] <- r8
7159 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7160 MOV r8, r7; Move value of r7 to r8;
7161 POP {r7}; Store PC value into r7.
7163 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7165 (2) register list is not full, supposing there are N registers in
7166 register list (except PC, 0 <= N <= 7).
7167 Prepare: for each i, 0 - N, tmp[i] <- ri.
7169 POP {r0, r1, ...., rN};
7171 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7172 from tmp[] properly.
7174 if (debug_displaced)
7175 fprintf_unfiltered (gdb_stdlog,
7176 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7177 dsc->u.block.regmask, insn1);
7179 if (dsc->u.block.regmask == 0xff)
7181 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7183 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7184 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7185 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7188 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7192 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7194 unsigned int new_regmask;
7196 for (i = 0; i < num_in_list + 1; i++)
7197 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7199 new_regmask = (1 << (num_in_list + 1)) - 1;
7201 if (debug_displaced)
7202 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7203 "{..., pc}: original reg list %.4x,"
7204 " modified list %.4x\n"),
7205 (int) dsc->u.block.regmask, new_regmask);
7207 dsc->u.block.regmask |= 0x8000;
7208 dsc->u.block.writeback = 0;
7209 dsc->u.block.cond = INST_AL;
7211 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7213 dsc->cleanup = &cleanup_block_load_pc;
7220 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7221 struct regcache *regs,
7222 arm_displaced_step_closure *dsc)
7224 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7225 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7228 /* 16-bit thumb instructions. */
7229 switch (op_bit_12_15)
7231 /* Shift (imme), add, subtract, move and compare. */
7232 case 0: case 1: case 2: case 3:
7233 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7234 "shift/add/sub/mov/cmp",
7238 switch (op_bit_10_11)
7240 case 0: /* Data-processing */
7241 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7245 case 1: /* Special data instructions and branch and exchange. */
7247 unsigned short op = bits (insn1, 7, 9);
7248 if (op == 6 || op == 7) /* BX or BLX */
7249 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7250 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7251 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7253 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7257 default: /* LDR (literal) */
7258 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7261 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7262 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7265 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7266 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7267 else /* Generate SP-relative address */
7268 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7270 case 11: /* Misc 16-bit instructions */
7272 switch (bits (insn1, 8, 11))
7274 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7275 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7277 case 12: case 13: /* POP */
7278 if (bit (insn1, 8)) /* PC is in register list. */
7279 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7283 case 15: /* If-Then, and hints */
7284 if (bits (insn1, 0, 3))
7285 /* If-Then makes up to four following instructions conditional.
7286 IT instruction itself is not conditional, so handle it as a
7287 common unmodified instruction. */
7288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7299 if (op_bit_10_11 < 2) /* Store multiple registers */
7300 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7301 else /* Load multiple registers */
7302 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7304 case 13: /* Conditional branch and supervisor call */
7305 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7306 err = thumb_copy_b (gdbarch, insn1, dsc);
7308 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7310 case 14: /* Unconditional branch */
7311 err = thumb_copy_b (gdbarch, insn1, dsc);
7318 internal_error (__FILE__, __LINE__,
7319 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7323 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7324 uint16_t insn1, uint16_t insn2,
7325 struct regcache *regs,
7326 arm_displaced_step_closure *dsc)
7328 int rt = bits (insn2, 12, 15);
7329 int rn = bits (insn1, 0, 3);
7330 int op1 = bits (insn1, 7, 8);
7332 switch (bits (insn1, 5, 6))
7334 case 0: /* Load byte and memory hints */
7335 if (rt == 0xf) /* PLD/PLI */
7338 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7339 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7341 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7346 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7347 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7350 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7351 "ldrb{reg, immediate}/ldrbt",
7356 case 1: /* Load halfword and memory hints. */
7357 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7358 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7359 "pld/unalloc memhint", dsc);
7363 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7366 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 case 2: /* Load word */
7372 int insn2_bit_8_11 = bits (insn2, 8, 11);
7375 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7376 else if (op1 == 0x1) /* Encoding T3 */
7377 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7379 else /* op1 == 0x0 */
7381 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7382 /* LDR (immediate) */
7383 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7384 dsc, bit (insn2, 8), 1);
7385 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7386 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7389 /* LDR (register) */
7390 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7396 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7403 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7404 uint16_t insn2, struct regcache *regs,
7405 arm_displaced_step_closure *dsc)
7408 unsigned short op = bit (insn2, 15);
7409 unsigned int op1 = bits (insn1, 11, 12);
7415 switch (bits (insn1, 9, 10))
7420 /* Load/store {dual, execlusive}, table branch. */
7421 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7422 && bits (insn2, 5, 7) == 0)
7423 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7426 /* PC is not allowed to use in load/store {dual, exclusive}
7428 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7429 "load/store dual/ex", dsc);
7431 else /* load/store multiple */
7433 switch (bits (insn1, 7, 8))
7435 case 0: case 3: /* SRS, RFE */
7436 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7439 case 1: case 2: /* LDM/STM/PUSH/POP */
7440 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7447 /* Data-processing (shift register). */
7448 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7451 default: /* Coprocessor instructions. */
7452 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7457 case 2: /* op1 = 2 */
7458 if (op) /* Branch and misc control. */
7460 if (bit (insn2, 14) /* BLX/BL */
7461 || bit (insn2, 12) /* Unconditional branch */
7462 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7463 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7465 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7470 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7472 int op = bits (insn1, 4, 8);
7473 int rn = bits (insn1, 0, 3);
7474 if ((op == 0 || op == 0xa) && rn == 0xf)
7475 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7478 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7481 else /* Data processing (modified immeidate) */
7482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7486 case 3: /* op1 = 3 */
7487 switch (bits (insn1, 9, 10))
7491 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7493 else /* NEON Load/Store and Store single data item */
7494 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7495 "neon elt/struct load/store",
7498 case 1: /* op1 = 3, bits (9, 10) == 1 */
7499 switch (bits (insn1, 7, 8))
7501 case 0: case 1: /* Data processing (register) */
7502 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 case 2: /* Multiply and absolute difference */
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "mul/mua/diff", dsc);
7509 case 3: /* Long multiply and divide */
7510 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7515 default: /* Coprocessor instructions */
7516 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7525 internal_error (__FILE__, __LINE__,
7526 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7531 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7532 struct regcache *regs,
7533 arm_displaced_step_closure *dsc)
7535 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7539 if (debug_displaced)
7540 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7541 "at %.8lx\n", insn1, (unsigned long) from);
7544 dsc->insn_size = thumb_insn_size (insn1);
7545 if (thumb_insn_size (insn1) == 4)
7548 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7549 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7552 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7556 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7557 CORE_ADDR to, struct regcache *regs,
7558 arm_displaced_step_closure *dsc)
7561 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7564 /* Most displaced instructions use a 1-instruction scratch space, so set this
7565 here and override below if/when necessary. */
7567 dsc->insn_addr = from;
7568 dsc->scratch_base = to;
7569 dsc->cleanup = NULL;
7570 dsc->wrote_to_pc = 0;
7572 if (!displaced_in_arm_mode (regs))
7573 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7577 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7578 if (debug_displaced)
7579 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7580 "at %.8lx\n", (unsigned long) insn,
7581 (unsigned long) from);
7583 if ((insn & 0xf0000000) == 0xf0000000)
7584 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7585 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7587 case 0x0: case 0x1: case 0x2: case 0x3:
7588 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7591 case 0x4: case 0x5: case 0x6:
7592 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7596 err = arm_decode_media (gdbarch, insn, dsc);
7599 case 0x8: case 0x9: case 0xa: case 0xb:
7600 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7603 case 0xc: case 0xd: case 0xe: case 0xf:
7604 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7609 internal_error (__FILE__, __LINE__,
7610 _("arm_process_displaced_insn: Instruction decode error"));
7613 /* Actually set up the scratch space for a displaced instruction. */
7616 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7617 CORE_ADDR to, arm_displaced_step_closure *dsc)
7619 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7620 unsigned int i, len, offset;
7621 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7622 int size = dsc->is_thumb? 2 : 4;
7623 const gdb_byte *bkp_insn;
7626 /* Poke modified instruction(s). */
7627 for (i = 0; i < dsc->numinsns; i++)
7629 if (debug_displaced)
7631 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7633 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7636 fprintf_unfiltered (gdb_stdlog, "%.4x",
7637 (unsigned short)dsc->modinsn[i]);
7639 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7640 (unsigned long) to + offset);
7643 write_memory_unsigned_integer (to + offset, size,
7644 byte_order_for_code,
7649 /* Choose the correct breakpoint instruction. */
7652 bkp_insn = tdep->thumb_breakpoint;
7653 len = tdep->thumb_breakpoint_size;
7657 bkp_insn = tdep->arm_breakpoint;
7658 len = tdep->arm_breakpoint_size;
7661 /* Put breakpoint afterwards. */
7662 write_memory (to + offset, bkp_insn, len);
7664 if (debug_displaced)
7665 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7666 paddress (gdbarch, from), paddress (gdbarch, to));
7669 /* Entry point for cleaning things up after a displaced instruction has been
7673 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7674 struct displaced_step_closure *dsc_,
7675 CORE_ADDR from, CORE_ADDR to,
7676 struct regcache *regs)
7678 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7681 dsc->cleanup (gdbarch, regs, dsc);
7683 if (!dsc->wrote_to_pc)
7684 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7685 dsc->insn_addr + dsc->insn_size);
7689 #include "bfd-in2.h"
7690 #include "libcoff.h"
7693 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7695 gdb_disassembler *di
7696 = static_cast<gdb_disassembler *>(info->application_data);
7697 struct gdbarch *gdbarch = di->arch ();
7699 if (arm_pc_is_thumb (gdbarch, memaddr))
7701 static asymbol *asym;
7702 static combined_entry_type ce;
7703 static struct coff_symbol_struct csym;
7704 static struct bfd fake_bfd;
7705 static bfd_target fake_target;
7707 if (csym.native == NULL)
7709 /* Create a fake symbol vector containing a Thumb symbol.
7710 This is solely so that the code in print_insn_little_arm()
7711 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7712 the presence of a Thumb symbol and switch to decoding
7713 Thumb instructions. */
7715 fake_target.flavour = bfd_target_coff_flavour;
7716 fake_bfd.xvec = &fake_target;
7717 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7719 csym.symbol.the_bfd = &fake_bfd;
7720 csym.symbol.name = "fake";
7721 asym = (asymbol *) & csym;
7724 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7725 info->symbols = &asym;
7728 info->symbols = NULL;
7730 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7731 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7732 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7733 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7734 in default_print_insn. */
7735 if (exec_bfd != NULL)
7736 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7738 return default_print_insn (memaddr, info);
7741 /* The following define instruction sequences that will cause ARM
7742 cpu's to take an undefined instruction trap. These are used to
7743 signal a breakpoint to GDB.
7745 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7746 modes. A different instruction is required for each mode. The ARM
7747 cpu's can also be big or little endian. Thus four different
7748 instructions are needed to support all cases.
7750 Note: ARMv4 defines several new instructions that will take the
7751 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7752 not in fact add the new instructions. The new undefined
7753 instructions in ARMv4 are all instructions that had no defined
7754 behaviour in earlier chips. There is no guarantee that they will
7755 raise an exception, but may be treated as NOP's. In practice, it
7756 may only safe to rely on instructions matching:
7758 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7759 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7760 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7762 Even this may only true if the condition predicate is true. The
7763 following use a condition predicate of ALWAYS so it is always TRUE.
7765 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7766 and NetBSD all use a software interrupt rather than an undefined
7767 instruction to force a trap. This can be handled by by the
7768 abi-specific code during establishment of the gdbarch vector. */
7770 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7771 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7772 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7773 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7775 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7776 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7777 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7778 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7780 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7783 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7785 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7786 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7788 if (arm_pc_is_thumb (gdbarch, *pcptr))
7790 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7792 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7793 check whether we are replacing a 32-bit instruction. */
7794 if (tdep->thumb2_breakpoint != NULL)
7798 if (target_read_memory (*pcptr, buf, 2) == 0)
7800 unsigned short inst1;
7802 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7803 if (thumb_insn_size (inst1) == 4)
7804 return ARM_BP_KIND_THUMB2;
7808 return ARM_BP_KIND_THUMB;
7811 return ARM_BP_KIND_ARM;
7815 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7817 static const gdb_byte *
7818 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7820 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7824 case ARM_BP_KIND_ARM:
7825 *size = tdep->arm_breakpoint_size;
7826 return tdep->arm_breakpoint;
7827 case ARM_BP_KIND_THUMB:
7828 *size = tdep->thumb_breakpoint_size;
7829 return tdep->thumb_breakpoint;
7830 case ARM_BP_KIND_THUMB2:
7831 *size = tdep->thumb2_breakpoint_size;
7832 return tdep->thumb2_breakpoint;
7834 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7838 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7841 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7842 struct regcache *regcache,
7847 /* Check the memory pointed by PC is readable. */
7848 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7850 struct arm_get_next_pcs next_pcs_ctx;
7852 arm_get_next_pcs_ctor (&next_pcs_ctx,
7853 &arm_get_next_pcs_ops,
7854 gdbarch_byte_order (gdbarch),
7855 gdbarch_byte_order_for_code (gdbarch),
7859 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7861 /* If MEMADDR is the next instruction of current pc, do the
7862 software single step computation, and get the thumb mode by
7863 the destination address. */
7864 for (CORE_ADDR pc : next_pcs)
7866 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7868 if (IS_THUMB_ADDR (pc))
7870 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7871 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7874 return ARM_BP_KIND_ARM;
7879 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7882 /* Extract from an array REGBUF containing the (raw) register state a
7883 function return value of type TYPE, and copy that, in virtual
7884 format, into VALBUF. */
7887 arm_extract_return_value (struct type *type, struct regcache *regs,
7890 struct gdbarch *gdbarch = regs->arch ();
7891 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7893 if (TYPE_CODE_FLT == TYPE_CODE (type))
7895 switch (gdbarch_tdep (gdbarch)->fp_model)
7899 /* The value is in register F0 in internal format. We need to
7900 extract the raw value and then convert it to the desired
7902 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7904 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7905 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7910 case ARM_FLOAT_SOFT_FPA:
7911 case ARM_FLOAT_SOFT_VFP:
7912 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7913 not using the VFP ABI code. */
7915 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7916 if (TYPE_LENGTH (type) > 4)
7917 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7918 valbuf + INT_REGISTER_SIZE);
7922 internal_error (__FILE__, __LINE__,
7923 _("arm_extract_return_value: "
7924 "Floating point model not supported"));
7928 else if (TYPE_CODE (type) == TYPE_CODE_INT
7929 || TYPE_CODE (type) == TYPE_CODE_CHAR
7930 || TYPE_CODE (type) == TYPE_CODE_BOOL
7931 || TYPE_CODE (type) == TYPE_CODE_PTR
7932 || TYPE_IS_REFERENCE (type)
7933 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7935 /* If the type is a plain integer, then the access is
7936 straight-forward. Otherwise we have to play around a bit
7938 int len = TYPE_LENGTH (type);
7939 int regno = ARM_A1_REGNUM;
7944 /* By using store_unsigned_integer we avoid having to do
7945 anything special for small big-endian values. */
7946 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7947 store_unsigned_integer (valbuf,
7948 (len > INT_REGISTER_SIZE
7949 ? INT_REGISTER_SIZE : len),
7951 len -= INT_REGISTER_SIZE;
7952 valbuf += INT_REGISTER_SIZE;
7957 /* For a structure or union the behaviour is as if the value had
7958 been stored to word-aligned memory and then loaded into
7959 registers with 32-bit load instruction(s). */
7960 int len = TYPE_LENGTH (type);
7961 int regno = ARM_A1_REGNUM;
7962 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7966 regcache_cooked_read (regs, regno++, tmpbuf);
7967 memcpy (valbuf, tmpbuf,
7968 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7969 len -= INT_REGISTER_SIZE;
7970 valbuf += INT_REGISTER_SIZE;
7976 /* Will a function return an aggregate type in memory or in a
7977 register? Return 0 if an aggregate type can be returned in a
7978 register, 1 if it must be returned in memory. */
7981 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7983 enum type_code code;
7985 type = check_typedef (type);
7987 /* Simple, non-aggregate types (ie not including vectors and
7988 complex) are always returned in a register (or registers). */
7989 code = TYPE_CODE (type);
7990 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7991 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7994 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7996 /* Vector values should be returned using ARM registers if they
7997 are not over 16 bytes. */
7998 return (TYPE_LENGTH (type) > 16);
8001 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8003 /* The AAPCS says all aggregates not larger than a word are returned
8005 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8014 /* All aggregate types that won't fit in a register must be returned
8016 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8019 /* In the ARM ABI, "integer" like aggregate types are returned in
8020 registers. For an aggregate type to be integer like, its size
8021 must be less than or equal to INT_REGISTER_SIZE and the
8022 offset of each addressable subfield must be zero. Note that bit
8023 fields are not addressable, and all addressable subfields of
8024 unions always start at offset zero.
8026 This function is based on the behaviour of GCC 2.95.1.
8027 See: gcc/arm.c: arm_return_in_memory() for details.
8029 Note: All versions of GCC before GCC 2.95.2 do not set up the
8030 parameters correctly for a function returning the following
8031 structure: struct { float f;}; This should be returned in memory,
8032 not a register. Richard Earnshaw sent me a patch, but I do not
8033 know of any way to detect if a function like the above has been
8034 compiled with the correct calling convention. */
8036 /* Assume all other aggregate types can be returned in a register.
8037 Run a check for structures, unions and arrays. */
8040 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8043 /* Need to check if this struct/union is "integer" like. For
8044 this to be true, its size must be less than or equal to
8045 INT_REGISTER_SIZE and the offset of each addressable
8046 subfield must be zero. Note that bit fields are not
8047 addressable, and unions always start at offset zero. If any
8048 of the subfields is a floating point type, the struct/union
8049 cannot be an integer type. */
8051 /* For each field in the object, check:
8052 1) Is it FP? --> yes, nRc = 1;
8053 2) Is it addressable (bitpos != 0) and
8054 not packed (bitsize == 0)?
8058 for (i = 0; i < TYPE_NFIELDS (type); i++)
8060 enum type_code field_type_code;
8063 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8066 /* Is it a floating point type field? */
8067 if (field_type_code == TYPE_CODE_FLT)
8073 /* If bitpos != 0, then we have to care about it. */
8074 if (TYPE_FIELD_BITPOS (type, i) != 0)
8076 /* Bitfields are not addressable. If the field bitsize is
8077 zero, then the field is not packed. Hence it cannot be
8078 a bitfield or any other packed type. */
8079 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8092 /* Write into appropriate registers a function return value of type
8093 TYPE, given in virtual format. */
8096 arm_store_return_value (struct type *type, struct regcache *regs,
8097 const gdb_byte *valbuf)
8099 struct gdbarch *gdbarch = regs->arch ();
8100 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8102 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8104 gdb_byte buf[FP_REGISTER_SIZE];
8106 switch (gdbarch_tdep (gdbarch)->fp_model)
8110 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8111 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8114 case ARM_FLOAT_SOFT_FPA:
8115 case ARM_FLOAT_SOFT_VFP:
8116 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8117 not using the VFP ABI code. */
8119 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8120 if (TYPE_LENGTH (type) > 4)
8121 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8122 valbuf + INT_REGISTER_SIZE);
8126 internal_error (__FILE__, __LINE__,
8127 _("arm_store_return_value: Floating "
8128 "point model not supported"));
8132 else if (TYPE_CODE (type) == TYPE_CODE_INT
8133 || TYPE_CODE (type) == TYPE_CODE_CHAR
8134 || TYPE_CODE (type) == TYPE_CODE_BOOL
8135 || TYPE_CODE (type) == TYPE_CODE_PTR
8136 || TYPE_IS_REFERENCE (type)
8137 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8139 if (TYPE_LENGTH (type) <= 4)
8141 /* Values of one word or less are zero/sign-extended and
8143 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8144 LONGEST val = unpack_long (type, valbuf);
8146 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8147 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8151 /* Integral values greater than one word are stored in consecutive
8152 registers starting with r0. This will always be a multiple of
8153 the regiser size. */
8154 int len = TYPE_LENGTH (type);
8155 int regno = ARM_A1_REGNUM;
8159 regcache_cooked_write (regs, regno++, valbuf);
8160 len -= INT_REGISTER_SIZE;
8161 valbuf += INT_REGISTER_SIZE;
8167 /* For a structure or union the behaviour is as if the value had
8168 been stored to word-aligned memory and then loaded into
8169 registers with 32-bit load instruction(s). */
8170 int len = TYPE_LENGTH (type);
8171 int regno = ARM_A1_REGNUM;
8172 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8176 memcpy (tmpbuf, valbuf,
8177 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8178 regcache_cooked_write (regs, regno++, tmpbuf);
8179 len -= INT_REGISTER_SIZE;
8180 valbuf += INT_REGISTER_SIZE;
8186 /* Handle function return values. */
8188 static enum return_value_convention
8189 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8190 struct type *valtype, struct regcache *regcache,
8191 gdb_byte *readbuf, const gdb_byte *writebuf)
8193 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8194 struct type *func_type = function ? value_type (function) : NULL;
8195 enum arm_vfp_cprc_base_type vfp_base_type;
8198 if (arm_vfp_abi_for_function (gdbarch, func_type)
8199 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8201 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8202 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8204 for (i = 0; i < vfp_base_count; i++)
8206 if (reg_char == 'q')
8209 arm_neon_quad_write (gdbarch, regcache, i,
8210 writebuf + i * unit_length);
8213 arm_neon_quad_read (gdbarch, regcache, i,
8214 readbuf + i * unit_length);
8221 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8222 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8225 regcache_cooked_write (regcache, regnum,
8226 writebuf + i * unit_length);
8228 regcache_cooked_read (regcache, regnum,
8229 readbuf + i * unit_length);
8232 return RETURN_VALUE_REGISTER_CONVENTION;
8235 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8236 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8237 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8239 if (tdep->struct_return == pcc_struct_return
8240 || arm_return_in_memory (gdbarch, valtype))
8241 return RETURN_VALUE_STRUCT_CONVENTION;
8243 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8245 if (arm_return_in_memory (gdbarch, valtype))
8246 return RETURN_VALUE_STRUCT_CONVENTION;
8250 arm_store_return_value (valtype, regcache, writebuf);
8253 arm_extract_return_value (valtype, regcache, readbuf);
8255 return RETURN_VALUE_REGISTER_CONVENTION;
8260 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8262 struct gdbarch *gdbarch = get_frame_arch (frame);
8263 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8264 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8266 gdb_byte buf[INT_REGISTER_SIZE];
8268 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8270 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8274 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8278 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8279 return the target PC. Otherwise return 0. */
8282 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8286 CORE_ADDR start_addr;
8288 /* Find the starting address and name of the function containing the PC. */
8289 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8291 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8293 start_addr = arm_skip_bx_reg (frame, pc);
8294 if (start_addr != 0)
8300 /* If PC is in a Thumb call or return stub, return the address of the
8301 target PC, which is in a register. The thunk functions are called
8302 _call_via_xx, where x is the register name. The possible names
8303 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8304 functions, named __ARM_call_via_r[0-7]. */
8305 if (startswith (name, "_call_via_")
8306 || startswith (name, "__ARM_call_via_"))
8308 /* Use the name suffix to determine which register contains the
8310 static const char *table[15] =
8311 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8312 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8315 int offset = strlen (name) - 2;
8317 for (regno = 0; regno <= 14; regno++)
8318 if (strcmp (&name[offset], table[regno]) == 0)
8319 return get_frame_register_unsigned (frame, regno);
8322 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8323 non-interworking calls to foo. We could decode the stubs
8324 to find the target but it's easier to use the symbol table. */
8325 namelen = strlen (name);
8326 if (name[0] == '_' && name[1] == '_'
8327 && ((namelen > 2 + strlen ("_from_thumb")
8328 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8329 || (namelen > 2 + strlen ("_from_arm")
8330 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8333 int target_len = namelen - 2;
8334 struct bound_minimal_symbol minsym;
8335 struct objfile *objfile;
8336 struct obj_section *sec;
8338 if (name[namelen - 1] == 'b')
8339 target_len -= strlen ("_from_thumb");
8341 target_len -= strlen ("_from_arm");
8343 target_name = (char *) alloca (target_len + 1);
8344 memcpy (target_name, name + 2, target_len);
8345 target_name[target_len] = '\0';
8347 sec = find_pc_section (pc);
8348 objfile = (sec == NULL) ? NULL : sec->objfile;
8349 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8350 if (minsym.minsym != NULL)
8351 return BMSYMBOL_VALUE_ADDRESS (minsym);
8356 return 0; /* not a stub */
8360 set_arm_command (const char *args, int from_tty)
8362 printf_unfiltered (_("\
8363 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8364 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8368 show_arm_command (const char *args, int from_tty)
8370 cmd_show_list (showarmcmdlist, from_tty, "");
8374 arm_update_current_architecture (void)
8376 struct gdbarch_info info;
8378 /* If the current architecture is not ARM, we have nothing to do. */
8379 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8382 /* Update the architecture. */
8383 gdbarch_info_init (&info);
8385 if (!gdbarch_update_p (info))
8386 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8390 set_fp_model_sfunc (const char *args, int from_tty,
8391 struct cmd_list_element *c)
8395 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8396 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8398 arm_fp_model = (enum arm_float_model) fp_model;
8402 if (fp_model == ARM_FLOAT_LAST)
8403 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8406 arm_update_current_architecture ();
8410 show_fp_model (struct ui_file *file, int from_tty,
8411 struct cmd_list_element *c, const char *value)
8413 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8415 if (arm_fp_model == ARM_FLOAT_AUTO
8416 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8417 fprintf_filtered (file, _("\
8418 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8419 fp_model_strings[tdep->fp_model]);
8421 fprintf_filtered (file, _("\
8422 The current ARM floating point model is \"%s\".\n"),
8423 fp_model_strings[arm_fp_model]);
8427 arm_set_abi (const char *args, int from_tty,
8428 struct cmd_list_element *c)
8432 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8433 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8435 arm_abi_global = (enum arm_abi_kind) arm_abi;
8439 if (arm_abi == ARM_ABI_LAST)
8440 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8443 arm_update_current_architecture ();
8447 arm_show_abi (struct ui_file *file, int from_tty,
8448 struct cmd_list_element *c, const char *value)
8450 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8452 if (arm_abi_global == ARM_ABI_AUTO
8453 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8454 fprintf_filtered (file, _("\
8455 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8456 arm_abi_strings[tdep->arm_abi]);
8458 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8463 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8464 struct cmd_list_element *c, const char *value)
8466 fprintf_filtered (file,
8467 _("The current execution mode assumed "
8468 "(when symbols are unavailable) is \"%s\".\n"),
8469 arm_fallback_mode_string);
8473 arm_show_force_mode (struct ui_file *file, int from_tty,
8474 struct cmd_list_element *c, const char *value)
8476 fprintf_filtered (file,
8477 _("The current execution mode assumed "
8478 "(even when symbols are available) is \"%s\".\n"),
8479 arm_force_mode_string);
8482 /* If the user changes the register disassembly style used for info
8483 register and other commands, we have to also switch the style used
8484 in opcodes for disassembly output. This function is run in the "set
8485 arm disassembly" command, and does that. */
8488 set_disassembly_style_sfunc (const char *args, int from_tty,
8489 struct cmd_list_element *c)
8491 /* Convert the short style name into the long style name (eg, reg-names-*)
8492 before calling the generic set_disassembler_options() function. */
8493 std::string long_name = std::string ("reg-names-") + disassembly_style;
8494 set_disassembler_options (&long_name[0]);
8498 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8499 struct cmd_list_element *c, const char *value)
8501 struct gdbarch *gdbarch = get_current_arch ();
8502 char *options = get_disassembler_options (gdbarch);
8503 const char *style = "";
8507 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8508 if (CONST_STRNEQ (opt, "reg-names-"))
8510 style = &opt[strlen ("reg-names-")];
8511 len = strcspn (style, ",");
8514 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8517 /* Return the ARM register name corresponding to register I. */
8519 arm_register_name (struct gdbarch *gdbarch, int i)
8521 const int num_regs = gdbarch_num_regs (gdbarch);
8523 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8524 && i >= num_regs && i < num_regs + 32)
8526 static const char *const vfp_pseudo_names[] = {
8527 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8528 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8529 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8530 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8533 return vfp_pseudo_names[i - num_regs];
8536 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8537 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8539 static const char *const neon_pseudo_names[] = {
8540 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8541 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8544 return neon_pseudo_names[i - num_regs - 32];
8547 if (i >= ARRAY_SIZE (arm_register_names))
8548 /* These registers are only supported on targets which supply
8549 an XML description. */
8552 return arm_register_names[i];
8555 /* Test whether the coff symbol specific value corresponds to a Thumb
8559 coff_sym_is_thumb (int val)
8561 return (val == C_THUMBEXT
8562 || val == C_THUMBSTAT
8563 || val == C_THUMBEXTFUNC
8564 || val == C_THUMBSTATFUNC
8565 || val == C_THUMBLABEL);
8568 /* arm_coff_make_msymbol_special()
8569 arm_elf_make_msymbol_special()
8571 These functions test whether the COFF or ELF symbol corresponds to
8572 an address in thumb code, and set a "special" bit in a minimal
8573 symbol to indicate that it does. */
8576 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8578 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8580 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8581 == ST_BRANCH_TO_THUMB)
8582 MSYMBOL_SET_SPECIAL (msym);
8586 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8588 if (coff_sym_is_thumb (val))
8589 MSYMBOL_SET_SPECIAL (msym);
8593 arm_objfile_data_free (struct objfile *objfile, void *arg)
8595 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8598 for (i = 0; i < objfile->obfd->section_count; i++)
8599 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8603 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8606 const char *name = bfd_asymbol_name (sym);
8607 struct arm_per_objfile *data;
8608 VEC(arm_mapping_symbol_s) **map_p;
8609 struct arm_mapping_symbol new_map_sym;
8611 gdb_assert (name[0] == '$');
8612 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8615 data = (struct arm_per_objfile *) objfile_data (objfile,
8616 arm_objfile_data_key);
8619 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8620 struct arm_per_objfile);
8621 set_objfile_data (objfile, arm_objfile_data_key, data);
8622 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8623 objfile->obfd->section_count,
8624 VEC(arm_mapping_symbol_s) *);
8626 map_p = &data->section_maps[bfd_get_section (sym)->index];
8628 new_map_sym.value = sym->value;
8629 new_map_sym.type = name[1];
8631 /* Assume that most mapping symbols appear in order of increasing
8632 value. If they were randomly distributed, it would be faster to
8633 always push here and then sort at first use. */
8634 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8636 struct arm_mapping_symbol *prev_map_sym;
8638 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8639 if (prev_map_sym->value >= sym->value)
8642 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8643 arm_compare_mapping_symbols);
8644 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8649 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8653 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8655 struct gdbarch *gdbarch = regcache->arch ();
8656 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8658 /* If necessary, set the T bit. */
8661 ULONGEST val, t_bit;
8662 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8663 t_bit = arm_psr_thumb_bit (gdbarch);
8664 if (arm_pc_is_thumb (gdbarch, pc))
8665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8668 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8673 /* Read the contents of a NEON quad register, by reading from two
8674 double registers. This is used to implement the quad pseudo
8675 registers, and for argument passing in case the quad registers are
8676 missing; vectors are passed in quad registers when using the VFP
8677 ABI, even if a NEON unit is not present. REGNUM is the index of
8678 the quad register, in [0, 15]. */
8680 static enum register_status
8681 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8682 int regnum, gdb_byte *buf)
8685 gdb_byte reg_buf[8];
8686 int offset, double_regnum;
8687 enum register_status status;
8689 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8690 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8693 /* d0 is always the least significant half of q0. */
8694 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8699 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8700 if (status != REG_VALID)
8702 memcpy (buf + offset, reg_buf, 8);
8704 offset = 8 - offset;
8705 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8706 if (status != REG_VALID)
8708 memcpy (buf + offset, reg_buf, 8);
8713 static enum register_status
8714 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8715 int regnum, gdb_byte *buf)
8717 const int num_regs = gdbarch_num_regs (gdbarch);
8719 gdb_byte reg_buf[8];
8720 int offset, double_regnum;
8722 gdb_assert (regnum >= num_regs);
8725 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8726 /* Quad-precision register. */
8727 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8730 enum register_status status;
8732 /* Single-precision register. */
8733 gdb_assert (regnum < 32);
8735 /* s0 is always the least significant half of d0. */
8736 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8737 offset = (regnum & 1) ? 0 : 4;
8739 offset = (regnum & 1) ? 4 : 0;
8741 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8742 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8745 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8746 if (status == REG_VALID)
8747 memcpy (buf, reg_buf + offset, 4);
8752 /* Store the contents of BUF to a NEON quad register, by writing to
8753 two double registers. This is used to implement the quad pseudo
8754 registers, and for argument passing in case the quad registers are
8755 missing; vectors are passed in quad registers when using the VFP
8756 ABI, even if a NEON unit is not present. REGNUM is the index
8757 of the quad register, in [0, 15]. */
8760 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8761 int regnum, const gdb_byte *buf)
8764 int offset, double_regnum;
8766 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8767 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8770 /* d0 is always the least significant half of q0. */
8771 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8776 regcache_raw_write (regcache, double_regnum, buf + offset);
8777 offset = 8 - offset;
8778 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8782 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8783 int regnum, const gdb_byte *buf)
8785 const int num_regs = gdbarch_num_regs (gdbarch);
8787 gdb_byte reg_buf[8];
8788 int offset, double_regnum;
8790 gdb_assert (regnum >= num_regs);
8793 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8794 /* Quad-precision register. */
8795 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8798 /* Single-precision register. */
8799 gdb_assert (regnum < 32);
8801 /* s0 is always the least significant half of d0. */
8802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8803 offset = (regnum & 1) ? 0 : 4;
8805 offset = (regnum & 1) ? 4 : 0;
8807 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8808 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8811 regcache_raw_read (regcache, double_regnum, reg_buf);
8812 memcpy (reg_buf + offset, buf, 4);
8813 regcache_raw_write (regcache, double_regnum, reg_buf);
8817 static struct value *
8818 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8820 const int *reg_p = (const int *) baton;
8821 return value_of_register (*reg_p, frame);
8824 static enum gdb_osabi
8825 arm_elf_osabi_sniffer (bfd *abfd)
8827 unsigned int elfosabi;
8828 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8830 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8832 if (elfosabi == ELFOSABI_ARM)
8833 /* GNU tools use this value. Check note sections in this case,
8835 bfd_map_over_sections (abfd,
8836 generic_elf_osabi_sniff_abi_tag_sections,
8839 /* Anything else will be handled by the generic ELF sniffer. */
8844 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8845 struct reggroup *group)
8847 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8848 this, FPS register belongs to save_regroup, restore_reggroup, and
8849 all_reggroup, of course. */
8850 if (regnum == ARM_FPS_REGNUM)
8851 return (group == float_reggroup
8852 || group == save_reggroup
8853 || group == restore_reggroup
8854 || group == all_reggroup);
8856 return default_register_reggroup_p (gdbarch, regnum, group);
8860 /* For backward-compatibility we allow two 'g' packet lengths with
8861 the remote protocol depending on whether FPA registers are
8862 supplied. M-profile targets do not have FPA registers, but some
8863 stubs already exist in the wild which use a 'g' packet which
8864 supplies them albeit with dummy values. The packet format which
8865 includes FPA registers should be considered deprecated for
8866 M-profile targets. */
8869 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8871 if (gdbarch_tdep (gdbarch)->is_m)
8873 /* If we know from the executable this is an M-profile target,
8874 cater for remote targets whose register set layout is the
8875 same as the FPA layout. */
8876 register_remote_g_packet_guess (gdbarch,
8877 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8878 (16 * INT_REGISTER_SIZE)
8879 + (8 * FP_REGISTER_SIZE)
8880 + (2 * INT_REGISTER_SIZE),
8881 tdesc_arm_with_m_fpa_layout);
8883 /* The regular M-profile layout. */
8884 register_remote_g_packet_guess (gdbarch,
8885 /* r0-r12,sp,lr,pc; xpsr */
8886 (16 * INT_REGISTER_SIZE)
8887 + INT_REGISTER_SIZE,
8890 /* M-profile plus M4F VFP. */
8891 register_remote_g_packet_guess (gdbarch,
8892 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8893 (16 * INT_REGISTER_SIZE)
8894 + (16 * VFP_REGISTER_SIZE)
8895 + (2 * INT_REGISTER_SIZE),
8896 tdesc_arm_with_m_vfp_d16);
8899 /* Otherwise we don't have a useful guess. */
8902 /* Implement the code_of_frame_writable gdbarch method. */
8905 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8907 if (gdbarch_tdep (gdbarch)->is_m
8908 && get_frame_type (frame) == SIGTRAMP_FRAME)
8910 /* M-profile exception frames return to some magic PCs, where
8911 isn't writable at all. */
8919 /* Initialize the current architecture based on INFO. If possible,
8920 re-use an architecture from ARCHES, which is a list of
8921 architectures already created during this debugging session.
8923 Called e.g. at program startup, when reading a core file, and when
8924 reading a binary file. */
8926 static struct gdbarch *
8927 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8929 struct gdbarch_tdep *tdep;
8930 struct gdbarch *gdbarch;
8931 struct gdbarch_list *best_arch;
8932 enum arm_abi_kind arm_abi = arm_abi_global;
8933 enum arm_float_model fp_model = arm_fp_model;
8934 struct tdesc_arch_data *tdesc_data = NULL;
8936 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8937 int have_wmmx_registers = 0;
8939 int have_fpa_registers = 1;
8940 const struct target_desc *tdesc = info.target_desc;
8942 /* If we have an object to base this architecture on, try to determine
8945 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8947 int ei_osabi, e_flags;
8949 switch (bfd_get_flavour (info.abfd))
8951 case bfd_target_coff_flavour:
8952 /* Assume it's an old APCS-style ABI. */
8954 arm_abi = ARM_ABI_APCS;
8957 case bfd_target_elf_flavour:
8958 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8959 e_flags = elf_elfheader (info.abfd)->e_flags;
8961 if (ei_osabi == ELFOSABI_ARM)
8963 /* GNU tools used to use this value, but do not for EABI
8964 objects. There's nowhere to tag an EABI version
8965 anyway, so assume APCS. */
8966 arm_abi = ARM_ABI_APCS;
8968 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8970 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8971 int attr_arch, attr_profile;
8975 case EF_ARM_EABI_UNKNOWN:
8976 /* Assume GNU tools. */
8977 arm_abi = ARM_ABI_APCS;
8980 case EF_ARM_EABI_VER4:
8981 case EF_ARM_EABI_VER5:
8982 arm_abi = ARM_ABI_AAPCS;
8983 /* EABI binaries default to VFP float ordering.
8984 They may also contain build attributes that can
8985 be used to identify if the VFP argument-passing
8987 if (fp_model == ARM_FLOAT_AUTO)
8990 switch (bfd_elf_get_obj_attr_int (info.abfd,
8994 case AEABI_VFP_args_base:
8995 /* "The user intended FP parameter/result
8996 passing to conform to AAPCS, base
8998 fp_model = ARM_FLOAT_SOFT_VFP;
9000 case AEABI_VFP_args_vfp:
9001 /* "The user intended FP parameter/result
9002 passing to conform to AAPCS, VFP
9004 fp_model = ARM_FLOAT_VFP;
9006 case AEABI_VFP_args_toolchain:
9007 /* "The user intended FP parameter/result
9008 passing to conform to tool chain-specific
9009 conventions" - we don't know any such
9010 conventions, so leave it as "auto". */
9012 case AEABI_VFP_args_compatible:
9013 /* "Code is compatible with both the base
9014 and VFP variants; the user did not permit
9015 non-variadic functions to pass FP
9016 parameters/results" - leave it as
9020 /* Attribute value not mentioned in the
9021 November 2012 ABI, so leave it as
9026 fp_model = ARM_FLOAT_SOFT_VFP;
9032 /* Leave it as "auto". */
9033 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9038 /* Detect M-profile programs. This only works if the
9039 executable file includes build attributes; GCC does
9040 copy them to the executable, but e.g. RealView does
9042 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9044 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9046 Tag_CPU_arch_profile);
9047 /* GCC specifies the profile for v6-M; RealView only
9048 specifies the profile for architectures starting with
9049 V7 (as opposed to architectures with a tag
9050 numerically greater than TAG_CPU_ARCH_V7). */
9051 if (!tdesc_has_registers (tdesc)
9052 && (attr_arch == TAG_CPU_ARCH_V6_M
9053 || attr_arch == TAG_CPU_ARCH_V6S_M
9054 || attr_profile == 'M'))
9059 if (fp_model == ARM_FLOAT_AUTO)
9061 int e_flags = elf_elfheader (info.abfd)->e_flags;
9063 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9066 /* Leave it as "auto". Strictly speaking this case
9067 means FPA, but almost nobody uses that now, and
9068 many toolchains fail to set the appropriate bits
9069 for the floating-point model they use. */
9071 case EF_ARM_SOFT_FLOAT:
9072 fp_model = ARM_FLOAT_SOFT_FPA;
9074 case EF_ARM_VFP_FLOAT:
9075 fp_model = ARM_FLOAT_VFP;
9077 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9078 fp_model = ARM_FLOAT_SOFT_VFP;
9083 if (e_flags & EF_ARM_BE8)
9084 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9089 /* Leave it as "auto". */
9094 /* Check any target description for validity. */
9095 if (tdesc_has_registers (tdesc))
9097 /* For most registers we require GDB's default names; but also allow
9098 the numeric names for sp / lr / pc, as a convenience. */
9099 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9100 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9101 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9103 const struct tdesc_feature *feature;
9106 feature = tdesc_find_feature (tdesc,
9107 "org.gnu.gdb.arm.core");
9108 if (feature == NULL)
9110 feature = tdesc_find_feature (tdesc,
9111 "org.gnu.gdb.arm.m-profile");
9112 if (feature == NULL)
9118 tdesc_data = tdesc_data_alloc ();
9121 for (i = 0; i < ARM_SP_REGNUM; i++)
9122 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9123 arm_register_names[i]);
9124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9130 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9134 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9135 ARM_PS_REGNUM, "xpsr");
9137 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9138 ARM_PS_REGNUM, "cpsr");
9142 tdesc_data_cleanup (tdesc_data);
9146 feature = tdesc_find_feature (tdesc,
9147 "org.gnu.gdb.arm.fpa");
9148 if (feature != NULL)
9151 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9152 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9153 arm_register_names[i]);
9156 tdesc_data_cleanup (tdesc_data);
9161 have_fpa_registers = 0;
9163 feature = tdesc_find_feature (tdesc,
9164 "org.gnu.gdb.xscale.iwmmxt");
9165 if (feature != NULL)
9167 static const char *const iwmmxt_names[] = {
9168 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9169 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9170 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9171 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9175 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9177 &= tdesc_numbered_register (feature, tdesc_data, i,
9178 iwmmxt_names[i - ARM_WR0_REGNUM]);
9180 /* Check for the control registers, but do not fail if they
9182 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9183 tdesc_numbered_register (feature, tdesc_data, i,
9184 iwmmxt_names[i - ARM_WR0_REGNUM]);
9186 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9188 &= tdesc_numbered_register (feature, tdesc_data, i,
9189 iwmmxt_names[i - ARM_WR0_REGNUM]);
9193 tdesc_data_cleanup (tdesc_data);
9197 have_wmmx_registers = 1;
9200 /* If we have a VFP unit, check whether the single precision registers
9201 are present. If not, then we will synthesize them as pseudo
9203 feature = tdesc_find_feature (tdesc,
9204 "org.gnu.gdb.arm.vfp");
9205 if (feature != NULL)
9207 static const char *const vfp_double_names[] = {
9208 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9209 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9210 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9211 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9214 /* Require the double precision registers. There must be either
9217 for (i = 0; i < 32; i++)
9219 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9221 vfp_double_names[i]);
9225 if (!valid_p && i == 16)
9228 /* Also require FPSCR. */
9229 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9230 ARM_FPSCR_REGNUM, "fpscr");
9233 tdesc_data_cleanup (tdesc_data);
9237 if (tdesc_unnumbered_register (feature, "s0") == 0)
9238 have_vfp_pseudos = 1;
9240 vfp_register_count = i;
9242 /* If we have VFP, also check for NEON. The architecture allows
9243 NEON without VFP (integer vector operations only), but GDB
9244 does not support that. */
9245 feature = tdesc_find_feature (tdesc,
9246 "org.gnu.gdb.arm.neon");
9247 if (feature != NULL)
9249 /* NEON requires 32 double-precision registers. */
9252 tdesc_data_cleanup (tdesc_data);
9256 /* If there are quad registers defined by the stub, use
9257 their type; otherwise (normally) provide them with
9258 the default type. */
9259 if (tdesc_unnumbered_register (feature, "q0") == 0)
9260 have_neon_pseudos = 1;
9267 /* If there is already a candidate, use it. */
9268 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9270 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9272 if (arm_abi != ARM_ABI_AUTO
9273 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9276 if (fp_model != ARM_FLOAT_AUTO
9277 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9280 /* There are various other properties in tdep that we do not
9281 need to check here: those derived from a target description,
9282 since gdbarches with a different target description are
9283 automatically disqualified. */
9285 /* Do check is_m, though, since it might come from the binary. */
9286 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9289 /* Found a match. */
9293 if (best_arch != NULL)
9295 if (tdesc_data != NULL)
9296 tdesc_data_cleanup (tdesc_data);
9297 return best_arch->gdbarch;
9300 tdep = XCNEW (struct gdbarch_tdep);
9301 gdbarch = gdbarch_alloc (&info, tdep);
9303 /* Record additional information about the architecture we are defining.
9304 These are gdbarch discriminators, like the OSABI. */
9305 tdep->arm_abi = arm_abi;
9306 tdep->fp_model = fp_model;
9308 tdep->have_fpa_registers = have_fpa_registers;
9309 tdep->have_wmmx_registers = have_wmmx_registers;
9310 gdb_assert (vfp_register_count == 0
9311 || vfp_register_count == 16
9312 || vfp_register_count == 32);
9313 tdep->vfp_register_count = vfp_register_count;
9314 tdep->have_vfp_pseudos = have_vfp_pseudos;
9315 tdep->have_neon_pseudos = have_neon_pseudos;
9316 tdep->have_neon = have_neon;
9318 arm_register_g_packet_guesses (gdbarch);
9321 switch (info.byte_order_for_code)
9323 case BFD_ENDIAN_BIG:
9324 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9325 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9326 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9327 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9331 case BFD_ENDIAN_LITTLE:
9332 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9333 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9334 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9335 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9340 internal_error (__FILE__, __LINE__,
9341 _("arm_gdbarch_init: bad byte order for float format"));
9344 /* On ARM targets char defaults to unsigned. */
9345 set_gdbarch_char_signed (gdbarch, 0);
9347 /* wchar_t is unsigned under the AAPCS. */
9348 if (tdep->arm_abi == ARM_ABI_AAPCS)
9349 set_gdbarch_wchar_signed (gdbarch, 0);
9351 set_gdbarch_wchar_signed (gdbarch, 1);
9353 /* Note: for displaced stepping, this includes the breakpoint, and one word
9354 of additional scratch space. This setting isn't used for anything beside
9355 displaced stepping at present. */
9356 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9358 /* This should be low enough for everything. */
9359 tdep->lowest_pc = 0x20;
9360 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9362 /* The default, for both APCS and AAPCS, is to return small
9363 structures in registers. */
9364 tdep->struct_return = reg_struct_return;
9366 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9367 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9370 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9372 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9374 /* Frame handling. */
9375 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9376 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9377 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9379 frame_base_set_default (gdbarch, &arm_normal_base);
9381 /* Address manipulation. */
9382 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9384 /* Advance PC across function entry code. */
9385 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9387 /* Detect whether PC is at a point where the stack has been destroyed. */
9388 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9390 /* Skip trampolines. */
9391 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9393 /* The stack grows downward. */
9394 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9396 /* Breakpoint manipulation. */
9397 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9398 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9399 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9400 arm_breakpoint_kind_from_current_state);
9402 /* Information about registers, etc. */
9403 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9404 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9405 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9406 set_gdbarch_register_type (gdbarch, arm_register_type);
9407 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9409 /* This "info float" is FPA-specific. Use the generic version if we
9411 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9412 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9414 /* Internal <-> external register number maps. */
9415 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9416 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9418 set_gdbarch_register_name (gdbarch, arm_register_name);
9420 /* Returning results. */
9421 set_gdbarch_return_value (gdbarch, arm_return_value);
9424 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9426 /* Minsymbol frobbing. */
9427 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9428 set_gdbarch_coff_make_msymbol_special (gdbarch,
9429 arm_coff_make_msymbol_special);
9430 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9432 /* Thumb-2 IT block support. */
9433 set_gdbarch_adjust_breakpoint_address (gdbarch,
9434 arm_adjust_breakpoint_address);
9436 /* Virtual tables. */
9437 set_gdbarch_vbit_in_delta (gdbarch, 1);
9439 /* Hook in the ABI-specific overrides, if they have been registered. */
9440 gdbarch_init_osabi (info, gdbarch);
9442 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9444 /* Add some default predicates. */
9446 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9447 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9448 dwarf2_append_unwinders (gdbarch);
9449 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9450 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9451 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9453 /* Now we have tuned the configuration, set a few final things,
9454 based on what the OS ABI has told us. */
9456 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9457 binaries are always marked. */
9458 if (tdep->arm_abi == ARM_ABI_AUTO)
9459 tdep->arm_abi = ARM_ABI_APCS;
9461 /* Watchpoints are not steppable. */
9462 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9464 /* We used to default to FPA for generic ARM, but almost nobody
9465 uses that now, and we now provide a way for the user to force
9466 the model. So default to the most useful variant. */
9467 if (tdep->fp_model == ARM_FLOAT_AUTO)
9468 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9470 if (tdep->jb_pc >= 0)
9471 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9473 /* Floating point sizes and format. */
9474 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9475 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9477 set_gdbarch_double_format
9478 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9479 set_gdbarch_long_double_format
9480 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9484 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9485 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9488 if (have_vfp_pseudos)
9490 /* NOTE: These are the only pseudo registers used by
9491 the ARM target at the moment. If more are added, a
9492 little more care in numbering will be needed. */
9494 int num_pseudos = 32;
9495 if (have_neon_pseudos)
9497 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9498 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9499 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9504 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9506 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9508 /* Override tdesc_register_type to adjust the types of VFP
9509 registers for NEON. */
9510 set_gdbarch_register_type (gdbarch, arm_register_type);
9513 /* Add standard register aliases. We add aliases even for those
9514 nanes which are used by the current architecture - it's simpler,
9515 and does no harm, since nothing ever lists user registers. */
9516 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9517 user_reg_add (gdbarch, arm_register_aliases[i].name,
9518 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9520 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9521 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9527 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9529 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9534 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9535 (unsigned long) tdep->lowest_pc);
9541 static void arm_record_test (void);
9546 _initialize_arm_tdep (void)
9549 const char *setname;
9550 const char *setdesc;
9552 char regdesc[1024], *rdptr = regdesc;
9553 size_t rest = sizeof (regdesc);
9555 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9557 arm_objfile_data_key
9558 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9560 /* Add ourselves to objfile event chain. */
9561 observer_attach_new_objfile (arm_exidx_new_objfile);
9563 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9565 /* Register an ELF OS ABI sniffer for ARM binaries. */
9566 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9567 bfd_target_elf_flavour,
9568 arm_elf_osabi_sniffer);
9570 /* Initialize the standard target descriptions. */
9571 initialize_tdesc_arm_with_m ();
9572 initialize_tdesc_arm_with_m_fpa_layout ();
9573 initialize_tdesc_arm_with_m_vfp_d16 ();
9574 initialize_tdesc_arm_with_iwmmxt ();
9575 initialize_tdesc_arm_with_vfpv2 ();
9576 initialize_tdesc_arm_with_vfpv3 ();
9577 initialize_tdesc_arm_with_neon ();
9579 /* Add root prefix command for all "set arm"/"show arm" commands. */
9580 add_prefix_cmd ("arm", no_class, set_arm_command,
9581 _("Various ARM-specific commands."),
9582 &setarmcmdlist, "set arm ", 0, &setlist);
9584 add_prefix_cmd ("arm", no_class, show_arm_command,
9585 _("Various ARM-specific commands."),
9586 &showarmcmdlist, "show arm ", 0, &showlist);
9589 arm_disassembler_options = xstrdup ("reg-names-std");
9590 const disasm_options_t *disasm_options = disassembler_options_arm ();
9591 int num_disassembly_styles = 0;
9592 for (i = 0; disasm_options->name[i] != NULL; i++)
9593 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9594 num_disassembly_styles++;
9596 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9597 valid_disassembly_styles = XNEWVEC (const char *,
9598 num_disassembly_styles + 1);
9599 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9600 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9602 size_t offset = strlen ("reg-names-");
9603 const char *style = disasm_options->name[i];
9604 valid_disassembly_styles[j++] = &style[offset];
9605 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9606 disasm_options->description[i]);
9610 /* Mark the end of valid options. */
9611 valid_disassembly_styles[num_disassembly_styles] = NULL;
9613 /* Create the help text. */
9614 std::string helptext = string_printf ("%s%s%s",
9615 _("The valid values are:\n"),
9617 _("The default is \"std\"."));
9619 add_setshow_enum_cmd("disassembler", no_class,
9620 valid_disassembly_styles, &disassembly_style,
9621 _("Set the disassembly style."),
9622 _("Show the disassembly style."),
9624 set_disassembly_style_sfunc,
9625 show_disassembly_style_sfunc,
9626 &setarmcmdlist, &showarmcmdlist);
9628 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9629 _("Set usage of ARM 32-bit mode."),
9630 _("Show usage of ARM 32-bit mode."),
9631 _("When off, a 26-bit PC will be used."),
9633 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9635 &setarmcmdlist, &showarmcmdlist);
9637 /* Add a command to allow the user to force the FPU model. */
9638 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9639 _("Set the floating point type."),
9640 _("Show the floating point type."),
9641 _("auto - Determine the FP typefrom the OS-ABI.\n\
9642 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9643 fpa - FPA co-processor (GCC compiled).\n\
9644 softvfp - Software FP with pure-endian doubles.\n\
9645 vfp - VFP co-processor."),
9646 set_fp_model_sfunc, show_fp_model,
9647 &setarmcmdlist, &showarmcmdlist);
9649 /* Add a command to allow the user to force the ABI. */
9650 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9653 NULL, arm_set_abi, arm_show_abi,
9654 &setarmcmdlist, &showarmcmdlist);
9656 /* Add two commands to allow the user to force the assumed
9658 add_setshow_enum_cmd ("fallback-mode", class_support,
9659 arm_mode_strings, &arm_fallback_mode_string,
9660 _("Set the mode assumed when symbols are unavailable."),
9661 _("Show the mode assumed when symbols are unavailable."),
9662 NULL, NULL, arm_show_fallback_mode,
9663 &setarmcmdlist, &showarmcmdlist);
9664 add_setshow_enum_cmd ("force-mode", class_support,
9665 arm_mode_strings, &arm_force_mode_string,
9666 _("Set the mode assumed even when symbols are available."),
9667 _("Show the mode assumed even when symbols are available."),
9668 NULL, NULL, arm_show_force_mode,
9669 &setarmcmdlist, &showarmcmdlist);
9671 /* Debugging flag. */
9672 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9673 _("Set ARM debugging."),
9674 _("Show ARM debugging."),
9675 _("When on, arm-specific debugging is enabled."),
9677 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9678 &setdebuglist, &showdebuglist);
9681 selftests::register_test ("arm-record", selftests::arm_record_test);
9686 /* ARM-reversible process record data structures. */
9688 #define ARM_INSN_SIZE_BYTES 4
9689 #define THUMB_INSN_SIZE_BYTES 2
9690 #define THUMB2_INSN_SIZE_BYTES 4
9693 /* Position of the bit within a 32-bit ARM instruction
9694 that defines whether the instruction is a load or store. */
9695 #define INSN_S_L_BIT_NUM 20
9697 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9700 unsigned int reg_len = LENGTH; \
9703 REGS = XNEWVEC (uint32_t, reg_len); \
9704 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9709 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9712 unsigned int mem_len = LENGTH; \
9715 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9716 memcpy(&MEMS->len, &RECORD_BUF[0], \
9717 sizeof(struct arm_mem_r) * LENGTH); \
9722 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9723 #define INSN_RECORDED(ARM_RECORD) \
9724 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9726 /* ARM memory record structure. */
9729 uint32_t len; /* Record length. */
9730 uint32_t addr; /* Memory address. */
9733 /* ARM instruction record contains opcode of current insn
9734 and execution state (before entry to decode_insn()),
9735 contains list of to-be-modified registers and
9736 memory blocks (on return from decode_insn()). */
9738 typedef struct insn_decode_record_t
9740 struct gdbarch *gdbarch;
9741 struct regcache *regcache;
9742 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9743 uint32_t arm_insn; /* Should accommodate thumb. */
9744 uint32_t cond; /* Condition code. */
9745 uint32_t opcode; /* Insn opcode. */
9746 uint32_t decode; /* Insn decode bits. */
9747 uint32_t mem_rec_count; /* No of mem records. */
9748 uint32_t reg_rec_count; /* No of reg records. */
9749 uint32_t *arm_regs; /* Registers to be saved for this record. */
9750 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9751 } insn_decode_record;
9754 /* Checks ARM SBZ and SBO mandatory fields. */
9757 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9759 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9778 enum arm_record_result
9780 ARM_RECORD_SUCCESS = 0,
9781 ARM_RECORD_FAILURE = 1
9788 } arm_record_strx_t;
9799 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9800 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9803 struct regcache *reg_cache = arm_insn_r->regcache;
9804 ULONGEST u_regval[2]= {0};
9806 uint32_t reg_src1 = 0, reg_src2 = 0;
9807 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9809 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9810 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9812 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9814 /* 1) Handle misc store, immediate offset. */
9815 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9816 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9817 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9818 regcache_raw_read_unsigned (reg_cache, reg_src1,
9820 if (ARM_PC_REGNUM == reg_src1)
9822 /* If R15 was used as Rn, hence current PC+8. */
9823 u_regval[0] = u_regval[0] + 8;
9825 offset_8 = (immed_high << 4) | immed_low;
9826 /* Calculate target store address. */
9827 if (14 == arm_insn_r->opcode)
9829 tgt_mem_addr = u_regval[0] + offset_8;
9833 tgt_mem_addr = u_regval[0] - offset_8;
9835 if (ARM_RECORD_STRH == str_type)
9837 record_buf_mem[0] = 2;
9838 record_buf_mem[1] = tgt_mem_addr;
9839 arm_insn_r->mem_rec_count = 1;
9841 else if (ARM_RECORD_STRD == str_type)
9843 record_buf_mem[0] = 4;
9844 record_buf_mem[1] = tgt_mem_addr;
9845 record_buf_mem[2] = 4;
9846 record_buf_mem[3] = tgt_mem_addr + 4;
9847 arm_insn_r->mem_rec_count = 2;
9850 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9852 /* 2) Store, register offset. */
9854 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9856 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9857 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9858 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9861 /* If R15 was used as Rn, hence current PC+8. */
9862 u_regval[0] = u_regval[0] + 8;
9864 /* Calculate target store address, Rn +/- Rm, register offset. */
9865 if (12 == arm_insn_r->opcode)
9867 tgt_mem_addr = u_regval[0] + u_regval[1];
9871 tgt_mem_addr = u_regval[1] - u_regval[0];
9873 if (ARM_RECORD_STRH == str_type)
9875 record_buf_mem[0] = 2;
9876 record_buf_mem[1] = tgt_mem_addr;
9877 arm_insn_r->mem_rec_count = 1;
9879 else if (ARM_RECORD_STRD == str_type)
9881 record_buf_mem[0] = 4;
9882 record_buf_mem[1] = tgt_mem_addr;
9883 record_buf_mem[2] = 4;
9884 record_buf_mem[3] = tgt_mem_addr + 4;
9885 arm_insn_r->mem_rec_count = 2;
9888 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9889 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9891 /* 3) Store, immediate pre-indexed. */
9892 /* 5) Store, immediate post-indexed. */
9893 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9894 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9895 offset_8 = (immed_high << 4) | immed_low;
9896 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9897 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9898 /* Calculate target store address, Rn +/- Rm, register offset. */
9899 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9901 tgt_mem_addr = u_regval[0] + offset_8;
9905 tgt_mem_addr = u_regval[0] - offset_8;
9907 if (ARM_RECORD_STRH == str_type)
9909 record_buf_mem[0] = 2;
9910 record_buf_mem[1] = tgt_mem_addr;
9911 arm_insn_r->mem_rec_count = 1;
9913 else if (ARM_RECORD_STRD == str_type)
9915 record_buf_mem[0] = 4;
9916 record_buf_mem[1] = tgt_mem_addr;
9917 record_buf_mem[2] = 4;
9918 record_buf_mem[3] = tgt_mem_addr + 4;
9919 arm_insn_r->mem_rec_count = 2;
9921 /* Record Rn also as it changes. */
9922 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9923 arm_insn_r->reg_rec_count = 1;
9925 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9926 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9928 /* 4) Store, register pre-indexed. */
9929 /* 6) Store, register post -indexed. */
9930 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9931 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9932 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9933 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9934 /* Calculate target store address, Rn +/- Rm, register offset. */
9935 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9937 tgt_mem_addr = u_regval[0] + u_regval[1];
9941 tgt_mem_addr = u_regval[1] - u_regval[0];
9943 if (ARM_RECORD_STRH == str_type)
9945 record_buf_mem[0] = 2;
9946 record_buf_mem[1] = tgt_mem_addr;
9947 arm_insn_r->mem_rec_count = 1;
9949 else if (ARM_RECORD_STRD == str_type)
9951 record_buf_mem[0] = 4;
9952 record_buf_mem[1] = tgt_mem_addr;
9953 record_buf_mem[2] = 4;
9954 record_buf_mem[3] = tgt_mem_addr + 4;
9955 arm_insn_r->mem_rec_count = 2;
9957 /* Record Rn also as it changes. */
9958 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9959 arm_insn_r->reg_rec_count = 1;
9964 /* Handling ARM extension space insns. */
9967 arm_record_extension_space (insn_decode_record *arm_insn_r)
9969 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9970 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9971 uint32_t record_buf[8], record_buf_mem[8];
9972 uint32_t reg_src1 = 0;
9973 struct regcache *reg_cache = arm_insn_r->regcache;
9974 ULONGEST u_regval = 0;
9976 gdb_assert (!INSN_RECORDED(arm_insn_r));
9977 /* Handle unconditional insn extension space. */
9979 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9980 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9981 if (arm_insn_r->cond)
9983 /* PLD has no affect on architectural state, it just affects
9985 if (5 == ((opcode1 & 0xE0) >> 5))
9988 record_buf[0] = ARM_PS_REGNUM;
9989 record_buf[1] = ARM_LR_REGNUM;
9990 arm_insn_r->reg_rec_count = 2;
9992 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9996 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9997 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10000 /* Undefined instruction on ARM V5; need to handle if later
10001 versions define it. */
10004 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10005 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10006 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10008 /* Handle arithmetic insn extension space. */
10009 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10010 && !INSN_RECORDED(arm_insn_r))
10012 /* Handle MLA(S) and MUL(S). */
10013 if (in_inclusive_range (insn_op1, 0U, 3U))
10015 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10016 record_buf[1] = ARM_PS_REGNUM;
10017 arm_insn_r->reg_rec_count = 2;
10019 else if (in_inclusive_range (insn_op1, 4U, 15U))
10021 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10022 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10023 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10024 record_buf[2] = ARM_PS_REGNUM;
10025 arm_insn_r->reg_rec_count = 3;
10029 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10030 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10031 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10033 /* Handle control insn extension space. */
10035 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10036 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10038 if (!bit (arm_insn_r->arm_insn,25))
10040 if (!bits (arm_insn_r->arm_insn, 4, 7))
10042 if ((0 == insn_op1) || (2 == insn_op1))
10045 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10046 arm_insn_r->reg_rec_count = 1;
10048 else if (1 == insn_op1)
10050 /* CSPR is going to be changed. */
10051 record_buf[0] = ARM_PS_REGNUM;
10052 arm_insn_r->reg_rec_count = 1;
10054 else if (3 == insn_op1)
10056 /* SPSR is going to be changed. */
10057 /* We need to get SPSR value, which is yet to be done. */
10061 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10066 record_buf[0] = ARM_PS_REGNUM;
10067 arm_insn_r->reg_rec_count = 1;
10069 else if (3 == insn_op1)
10072 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10073 arm_insn_r->reg_rec_count = 1;
10076 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10079 record_buf[0] = ARM_PS_REGNUM;
10080 record_buf[1] = ARM_LR_REGNUM;
10081 arm_insn_r->reg_rec_count = 2;
10083 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10085 /* QADD, QSUB, QDADD, QDSUB */
10086 record_buf[0] = ARM_PS_REGNUM;
10087 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 2;
10090 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10093 record_buf[0] = ARM_PS_REGNUM;
10094 record_buf[1] = ARM_LR_REGNUM;
10095 arm_insn_r->reg_rec_count = 2;
10097 /* Save SPSR also;how? */
10100 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10101 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10102 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10103 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10106 if (0 == insn_op1 || 1 == insn_op1)
10108 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10109 /* We dont do optimization for SMULW<y> where we
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 record_buf[1] = ARM_PS_REGNUM;
10113 arm_insn_r->reg_rec_count = 2;
10115 else if (2 == insn_op1)
10118 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10119 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10120 arm_insn_r->reg_rec_count = 2;
10122 else if (3 == insn_op1)
10125 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10126 arm_insn_r->reg_rec_count = 1;
10132 /* MSR : immediate form. */
10135 /* CSPR is going to be changed. */
10136 record_buf[0] = ARM_PS_REGNUM;
10137 arm_insn_r->reg_rec_count = 1;
10139 else if (3 == insn_op1)
10141 /* SPSR is going to be changed. */
10142 /* we need to get SPSR value, which is yet to be done */
10148 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10149 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10150 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10152 /* Handle load/store insn extension space. */
10154 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10155 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10156 && !INSN_RECORDED(arm_insn_r))
10161 /* These insn, changes register and memory as well. */
10162 /* SWP or SWPB insn. */
10163 /* Get memory address given by Rn. */
10164 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10165 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10166 /* SWP insn ?, swaps word. */
10167 if (8 == arm_insn_r->opcode)
10169 record_buf_mem[0] = 4;
10173 /* SWPB insn, swaps only byte. */
10174 record_buf_mem[0] = 1;
10176 record_buf_mem[1] = u_regval;
10177 arm_insn_r->mem_rec_count = 1;
10178 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10179 arm_insn_r->reg_rec_count = 1;
10181 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10184 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10187 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10190 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10191 record_buf[1] = record_buf[0] + 1;
10192 arm_insn_r->reg_rec_count = 2;
10194 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10197 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10200 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10202 /* LDRH, LDRSB, LDRSH. */
10203 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10204 arm_insn_r->reg_rec_count = 1;
10209 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10210 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10211 && !INSN_RECORDED(arm_insn_r))
10214 /* Handle coprocessor insn extension space. */
10217 /* To be done for ARMv5 and later; as of now we return -1. */
10221 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10222 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10227 /* Handling opcode 000 insns. */
10230 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10232 struct regcache *reg_cache = arm_insn_r->regcache;
10233 uint32_t record_buf[8], record_buf_mem[8];
10234 ULONGEST u_regval[2] = {0};
10236 uint32_t reg_src1 = 0, reg_dest = 0;
10237 uint32_t opcode1 = 0;
10239 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10240 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10241 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10243 /* Data processing insn /multiply insn. */
10244 if (9 == arm_insn_r->decode
10245 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10246 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10248 /* Handle multiply instructions. */
10249 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10250 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10252 /* Handle MLA and MUL. */
10253 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10254 record_buf[1] = ARM_PS_REGNUM;
10255 arm_insn_r->reg_rec_count = 2;
10257 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10259 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10260 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10261 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10262 record_buf[2] = ARM_PS_REGNUM;
10263 arm_insn_r->reg_rec_count = 3;
10266 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10267 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10269 /* Handle misc load insns, as 20th bit (L = 1). */
10270 /* LDR insn has a capability to do branching, if
10271 MOV LR, PC is precceded by LDR insn having Rn as R15
10272 in that case, it emulates branch and link insn, and hence we
10273 need to save CSPR and PC as well. I am not sure this is right
10274 place; as opcode = 010 LDR insn make this happen, if R15 was
10276 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10277 if (15 != reg_dest)
10279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10280 arm_insn_r->reg_rec_count = 1;
10284 record_buf[0] = reg_dest;
10285 record_buf[1] = ARM_PS_REGNUM;
10286 arm_insn_r->reg_rec_count = 2;
10289 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10290 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10291 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10292 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10294 /* Handle MSR insn. */
10295 if (9 == arm_insn_r->opcode)
10297 /* CSPR is going to be changed. */
10298 record_buf[0] = ARM_PS_REGNUM;
10299 arm_insn_r->reg_rec_count = 1;
10303 /* SPSR is going to be changed. */
10304 /* How to read SPSR value? */
10308 else if (9 == arm_insn_r->decode
10309 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10310 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10312 /* Handling SWP, SWPB. */
10313 /* These insn, changes register and memory as well. */
10314 /* SWP or SWPB insn. */
10316 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10317 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10318 /* SWP insn ?, swaps word. */
10319 if (8 == arm_insn_r->opcode)
10321 record_buf_mem[0] = 4;
10325 /* SWPB insn, swaps only byte. */
10326 record_buf_mem[0] = 1;
10328 record_buf_mem[1] = u_regval[0];
10329 arm_insn_r->mem_rec_count = 1;
10330 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10331 arm_insn_r->reg_rec_count = 1;
10333 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10334 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10336 /* Handle BLX, branch and link/exchange. */
10337 if (9 == arm_insn_r->opcode)
10339 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10340 and R14 stores the return address. */
10341 record_buf[0] = ARM_PS_REGNUM;
10342 record_buf[1] = ARM_LR_REGNUM;
10343 arm_insn_r->reg_rec_count = 2;
10346 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10348 /* Handle enhanced software breakpoint insn, BKPT. */
10349 /* CPSR is changed to be executed in ARM state, disabling normal
10350 interrupts, entering abort mode. */
10351 /* According to high vector configuration PC is set. */
10352 /* user hit breakpoint and type reverse, in
10353 that case, we need to go back with previous CPSR and
10354 Program Counter. */
10355 record_buf[0] = ARM_PS_REGNUM;
10356 record_buf[1] = ARM_LR_REGNUM;
10357 arm_insn_r->reg_rec_count = 2;
10359 /* Save SPSR also; how? */
10362 else if (11 == arm_insn_r->decode
10363 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10365 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10367 /* Handle str(x) insn */
10368 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10371 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10372 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10374 /* Handle BX, branch and link/exchange. */
10375 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10376 record_buf[0] = ARM_PS_REGNUM;
10377 arm_insn_r->reg_rec_count = 1;
10379 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10380 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10381 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10383 /* Count leading zeros: CLZ. */
10384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10385 arm_insn_r->reg_rec_count = 1;
10387 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10388 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10389 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10390 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10393 /* Handle MRS insn. */
10394 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10395 arm_insn_r->reg_rec_count = 1;
10397 else if (arm_insn_r->opcode <= 15)
10399 /* Normal data processing insns. */
10400 /* Out of 11 shifter operands mode, all the insn modifies destination
10401 register, which is specified by 13-16 decode. */
10402 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10403 record_buf[1] = ARM_PS_REGNUM;
10404 arm_insn_r->reg_rec_count = 2;
10411 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10412 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10416 /* Handling opcode 001 insns. */
10419 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10421 uint32_t record_buf[8], record_buf_mem[8];
10423 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10424 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10426 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10427 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10428 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10431 /* Handle MSR insn. */
10432 if (9 == arm_insn_r->opcode)
10434 /* CSPR is going to be changed. */
10435 record_buf[0] = ARM_PS_REGNUM;
10436 arm_insn_r->reg_rec_count = 1;
10440 /* SPSR is going to be changed. */
10443 else if (arm_insn_r->opcode <= 15)
10445 /* Normal data processing insns. */
10446 /* Out of 11 shifter operands mode, all the insn modifies destination
10447 register, which is specified by 13-16 decode. */
10448 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10449 record_buf[1] = ARM_PS_REGNUM;
10450 arm_insn_r->reg_rec_count = 2;
10457 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10458 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10463 arm_record_media (insn_decode_record *arm_insn_r)
10465 uint32_t record_buf[8];
10467 switch (bits (arm_insn_r->arm_insn, 22, 24))
10470 /* Parallel addition and subtraction, signed */
10472 /* Parallel addition and subtraction, unsigned */
10475 /* Packing, unpacking, saturation and reversal */
10477 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10479 record_buf[arm_insn_r->reg_rec_count++] = rd;
10485 /* Signed multiplies */
10487 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10488 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10490 record_buf[arm_insn_r->reg_rec_count++] = rd;
10492 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10493 else if (op1 == 0x4)
10494 record_buf[arm_insn_r->reg_rec_count++]
10495 = bits (arm_insn_r->arm_insn, 12, 15);
10501 if (bit (arm_insn_r->arm_insn, 21)
10502 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10505 record_buf[arm_insn_r->reg_rec_count++]
10506 = bits (arm_insn_r->arm_insn, 12, 15);
10508 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10509 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10511 /* USAD8 and USADA8 */
10512 record_buf[arm_insn_r->reg_rec_count++]
10513 = bits (arm_insn_r->arm_insn, 16, 19);
10520 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10521 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10523 /* Permanently UNDEFINED */
10528 /* BFC, BFI and UBFX */
10529 record_buf[arm_insn_r->reg_rec_count++]
10530 = bits (arm_insn_r->arm_insn, 12, 15);
10539 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10544 /* Handle ARM mode instructions with opcode 010. */
10547 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10549 struct regcache *reg_cache = arm_insn_r->regcache;
10551 uint32_t reg_base , reg_dest;
10552 uint32_t offset_12, tgt_mem_addr;
10553 uint32_t record_buf[8], record_buf_mem[8];
10554 unsigned char wback;
10557 /* Calculate wback. */
10558 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10559 || (bit (arm_insn_r->arm_insn, 21) == 1);
10561 arm_insn_r->reg_rec_count = 0;
10562 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10564 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10566 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10569 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10570 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10572 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10573 preceeds a LDR instruction having R15 as reg_base, it
10574 emulates a branch and link instruction, and hence we need to save
10575 CPSR and PC as well. */
10576 if (ARM_PC_REGNUM == reg_dest)
10577 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10579 /* If wback is true, also save the base register, which is going to be
10582 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10586 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10588 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10589 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10591 /* Handle bit U. */
10592 if (bit (arm_insn_r->arm_insn, 23))
10594 /* U == 1: Add the offset. */
10595 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10599 /* U == 0: subtract the offset. */
10600 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10603 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10605 if (bit (arm_insn_r->arm_insn, 22))
10607 /* STRB and STRBT: 1 byte. */
10608 record_buf_mem[0] = 1;
10612 /* STR and STRT: 4 bytes. */
10613 record_buf_mem[0] = 4;
10616 /* Handle bit P. */
10617 if (bit (arm_insn_r->arm_insn, 24))
10618 record_buf_mem[1] = tgt_mem_addr;
10620 record_buf_mem[1] = (uint32_t) u_regval;
10622 arm_insn_r->mem_rec_count = 1;
10624 /* If wback is true, also save the base register, which is going to be
10627 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10630 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10631 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10635 /* Handling opcode 011 insns. */
10638 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10640 struct regcache *reg_cache = arm_insn_r->regcache;
10642 uint32_t shift_imm = 0;
10643 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10644 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10645 uint32_t record_buf[8], record_buf_mem[8];
10648 ULONGEST u_regval[2];
10650 if (bit (arm_insn_r->arm_insn, 4))
10651 return arm_record_media (arm_insn_r);
10653 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10654 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10656 /* Handle enhanced store insns and LDRD DSP insn,
10657 order begins according to addressing modes for store insns
10661 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10663 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10664 /* LDR insn has a capability to do branching, if
10665 MOV LR, PC is precedded by LDR insn having Rn as R15
10666 in that case, it emulates branch and link insn, and hence we
10667 need to save CSPR and PC as well. */
10668 if (15 != reg_dest)
10670 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10671 arm_insn_r->reg_rec_count = 1;
10675 record_buf[0] = reg_dest;
10676 record_buf[1] = ARM_PS_REGNUM;
10677 arm_insn_r->reg_rec_count = 2;
10682 if (! bits (arm_insn_r->arm_insn, 4, 11))
10684 /* Store insn, register offset and register pre-indexed,
10685 register post-indexed. */
10687 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10689 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10690 regcache_raw_read_unsigned (reg_cache, reg_src1
10692 regcache_raw_read_unsigned (reg_cache, reg_src2
10694 if (15 == reg_src2)
10696 /* If R15 was used as Rn, hence current PC+8. */
10697 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10698 u_regval[0] = u_regval[0] + 8;
10700 /* Calculate target store address, Rn +/- Rm, register offset. */
10702 if (bit (arm_insn_r->arm_insn, 23))
10704 tgt_mem_addr = u_regval[0] + u_regval[1];
10708 tgt_mem_addr = u_regval[1] - u_regval[0];
10711 switch (arm_insn_r->opcode)
10725 record_buf_mem[0] = 4;
10740 record_buf_mem[0] = 1;
10744 gdb_assert_not_reached ("no decoding pattern found");
10747 record_buf_mem[1] = tgt_mem_addr;
10748 arm_insn_r->mem_rec_count = 1;
10750 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10751 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10752 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10753 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10754 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10755 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10758 /* Rn is going to be changed in pre-indexed mode and
10759 post-indexed mode as well. */
10760 record_buf[0] = reg_src2;
10761 arm_insn_r->reg_rec_count = 1;
10766 /* Store insn, scaled register offset; scaled pre-indexed. */
10767 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10769 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10771 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10772 /* Get shift_imm. */
10773 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10774 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10775 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10776 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10777 /* Offset_12 used as shift. */
10781 /* Offset_12 used as index. */
10782 offset_12 = u_regval[0] << shift_imm;
10786 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10792 if (bit (u_regval[0], 31))
10794 offset_12 = 0xFFFFFFFF;
10803 /* This is arithmetic shift. */
10804 offset_12 = s_word >> shift_imm;
10811 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10813 /* Get C flag value and shift it by 31. */
10814 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10815 | (u_regval[0]) >> 1);
10819 offset_12 = (u_regval[0] >> shift_imm) \
10821 (sizeof(uint32_t) - shift_imm));
10826 gdb_assert_not_reached ("no decoding pattern found");
10830 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10832 if (bit (arm_insn_r->arm_insn, 23))
10834 tgt_mem_addr = u_regval[1] + offset_12;
10838 tgt_mem_addr = u_regval[1] - offset_12;
10841 switch (arm_insn_r->opcode)
10855 record_buf_mem[0] = 4;
10870 record_buf_mem[0] = 1;
10874 gdb_assert_not_reached ("no decoding pattern found");
10877 record_buf_mem[1] = tgt_mem_addr;
10878 arm_insn_r->mem_rec_count = 1;
10880 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10881 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10882 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10883 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10884 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10885 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10888 /* Rn is going to be changed in register scaled pre-indexed
10889 mode,and scaled post indexed mode. */
10890 record_buf[0] = reg_src2;
10891 arm_insn_r->reg_rec_count = 1;
10896 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10897 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10901 /* Handle ARM mode instructions with opcode 100. */
10904 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10906 struct regcache *reg_cache = arm_insn_r->regcache;
10907 uint32_t register_count = 0, register_bits;
10908 uint32_t reg_base, addr_mode;
10909 uint32_t record_buf[24], record_buf_mem[48];
10913 /* Fetch the list of registers. */
10914 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10915 arm_insn_r->reg_rec_count = 0;
10917 /* Fetch the base register that contains the address we are loading data
10919 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10921 /* Calculate wback. */
10922 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10924 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10926 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10928 /* Find out which registers are going to be loaded from memory. */
10929 while (register_bits)
10931 if (register_bits & 0x00000001)
10932 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10933 register_bits = register_bits >> 1;
10938 /* If wback is true, also save the base register, which is going to be
10941 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10943 /* Save the CPSR register. */
10944 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10948 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10950 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10952 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10954 /* Find out how many registers are going to be stored to memory. */
10955 while (register_bits)
10957 if (register_bits & 0x00000001)
10959 register_bits = register_bits >> 1;
10964 /* STMDA (STMED): Decrement after. */
10966 record_buf_mem[1] = (uint32_t) u_regval
10967 - register_count * INT_REGISTER_SIZE + 4;
10969 /* STM (STMIA, STMEA): Increment after. */
10971 record_buf_mem[1] = (uint32_t) u_regval;
10973 /* STMDB (STMFD): Decrement before. */
10975 record_buf_mem[1] = (uint32_t) u_regval
10976 - register_count * INT_REGISTER_SIZE;
10978 /* STMIB (STMFA): Increment before. */
10980 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10983 gdb_assert_not_reached ("no decoding pattern found");
10987 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10988 arm_insn_r->mem_rec_count = 1;
10990 /* If wback is true, also save the base register, which is going to be
10993 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10996 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10997 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11001 /* Handling opcode 101 insns. */
11004 arm_record_b_bl (insn_decode_record *arm_insn_r)
11006 uint32_t record_buf[8];
11008 /* Handle B, BL, BLX(1) insns. */
11009 /* B simply branches so we do nothing here. */
11010 /* Note: BLX(1) doesnt fall here but instead it falls into
11011 extension space. */
11012 if (bit (arm_insn_r->arm_insn, 24))
11014 record_buf[0] = ARM_LR_REGNUM;
11015 arm_insn_r->reg_rec_count = 1;
11018 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11024 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11026 printf_unfiltered (_("Process record does not support instruction "
11027 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11028 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11033 /* Record handler for vector data transfer instructions. */
11036 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11038 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11039 uint32_t record_buf[4];
11041 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11042 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11043 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11044 bit_l = bit (arm_insn_r->arm_insn, 20);
11045 bit_c = bit (arm_insn_r->arm_insn, 8);
11047 /* Handle VMOV instruction. */
11048 if (bit_l && bit_c)
11050 record_buf[0] = reg_t;
11051 arm_insn_r->reg_rec_count = 1;
11053 else if (bit_l && !bit_c)
11055 /* Handle VMOV instruction. */
11056 if (bits_a == 0x00)
11058 record_buf[0] = reg_t;
11059 arm_insn_r->reg_rec_count = 1;
11061 /* Handle VMRS instruction. */
11062 else if (bits_a == 0x07)
11065 reg_t = ARM_PS_REGNUM;
11067 record_buf[0] = reg_t;
11068 arm_insn_r->reg_rec_count = 1;
11071 else if (!bit_l && !bit_c)
11073 /* Handle VMOV instruction. */
11074 if (bits_a == 0x00)
11076 record_buf[0] = ARM_D0_REGNUM + reg_v;
11078 arm_insn_r->reg_rec_count = 1;
11080 /* Handle VMSR instruction. */
11081 else if (bits_a == 0x07)
11083 record_buf[0] = ARM_FPSCR_REGNUM;
11084 arm_insn_r->reg_rec_count = 1;
11087 else if (!bit_l && bit_c)
11089 /* Handle VMOV instruction. */
11090 if (!(bits_a & 0x04))
11092 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11094 arm_insn_r->reg_rec_count = 1;
11096 /* Handle VDUP instruction. */
11099 if (bit (arm_insn_r->arm_insn, 21))
11101 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11102 record_buf[0] = reg_v + ARM_D0_REGNUM;
11103 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11104 arm_insn_r->reg_rec_count = 2;
11108 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11109 record_buf[0] = reg_v + ARM_D0_REGNUM;
11110 arm_insn_r->reg_rec_count = 1;
11115 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11119 /* Record handler for extension register load/store instructions. */
11122 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11124 uint32_t opcode, single_reg;
11125 uint8_t op_vldm_vstm;
11126 uint32_t record_buf[8], record_buf_mem[128];
11127 ULONGEST u_regval = 0;
11129 struct regcache *reg_cache = arm_insn_r->regcache;
11131 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11132 single_reg = !bit (arm_insn_r->arm_insn, 8);
11133 op_vldm_vstm = opcode & 0x1b;
11135 /* Handle VMOV instructions. */
11136 if ((opcode & 0x1e) == 0x04)
11138 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11141 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11142 arm_insn_r->reg_rec_count = 2;
11146 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11147 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11151 /* The first S register number m is REG_M:M (M is bit 5),
11152 the corresponding D register number is REG_M:M / 2, which
11154 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11155 /* The second S register number is REG_M:M + 1, the
11156 corresponding D register number is (REG_M:M + 1) / 2.
11157 IOW, if bit M is 1, the first and second S registers
11158 are mapped to different D registers, otherwise, they are
11159 in the same D register. */
11162 record_buf[arm_insn_r->reg_rec_count++]
11163 = ARM_D0_REGNUM + reg_m + 1;
11168 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11169 arm_insn_r->reg_rec_count = 1;
11173 /* Handle VSTM and VPUSH instructions. */
11174 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11175 || op_vldm_vstm == 0x12)
11177 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11178 uint32_t memory_index = 0;
11180 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11181 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11182 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11183 imm_off32 = imm_off8 << 2;
11184 memory_count = imm_off8;
11186 if (bit (arm_insn_r->arm_insn, 23))
11187 start_address = u_regval;
11189 start_address = u_regval - imm_off32;
11191 if (bit (arm_insn_r->arm_insn, 21))
11193 record_buf[0] = reg_rn;
11194 arm_insn_r->reg_rec_count = 1;
11197 while (memory_count > 0)
11201 record_buf_mem[memory_index] = 4;
11202 record_buf_mem[memory_index + 1] = start_address;
11203 start_address = start_address + 4;
11204 memory_index = memory_index + 2;
11208 record_buf_mem[memory_index] = 4;
11209 record_buf_mem[memory_index + 1] = start_address;
11210 record_buf_mem[memory_index + 2] = 4;
11211 record_buf_mem[memory_index + 3] = start_address + 4;
11212 start_address = start_address + 8;
11213 memory_index = memory_index + 4;
11217 arm_insn_r->mem_rec_count = (memory_index >> 1);
11219 /* Handle VLDM instructions. */
11220 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11221 || op_vldm_vstm == 0x13)
11223 uint32_t reg_count, reg_vd;
11224 uint32_t reg_index = 0;
11225 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11227 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11228 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11230 /* REG_VD is the first D register number. If the instruction
11231 loads memory to S registers (SINGLE_REG is TRUE), the register
11232 number is (REG_VD << 1 | bit D), so the corresponding D
11233 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11235 reg_vd = reg_vd | (bit_d << 4);
11237 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11238 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11240 /* If the instruction loads memory to D register, REG_COUNT should
11241 be divided by 2, according to the ARM Architecture Reference
11242 Manual. If the instruction loads memory to S register, divide by
11243 2 as well because two S registers are mapped to D register. */
11244 reg_count = reg_count / 2;
11245 if (single_reg && bit_d)
11247 /* Increase the register count if S register list starts from
11248 an odd number (bit d is one). */
11252 while (reg_count > 0)
11254 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11257 arm_insn_r->reg_rec_count = reg_index;
11259 /* VSTR Vector store register. */
11260 else if ((opcode & 0x13) == 0x10)
11262 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11263 uint32_t memory_index = 0;
11265 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11266 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11267 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11268 imm_off32 = imm_off8 << 2;
11270 if (bit (arm_insn_r->arm_insn, 23))
11271 start_address = u_regval + imm_off32;
11273 start_address = u_regval - imm_off32;
11277 record_buf_mem[memory_index] = 4;
11278 record_buf_mem[memory_index + 1] = start_address;
11279 arm_insn_r->mem_rec_count = 1;
11283 record_buf_mem[memory_index] = 4;
11284 record_buf_mem[memory_index + 1] = start_address;
11285 record_buf_mem[memory_index + 2] = 4;
11286 record_buf_mem[memory_index + 3] = start_address + 4;
11287 arm_insn_r->mem_rec_count = 2;
11290 /* VLDR Vector load register. */
11291 else if ((opcode & 0x13) == 0x11)
11293 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11297 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11298 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11302 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11303 /* Record register D rather than pseudo register S. */
11304 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11306 arm_insn_r->reg_rec_count = 1;
11309 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11310 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11314 /* Record handler for arm/thumb mode VFP data processing instructions. */
11317 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11319 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11320 uint32_t record_buf[4];
11321 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11322 enum insn_types curr_insn_type = INSN_INV;
11324 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11325 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11326 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11327 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11328 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11329 bit_d = bit (arm_insn_r->arm_insn, 22);
11330 opc1 = opc1 & 0x04;
11332 /* Handle VMLA, VMLS. */
11335 if (bit (arm_insn_r->arm_insn, 10))
11337 if (bit (arm_insn_r->arm_insn, 6))
11338 curr_insn_type = INSN_T0;
11340 curr_insn_type = INSN_T1;
11345 curr_insn_type = INSN_T1;
11347 curr_insn_type = INSN_T2;
11350 /* Handle VNMLA, VNMLS, VNMUL. */
11351 else if (opc1 == 0x01)
11354 curr_insn_type = INSN_T1;
11356 curr_insn_type = INSN_T2;
11359 else if (opc1 == 0x02 && !(opc3 & 0x01))
11361 if (bit (arm_insn_r->arm_insn, 10))
11363 if (bit (arm_insn_r->arm_insn, 6))
11364 curr_insn_type = INSN_T0;
11366 curr_insn_type = INSN_T1;
11371 curr_insn_type = INSN_T1;
11373 curr_insn_type = INSN_T2;
11376 /* Handle VADD, VSUB. */
11377 else if (opc1 == 0x03)
11379 if (!bit (arm_insn_r->arm_insn, 9))
11381 if (bit (arm_insn_r->arm_insn, 6))
11382 curr_insn_type = INSN_T0;
11384 curr_insn_type = INSN_T1;
11389 curr_insn_type = INSN_T1;
11391 curr_insn_type = INSN_T2;
11395 else if (opc1 == 0x0b)
11398 curr_insn_type = INSN_T1;
11400 curr_insn_type = INSN_T2;
11402 /* Handle all other vfp data processing instructions. */
11403 else if (opc1 == 0x0b)
11406 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11408 if (bit (arm_insn_r->arm_insn, 4))
11410 if (bit (arm_insn_r->arm_insn, 6))
11411 curr_insn_type = INSN_T0;
11413 curr_insn_type = INSN_T1;
11418 curr_insn_type = INSN_T1;
11420 curr_insn_type = INSN_T2;
11423 /* Handle VNEG and VABS. */
11424 else if ((opc2 == 0x01 && opc3 == 0x01)
11425 || (opc2 == 0x00 && opc3 == 0x03))
11427 if (!bit (arm_insn_r->arm_insn, 11))
11429 if (bit (arm_insn_r->arm_insn, 6))
11430 curr_insn_type = INSN_T0;
11432 curr_insn_type = INSN_T1;
11437 curr_insn_type = INSN_T1;
11439 curr_insn_type = INSN_T2;
11442 /* Handle VSQRT. */
11443 else if (opc2 == 0x01 && opc3 == 0x03)
11446 curr_insn_type = INSN_T1;
11448 curr_insn_type = INSN_T2;
11451 else if (opc2 == 0x07 && opc3 == 0x03)
11454 curr_insn_type = INSN_T1;
11456 curr_insn_type = INSN_T2;
11458 else if (opc3 & 0x01)
11461 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11463 if (!bit (arm_insn_r->arm_insn, 18))
11464 curr_insn_type = INSN_T2;
11468 curr_insn_type = INSN_T1;
11470 curr_insn_type = INSN_T2;
11474 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11477 curr_insn_type = INSN_T1;
11479 curr_insn_type = INSN_T2;
11481 /* Handle VCVTB, VCVTT. */
11482 else if ((opc2 & 0x0e) == 0x02)
11483 curr_insn_type = INSN_T2;
11484 /* Handle VCMP, VCMPE. */
11485 else if ((opc2 & 0x0e) == 0x04)
11486 curr_insn_type = INSN_T3;
11490 switch (curr_insn_type)
11493 reg_vd = reg_vd | (bit_d << 4);
11494 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11495 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11496 arm_insn_r->reg_rec_count = 2;
11500 reg_vd = reg_vd | (bit_d << 4);
11501 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11502 arm_insn_r->reg_rec_count = 1;
11506 reg_vd = (reg_vd << 1) | bit_d;
11507 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11508 arm_insn_r->reg_rec_count = 1;
11512 record_buf[0] = ARM_FPSCR_REGNUM;
11513 arm_insn_r->reg_rec_count = 1;
11517 gdb_assert_not_reached ("no decoding pattern found");
11521 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11525 /* Handling opcode 110 insns. */
11528 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11530 uint32_t op1, op1_ebit, coproc;
11532 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11533 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11534 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11536 if ((coproc & 0x0e) == 0x0a)
11538 /* Handle extension register ld/st instructions. */
11540 return arm_record_exreg_ld_st_insn (arm_insn_r);
11542 /* 64-bit transfers between arm core and extension registers. */
11543 if ((op1 & 0x3e) == 0x04)
11544 return arm_record_exreg_ld_st_insn (arm_insn_r);
11548 /* Handle coprocessor ld/st instructions. */
11553 return arm_record_unsupported_insn (arm_insn_r);
11556 return arm_record_unsupported_insn (arm_insn_r);
11559 /* Move to coprocessor from two arm core registers. */
11561 return arm_record_unsupported_insn (arm_insn_r);
11563 /* Move to two arm core registers from coprocessor. */
11568 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11569 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11570 arm_insn_r->reg_rec_count = 2;
11572 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11576 return arm_record_unsupported_insn (arm_insn_r);
11579 /* Handling opcode 111 insns. */
11582 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11584 uint32_t op, op1_sbit, op1_ebit, coproc;
11585 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11586 struct regcache *reg_cache = arm_insn_r->regcache;
11588 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11589 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11590 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11591 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11592 op = bit (arm_insn_r->arm_insn, 4);
11594 /* Handle arm SWI/SVC system call instructions. */
11597 if (tdep->arm_syscall_record != NULL)
11599 ULONGEST svc_operand, svc_number;
11601 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11603 if (svc_operand) /* OABI. */
11604 svc_number = svc_operand - 0x900000;
11606 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11608 return tdep->arm_syscall_record (reg_cache, svc_number);
11612 printf_unfiltered (_("no syscall record support\n"));
11617 if ((coproc & 0x0e) == 0x0a)
11619 /* VFP data-processing instructions. */
11620 if (!op1_sbit && !op)
11621 return arm_record_vfp_data_proc_insn (arm_insn_r);
11623 /* Advanced SIMD, VFP instructions. */
11624 if (!op1_sbit && op)
11625 return arm_record_vdata_transfer_insn (arm_insn_r);
11629 /* Coprocessor data operations. */
11630 if (!op1_sbit && !op)
11631 return arm_record_unsupported_insn (arm_insn_r);
11633 /* Move to Coprocessor from ARM core register. */
11634 if (!op1_sbit && !op1_ebit && op)
11635 return arm_record_unsupported_insn (arm_insn_r);
11637 /* Move to arm core register from coprocessor. */
11638 if (!op1_sbit && op1_ebit && op)
11640 uint32_t record_buf[1];
11642 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11643 if (record_buf[0] == 15)
11644 record_buf[0] = ARM_PS_REGNUM;
11646 arm_insn_r->reg_rec_count = 1;
11647 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11653 return arm_record_unsupported_insn (arm_insn_r);
11656 /* Handling opcode 000 insns. */
11659 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11661 uint32_t record_buf[8];
11662 uint32_t reg_src1 = 0;
11664 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11666 record_buf[0] = ARM_PS_REGNUM;
11667 record_buf[1] = reg_src1;
11668 thumb_insn_r->reg_rec_count = 2;
11670 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11676 /* Handling opcode 001 insns. */
11679 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11681 uint32_t record_buf[8];
11682 uint32_t reg_src1 = 0;
11684 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11686 record_buf[0] = ARM_PS_REGNUM;
11687 record_buf[1] = reg_src1;
11688 thumb_insn_r->reg_rec_count = 2;
11690 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11695 /* Handling opcode 010 insns. */
11698 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11700 struct regcache *reg_cache = thumb_insn_r->regcache;
11701 uint32_t record_buf[8], record_buf_mem[8];
11703 uint32_t reg_src1 = 0, reg_src2 = 0;
11704 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11706 ULONGEST u_regval[2] = {0};
11708 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11710 if (bit (thumb_insn_r->arm_insn, 12))
11712 /* Handle load/store register offset. */
11713 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11715 if (in_inclusive_range (opB, 4U, 7U))
11717 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11718 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11719 record_buf[0] = reg_src1;
11720 thumb_insn_r->reg_rec_count = 1;
11722 else if (in_inclusive_range (opB, 0U, 2U))
11724 /* STR(2), STRB(2), STRH(2) . */
11725 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11726 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11727 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11728 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11730 record_buf_mem[0] = 4; /* STR (2). */
11732 record_buf_mem[0] = 1; /* STRB (2). */
11734 record_buf_mem[0] = 2; /* STRH (2). */
11735 record_buf_mem[1] = u_regval[0] + u_regval[1];
11736 thumb_insn_r->mem_rec_count = 1;
11739 else if (bit (thumb_insn_r->arm_insn, 11))
11741 /* Handle load from literal pool. */
11743 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11744 record_buf[0] = reg_src1;
11745 thumb_insn_r->reg_rec_count = 1;
11749 /* Special data instructions and branch and exchange */
11750 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11751 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11752 if ((3 == opcode2) && (!opcode3))
11754 /* Branch with exchange. */
11755 record_buf[0] = ARM_PS_REGNUM;
11756 thumb_insn_r->reg_rec_count = 1;
11760 /* Format 8; special data processing insns. */
11761 record_buf[0] = ARM_PS_REGNUM;
11762 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11763 | bits (thumb_insn_r->arm_insn, 0, 2));
11764 thumb_insn_r->reg_rec_count = 2;
11769 /* Format 5; data processing insns. */
11770 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11771 if (bit (thumb_insn_r->arm_insn, 7))
11773 reg_src1 = reg_src1 + 8;
11775 record_buf[0] = ARM_PS_REGNUM;
11776 record_buf[1] = reg_src1;
11777 thumb_insn_r->reg_rec_count = 2;
11780 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11781 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11787 /* Handling opcode 001 insns. */
11790 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11792 struct regcache *reg_cache = thumb_insn_r->regcache;
11793 uint32_t record_buf[8], record_buf_mem[8];
11795 uint32_t reg_src1 = 0;
11796 uint32_t opcode = 0, immed_5 = 0;
11798 ULONGEST u_regval = 0;
11800 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11805 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11806 record_buf[0] = reg_src1;
11807 thumb_insn_r->reg_rec_count = 1;
11812 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11813 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11814 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11815 record_buf_mem[0] = 4;
11816 record_buf_mem[1] = u_regval + (immed_5 * 4);
11817 thumb_insn_r->mem_rec_count = 1;
11820 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11821 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11827 /* Handling opcode 100 insns. */
11830 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11832 struct regcache *reg_cache = thumb_insn_r->regcache;
11833 uint32_t record_buf[8], record_buf_mem[8];
11835 uint32_t reg_src1 = 0;
11836 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11838 ULONGEST u_regval = 0;
11840 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11845 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11846 record_buf[0] = reg_src1;
11847 thumb_insn_r->reg_rec_count = 1;
11849 else if (1 == opcode)
11852 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11853 record_buf[0] = reg_src1;
11854 thumb_insn_r->reg_rec_count = 1;
11856 else if (2 == opcode)
11859 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11860 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11861 record_buf_mem[0] = 4;
11862 record_buf_mem[1] = u_regval + (immed_8 * 4);
11863 thumb_insn_r->mem_rec_count = 1;
11865 else if (0 == opcode)
11868 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11869 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11870 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11871 record_buf_mem[0] = 2;
11872 record_buf_mem[1] = u_regval + (immed_5 * 2);
11873 thumb_insn_r->mem_rec_count = 1;
11876 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11877 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11883 /* Handling opcode 101 insns. */
11886 thumb_record_misc (insn_decode_record *thumb_insn_r)
11888 struct regcache *reg_cache = thumb_insn_r->regcache;
11890 uint32_t opcode = 0;
11891 uint32_t register_bits = 0, register_count = 0;
11892 uint32_t index = 0, start_address = 0;
11893 uint32_t record_buf[24], record_buf_mem[48];
11896 ULONGEST u_regval = 0;
11898 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11900 if (opcode == 0 || opcode == 1)
11902 /* ADR and ADD (SP plus immediate) */
11904 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11905 record_buf[0] = reg_src1;
11906 thumb_insn_r->reg_rec_count = 1;
11910 /* Miscellaneous 16-bit instructions */
11911 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11916 /* SETEND and CPS */
11919 /* ADD/SUB (SP plus immediate) */
11920 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11921 record_buf[0] = ARM_SP_REGNUM;
11922 thumb_insn_r->reg_rec_count = 1;
11924 case 1: /* fall through */
11925 case 3: /* fall through */
11926 case 9: /* fall through */
11931 /* SXTH, SXTB, UXTH, UXTB */
11932 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11933 thumb_insn_r->reg_rec_count = 1;
11935 case 4: /* fall through */
11938 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11939 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11940 while (register_bits)
11942 if (register_bits & 0x00000001)
11944 register_bits = register_bits >> 1;
11946 start_address = u_regval - \
11947 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11948 thumb_insn_r->mem_rec_count = register_count;
11949 while (register_count)
11951 record_buf_mem[(register_count * 2) - 1] = start_address;
11952 record_buf_mem[(register_count * 2) - 2] = 4;
11953 start_address = start_address + 4;
11956 record_buf[0] = ARM_SP_REGNUM;
11957 thumb_insn_r->reg_rec_count = 1;
11960 /* REV, REV16, REVSH */
11961 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11962 thumb_insn_r->reg_rec_count = 1;
11964 case 12: /* fall through */
11967 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11968 while (register_bits)
11970 if (register_bits & 0x00000001)
11971 record_buf[index++] = register_count;
11972 register_bits = register_bits >> 1;
11975 record_buf[index++] = ARM_PS_REGNUM;
11976 record_buf[index++] = ARM_SP_REGNUM;
11977 thumb_insn_r->reg_rec_count = index;
11981 /* Handle enhanced software breakpoint insn, BKPT. */
11982 /* CPSR is changed to be executed in ARM state, disabling normal
11983 interrupts, entering abort mode. */
11984 /* According to high vector configuration PC is set. */
11985 /* User hits breakpoint and type reverse, in that case, we need to go back with
11986 previous CPSR and Program Counter. */
11987 record_buf[0] = ARM_PS_REGNUM;
11988 record_buf[1] = ARM_LR_REGNUM;
11989 thumb_insn_r->reg_rec_count = 2;
11990 /* We need to save SPSR value, which is not yet done. */
11991 printf_unfiltered (_("Process record does not support instruction "
11992 "0x%0x at address %s.\n"),
11993 thumb_insn_r->arm_insn,
11994 paddress (thumb_insn_r->gdbarch,
11995 thumb_insn_r->this_addr));
11999 /* If-Then, and hints */
12006 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12007 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12013 /* Handling opcode 110 insns. */
12016 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12018 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12019 struct regcache *reg_cache = thumb_insn_r->regcache;
12021 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12022 uint32_t reg_src1 = 0;
12023 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12024 uint32_t index = 0, start_address = 0;
12025 uint32_t record_buf[24], record_buf_mem[48];
12027 ULONGEST u_regval = 0;
12029 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12030 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12036 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12038 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12039 while (register_bits)
12041 if (register_bits & 0x00000001)
12042 record_buf[index++] = register_count;
12043 register_bits = register_bits >> 1;
12046 record_buf[index++] = reg_src1;
12047 thumb_insn_r->reg_rec_count = index;
12049 else if (0 == opcode2)
12051 /* It handles both STMIA. */
12052 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12054 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12055 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12056 while (register_bits)
12058 if (register_bits & 0x00000001)
12060 register_bits = register_bits >> 1;
12062 start_address = u_regval;
12063 thumb_insn_r->mem_rec_count = register_count;
12064 while (register_count)
12066 record_buf_mem[(register_count * 2) - 1] = start_address;
12067 record_buf_mem[(register_count * 2) - 2] = 4;
12068 start_address = start_address + 4;
12072 else if (0x1F == opcode1)
12074 /* Handle arm syscall insn. */
12075 if (tdep->arm_syscall_record != NULL)
12077 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12078 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12082 printf_unfiltered (_("no syscall record support\n"));
12087 /* B (1), conditional branch is automatically taken care in process_record,
12088 as PC is saved there. */
12090 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12091 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12097 /* Handling opcode 111 insns. */
12100 thumb_record_branch (insn_decode_record *thumb_insn_r)
12102 uint32_t record_buf[8];
12103 uint32_t bits_h = 0;
12105 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12107 if (2 == bits_h || 3 == bits_h)
12110 record_buf[0] = ARM_LR_REGNUM;
12111 thumb_insn_r->reg_rec_count = 1;
12113 else if (1 == bits_h)
12116 record_buf[0] = ARM_PS_REGNUM;
12117 record_buf[1] = ARM_LR_REGNUM;
12118 thumb_insn_r->reg_rec_count = 2;
12121 /* B(2) is automatically taken care in process_record, as PC is
12124 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12129 /* Handler for thumb2 load/store multiple instructions. */
12132 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12134 struct regcache *reg_cache = thumb2_insn_r->regcache;
12136 uint32_t reg_rn, op;
12137 uint32_t register_bits = 0, register_count = 0;
12138 uint32_t index = 0, start_address = 0;
12139 uint32_t record_buf[24], record_buf_mem[48];
12141 ULONGEST u_regval = 0;
12143 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12144 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12146 if (0 == op || 3 == op)
12148 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12150 /* Handle RFE instruction. */
12151 record_buf[0] = ARM_PS_REGNUM;
12152 thumb2_insn_r->reg_rec_count = 1;
12156 /* Handle SRS instruction after reading banked SP. */
12157 return arm_record_unsupported_insn (thumb2_insn_r);
12160 else if (1 == op || 2 == op)
12162 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12164 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12165 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12166 while (register_bits)
12168 if (register_bits & 0x00000001)
12169 record_buf[index++] = register_count;
12172 register_bits = register_bits >> 1;
12174 record_buf[index++] = reg_rn;
12175 record_buf[index++] = ARM_PS_REGNUM;
12176 thumb2_insn_r->reg_rec_count = index;
12180 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12181 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12182 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12183 while (register_bits)
12185 if (register_bits & 0x00000001)
12188 register_bits = register_bits >> 1;
12193 /* Start address calculation for LDMDB/LDMEA. */
12194 start_address = u_regval;
12198 /* Start address calculation for LDMDB/LDMEA. */
12199 start_address = u_regval - register_count * 4;
12202 thumb2_insn_r->mem_rec_count = register_count;
12203 while (register_count)
12205 record_buf_mem[register_count * 2 - 1] = start_address;
12206 record_buf_mem[register_count * 2 - 2] = 4;
12207 start_address = start_address + 4;
12210 record_buf[0] = reg_rn;
12211 record_buf[1] = ARM_PS_REGNUM;
12212 thumb2_insn_r->reg_rec_count = 2;
12216 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12218 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12220 return ARM_RECORD_SUCCESS;
12223 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12227 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12229 struct regcache *reg_cache = thumb2_insn_r->regcache;
12231 uint32_t reg_rd, reg_rn, offset_imm;
12232 uint32_t reg_dest1, reg_dest2;
12233 uint32_t address, offset_addr;
12234 uint32_t record_buf[8], record_buf_mem[8];
12235 uint32_t op1, op2, op3;
12237 ULONGEST u_regval[2];
12239 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12240 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12241 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12243 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12245 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12247 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12248 record_buf[0] = reg_dest1;
12249 record_buf[1] = ARM_PS_REGNUM;
12250 thumb2_insn_r->reg_rec_count = 2;
12253 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12255 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12256 record_buf[2] = reg_dest2;
12257 thumb2_insn_r->reg_rec_count = 3;
12262 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12263 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12265 if (0 == op1 && 0 == op2)
12267 /* Handle STREX. */
12268 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12269 address = u_regval[0] + (offset_imm * 4);
12270 record_buf_mem[0] = 4;
12271 record_buf_mem[1] = address;
12272 thumb2_insn_r->mem_rec_count = 1;
12273 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12274 record_buf[0] = reg_rd;
12275 thumb2_insn_r->reg_rec_count = 1;
12277 else if (1 == op1 && 0 == op2)
12279 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12280 record_buf[0] = reg_rd;
12281 thumb2_insn_r->reg_rec_count = 1;
12282 address = u_regval[0];
12283 record_buf_mem[1] = address;
12287 /* Handle STREXB. */
12288 record_buf_mem[0] = 1;
12289 thumb2_insn_r->mem_rec_count = 1;
12293 /* Handle STREXH. */
12294 record_buf_mem[0] = 2 ;
12295 thumb2_insn_r->mem_rec_count = 1;
12299 /* Handle STREXD. */
12300 address = u_regval[0];
12301 record_buf_mem[0] = 4;
12302 record_buf_mem[2] = 4;
12303 record_buf_mem[3] = address + 4;
12304 thumb2_insn_r->mem_rec_count = 2;
12309 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12311 if (bit (thumb2_insn_r->arm_insn, 24))
12313 if (bit (thumb2_insn_r->arm_insn, 23))
12314 offset_addr = u_regval[0] + (offset_imm * 4);
12316 offset_addr = u_regval[0] - (offset_imm * 4);
12318 address = offset_addr;
12321 address = u_regval[0];
12323 record_buf_mem[0] = 4;
12324 record_buf_mem[1] = address;
12325 record_buf_mem[2] = 4;
12326 record_buf_mem[3] = address + 4;
12327 thumb2_insn_r->mem_rec_count = 2;
12328 record_buf[0] = reg_rn;
12329 thumb2_insn_r->reg_rec_count = 1;
12333 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12335 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12337 return ARM_RECORD_SUCCESS;
12340 /* Handler for thumb2 data processing (shift register and modified immediate)
12344 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12346 uint32_t reg_rd, op;
12347 uint32_t record_buf[8];
12349 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12350 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12352 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12354 record_buf[0] = ARM_PS_REGNUM;
12355 thumb2_insn_r->reg_rec_count = 1;
12359 record_buf[0] = reg_rd;
12360 record_buf[1] = ARM_PS_REGNUM;
12361 thumb2_insn_r->reg_rec_count = 2;
12364 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12366 return ARM_RECORD_SUCCESS;
12369 /* Generic handler for thumb2 instructions which effect destination and PS
12373 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12376 uint32_t record_buf[8];
12378 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12380 record_buf[0] = reg_rd;
12381 record_buf[1] = ARM_PS_REGNUM;
12382 thumb2_insn_r->reg_rec_count = 2;
12384 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12386 return ARM_RECORD_SUCCESS;
12389 /* Handler for thumb2 branch and miscellaneous control instructions. */
12392 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12394 uint32_t op, op1, op2;
12395 uint32_t record_buf[8];
12397 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12398 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12399 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12401 /* Handle MSR insn. */
12402 if (!(op1 & 0x2) && 0x38 == op)
12406 /* CPSR is going to be changed. */
12407 record_buf[0] = ARM_PS_REGNUM;
12408 thumb2_insn_r->reg_rec_count = 1;
12412 arm_record_unsupported_insn(thumb2_insn_r);
12416 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12419 record_buf[0] = ARM_PS_REGNUM;
12420 record_buf[1] = ARM_LR_REGNUM;
12421 thumb2_insn_r->reg_rec_count = 2;
12424 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12426 return ARM_RECORD_SUCCESS;
12429 /* Handler for thumb2 store single data item instructions. */
12432 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12434 struct regcache *reg_cache = thumb2_insn_r->regcache;
12436 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12437 uint32_t address, offset_addr;
12438 uint32_t record_buf[8], record_buf_mem[8];
12441 ULONGEST u_regval[2];
12443 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12444 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12445 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12446 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12448 if (bit (thumb2_insn_r->arm_insn, 23))
12451 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12452 offset_addr = u_regval[0] + offset_imm;
12453 address = offset_addr;
12458 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12460 /* Handle STRB (register). */
12461 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12462 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12463 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12464 offset_addr = u_regval[1] << shift_imm;
12465 address = u_regval[0] + offset_addr;
12469 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12470 if (bit (thumb2_insn_r->arm_insn, 10))
12472 if (bit (thumb2_insn_r->arm_insn, 9))
12473 offset_addr = u_regval[0] + offset_imm;
12475 offset_addr = u_regval[0] - offset_imm;
12477 address = offset_addr;
12480 address = u_regval[0];
12486 /* Store byte instructions. */
12489 record_buf_mem[0] = 1;
12491 /* Store half word instructions. */
12494 record_buf_mem[0] = 2;
12496 /* Store word instructions. */
12499 record_buf_mem[0] = 4;
12503 gdb_assert_not_reached ("no decoding pattern found");
12507 record_buf_mem[1] = address;
12508 thumb2_insn_r->mem_rec_count = 1;
12509 record_buf[0] = reg_rn;
12510 thumb2_insn_r->reg_rec_count = 1;
12512 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12514 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12516 return ARM_RECORD_SUCCESS;
12519 /* Handler for thumb2 load memory hints instructions. */
12522 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12524 uint32_t record_buf[8];
12525 uint32_t reg_rt, reg_rn;
12527 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12528 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12530 if (ARM_PC_REGNUM != reg_rt)
12532 record_buf[0] = reg_rt;
12533 record_buf[1] = reg_rn;
12534 record_buf[2] = ARM_PS_REGNUM;
12535 thumb2_insn_r->reg_rec_count = 3;
12537 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12539 return ARM_RECORD_SUCCESS;
12542 return ARM_RECORD_FAILURE;
12545 /* Handler for thumb2 load word instructions. */
12548 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12550 uint32_t record_buf[8];
12552 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12553 record_buf[1] = ARM_PS_REGNUM;
12554 thumb2_insn_r->reg_rec_count = 2;
12556 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12558 return ARM_RECORD_SUCCESS;
12561 /* Handler for thumb2 long multiply, long multiply accumulate, and
12562 divide instructions. */
12565 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12567 uint32_t opcode1 = 0, opcode2 = 0;
12568 uint32_t record_buf[8];
12570 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12571 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12573 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12575 /* Handle SMULL, UMULL, SMULAL. */
12576 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12577 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12578 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12579 record_buf[2] = ARM_PS_REGNUM;
12580 thumb2_insn_r->reg_rec_count = 3;
12582 else if (1 == opcode1 || 3 == opcode2)
12584 /* Handle SDIV and UDIV. */
12585 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12586 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12587 record_buf[2] = ARM_PS_REGNUM;
12588 thumb2_insn_r->reg_rec_count = 3;
12591 return ARM_RECORD_FAILURE;
12593 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12595 return ARM_RECORD_SUCCESS;
12598 /* Record handler for thumb32 coprocessor instructions. */
12601 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12603 if (bit (thumb2_insn_r->arm_insn, 25))
12604 return arm_record_coproc_data_proc (thumb2_insn_r);
12606 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12609 /* Record handler for advance SIMD structure load/store instructions. */
12612 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12614 struct regcache *reg_cache = thumb2_insn_r->regcache;
12615 uint32_t l_bit, a_bit, b_bits;
12616 uint32_t record_buf[128], record_buf_mem[128];
12617 uint32_t reg_rn, reg_vd, address, f_elem;
12618 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12621 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12622 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12623 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12624 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12625 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12626 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12627 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12628 f_elem = 8 / f_ebytes;
12632 ULONGEST u_regval = 0;
12633 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12634 address = u_regval;
12639 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12641 if (b_bits == 0x07)
12643 else if (b_bits == 0x0a)
12645 else if (b_bits == 0x06)
12647 else if (b_bits == 0x02)
12652 for (index_r = 0; index_r < bf_regs; index_r++)
12654 for (index_e = 0; index_e < f_elem; index_e++)
12656 record_buf_mem[index_m++] = f_ebytes;
12657 record_buf_mem[index_m++] = address;
12658 address = address + f_ebytes;
12659 thumb2_insn_r->mem_rec_count += 1;
12664 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12666 if (b_bits == 0x09 || b_bits == 0x08)
12668 else if (b_bits == 0x03)
12673 for (index_r = 0; index_r < bf_regs; index_r++)
12674 for (index_e = 0; index_e < f_elem; index_e++)
12676 for (loop_t = 0; loop_t < 2; loop_t++)
12678 record_buf_mem[index_m++] = f_ebytes;
12679 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12680 thumb2_insn_r->mem_rec_count += 1;
12682 address = address + (2 * f_ebytes);
12686 else if ((b_bits & 0x0e) == 0x04)
12688 for (index_e = 0; index_e < f_elem; index_e++)
12690 for (loop_t = 0; loop_t < 3; loop_t++)
12692 record_buf_mem[index_m++] = f_ebytes;
12693 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12694 thumb2_insn_r->mem_rec_count += 1;
12696 address = address + (3 * f_ebytes);
12700 else if (!(b_bits & 0x0e))
12702 for (index_e = 0; index_e < f_elem; index_e++)
12704 for (loop_t = 0; loop_t < 4; loop_t++)
12706 record_buf_mem[index_m++] = f_ebytes;
12707 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12708 thumb2_insn_r->mem_rec_count += 1;
12710 address = address + (4 * f_ebytes);
12716 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12718 if (bft_size == 0x00)
12720 else if (bft_size == 0x01)
12722 else if (bft_size == 0x02)
12728 if (!(b_bits & 0x0b) || b_bits == 0x08)
12729 thumb2_insn_r->mem_rec_count = 1;
12731 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12732 thumb2_insn_r->mem_rec_count = 2;
12734 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12735 thumb2_insn_r->mem_rec_count = 3;
12737 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12738 thumb2_insn_r->mem_rec_count = 4;
12740 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12742 record_buf_mem[index_m] = f_ebytes;
12743 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12752 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12753 thumb2_insn_r->reg_rec_count = 1;
12755 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12756 thumb2_insn_r->reg_rec_count = 2;
12758 else if ((b_bits & 0x0e) == 0x04)
12759 thumb2_insn_r->reg_rec_count = 3;
12761 else if (!(b_bits & 0x0e))
12762 thumb2_insn_r->reg_rec_count = 4;
12767 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12768 thumb2_insn_r->reg_rec_count = 1;
12770 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12771 thumb2_insn_r->reg_rec_count = 2;
12773 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12774 thumb2_insn_r->reg_rec_count = 3;
12776 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12777 thumb2_insn_r->reg_rec_count = 4;
12779 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12780 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12784 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12786 record_buf[index_r] = reg_rn;
12787 thumb2_insn_r->reg_rec_count += 1;
12790 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12792 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12797 /* Decodes thumb2 instruction type and invokes its record handler. */
12799 static unsigned int
12800 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12802 uint32_t op, op1, op2;
12804 op = bit (thumb2_insn_r->arm_insn, 15);
12805 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12806 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12810 if (!(op2 & 0x64 ))
12812 /* Load/store multiple instruction. */
12813 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12815 else if ((op2 & 0x64) == 0x4)
12817 /* Load/store (dual/exclusive) and table branch instruction. */
12818 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12820 else if ((op2 & 0x60) == 0x20)
12822 /* Data-processing (shifted register). */
12823 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12825 else if (op2 & 0x40)
12827 /* Co-processor instructions. */
12828 return thumb2_record_coproc_insn (thumb2_insn_r);
12831 else if (op1 == 0x02)
12835 /* Branches and miscellaneous control instructions. */
12836 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12838 else if (op2 & 0x20)
12840 /* Data-processing (plain binary immediate) instruction. */
12841 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12845 /* Data-processing (modified immediate). */
12846 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12849 else if (op1 == 0x03)
12851 if (!(op2 & 0x71 ))
12853 /* Store single data item. */
12854 return thumb2_record_str_single_data (thumb2_insn_r);
12856 else if (!((op2 & 0x71) ^ 0x10))
12858 /* Advanced SIMD or structure load/store instructions. */
12859 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12861 else if (!((op2 & 0x67) ^ 0x01))
12863 /* Load byte, memory hints instruction. */
12864 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12866 else if (!((op2 & 0x67) ^ 0x03))
12868 /* Load halfword, memory hints instruction. */
12869 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12871 else if (!((op2 & 0x67) ^ 0x05))
12873 /* Load word instruction. */
12874 return thumb2_record_ld_word (thumb2_insn_r);
12876 else if (!((op2 & 0x70) ^ 0x20))
12878 /* Data-processing (register) instruction. */
12879 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12881 else if (!((op2 & 0x78) ^ 0x30))
12883 /* Multiply, multiply accumulate, abs diff instruction. */
12884 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12886 else if (!((op2 & 0x78) ^ 0x38))
12888 /* Long multiply, long multiply accumulate, and divide. */
12889 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12891 else if (op2 & 0x40)
12893 /* Co-processor instructions. */
12894 return thumb2_record_coproc_insn (thumb2_insn_r);
12902 /* Abstract memory reader. */
12904 class abstract_memory_reader
12907 /* Read LEN bytes of target memory at address MEMADDR, placing the
12908 results in GDB's memory at BUF. Return true on success. */
12910 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12913 /* Instruction reader from real target. */
12915 class instruction_reader : public abstract_memory_reader
12918 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12920 if (target_read_memory (memaddr, buf, len))
12929 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12930 and positive val on fauilure. */
12933 extract_arm_insn (abstract_memory_reader& reader,
12934 insn_decode_record *insn_record, uint32_t insn_size)
12936 gdb_byte buf[insn_size];
12938 memset (&buf[0], 0, insn_size);
12940 if (!reader.read (insn_record->this_addr, buf, insn_size))
12942 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12944 gdbarch_byte_order_for_code (insn_record->gdbarch));
12948 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12950 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12954 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12955 record_type_t record_type, uint32_t insn_size)
12958 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12960 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12962 arm_record_data_proc_misc_ld_str, /* 000. */
12963 arm_record_data_proc_imm, /* 001. */
12964 arm_record_ld_st_imm_offset, /* 010. */
12965 arm_record_ld_st_reg_offset, /* 011. */
12966 arm_record_ld_st_multiple, /* 100. */
12967 arm_record_b_bl, /* 101. */
12968 arm_record_asimd_vfp_coproc, /* 110. */
12969 arm_record_coproc_data_proc /* 111. */
12972 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12974 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12976 thumb_record_shift_add_sub, /* 000. */
12977 thumb_record_add_sub_cmp_mov, /* 001. */
12978 thumb_record_ld_st_reg_offset, /* 010. */
12979 thumb_record_ld_st_imm_offset, /* 011. */
12980 thumb_record_ld_st_stack, /* 100. */
12981 thumb_record_misc, /* 101. */
12982 thumb_record_ldm_stm_swi, /* 110. */
12983 thumb_record_branch /* 111. */
12986 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12987 uint32_t insn_id = 0;
12989 if (extract_arm_insn (reader, arm_record, insn_size))
12993 printf_unfiltered (_("Process record: error reading memory at "
12994 "addr %s len = %d.\n"),
12995 paddress (arm_record->gdbarch,
12996 arm_record->this_addr), insn_size);
13000 else if (ARM_RECORD == record_type)
13002 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13003 insn_id = bits (arm_record->arm_insn, 25, 27);
13005 if (arm_record->cond == 0xf)
13006 ret = arm_record_extension_space (arm_record);
13009 /* If this insn has fallen into extension space
13010 then we need not decode it anymore. */
13011 ret = arm_handle_insn[insn_id] (arm_record);
13013 if (ret != ARM_RECORD_SUCCESS)
13015 arm_record_unsupported_insn (arm_record);
13019 else if (THUMB_RECORD == record_type)
13021 /* As thumb does not have condition codes, we set negative. */
13022 arm_record->cond = -1;
13023 insn_id = bits (arm_record->arm_insn, 13, 15);
13024 ret = thumb_handle_insn[insn_id] (arm_record);
13025 if (ret != ARM_RECORD_SUCCESS)
13027 arm_record_unsupported_insn (arm_record);
13031 else if (THUMB2_RECORD == record_type)
13033 /* As thumb does not have condition codes, we set negative. */
13034 arm_record->cond = -1;
13036 /* Swap first half of 32bit thumb instruction with second half. */
13037 arm_record->arm_insn
13038 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13040 ret = thumb2_record_decode_insn_handler (arm_record);
13042 if (ret != ARM_RECORD_SUCCESS)
13044 arm_record_unsupported_insn (arm_record);
13050 /* Throw assertion. */
13051 gdb_assert_not_reached ("not a valid instruction, could not decode");
13058 namespace selftests {
13060 /* Provide both 16-bit and 32-bit thumb instructions. */
13062 class instruction_reader_thumb : public abstract_memory_reader
13065 template<size_t SIZE>
13066 instruction_reader_thumb (enum bfd_endian endian,
13067 const uint16_t (&insns)[SIZE])
13068 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13071 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13073 SELF_CHECK (len == 4 || len == 2);
13074 SELF_CHECK (memaddr % 2 == 0);
13075 SELF_CHECK ((memaddr / 2) < m_insns_size);
13077 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13080 store_unsigned_integer (&buf[2], 2, m_endian,
13081 m_insns[memaddr / 2 + 1]);
13087 enum bfd_endian m_endian;
13088 const uint16_t *m_insns;
13089 size_t m_insns_size;
13093 arm_record_test (void)
13095 struct gdbarch_info info;
13096 gdbarch_info_init (&info);
13097 info.bfd_arch_info = bfd_scan_arch ("arm");
13099 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13101 SELF_CHECK (gdbarch != NULL);
13103 /* 16-bit Thumb instructions. */
13105 insn_decode_record arm_record;
13107 memset (&arm_record, 0, sizeof (insn_decode_record));
13108 arm_record.gdbarch = gdbarch;
13110 static const uint16_t insns[] = {
13111 /* db b2 uxtb r3, r3 */
13113 /* cd 58 ldr r5, [r1, r3] */
13117 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13118 instruction_reader_thumb reader (endian, insns);
13119 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13120 THUMB_INSN_SIZE_BYTES);
13122 SELF_CHECK (ret == 0);
13123 SELF_CHECK (arm_record.mem_rec_count == 0);
13124 SELF_CHECK (arm_record.reg_rec_count == 1);
13125 SELF_CHECK (arm_record.arm_regs[0] == 3);
13127 arm_record.this_addr += 2;
13128 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13129 THUMB_INSN_SIZE_BYTES);
13131 SELF_CHECK (ret == 0);
13132 SELF_CHECK (arm_record.mem_rec_count == 0);
13133 SELF_CHECK (arm_record.reg_rec_count == 1);
13134 SELF_CHECK (arm_record.arm_regs[0] == 5);
13137 /* 32-bit Thumb-2 instructions. */
13139 insn_decode_record arm_record;
13141 memset (&arm_record, 0, sizeof (insn_decode_record));
13142 arm_record.gdbarch = gdbarch;
13144 static const uint16_t insns[] = {
13145 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13149 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13150 instruction_reader_thumb reader (endian, insns);
13151 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13152 THUMB2_INSN_SIZE_BYTES);
13154 SELF_CHECK (ret == 0);
13155 SELF_CHECK (arm_record.mem_rec_count == 0);
13156 SELF_CHECK (arm_record.reg_rec_count == 1);
13157 SELF_CHECK (arm_record.arm_regs[0] == 7);
13160 } // namespace selftests
13161 #endif /* GDB_SELF_TEST */
13163 /* Cleans up local record registers and memory allocations. */
13166 deallocate_reg_mem (insn_decode_record *record)
13168 xfree (record->arm_regs);
13169 xfree (record->arm_mems);
13173 /* Parse the current instruction and record the values of the registers and
13174 memory that will be changed in current instruction to record_arch_list".
13175 Return -1 if something is wrong. */
13178 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13179 CORE_ADDR insn_addr)
13182 uint32_t no_of_rec = 0;
13183 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13184 ULONGEST t_bit = 0, insn_id = 0;
13186 ULONGEST u_regval = 0;
13188 insn_decode_record arm_record;
13190 memset (&arm_record, 0, sizeof (insn_decode_record));
13191 arm_record.regcache = regcache;
13192 arm_record.this_addr = insn_addr;
13193 arm_record.gdbarch = gdbarch;
13196 if (record_debug > 1)
13198 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13200 paddress (gdbarch, arm_record.this_addr));
13203 instruction_reader reader;
13204 if (extract_arm_insn (reader, &arm_record, 2))
13208 printf_unfiltered (_("Process record: error reading memory at "
13209 "addr %s len = %d.\n"),
13210 paddress (arm_record.gdbarch,
13211 arm_record.this_addr), 2);
13216 /* Check the insn, whether it is thumb or arm one. */
13218 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13219 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13222 if (!(u_regval & t_bit))
13224 /* We are decoding arm insn. */
13225 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13229 insn_id = bits (arm_record.arm_insn, 11, 15);
13230 /* is it thumb2 insn? */
13231 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13233 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13234 THUMB2_INSN_SIZE_BYTES);
13238 /* We are decoding thumb insn. */
13239 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13240 THUMB_INSN_SIZE_BYTES);
13246 /* Record registers. */
13247 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13248 if (arm_record.arm_regs)
13250 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13252 if (record_full_arch_list_add_reg
13253 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13257 /* Record memories. */
13258 if (arm_record.arm_mems)
13260 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13262 if (record_full_arch_list_add_mem
13263 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13264 arm_record.arm_mems[no_of_rec].len))
13269 if (record_full_arch_list_add_end ())
13274 deallocate_reg_mem (&arm_record);