1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "gdb_string.h"
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
58 #include "features/arm-with-m.c"
62 /* Macros for setting and testing a bit in a minimal symbol that marks
63 it as Thumb function. The MSB of the minimal symbol's "info" field
64 is used for this purpose.
66 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
67 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
69 #define MSYMBOL_SET_SPECIAL(msym) \
70 MSYMBOL_TARGET_FLAG_1 (msym) = 1
72 #define MSYMBOL_IS_SPECIAL(msym) \
73 MSYMBOL_TARGET_FLAG_1 (msym)
75 /* Per-objfile data used for mapping symbols. */
76 static const struct objfile_data *arm_objfile_data_key;
78 struct arm_mapping_symbol
83 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
84 DEF_VEC_O(arm_mapping_symbol_s);
86 struct arm_per_objfile
88 VEC(arm_mapping_symbol_s) **section_maps;
91 /* The list of available "set arm ..." and "show arm ..." commands. */
92 static struct cmd_list_element *setarmcmdlist = NULL;
93 static struct cmd_list_element *showarmcmdlist = NULL;
95 /* The type of floating-point to use. Keep this in sync with enum
96 arm_float_model, and the help string in _initialize_arm_tdep. */
97 static const char *fp_model_strings[] =
107 /* A variable that can be configured by the user. */
108 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
109 static const char *current_fp_model = "auto";
111 /* The ABI to use. Keep this in sync with arm_abi_kind. */
112 static const char *arm_abi_strings[] =
120 /* A variable that can be configured by the user. */
121 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
122 static const char *arm_abi_string = "auto";
124 /* The execution mode to assume. */
125 static const char *arm_mode_strings[] =
133 static const char *arm_fallback_mode_string = "auto";
134 static const char *arm_force_mode_string = "auto";
136 /* Number of different reg name sets (options). */
137 static int num_disassembly_options;
139 /* The standard register names, and all the valid aliases for them. Note
140 that `fp', `sp' and `pc' are not added in this alias list, because they
141 have been added as builtin user registers in
142 std-regs.c:_initialize_frame_reg. */
147 } arm_register_aliases[] = {
148 /* Basic register numbers. */
165 /* Synonyms (argument and variable registers). */
178 /* Other platform-specific names for r9. */
184 /* Names used by GCC (not listed in the ARM EABI). */
186 /* A special name from the older ATPCS. */
190 static const char *const arm_register_names[] =
191 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
192 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
193 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
194 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
195 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
196 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
197 "fps", "cpsr" }; /* 24 25 */
199 /* Valid register name styles. */
200 static const char **valid_disassembly_styles;
202 /* Disassembly style to use. Default to "std" register names. */
203 static const char *disassembly_style;
205 /* This is used to keep the bfd arch_info in sync with the disassembly
207 static void set_disassembly_style_sfunc(char *, int,
208 struct cmd_list_element *);
209 static void set_disassembly_style (void);
211 static void convert_from_extended (const struct floatformat *, const void *,
213 static void convert_to_extended (const struct floatformat *, void *,
216 static void arm_neon_quad_read (struct gdbarch *gdbarch,
217 struct regcache *regcache,
218 int regnum, gdb_byte *buf);
219 static void arm_neon_quad_write (struct gdbarch *gdbarch,
220 struct regcache *regcache,
221 int regnum, const gdb_byte *buf);
223 struct arm_prologue_cache
225 /* The stack pointer at the time this frame was created; i.e. the
226 caller's stack pointer when this function was called. It is used
227 to identify this frame. */
230 /* The frame base for this frame is just prev_sp - frame size.
231 FRAMESIZE is the distance from the frame pointer to the
232 initial stack pointer. */
236 /* The register used to hold the frame pointer for this frame. */
239 /* Saved register offsets. */
240 struct trad_frame_saved_reg *saved_regs;
243 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
244 CORE_ADDR prologue_start,
245 CORE_ADDR prologue_end,
246 struct arm_prologue_cache *cache);
248 /* Architecture version for displaced stepping. This effects the behaviour of
249 certain instructions, and really should not be hard-wired. */
251 #define DISPLACED_STEPPING_ARCH_VERSION 5
253 /* Addresses for calling Thumb functions have the bit 0 set.
254 Here are some macros to test, set, or clear bit 0 of addresses. */
255 #define IS_THUMB_ADDR(addr) ((addr) & 1)
256 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
257 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
259 /* Set to true if the 32-bit mode is in use. */
263 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
266 arm_psr_thumb_bit (struct gdbarch *gdbarch)
268 if (gdbarch_tdep (gdbarch)->is_m)
274 /* Determine if FRAME is executing in Thumb mode. */
277 arm_frame_is_thumb (struct frame_info *frame)
280 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
282 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
283 directly (from a signal frame or dummy frame) or by interpreting
284 the saved LR (from a prologue or DWARF frame). So consult it and
285 trust the unwinders. */
286 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
288 return (cpsr & t_bit) != 0;
291 /* Callback for VEC_lower_bound. */
294 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
295 const struct arm_mapping_symbol *rhs)
297 return lhs->value < rhs->value;
300 /* Search for the mapping symbol covering MEMADDR. If one is found,
301 return its type. Otherwise, return 0. If START is non-NULL,
302 set *START to the location of the mapping symbol. */
305 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
307 struct obj_section *sec;
309 /* If there are mapping symbols, consult them. */
310 sec = find_pc_section (memaddr);
313 struct arm_per_objfile *data;
314 VEC(arm_mapping_symbol_s) *map;
315 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
319 data = objfile_data (sec->objfile, arm_objfile_data_key);
322 map = data->section_maps[sec->the_bfd_section->index];
323 if (!VEC_empty (arm_mapping_symbol_s, map))
325 struct arm_mapping_symbol *map_sym;
327 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
328 arm_compare_mapping_symbols);
330 /* VEC_lower_bound finds the earliest ordered insertion
331 point. If the following symbol starts at this exact
332 address, we use that; otherwise, the preceding
333 mapping symbol covers this address. */
334 if (idx < VEC_length (arm_mapping_symbol_s, map))
336 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
337 if (map_sym->value == map_key.value)
340 *start = map_sym->value + obj_section_addr (sec);
341 return map_sym->type;
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
349 *start = map_sym->value + obj_section_addr (sec);
350 return map_sym->type;
359 static CORE_ADDR arm_get_next_pc_raw (struct frame_info *frame,
360 CORE_ADDR pc, int insert_bkpt);
362 /* Determine if the program counter specified in MEMADDR is in a Thumb
363 function. This function should be called for addresses unrelated to
364 any executing frame; otherwise, prefer arm_frame_is_thumb. */
367 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
369 struct obj_section *sec;
370 struct minimal_symbol *sym;
373 /* If bit 0 of the address is set, assume this is a Thumb address. */
374 if (IS_THUMB_ADDR (memaddr))
377 /* If the user wants to override the symbol table, let him. */
378 if (strcmp (arm_force_mode_string, "arm") == 0)
380 if (strcmp (arm_force_mode_string, "thumb") == 0)
383 /* ARM v6-M and v7-M are always in Thumb mode. */
384 if (gdbarch_tdep (gdbarch)->is_m)
387 /* If there are mapping symbols, consult them. */
388 type = arm_find_mapping_symbol (memaddr, NULL);
392 /* Thumb functions have a "special" bit set in minimal symbols. */
393 sym = lookup_minimal_symbol_by_pc (memaddr);
395 return (MSYMBOL_IS_SPECIAL (sym));
397 /* If the user wants to override the fallback mode, let them. */
398 if (strcmp (arm_fallback_mode_string, "arm") == 0)
400 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
403 /* If we couldn't find any symbol, but we're talking to a running
404 target, then trust the current value of $cpsr. This lets
405 "display/i $pc" always show the correct mode (though if there is
406 a symbol table we will not reach here, so it still may not be
407 displayed in the mode it will be executed).
409 As a further heuristic if we detect that we are doing a single-step we
410 see what state executing the current instruction ends up with us being
412 if (target_has_registers)
414 struct frame_info *current_frame = get_current_frame ();
415 CORE_ADDR current_pc = get_frame_pc (current_frame);
416 int is_thumb = arm_frame_is_thumb (current_frame);
418 if (memaddr == current_pc)
422 struct gdbarch *gdbarch = get_frame_arch (current_frame);
423 next_pc = arm_get_next_pc_raw (current_frame, current_pc, FALSE);
424 if (memaddr == gdbarch_addr_bits_remove (gdbarch, next_pc))
425 return IS_THUMB_ADDR (next_pc);
431 /* Otherwise we're out of luck; we assume ARM. */
435 /* Remove useless bits from addresses in a running program. */
437 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
440 return UNMAKE_THUMB_ADDR (val);
442 return (val & 0x03fffffc);
445 /* When reading symbols, we need to zap the low bit of the address,
446 which may be set to 1 for Thumb functions. */
448 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
453 /* Return 1 if PC is the start of a compiler helper function which
454 can be safely ignored during prologue skipping. */
456 skip_prologue_function (CORE_ADDR pc)
458 struct minimal_symbol *msym;
461 msym = lookup_minimal_symbol_by_pc (pc);
462 if (msym == NULL || SYMBOL_VALUE_ADDRESS (msym) != pc)
465 name = SYMBOL_LINKAGE_NAME (msym);
469 /* The GNU linker's Thumb call stub to foo is named
471 if (strstr (name, "_from_thumb") != NULL)
474 /* On soft-float targets, __truncdfsf2 is called to convert promoted
475 arguments to their argument types in non-prototyped
477 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
479 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
482 /* Internal functions related to thread-local storage. */
483 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
485 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
491 /* Support routines for instruction parsing. */
492 #define submask(x) ((1L << ((x) + 1)) - 1)
493 #define bit(obj,st) (((obj) >> (st)) & 1)
494 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
495 #define sbits(obj,st,fn) \
496 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
497 #define BranchDest(addr,instr) \
498 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
500 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
501 the first 16-bit of instruction, and INSN2 is the second 16-bit of
503 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
504 ((bits ((insn1), 0, 3) << 12) \
505 | (bits ((insn1), 10, 10) << 11) \
506 | (bits ((insn2), 12, 14) << 8) \
507 | bits ((insn2), 0, 7))
509 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
510 the 32-bit instruction. */
511 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
512 ((bits ((insn), 16, 19) << 12) \
513 | bits ((insn), 0, 11))
515 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
518 thumb_expand_immediate (unsigned int imm)
520 unsigned int count = imm >> 7;
528 return (imm & 0xff) | ((imm & 0xff) << 16);
530 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
532 return (imm & 0xff) | ((imm & 0xff) << 8)
533 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
536 return (0x80 | (imm & 0x7f)) << (32 - count);
539 /* Return 1 if the 16-bit Thumb instruction INST might change
540 control flow, 0 otherwise. */
543 thumb_instruction_changes_pc (unsigned short inst)
545 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
548 if ((inst & 0xf000) == 0xd000) /* conditional branch */
551 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
554 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
557 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
560 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
566 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
567 might change control flow, 0 otherwise. */
570 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
572 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
574 /* Branches and miscellaneous control instructions. */
576 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
581 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
583 /* SUBS PC, LR, #imm8. */
586 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
588 /* Conditional branch. */
595 if ((inst1 & 0xfe50) == 0xe810)
597 /* Load multiple or RFE. */
599 if (bit (inst1, 7) && !bit (inst1, 8))
605 else if (!bit (inst1, 7) && bit (inst1, 8))
611 else if (bit (inst1, 7) && bit (inst1, 8))
616 else if (!bit (inst1, 7) && !bit (inst1, 8))
625 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
627 /* MOV PC or MOVS PC. */
631 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
634 if (bits (inst1, 0, 3) == 15)
640 if ((inst2 & 0x0fc0) == 0x0000)
646 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
652 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
661 /* Analyze a Thumb prologue, looking for a recognizable stack frame
662 and frame pointer. Scan until we encounter a store that could
663 clobber the stack frame unexpectedly, or an unknown instruction.
664 Return the last address which is definitely safe to skip for an
665 initial breakpoint. */
668 thumb_analyze_prologue (struct gdbarch *gdbarch,
669 CORE_ADDR start, CORE_ADDR limit,
670 struct arm_prologue_cache *cache)
672 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
673 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
676 struct pv_area *stack;
677 struct cleanup *back_to;
679 CORE_ADDR unrecognized_pc = 0;
681 for (i = 0; i < 16; i++)
682 regs[i] = pv_register (i, 0);
683 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
684 back_to = make_cleanup_free_pv_area (stack);
686 while (start < limit)
690 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
692 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
697 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
700 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
701 whether to save LR (R14). */
702 mask = (insn & 0xff) | ((insn & 0x100) << 6);
704 /* Calculate offsets of saved R0-R7 and LR. */
705 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
706 if (mask & (1 << regno))
708 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
710 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
713 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
716 offset = (insn & 0x7f) << 2; /* get scaled offset */
717 if (insn & 0x80) /* Check for SUB. */
718 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
721 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
724 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
725 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
727 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
728 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
729 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
731 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
732 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
733 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
735 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
736 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
737 && pv_is_constant (regs[bits (insn, 3, 5)]))
738 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
739 regs[bits (insn, 6, 8)]);
740 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
741 && pv_is_constant (regs[bits (insn, 3, 6)]))
743 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
744 int rm = bits (insn, 3, 6);
745 regs[rd] = pv_add (regs[rd], regs[rm]);
747 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
749 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
750 int src_reg = (insn & 0x78) >> 3;
751 regs[dst_reg] = regs[src_reg];
753 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
755 /* Handle stores to the stack. Normally pushes are used,
756 but with GCC -mtpcs-frame, there may be other stores
757 in the prologue to create the frame. */
758 int regno = (insn >> 8) & 0x7;
761 offset = (insn & 0xff) << 2;
762 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
764 if (pv_area_store_would_trash (stack, addr))
767 pv_area_store (stack, addr, 4, regs[regno]);
769 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
771 int rd = bits (insn, 0, 2);
772 int rn = bits (insn, 3, 5);
775 offset = bits (insn, 6, 10) << 2;
776 addr = pv_add_constant (regs[rn], offset);
778 if (pv_area_store_would_trash (stack, addr))
781 pv_area_store (stack, addr, 4, regs[rd]);
783 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
784 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
785 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
786 /* Ignore stores of argument registers to the stack. */
788 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
789 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
790 /* Ignore block loads from the stack, potentially copying
791 parameters from memory. */
793 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
794 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
795 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
796 /* Similarly ignore single loads from the stack. */
798 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
799 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
800 /* Skip register copies, i.e. saves to another register
801 instead of the stack. */
803 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
804 /* Recognize constant loads; even with small stacks these are necessary
806 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
807 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
809 /* Constant pool loads, for the same reason. */
810 unsigned int constant;
813 loc = start + 4 + bits (insn, 0, 7) * 4;
814 constant = read_memory_unsigned_integer (loc, 4, byte_order);
815 regs[bits (insn, 8, 10)] = pv_constant (constant);
817 else if ((insn & 0xe000) == 0xe000)
819 unsigned short inst2;
821 inst2 = read_memory_unsigned_integer (start + 2, 2,
822 byte_order_for_code);
824 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
826 /* BL, BLX. Allow some special function calls when
827 skipping the prologue; GCC generates these before
828 storing arguments to the stack. */
830 int j1, j2, imm1, imm2;
832 imm1 = sbits (insn, 0, 10);
833 imm2 = bits (inst2, 0, 10);
834 j1 = bit (inst2, 13);
835 j2 = bit (inst2, 11);
837 offset = ((imm1 << 12) + (imm2 << 1));
838 offset ^= ((!j2) << 22) | ((!j1) << 23);
840 nextpc = start + 4 + offset;
841 /* For BLX make sure to clear the low bits. */
842 if (bit (inst2, 12) == 0)
843 nextpc = nextpc & 0xfffffffc;
845 if (!skip_prologue_function (nextpc))
849 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
851 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
853 pv_t addr = regs[bits (insn, 0, 3)];
856 if (pv_area_store_would_trash (stack, addr))
859 /* Calculate offsets of saved registers. */
860 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
861 if (inst2 & (1 << regno))
863 addr = pv_add_constant (addr, -4);
864 pv_area_store (stack, addr, 4, regs[regno]);
868 regs[bits (insn, 0, 3)] = addr;
871 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
873 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 int regno1 = bits (inst2, 12, 15);
876 int regno2 = bits (inst2, 8, 11);
877 pv_t addr = regs[bits (insn, 0, 3)];
879 offset = inst2 & 0xff;
881 addr = pv_add_constant (addr, offset);
883 addr = pv_add_constant (addr, -offset);
885 if (pv_area_store_would_trash (stack, addr))
888 pv_area_store (stack, addr, 4, regs[regno1]);
889 pv_area_store (stack, pv_add_constant (addr, 4),
893 regs[bits (insn, 0, 3)] = addr;
896 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
897 && (inst2 & 0x0c00) == 0x0c00
898 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
900 int regno = bits (inst2, 12, 15);
901 pv_t addr = regs[bits (insn, 0, 3)];
903 offset = inst2 & 0xff;
905 addr = pv_add_constant (addr, offset);
907 addr = pv_add_constant (addr, -offset);
909 if (pv_area_store_would_trash (stack, addr))
912 pv_area_store (stack, addr, 4, regs[regno]);
915 regs[bits (insn, 0, 3)] = addr;
918 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
919 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 int regno = bits (inst2, 12, 15);
924 offset = inst2 & 0xfff;
925 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
927 if (pv_area_store_would_trash (stack, addr))
930 pv_area_store (stack, addr, 4, regs[regno]);
933 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
935 /* Ignore stores of argument registers to the stack. */
938 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
939 && (inst2 & 0x0d00) == 0x0c00
940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
941 /* Ignore stores of argument registers to the stack. */
944 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
946 && (inst2 & 0x8000) == 0x0000
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 /* Ignore block loads from the stack, potentially copying
949 parameters from memory. */
952 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
954 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
955 /* Similarly ignore dual loads from the stack. */
958 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
959 && (inst2 & 0x0d00) == 0x0c00
960 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
961 /* Similarly ignore single loads from the stack. */
964 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Similarly ignore single loads from the stack. */
969 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
970 && (inst2 & 0x8000) == 0x0000)
972 unsigned int imm = ((bits (insn, 10, 10) << 11)
973 | (bits (inst2, 12, 14) << 8)
974 | bits (inst2, 0, 7));
976 regs[bits (inst2, 8, 11)]
977 = pv_add_constant (regs[bits (insn, 0, 3)],
978 thumb_expand_immediate (imm));
981 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
982 && (inst2 & 0x8000) == 0x0000)
984 unsigned int imm = ((bits (insn, 10, 10) << 11)
985 | (bits (inst2, 12, 14) << 8)
986 | bits (inst2, 0, 7));
988 regs[bits (inst2, 8, 11)]
989 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
992 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
993 && (inst2 & 0x8000) == 0x0000)
995 unsigned int imm = ((bits (insn, 10, 10) << 11)
996 | (bits (inst2, 12, 14) << 8)
997 | bits (inst2, 0, 7));
999 regs[bits (inst2, 8, 11)]
1000 = pv_add_constant (regs[bits (insn, 0, 3)],
1001 - (CORE_ADDR) thumb_expand_immediate (imm));
1004 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1005 && (inst2 & 0x8000) == 0x0000)
1007 unsigned int imm = ((bits (insn, 10, 10) << 11)
1008 | (bits (inst2, 12, 14) << 8)
1009 | bits (inst2, 0, 7));
1011 regs[bits (inst2, 8, 11)]
1012 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1015 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1017 unsigned int imm = ((bits (insn, 10, 10) << 11)
1018 | (bits (inst2, 12, 14) << 8)
1019 | bits (inst2, 0, 7));
1021 regs[bits (inst2, 8, 11)]
1022 = pv_constant (thumb_expand_immediate (imm));
1025 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1028 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1030 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1033 else if (insn == 0xea5f /* mov.w Rd,Rm */
1034 && (inst2 & 0xf0f0) == 0)
1036 int dst_reg = (inst2 & 0x0f00) >> 8;
1037 int src_reg = inst2 & 0xf;
1038 regs[dst_reg] = regs[src_reg];
1041 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1043 /* Constant pool loads. */
1044 unsigned int constant;
1047 offset = bits (insn, 0, 11);
1049 loc = start + 4 + offset;
1051 loc = start + 4 - offset;
1053 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1054 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1057 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1059 /* Constant pool loads. */
1060 unsigned int constant;
1063 offset = bits (insn, 0, 7) << 2;
1065 loc = start + 4 + offset;
1067 loc = start + 4 - offset;
1069 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1070 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1072 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1073 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1076 else if (thumb2_instruction_changes_pc (insn, inst2))
1078 /* Don't scan past anything that might change control flow. */
1083 /* The optimizer might shove anything into the prologue,
1084 so we just skip what we don't recognize. */
1085 unrecognized_pc = start;
1090 else if (thumb_instruction_changes_pc (insn))
1092 /* Don't scan past anything that might change control flow. */
1097 /* The optimizer might shove anything into the prologue,
1098 so we just skip what we don't recognize. */
1099 unrecognized_pc = start;
1106 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1107 paddress (gdbarch, start));
1109 if (unrecognized_pc == 0)
1110 unrecognized_pc = start;
1114 do_cleanups (back_to);
1115 return unrecognized_pc;
1118 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1120 /* Frame pointer is fp. Frame size is constant. */
1121 cache->framereg = ARM_FP_REGNUM;
1122 cache->framesize = -regs[ARM_FP_REGNUM].k;
1124 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1126 /* Frame pointer is r7. Frame size is constant. */
1127 cache->framereg = THUMB_FP_REGNUM;
1128 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1130 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1132 /* Try the stack pointer... this is a bit desperate. */
1133 cache->framereg = ARM_SP_REGNUM;
1134 cache->framesize = -regs[ARM_SP_REGNUM].k;
1138 /* We're just out of luck. We don't know where the frame is. */
1139 cache->framereg = -1;
1140 cache->framesize = 0;
1143 for (i = 0; i < 16; i++)
1144 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1145 cache->saved_regs[i].addr = offset;
1147 do_cleanups (back_to);
1148 return unrecognized_pc;
1152 /* Try to analyze the instructions starting from PC, which load symbol
1153 __stack_chk_guard. Return the address of instruction after loading this
1154 symbol, set the dest register number to *BASEREG, and set the size of
1155 instructions for loading symbol in OFFSET. Return 0 if instructions are
1159 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1160 unsigned int *destreg, int *offset)
1162 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1163 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1164 unsigned int low, high, address;
1169 unsigned short insn1
1170 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1172 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1174 *destreg = bits (insn1, 8, 10);
1176 address = bits (insn1, 0, 7);
1178 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1180 unsigned short insn2
1181 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1183 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1186 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1188 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1190 /* movt Rd, #const */
1191 if ((insn1 & 0xfbc0) == 0xf2c0)
1193 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1194 *destreg = bits (insn2, 8, 11);
1196 address = (high << 16 | low);
1203 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1205 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1207 address = bits (insn, 0, 11);
1208 *destreg = bits (insn, 12, 15);
1211 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1213 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1216 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1218 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1220 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1221 *destreg = bits (insn, 12, 15);
1223 address = (high << 16 | low);
1231 /* Try to skip a sequence of instructions used for stack protector. If PC
1232 points to the first instruction of this sequence, return the address of
1233 first instruction after this sequence, otherwise, return original PC.
1235 On arm, this sequence of instructions is composed of mainly three steps,
1236 Step 1: load symbol __stack_chk_guard,
1237 Step 2: load from address of __stack_chk_guard,
1238 Step 3: store it to somewhere else.
1240 Usually, instructions on step 2 and step 3 are the same on various ARM
1241 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1242 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1243 instructions in step 1 vary from different ARM architectures. On ARMv7,
1246 movw Rn, #:lower16:__stack_chk_guard
1247 movt Rn, #:upper16:__stack_chk_guard
1254 .word __stack_chk_guard
1256 Since ldr/str is a very popular instruction, we can't use them as
1257 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1258 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1259 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1262 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1264 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1265 unsigned int address, basereg;
1266 struct minimal_symbol *stack_chk_guard;
1268 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1271 /* Try to parse the instructions in Step 1. */
1272 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1277 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1278 /* If name of symbol doesn't start with '__stack_chk_guard', this
1279 instruction sequence is not for stack protector. If symbol is
1280 removed, we conservatively think this sequence is for stack protector. */
1282 && strcmp (SYMBOL_LINKAGE_NAME(stack_chk_guard), "__stack_chk_guard"))
1287 unsigned int destreg;
1289 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1291 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1292 if ((insn & 0xf800) != 0x6800)
1294 if (bits (insn, 3, 5) != basereg)
1296 destreg = bits (insn, 0, 2);
1298 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1299 byte_order_for_code);
1300 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1301 if ((insn & 0xf800) != 0x6000)
1303 if (destreg != bits (insn, 0, 2))
1308 unsigned int destreg;
1310 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1312 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1313 if ((insn & 0x0e500000) != 0x04100000)
1315 if (bits (insn, 16, 19) != basereg)
1317 destreg = bits (insn, 12, 15);
1318 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1319 insn = read_memory_unsigned_integer (pc + offset + 4,
1320 4, byte_order_for_code);
1321 if ((insn & 0x0e500000) != 0x04000000)
1323 if (bits (insn, 12, 15) != destreg)
1326 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1329 return pc + offset + 4;
1331 return pc + offset + 8;
1334 /* Advance the PC across any function entry prologue instructions to
1335 reach some "real" code.
1337 The APCS (ARM Procedure Call Standard) defines the following
1341 [stmfd sp!, {a1,a2,a3,a4}]
1342 stmfd sp!, {...,fp,ip,lr,pc}
1343 [stfe f7, [sp, #-12]!]
1344 [stfe f6, [sp, #-12]!]
1345 [stfe f5, [sp, #-12]!]
1346 [stfe f4, [sp, #-12]!]
1347 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1350 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1352 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1355 CORE_ADDR func_addr, limit_pc;
1356 struct symtab_and_line sal;
1358 /* See if we can determine the end of the prologue via the symbol table.
1359 If so, then return either PC, or the PC after the prologue, whichever
1361 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1363 CORE_ADDR post_prologue_pc
1364 = skip_prologue_using_sal (gdbarch, func_addr);
1365 struct symtab *s = find_pc_symtab (func_addr);
1367 if (post_prologue_pc)
1369 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1372 /* GCC always emits a line note before the prologue and another
1373 one after, even if the two are at the same address or on the
1374 same line. Take advantage of this so that we do not need to
1375 know every instruction that might appear in the prologue. We
1376 will have producer information for most binaries; if it is
1377 missing (e.g. for -gstabs), assuming the GNU tools. */
1378 if (post_prologue_pc
1380 || s->producer == NULL
1381 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1382 return post_prologue_pc;
1384 if (post_prologue_pc != 0)
1386 CORE_ADDR analyzed_limit;
1388 /* For non-GCC compilers, make sure the entire line is an
1389 acceptable prologue; GDB will round this function's
1390 return value up to the end of the following line so we
1391 can not skip just part of a line (and we do not want to).
1393 RealView does not treat the prologue specially, but does
1394 associate prologue code with the opening brace; so this
1395 lets us skip the first line if we think it is the opening
1397 if (arm_pc_is_thumb (gdbarch, func_addr))
1398 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1399 post_prologue_pc, NULL);
1401 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1402 post_prologue_pc, NULL);
1404 if (analyzed_limit != post_prologue_pc)
1407 return post_prologue_pc;
1411 /* Can't determine prologue from the symbol table, need to examine
1414 /* Find an upper limit on the function prologue using the debug
1415 information. If the debug information could not be used to provide
1416 that bound, then use an arbitrary large number as the upper bound. */
1417 /* Like arm_scan_prologue, stop no later than pc + 64. */
1418 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1420 limit_pc = pc + 64; /* Magic. */
1423 /* Check if this is Thumb code. */
1424 if (arm_pc_is_thumb (gdbarch, pc))
1425 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1427 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1429 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1431 /* "mov ip, sp" is no longer a required part of the prologue. */
1432 if (inst == 0xe1a0c00d) /* mov ip, sp */
1435 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1438 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1441 /* Some prologues begin with "str lr, [sp, #-4]!". */
1442 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1445 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1448 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1451 /* Any insns after this point may float into the code, if it makes
1452 for better instruction scheduling, so we skip them only if we
1453 find them, but still consider the function to be frame-ful. */
1455 /* We may have either one sfmfd instruction here, or several stfe
1456 insns, depending on the version of floating point code we
1458 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1461 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1464 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1467 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1470 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1471 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1472 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1475 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1476 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1477 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1480 /* Un-recognized instruction; stop scanning. */
1484 return skip_pc; /* End of prologue. */
1488 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1489 This function decodes a Thumb function prologue to determine:
1490 1) the size of the stack frame
1491 2) which registers are saved on it
1492 3) the offsets of saved regs
1493 4) the offset from the stack pointer to the frame pointer
1495 A typical Thumb function prologue would create this stack frame
1496 (offsets relative to FP)
1497 old SP -> 24 stack parameters
1500 R7 -> 0 local variables (16 bytes)
1501 SP -> -12 additional stack space (12 bytes)
1502 The frame size would thus be 36 bytes, and the frame offset would be
1503 12 bytes. The frame register is R7.
1505 The comments for thumb_skip_prolog() describe the algorithm we use
1506 to detect the end of the prolog. */
1510 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1511 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1513 CORE_ADDR prologue_start;
1514 CORE_ADDR prologue_end;
1515 CORE_ADDR current_pc;
1517 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1520 /* See comment in arm_scan_prologue for an explanation of
1522 if (prologue_end > prologue_start + 64)
1524 prologue_end = prologue_start + 64;
1528 /* We're in the boondocks: we have no idea where the start of the
1532 prologue_end = min (prologue_end, prev_pc);
1534 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1537 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1540 arm_instruction_changes_pc (uint32_t this_instr)
1542 if (bits (this_instr, 28, 31) == INST_NV)
1543 /* Unconditional instructions. */
1544 switch (bits (this_instr, 24, 27))
1548 /* Branch with Link and change to Thumb. */
1553 /* Coprocessor register transfer. */
1554 if (bits (this_instr, 12, 15) == 15)
1555 error (_("Invalid update to pc in instruction"));
1561 switch (bits (this_instr, 25, 27))
1564 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1566 /* Multiplies and extra load/stores. */
1567 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1568 /* Neither multiplies nor extension load/stores are allowed
1572 /* Otherwise, miscellaneous instructions. */
1574 /* BX <reg>, BXJ <reg>, BLX <reg> */
1575 if (bits (this_instr, 4, 27) == 0x12fff1
1576 || bits (this_instr, 4, 27) == 0x12fff2
1577 || bits (this_instr, 4, 27) == 0x12fff3)
1580 /* Other miscellaneous instructions are unpredictable if they
1584 /* Data processing instruction. Fall through. */
1587 if (bits (this_instr, 12, 15) == 15)
1594 /* Media instructions and architecturally undefined instructions. */
1595 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1599 if (bit (this_instr, 20) == 0)
1603 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1609 /* Load/store multiple. */
1610 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1616 /* Branch and branch with link. */
1621 /* Coprocessor transfers or SWIs can not affect PC. */
1625 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1629 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1630 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1631 fill it in. Return the first address not recognized as a prologue
1634 We recognize all the instructions typically found in ARM prologues,
1635 plus harmless instructions which can be skipped (either for analysis
1636 purposes, or a more restrictive set that can be skipped when finding
1637 the end of the prologue). */
1640 arm_analyze_prologue (struct gdbarch *gdbarch,
1641 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1642 struct arm_prologue_cache *cache)
1644 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1645 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1647 CORE_ADDR offset, current_pc;
1648 pv_t regs[ARM_FPS_REGNUM];
1649 struct pv_area *stack;
1650 struct cleanup *back_to;
1651 int framereg, framesize;
1652 CORE_ADDR unrecognized_pc = 0;
1654 /* Search the prologue looking for instructions that set up the
1655 frame pointer, adjust the stack pointer, and save registers.
1657 Be careful, however, and if it doesn't look like a prologue,
1658 don't try to scan it. If, for instance, a frameless function
1659 begins with stmfd sp!, then we will tell ourselves there is
1660 a frame, which will confuse stack traceback, as well as "finish"
1661 and other operations that rely on a knowledge of the stack
1664 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1665 regs[regno] = pv_register (regno, 0);
1666 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1667 back_to = make_cleanup_free_pv_area (stack);
1669 for (current_pc = prologue_start;
1670 current_pc < prologue_end;
1674 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1676 if (insn == 0xe1a0c00d) /* mov ip, sp */
1678 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1681 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1682 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1684 unsigned imm = insn & 0xff; /* immediate value */
1685 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1686 int rd = bits (insn, 12, 15);
1687 imm = (imm >> rot) | (imm << (32 - rot));
1688 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1691 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1692 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1694 unsigned imm = insn & 0xff; /* immediate value */
1695 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1696 int rd = bits (insn, 12, 15);
1697 imm = (imm >> rot) | (imm << (32 - rot));
1698 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1701 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1704 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1706 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1707 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1708 regs[bits (insn, 12, 15)]);
1711 else if ((insn & 0xffff0000) == 0xe92d0000)
1712 /* stmfd sp!, {..., fp, ip, lr, pc}
1714 stmfd sp!, {a1, a2, a3, a4} */
1716 int mask = insn & 0xffff;
1718 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1721 /* Calculate offsets of saved registers. */
1722 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1723 if (mask & (1 << regno))
1726 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1727 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1730 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1731 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1732 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1737 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1738 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1739 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1741 /* No need to add this to saved_regs -- it's just an arg reg. */
1744 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1746 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1748 /* No need to add this to saved_regs -- it's just arg regs. */
1751 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1758 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1760 unsigned imm = insn & 0xff; /* immediate value */
1761 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1762 imm = (imm >> rot) | (imm << (32 - rot));
1763 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1765 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1767 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1769 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1772 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1773 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1774 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1776 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1778 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1780 int n_saved_fp_regs;
1781 unsigned int fp_start_reg, fp_bound_reg;
1783 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1786 if ((insn & 0x800) == 0x800) /* N0 is set */
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 3;
1791 n_saved_fp_regs = 1;
1795 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1796 n_saved_fp_regs = 2;
1798 n_saved_fp_regs = 4;
1801 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1802 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1803 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1805 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1806 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1807 regs[fp_start_reg++]);
1810 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1812 /* Allow some special function calls when skipping the
1813 prologue; GCC generates these before storing arguments to
1815 CORE_ADDR dest = BranchDest (current_pc, insn);
1817 if (skip_prologue_function (dest))
1822 else if ((insn & 0xf0000000) != 0xe0000000)
1823 break; /* Condition not true, exit early. */
1824 else if (arm_instruction_changes_pc (insn))
1825 /* Don't scan past anything that might change control flow. */
1827 else if ((insn & 0xfe500000) == 0xe8100000) /* ldm */
1829 /* Ignore block loads from the stack, potentially copying
1830 parameters from memory. */
1831 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1836 else if ((insn & 0xfc500000) == 0xe4100000)
1838 /* Similarly ignore single loads from the stack. */
1839 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1844 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1845 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1846 register instead of the stack. */
1850 /* The optimizer might shove anything into the prologue,
1851 so we just skip what we don't recognize. */
1852 unrecognized_pc = current_pc;
1857 if (unrecognized_pc == 0)
1858 unrecognized_pc = current_pc;
1860 /* The frame size is just the distance from the frame register
1861 to the original stack pointer. */
1862 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1864 /* Frame pointer is fp. */
1865 framereg = ARM_FP_REGNUM;
1866 framesize = -regs[ARM_FP_REGNUM].k;
1868 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1870 /* Try the stack pointer... this is a bit desperate. */
1871 framereg = ARM_SP_REGNUM;
1872 framesize = -regs[ARM_SP_REGNUM].k;
1876 /* We're just out of luck. We don't know where the frame is. */
1883 cache->framereg = framereg;
1884 cache->framesize = framesize;
1886 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1887 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1888 cache->saved_regs[regno].addr = offset;
1892 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1893 paddress (gdbarch, unrecognized_pc));
1895 do_cleanups (back_to);
1896 return unrecognized_pc;
1900 arm_scan_prologue (struct frame_info *this_frame,
1901 struct arm_prologue_cache *cache)
1903 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1904 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1906 CORE_ADDR prologue_start, prologue_end, current_pc;
1907 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1908 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1909 pv_t regs[ARM_FPS_REGNUM];
1910 struct pv_area *stack;
1911 struct cleanup *back_to;
1914 /* Assume there is no frame until proven otherwise. */
1915 cache->framereg = ARM_SP_REGNUM;
1916 cache->framesize = 0;
1918 /* Check for Thumb prologue. */
1919 if (arm_frame_is_thumb (this_frame))
1921 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1925 /* Find the function prologue. If we can't find the function in
1926 the symbol table, peek in the stack frame to find the PC. */
1927 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1930 /* One way to find the end of the prologue (which works well
1931 for unoptimized code) is to do the following:
1933 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1936 prologue_end = prev_pc;
1937 else if (sal.end < prologue_end)
1938 prologue_end = sal.end;
1940 This mechanism is very accurate so long as the optimizer
1941 doesn't move any instructions from the function body into the
1942 prologue. If this happens, sal.end will be the last
1943 instruction in the first hunk of prologue code just before
1944 the first instruction that the scheduler has moved from
1945 the body to the prologue.
1947 In order to make sure that we scan all of the prologue
1948 instructions, we use a slightly less accurate mechanism which
1949 may scan more than necessary. To help compensate for this
1950 lack of accuracy, the prologue scanning loop below contains
1951 several clauses which'll cause the loop to terminate early if
1952 an implausible prologue instruction is encountered.
1958 is a suitable endpoint since it accounts for the largest
1959 possible prologue plus up to five instructions inserted by
1962 if (prologue_end > prologue_start + 64)
1964 prologue_end = prologue_start + 64; /* See above. */
1969 /* We have no symbol information. Our only option is to assume this
1970 function has a standard stack frame and the normal frame register.
1971 Then, we can find the value of our frame pointer on entrance to
1972 the callee (or at the present moment if this is the innermost frame).
1973 The value stored there should be the address of the stmfd + 8. */
1974 CORE_ADDR frame_loc;
1975 LONGEST return_value;
1977 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1978 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1982 prologue_start = gdbarch_addr_bits_remove
1983 (gdbarch, return_value) - 8;
1984 prologue_end = prologue_start + 64; /* See above. */
1988 if (prev_pc < prologue_end)
1989 prologue_end = prev_pc;
1991 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1994 static struct arm_prologue_cache *
1995 arm_make_prologue_cache (struct frame_info *this_frame)
1998 struct arm_prologue_cache *cache;
1999 CORE_ADDR unwound_fp;
2001 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2002 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2004 arm_scan_prologue (this_frame, cache);
2006 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2007 if (unwound_fp == 0)
2010 cache->prev_sp = unwound_fp + cache->framesize;
2012 /* Calculate actual addresses of saved registers using offsets
2013 determined by arm_scan_prologue. */
2014 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2015 if (trad_frame_addr_p (cache->saved_regs, reg))
2016 cache->saved_regs[reg].addr += cache->prev_sp;
2021 /* Our frame ID for a normal frame is the current function's starting PC
2022 and the caller's SP when we were called. */
2025 arm_prologue_this_id (struct frame_info *this_frame,
2027 struct frame_id *this_id)
2029 struct arm_prologue_cache *cache;
2033 if (*this_cache == NULL)
2034 *this_cache = arm_make_prologue_cache (this_frame);
2035 cache = *this_cache;
2037 /* This is meant to halt the backtrace at "_start". */
2038 pc = get_frame_pc (this_frame);
2039 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2042 /* If we've hit a wall, stop. */
2043 if (cache->prev_sp == 0)
2046 /* Use function start address as part of the frame ID. If we cannot
2047 identify the start address (due to missing symbol information),
2048 fall back to just using the current PC. */
2049 func = get_frame_func (this_frame);
2053 id = frame_id_build (cache->prev_sp, func);
2057 static struct value *
2058 arm_prologue_prev_register (struct frame_info *this_frame,
2062 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2063 struct arm_prologue_cache *cache;
2065 if (*this_cache == NULL)
2066 *this_cache = arm_make_prologue_cache (this_frame);
2067 cache = *this_cache;
2069 /* If we are asked to unwind the PC, then we need to return the LR
2070 instead. The prologue may save PC, but it will point into this
2071 frame's prologue, not the next frame's resume location. Also
2072 strip the saved T bit. A valid LR may have the low bit set, but
2073 a valid PC never does. */
2074 if (prev_regnum == ARM_PC_REGNUM)
2078 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2079 return frame_unwind_got_constant (this_frame, prev_regnum,
2080 arm_addr_bits_remove (gdbarch, lr));
2083 /* SP is generally not saved to the stack, but this frame is
2084 identified by the next frame's stack pointer at the time of the call.
2085 The value was already reconstructed into PREV_SP. */
2086 if (prev_regnum == ARM_SP_REGNUM)
2087 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2089 /* The CPSR may have been changed by the call instruction and by the
2090 called function. The only bit we can reconstruct is the T bit,
2091 by checking the low bit of LR as of the call. This is a reliable
2092 indicator of Thumb-ness except for some ARM v4T pre-interworking
2093 Thumb code, which could get away with a clear low bit as long as
2094 the called function did not use bx. Guess that all other
2095 bits are unchanged; the condition flags are presumably lost,
2096 but the processor status is likely valid. */
2097 if (prev_regnum == ARM_PS_REGNUM)
2100 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2102 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2103 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2104 if (IS_THUMB_ADDR (lr))
2108 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2111 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2115 struct frame_unwind arm_prologue_unwind = {
2117 arm_prologue_this_id,
2118 arm_prologue_prev_register,
2120 default_frame_sniffer
2123 /* Maintain a list of ARM exception table entries per objfile, similar to the
2124 list of mapping symbols. We only cache entries for standard ARM-defined
2125 personality routines; the cache will contain only the frame unwinding
2126 instructions associated with the entry (not the descriptors). */
2128 static const struct objfile_data *arm_exidx_data_key;
2130 struct arm_exidx_entry
2135 typedef struct arm_exidx_entry arm_exidx_entry_s;
2136 DEF_VEC_O(arm_exidx_entry_s);
2138 struct arm_exidx_data
2140 VEC(arm_exidx_entry_s) **section_maps;
2144 arm_exidx_data_free (struct objfile *objfile, void *arg)
2146 struct arm_exidx_data *data = arg;
2149 for (i = 0; i < objfile->obfd->section_count; i++)
2150 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2154 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2155 const struct arm_exidx_entry *rhs)
2157 return lhs->addr < rhs->addr;
2160 static struct obj_section *
2161 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2163 struct obj_section *osect;
2165 ALL_OBJFILE_OSECTIONS (objfile, osect)
2166 if (bfd_get_section_flags (objfile->obfd,
2167 osect->the_bfd_section) & SEC_ALLOC)
2169 bfd_vma start, size;
2170 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2171 size = bfd_get_section_size (osect->the_bfd_section);
2173 if (start <= vma && vma < start + size)
2180 /* Parse contents of exception table and exception index sections
2181 of OBJFILE, and fill in the exception table entry cache.
2183 For each entry that refers to a standard ARM-defined personality
2184 routine, extract the frame unwinding instructions (from either
2185 the index or the table section). The unwinding instructions
2187 - extracting them from the rest of the table data
2188 - converting to host endianness
2189 - appending the implicit 0xb0 ("Finish") code
2191 The extracted and normalized instructions are stored for later
2192 retrieval by the arm_find_exidx_entry routine. */
2195 arm_exidx_new_objfile (struct objfile *objfile)
2197 struct cleanup *cleanups = make_cleanup (null_cleanup, NULL);
2198 struct arm_exidx_data *data;
2199 asection *exidx, *extab;
2200 bfd_vma exidx_vma = 0, extab_vma = 0;
2201 bfd_size_type exidx_size = 0, extab_size = 0;
2202 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2205 /* If we've already touched this file, do nothing. */
2206 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2209 /* Read contents of exception table and index. */
2210 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2213 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2214 exidx_size = bfd_get_section_size (exidx);
2215 exidx_data = xmalloc (exidx_size);
2216 make_cleanup (xfree, exidx_data);
2218 if (!bfd_get_section_contents (objfile->obfd, exidx,
2219 exidx_data, 0, exidx_size))
2221 do_cleanups (cleanups);
2226 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2229 extab_vma = bfd_section_vma (objfile->obfd, extab);
2230 extab_size = bfd_get_section_size (extab);
2231 extab_data = xmalloc (extab_size);
2232 make_cleanup (xfree, extab_data);
2234 if (!bfd_get_section_contents (objfile->obfd, extab,
2235 extab_data, 0, extab_size))
2237 do_cleanups (cleanups);
2242 /* Allocate exception table data structure. */
2243 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2244 set_objfile_data (objfile, arm_exidx_data_key, data);
2245 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2246 objfile->obfd->section_count,
2247 VEC(arm_exidx_entry_s) *);
2249 /* Fill in exception table. */
2250 for (i = 0; i < exidx_size / 8; i++)
2252 struct arm_exidx_entry new_exidx_entry;
2253 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2254 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2255 bfd_vma addr = 0, word = 0;
2256 int n_bytes = 0, n_words = 0;
2257 struct obj_section *sec;
2258 gdb_byte *entry = NULL;
2260 /* Extract address of start of function. */
2261 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2262 idx += exidx_vma + i * 8;
2264 /* Find section containing function and compute section offset. */
2265 sec = arm_obj_section_from_vma (objfile, idx);
2268 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2270 /* Determine address of exception table entry. */
2273 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2275 else if ((val & 0xff000000) == 0x80000000)
2277 /* Exception table entry embedded in .ARM.exidx
2278 -- must be short form. */
2282 else if (!(val & 0x80000000))
2284 /* Exception table entry in .ARM.extab. */
2285 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2286 addr += exidx_vma + i * 8 + 4;
2288 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2290 word = bfd_h_get_32 (objfile->obfd,
2291 extab_data + addr - extab_vma);
2294 if ((word & 0xff000000) == 0x80000000)
2299 else if ((word & 0xff000000) == 0x81000000
2300 || (word & 0xff000000) == 0x82000000)
2304 n_words = ((word >> 16) & 0xff);
2306 else if (!(word & 0x80000000))
2309 struct obj_section *pers_sec;
2310 int gnu_personality = 0;
2312 /* Custom personality routine. */
2313 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2314 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2316 /* Check whether we've got one of the variants of the
2317 GNU personality routines. */
2318 pers_sec = arm_obj_section_from_vma (objfile, pers);
2321 static const char *personality[] =
2323 "__gcc_personality_v0",
2324 "__gxx_personality_v0",
2325 "__gcj_personality_v0",
2326 "__gnu_objc_personality_v0",
2330 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2333 for (k = 0; personality[k]; k++)
2334 if (lookup_minimal_symbol_by_pc_name
2335 (pc, personality[k], objfile))
2337 gnu_personality = 1;
2342 /* If so, the next word contains a word count in the high
2343 byte, followed by the same unwind instructions as the
2344 pre-defined forms. */
2346 && addr + 4 <= extab_vma + extab_size)
2348 word = bfd_h_get_32 (objfile->obfd,
2349 extab_data + addr - extab_vma);
2352 n_words = ((word >> 24) & 0xff);
2358 /* Sanity check address. */
2360 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2361 n_words = n_bytes = 0;
2363 /* The unwind instructions reside in WORD (only the N_BYTES least
2364 significant bytes are valid), followed by N_WORDS words in the
2365 extab section starting at ADDR. */
2366 if (n_bytes || n_words)
2368 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2369 n_bytes + n_words * 4 + 1);
2372 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2376 word = bfd_h_get_32 (objfile->obfd,
2377 extab_data + addr - extab_vma);
2380 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2381 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2382 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2383 *p++ = (gdb_byte) (word & 0xff);
2386 /* Implied "Finish" to terminate the list. */
2390 /* Push entry onto vector. They are guaranteed to always
2391 appear in order of increasing addresses. */
2392 new_exidx_entry.addr = idx;
2393 new_exidx_entry.entry = entry;
2394 VEC_safe_push (arm_exidx_entry_s,
2395 data->section_maps[sec->the_bfd_section->index],
2399 do_cleanups (cleanups);
2402 /* Search for the exception table entry covering MEMADDR. If one is found,
2403 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2404 set *START to the start of the region covered by this entry. */
2407 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2409 struct obj_section *sec;
2411 sec = find_pc_section (memaddr);
2414 struct arm_exidx_data *data;
2415 VEC(arm_exidx_entry_s) *map;
2416 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2419 data = objfile_data (sec->objfile, arm_exidx_data_key);
2422 map = data->section_maps[sec->the_bfd_section->index];
2423 if (!VEC_empty (arm_exidx_entry_s, map))
2425 struct arm_exidx_entry *map_sym;
2427 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2428 arm_compare_exidx_entries);
2430 /* VEC_lower_bound finds the earliest ordered insertion
2431 point. If the following symbol starts at this exact
2432 address, we use that; otherwise, the preceding
2433 exception table entry covers this address. */
2434 if (idx < VEC_length (arm_exidx_entry_s, map))
2436 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2437 if (map_sym->addr == map_key.addr)
2440 *start = map_sym->addr + obj_section_addr (sec);
2441 return map_sym->entry;
2447 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2449 *start = map_sym->addr + obj_section_addr (sec);
2450 return map_sym->entry;
2459 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2460 instruction list from the ARM exception table entry ENTRY, allocate and
2461 return a prologue cache structure describing how to unwind this frame.
2463 Return NULL if the unwinding instruction list contains a "spare",
2464 "reserved" or "refuse to unwind" instruction as defined in section
2465 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2466 for the ARM Architecture" document. */
2468 static struct arm_prologue_cache *
2469 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2474 struct arm_prologue_cache *cache;
2475 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2476 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2482 /* Whenever we reload SP, we actually have to retrieve its
2483 actual value in the current frame. */
2486 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2488 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2489 vsp = get_frame_register_unsigned (this_frame, reg);
2493 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2494 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2500 /* Decode next unwind instruction. */
2503 if ((insn & 0xc0) == 0)
2505 int offset = insn & 0x3f;
2506 vsp += (offset << 2) + 4;
2508 else if ((insn & 0xc0) == 0x40)
2510 int offset = insn & 0x3f;
2511 vsp -= (offset << 2) + 4;
2513 else if ((insn & 0xf0) == 0x80)
2515 int mask = ((insn & 0xf) << 8) | *entry++;
2518 /* The special case of an all-zero mask identifies
2519 "Refuse to unwind". We return NULL to fall back
2520 to the prologue analyzer. */
2524 /* Pop registers r4..r15 under mask. */
2525 for (i = 0; i < 12; i++)
2526 if (mask & (1 << i))
2528 cache->saved_regs[4 + i].addr = vsp;
2532 /* Special-case popping SP -- we need to reload vsp. */
2533 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2536 else if ((insn & 0xf0) == 0x90)
2538 int reg = insn & 0xf;
2540 /* Reserved cases. */
2541 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2544 /* Set SP from another register and mark VSP for reload. */
2545 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2548 else if ((insn & 0xf0) == 0xa0)
2550 int count = insn & 0x7;
2551 int pop_lr = (insn & 0x8) != 0;
2554 /* Pop r4..r[4+count]. */
2555 for (i = 0; i <= count; i++)
2557 cache->saved_regs[4 + i].addr = vsp;
2561 /* If indicated by flag, pop LR as well. */
2564 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2568 else if (insn == 0xb0)
2570 /* We could only have updated PC by popping into it; if so, it
2571 will show up as address. Otherwise, copy LR into PC. */
2572 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2573 cache->saved_regs[ARM_PC_REGNUM]
2574 = cache->saved_regs[ARM_LR_REGNUM];
2579 else if (insn == 0xb1)
2581 int mask = *entry++;
2584 /* All-zero mask and mask >= 16 is "spare". */
2585 if (mask == 0 || mask >= 16)
2588 /* Pop r0..r3 under mask. */
2589 for (i = 0; i < 4; i++)
2590 if (mask & (1 << i))
2592 cache->saved_regs[i].addr = vsp;
2596 else if (insn == 0xb2)
2598 ULONGEST offset = 0;
2603 offset |= (*entry & 0x7f) << shift;
2606 while (*entry++ & 0x80);
2608 vsp += 0x204 + (offset << 2);
2610 else if (insn == 0xb3)
2612 int start = *entry >> 4;
2613 int count = (*entry++) & 0xf;
2616 /* Only registers D0..D15 are valid here. */
2617 if (start + count >= 16)
2620 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2621 for (i = 0; i <= count; i++)
2623 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2627 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2630 else if ((insn & 0xf8) == 0xb8)
2632 int count = insn & 0x7;
2635 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2636 for (i = 0; i <= count; i++)
2638 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2642 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2645 else if (insn == 0xc6)
2647 int start = *entry >> 4;
2648 int count = (*entry++) & 0xf;
2651 /* Only registers WR0..WR15 are valid. */
2652 if (start + count >= 16)
2655 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2656 for (i = 0; i <= count; i++)
2658 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2662 else if (insn == 0xc7)
2664 int mask = *entry++;
2667 /* All-zero mask and mask >= 16 is "spare". */
2668 if (mask == 0 || mask >= 16)
2671 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2672 for (i = 0; i < 4; i++)
2673 if (mask & (1 << i))
2675 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2679 else if ((insn & 0xf8) == 0xc0)
2681 int count = insn & 0x7;
2684 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2685 for (i = 0; i <= count; i++)
2687 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2691 else if (insn == 0xc8)
2693 int start = *entry >> 4;
2694 int count = (*entry++) & 0xf;
2697 /* Only registers D0..D31 are valid. */
2698 if (start + count >= 16)
2701 /* Pop VFP double-precision registers
2702 D[16+start]..D[16+start+count]. */
2703 for (i = 0; i <= count; i++)
2705 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2709 else if (insn == 0xc9)
2711 int start = *entry >> 4;
2712 int count = (*entry++) & 0xf;
2715 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2716 for (i = 0; i <= count; i++)
2718 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2722 else if ((insn & 0xf8) == 0xd0)
2724 int count = insn & 0x7;
2727 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2728 for (i = 0; i <= count; i++)
2730 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2736 /* Everything else is "spare". */
2741 /* If we restore SP from a register, assume this was the frame register.
2742 Otherwise just fall back to SP as frame register. */
2743 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2744 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2746 cache->framereg = ARM_SP_REGNUM;
2748 /* Determine offset to previous frame. */
2750 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2752 /* We already got the previous SP. */
2753 cache->prev_sp = vsp;
2758 /* Unwinding via ARM exception table entries. Note that the sniffer
2759 already computes a filled-in prologue cache, which is then used
2760 with the same arm_prologue_this_id and arm_prologue_prev_register
2761 routines also used for prologue-parsing based unwinding. */
2764 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2765 struct frame_info *this_frame,
2766 void **this_prologue_cache)
2768 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2769 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2770 CORE_ADDR addr_in_block, exidx_region, func_start;
2771 struct arm_prologue_cache *cache;
2774 /* See if we have an ARM exception table entry covering this address. */
2775 addr_in_block = get_frame_address_in_block (this_frame);
2776 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2780 /* The ARM exception table does not describe unwind information
2781 for arbitrary PC values, but is guaranteed to be correct only
2782 at call sites. We have to decide here whether we want to use
2783 ARM exception table information for this frame, or fall back
2784 to using prologue parsing. (Note that if we have DWARF CFI,
2785 this sniffer isn't even called -- CFI is always preferred.)
2787 Before we make this decision, however, we check whether we
2788 actually have *symbol* information for the current frame.
2789 If not, prologue parsing would not work anyway, so we might
2790 as well use the exception table and hope for the best. */
2791 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2795 /* If the next frame is "normal", we are at a call site in this
2796 frame, so exception information is guaranteed to be valid. */
2797 if (get_next_frame (this_frame)
2798 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2801 /* We also assume exception information is valid if we're currently
2802 blocked in a system call. The system library is supposed to
2803 ensure this, so that e.g. pthread cancellation works. */
2804 if (arm_frame_is_thumb (this_frame))
2808 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2809 byte_order_for_code, &insn)
2810 && (insn & 0xff00) == 0xdf00 /* svc */)
2817 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2818 byte_order_for_code, &insn)
2819 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2823 /* Bail out if we don't know that exception information is valid. */
2827 /* The ARM exception index does not mark the *end* of the region
2828 covered by the entry, and some functions will not have any entry.
2829 To correctly recognize the end of the covered region, the linker
2830 should have inserted dummy records with a CANTUNWIND marker.
2832 Unfortunately, current versions of GNU ld do not reliably do
2833 this, and thus we may have found an incorrect entry above.
2834 As a (temporary) sanity check, we only use the entry if it
2835 lies *within* the bounds of the function. Note that this check
2836 might reject perfectly valid entries that just happen to cover
2837 multiple functions; therefore this check ought to be removed
2838 once the linker is fixed. */
2839 if (func_start > exidx_region)
2843 /* Decode the list of unwinding instructions into a prologue cache.
2844 Note that this may fail due to e.g. a "refuse to unwind" code. */
2845 cache = arm_exidx_fill_cache (this_frame, entry);
2849 *this_prologue_cache = cache;
2853 struct frame_unwind arm_exidx_unwind = {
2855 arm_prologue_this_id,
2856 arm_prologue_prev_register,
2858 arm_exidx_unwind_sniffer
2861 static struct arm_prologue_cache *
2862 arm_make_stub_cache (struct frame_info *this_frame)
2864 struct arm_prologue_cache *cache;
2866 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2867 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2869 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2874 /* Our frame ID for a stub frame is the current SP and LR. */
2877 arm_stub_this_id (struct frame_info *this_frame,
2879 struct frame_id *this_id)
2881 struct arm_prologue_cache *cache;
2883 if (*this_cache == NULL)
2884 *this_cache = arm_make_stub_cache (this_frame);
2885 cache = *this_cache;
2887 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2891 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2892 struct frame_info *this_frame,
2893 void **this_prologue_cache)
2895 CORE_ADDR addr_in_block;
2898 addr_in_block = get_frame_address_in_block (this_frame);
2899 if (in_plt_section (addr_in_block, NULL)
2900 /* We also use the stub winder if the target memory is unreadable
2901 to avoid having the prologue unwinder trying to read it. */
2902 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2908 struct frame_unwind arm_stub_unwind = {
2911 arm_prologue_prev_register,
2913 arm_stub_unwind_sniffer
2917 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2919 struct arm_prologue_cache *cache;
2921 if (*this_cache == NULL)
2922 *this_cache = arm_make_prologue_cache (this_frame);
2923 cache = *this_cache;
2925 return cache->prev_sp - cache->framesize;
2928 struct frame_base arm_normal_base = {
2929 &arm_prologue_unwind,
2930 arm_normal_frame_base,
2931 arm_normal_frame_base,
2932 arm_normal_frame_base
2935 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2936 dummy frame. The frame ID's base needs to match the TOS value
2937 saved by save_dummy_frame_tos() and returned from
2938 arm_push_dummy_call, and the PC needs to match the dummy frame's
2941 static struct frame_id
2942 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2944 return frame_id_build (get_frame_register_unsigned (this_frame,
2946 get_frame_pc (this_frame));
2949 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2950 be used to construct the previous frame's ID, after looking up the
2951 containing function). */
2954 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2957 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2958 return arm_addr_bits_remove (gdbarch, pc);
2962 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2964 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2967 static struct value *
2968 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2971 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2973 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2978 /* The PC is normally copied from the return column, which
2979 describes saves of LR. However, that version may have an
2980 extra bit set to indicate Thumb state. The bit is not
2982 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2983 return frame_unwind_got_constant (this_frame, regnum,
2984 arm_addr_bits_remove (gdbarch, lr));
2987 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2988 cpsr = get_frame_register_unsigned (this_frame, regnum);
2989 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2990 if (IS_THUMB_ADDR (lr))
2994 return frame_unwind_got_constant (this_frame, regnum, cpsr);
2997 internal_error (__FILE__, __LINE__,
2998 _("Unexpected register %d"), regnum);
3003 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3004 struct dwarf2_frame_state_reg *reg,
3005 struct frame_info *this_frame)
3011 reg->how = DWARF2_FRAME_REG_FN;
3012 reg->loc.fn = arm_dwarf2_prev_register;
3015 reg->how = DWARF2_FRAME_REG_CFA;
3020 /* Return true if we are in the function's epilogue, i.e. after the
3021 instruction that destroyed the function's stack frame. */
3024 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3026 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3027 unsigned int insn, insn2;
3028 int found_return = 0, found_stack_adjust = 0;
3029 CORE_ADDR func_start, func_end;
3033 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3036 /* The epilogue is a sequence of instructions along the following lines:
3038 - add stack frame size to SP or FP
3039 - [if frame pointer used] restore SP from FP
3040 - restore registers from SP [may include PC]
3041 - a return-type instruction [if PC wasn't already restored]
3043 In a first pass, we scan forward from the current PC and verify the
3044 instructions we find as compatible with this sequence, ending in a
3047 However, this is not sufficient to distinguish indirect function calls
3048 within a function from indirect tail calls in the epilogue in some cases.
3049 Therefore, if we didn't already find any SP-changing instruction during
3050 forward scan, we add a backward scanning heuristic to ensure we actually
3051 are in the epilogue. */
3054 while (scan_pc < func_end && !found_return)
3056 if (target_read_memory (scan_pc, buf, 2))
3060 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3062 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3064 else if (insn == 0x46f7) /* mov pc, lr */
3066 else if (insn == 0x46bd) /* mov sp, r7 */
3067 found_stack_adjust = 1;
3068 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3069 found_stack_adjust = 1;
3070 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3072 found_stack_adjust = 1;
3073 if (insn & 0x0100) /* <registers> include PC. */
3076 else if ((insn & 0xe000) == 0xe000) /* 32-bit Thumb-2 instruction */
3078 if (target_read_memory (scan_pc, buf, 2))
3082 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3084 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3086 found_stack_adjust = 1;
3087 if (insn2 & 0x8000) /* <registers> include PC. */
3090 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3091 && (insn2 & 0x0fff) == 0x0b04)
3093 found_stack_adjust = 1;
3094 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3097 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3098 && (insn2 & 0x0e00) == 0x0a00)
3099 found_stack_adjust = 1;
3110 /* Since any instruction in the epilogue sequence, with the possible
3111 exception of return itself, updates the stack pointer, we need to
3112 scan backwards for at most one instruction. Try either a 16-bit or
3113 a 32-bit instruction. This is just a heuristic, so we do not worry
3114 too much about false positives. */
3116 if (!found_stack_adjust)
3118 if (pc - 4 < func_start)
3120 if (target_read_memory (pc - 4, buf, 4))
3123 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3124 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3126 if (insn2 == 0x46bd) /* mov sp, r7 */
3127 found_stack_adjust = 1;
3128 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3129 found_stack_adjust = 1;
3130 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3131 found_stack_adjust = 1;
3132 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3133 found_stack_adjust = 1;
3134 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3135 && (insn2 & 0x0fff) == 0x0b04)
3136 found_stack_adjust = 1;
3137 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3138 && (insn2 & 0x0e00) == 0x0a00)
3139 found_stack_adjust = 1;
3142 return found_stack_adjust;
3145 /* Return true if we are in the function's epilogue, i.e. after the
3146 instruction that destroyed the function's stack frame. */
3149 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3151 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3153 int found_return, found_stack_adjust;
3154 CORE_ADDR func_start, func_end;
3156 if (arm_pc_is_thumb (gdbarch, pc))
3157 return thumb_in_function_epilogue_p (gdbarch, pc);
3159 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3162 /* We are in the epilogue if the previous instruction was a stack
3163 adjustment and the next instruction is a possible return (bx, mov
3164 pc, or pop). We could have to scan backwards to find the stack
3165 adjustment, or forwards to find the return, but this is a decent
3166 approximation. First scan forwards. */
3169 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3170 if (bits (insn, 28, 31) != INST_NV)
3172 if ((insn & 0x0ffffff0) == 0x012fff10)
3175 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3178 else if ((insn & 0x0fff0000) == 0x08bd0000
3179 && (insn & 0x0000c000) != 0)
3180 /* POP (LDMIA), including PC or LR. */
3187 /* Scan backwards. This is just a heuristic, so do not worry about
3188 false positives from mode changes. */
3190 if (pc < func_start + 4)
3193 found_stack_adjust = 0;
3194 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3195 if (bits (insn, 28, 31) != INST_NV)
3197 if ((insn & 0x0df0f000) == 0x0080d000)
3198 /* ADD SP (register or immediate). */
3199 found_stack_adjust = 1;
3200 else if ((insn & 0x0df0f000) == 0x0040d000)
3201 /* SUB SP (register or immediate). */
3202 found_stack_adjust = 1;
3203 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3205 found_stack_adjust = 1;
3206 else if ((insn & 0x0fff0000) == 0x08bd0000)
3208 found_stack_adjust = 1;
3211 if (found_stack_adjust)
3218 /* When arguments must be pushed onto the stack, they go on in reverse
3219 order. The code below implements a FILO (stack) to do this. */
3224 struct stack_item *prev;
3228 static struct stack_item *
3229 push_stack_item (struct stack_item *prev, const void *contents, int len)
3231 struct stack_item *si;
3232 si = xmalloc (sizeof (struct stack_item));
3233 si->data = xmalloc (len);
3236 memcpy (si->data, contents, len);
3240 static struct stack_item *
3241 pop_stack_item (struct stack_item *si)
3243 struct stack_item *dead = si;
3251 /* Return the alignment (in bytes) of the given type. */
3254 arm_type_align (struct type *t)
3260 t = check_typedef (t);
3261 switch (TYPE_CODE (t))
3264 /* Should never happen. */
3265 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3269 case TYPE_CODE_ENUM:
3273 case TYPE_CODE_RANGE:
3274 case TYPE_CODE_BITSTRING:
3276 case TYPE_CODE_CHAR:
3277 case TYPE_CODE_BOOL:
3278 return TYPE_LENGTH (t);
3280 case TYPE_CODE_ARRAY:
3281 case TYPE_CODE_COMPLEX:
3282 /* TODO: What about vector types? */
3283 return arm_type_align (TYPE_TARGET_TYPE (t));
3285 case TYPE_CODE_STRUCT:
3286 case TYPE_CODE_UNION:
3288 for (n = 0; n < TYPE_NFIELDS (t); n++)
3290 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3298 /* Possible base types for a candidate for passing and returning in
3301 enum arm_vfp_cprc_base_type
3310 /* The length of one element of base type B. */
3313 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3317 case VFP_CPRC_SINGLE:
3319 case VFP_CPRC_DOUBLE:
3321 case VFP_CPRC_VEC64:
3323 case VFP_CPRC_VEC128:
3326 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3331 /* The character ('s', 'd' or 'q') for the type of VFP register used
3332 for passing base type B. */
3335 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3339 case VFP_CPRC_SINGLE:
3341 case VFP_CPRC_DOUBLE:
3343 case VFP_CPRC_VEC64:
3345 case VFP_CPRC_VEC128:
3348 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3353 /* Determine whether T may be part of a candidate for passing and
3354 returning in VFP registers, ignoring the limit on the total number
3355 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3356 classification of the first valid component found; if it is not
3357 VFP_CPRC_UNKNOWN, all components must have the same classification
3358 as *BASE_TYPE. If it is found that T contains a type not permitted
3359 for passing and returning in VFP registers, a type differently
3360 classified from *BASE_TYPE, or two types differently classified
3361 from each other, return -1, otherwise return the total number of
3362 base-type elements found (possibly 0 in an empty structure or
3363 array). Vectors and complex types are not currently supported,
3364 matching the generic AAPCS support. */
3367 arm_vfp_cprc_sub_candidate (struct type *t,
3368 enum arm_vfp_cprc_base_type *base_type)
3370 t = check_typedef (t);
3371 switch (TYPE_CODE (t))
3374 switch (TYPE_LENGTH (t))
3377 if (*base_type == VFP_CPRC_UNKNOWN)
3378 *base_type = VFP_CPRC_SINGLE;
3379 else if (*base_type != VFP_CPRC_SINGLE)
3384 if (*base_type == VFP_CPRC_UNKNOWN)
3385 *base_type = VFP_CPRC_DOUBLE;
3386 else if (*base_type != VFP_CPRC_DOUBLE)
3395 case TYPE_CODE_ARRAY:
3399 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3402 if (TYPE_LENGTH (t) == 0)
3404 gdb_assert (count == 0);
3407 else if (count == 0)
3409 unitlen = arm_vfp_cprc_unit_length (*base_type);
3410 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3411 return TYPE_LENGTH (t) / unitlen;
3415 case TYPE_CODE_STRUCT:
3420 for (i = 0; i < TYPE_NFIELDS (t); i++)
3422 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3424 if (sub_count == -1)
3428 if (TYPE_LENGTH (t) == 0)
3430 gdb_assert (count == 0);
3433 else if (count == 0)
3435 unitlen = arm_vfp_cprc_unit_length (*base_type);
3436 if (TYPE_LENGTH (t) != unitlen * count)
3441 case TYPE_CODE_UNION:
3446 for (i = 0; i < TYPE_NFIELDS (t); i++)
3448 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3450 if (sub_count == -1)
3452 count = (count > sub_count ? count : sub_count);
3454 if (TYPE_LENGTH (t) == 0)
3456 gdb_assert (count == 0);
3459 else if (count == 0)
3461 unitlen = arm_vfp_cprc_unit_length (*base_type);
3462 if (TYPE_LENGTH (t) != unitlen * count)
3474 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3475 if passed to or returned from a non-variadic function with the VFP
3476 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3477 *BASE_TYPE to the base type for T and *COUNT to the number of
3478 elements of that base type before returning. */
3481 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3484 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3485 int c = arm_vfp_cprc_sub_candidate (t, &b);
3486 if (c <= 0 || c > 4)
3493 /* Return 1 if the VFP ABI should be used for passing arguments to and
3494 returning values from a function of type FUNC_TYPE, 0
3498 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3500 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3501 /* Variadic functions always use the base ABI. Assume that functions
3502 without debug info are not variadic. */
3503 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3505 /* The VFP ABI is only supported as a variant of AAPCS. */
3506 if (tdep->arm_abi != ARM_ABI_AAPCS)
3508 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3511 /* We currently only support passing parameters in integer registers, which
3512 conforms with GCC's default model, and VFP argument passing following
3513 the VFP variant of AAPCS. Several other variants exist and
3514 we should probably support some of them based on the selected ABI. */
3517 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3518 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3519 struct value **args, CORE_ADDR sp, int struct_return,
3520 CORE_ADDR struct_addr)
3522 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3526 struct stack_item *si = NULL;
3529 unsigned vfp_regs_free = (1 << 16) - 1;
3531 /* Determine the type of this function and whether the VFP ABI
3533 ftype = check_typedef (value_type (function));
3534 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3535 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3536 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3538 /* Set the return address. For the ARM, the return breakpoint is
3539 always at BP_ADDR. */
3540 if (arm_pc_is_thumb (gdbarch, bp_addr))
3542 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3544 /* Walk through the list of args and determine how large a temporary
3545 stack is required. Need to take care here as structs may be
3546 passed on the stack, and we have to to push them. */
3549 argreg = ARM_A1_REGNUM;
3552 /* The struct_return pointer occupies the first parameter
3553 passing register. */
3557 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3558 gdbarch_register_name (gdbarch, argreg),
3559 paddress (gdbarch, struct_addr));
3560 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3564 for (argnum = 0; argnum < nargs; argnum++)
3567 struct type *arg_type;
3568 struct type *target_type;
3569 enum type_code typecode;
3570 const bfd_byte *val;
3572 enum arm_vfp_cprc_base_type vfp_base_type;
3574 int may_use_core_reg = 1;
3576 arg_type = check_typedef (value_type (args[argnum]));
3577 len = TYPE_LENGTH (arg_type);
3578 target_type = TYPE_TARGET_TYPE (arg_type);
3579 typecode = TYPE_CODE (arg_type);
3580 val = value_contents (args[argnum]);
3582 align = arm_type_align (arg_type);
3583 /* Round alignment up to a whole number of words. */
3584 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3585 /* Different ABIs have different maximum alignments. */
3586 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3588 /* The APCS ABI only requires word alignment. */
3589 align = INT_REGISTER_SIZE;
3593 /* The AAPCS requires at most doubleword alignment. */
3594 if (align > INT_REGISTER_SIZE * 2)
3595 align = INT_REGISTER_SIZE * 2;
3599 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3607 /* Because this is a CPRC it cannot go in a core register or
3608 cause a core register to be skipped for alignment.
3609 Either it goes in VFP registers and the rest of this loop
3610 iteration is skipped for this argument, or it goes on the
3611 stack (and the stack alignment code is correct for this
3613 may_use_core_reg = 0;
3615 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3616 shift = unit_length / 4;
3617 mask = (1 << (shift * vfp_base_count)) - 1;
3618 for (regno = 0; regno < 16; regno += shift)
3619 if (((vfp_regs_free >> regno) & mask) == mask)
3628 vfp_regs_free &= ~(mask << regno);
3629 reg_scaled = regno / shift;
3630 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3631 for (i = 0; i < vfp_base_count; i++)
3635 if (reg_char == 'q')
3636 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3637 val + i * unit_length);
3640 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3641 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3643 regcache_cooked_write (regcache, regnum,
3644 val + i * unit_length);
3651 /* This CPRC could not go in VFP registers, so all VFP
3652 registers are now marked as used. */
3657 /* Push stack padding for dowubleword alignment. */
3658 if (nstack & (align - 1))
3660 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3661 nstack += INT_REGISTER_SIZE;
3664 /* Doubleword aligned quantities must go in even register pairs. */
3665 if (may_use_core_reg
3666 && argreg <= ARM_LAST_ARG_REGNUM
3667 && align > INT_REGISTER_SIZE
3671 /* If the argument is a pointer to a function, and it is a
3672 Thumb function, create a LOCAL copy of the value and set
3673 the THUMB bit in it. */
3674 if (TYPE_CODE_PTR == typecode
3675 && target_type != NULL
3676 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3678 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3679 if (arm_pc_is_thumb (gdbarch, regval))
3681 bfd_byte *copy = alloca (len);
3682 store_unsigned_integer (copy, len, byte_order,
3683 MAKE_THUMB_ADDR (regval));
3688 /* Copy the argument to general registers or the stack in
3689 register-sized pieces. Large arguments are split between
3690 registers and stack. */
3693 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3695 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3697 /* The argument is being passed in a general purpose
3700 = extract_unsigned_integer (val, partial_len, byte_order);
3701 if (byte_order == BFD_ENDIAN_BIG)
3702 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3704 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3706 gdbarch_register_name
3708 phex (regval, INT_REGISTER_SIZE));
3709 regcache_cooked_write_unsigned (regcache, argreg, regval);
3714 /* Push the arguments onto the stack. */
3716 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3718 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3719 nstack += INT_REGISTER_SIZE;
3726 /* If we have an odd number of words to push, then decrement the stack
3727 by one word now, so first stack argument will be dword aligned. */
3734 write_memory (sp, si->data, si->len);
3735 si = pop_stack_item (si);
3738 /* Finally, update teh SP register. */
3739 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3745 /* Always align the frame to an 8-byte boundary. This is required on
3746 some platforms and harmless on the rest. */
3749 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3751 /* Align the stack to eight bytes. */
3752 return sp & ~ (CORE_ADDR) 7;
3756 print_fpu_flags (int flags)
3758 if (flags & (1 << 0))
3759 fputs ("IVO ", stdout);
3760 if (flags & (1 << 1))
3761 fputs ("DVZ ", stdout);
3762 if (flags & (1 << 2))
3763 fputs ("OFL ", stdout);
3764 if (flags & (1 << 3))
3765 fputs ("UFL ", stdout);
3766 if (flags & (1 << 4))
3767 fputs ("INX ", stdout);
3771 /* Print interesting information about the floating point processor
3772 (if present) or emulator. */
3774 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3775 struct frame_info *frame, const char *args)
3777 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3780 type = (status >> 24) & 127;
3781 if (status & (1 << 31))
3782 printf (_("Hardware FPU type %d\n"), type);
3784 printf (_("Software FPU type %d\n"), type);
3785 /* i18n: [floating point unit] mask */
3786 fputs (_("mask: "), stdout);
3787 print_fpu_flags (status >> 16);
3788 /* i18n: [floating point unit] flags */
3789 fputs (_("flags: "), stdout);
3790 print_fpu_flags (status);
3793 /* Construct the ARM extended floating point type. */
3794 static struct type *
3795 arm_ext_type (struct gdbarch *gdbarch)
3797 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3799 if (!tdep->arm_ext_type)
3801 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3802 floatformats_arm_ext);
3804 return tdep->arm_ext_type;
3807 static struct type *
3808 arm_neon_double_type (struct gdbarch *gdbarch)
3810 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3812 if (tdep->neon_double_type == NULL)
3814 struct type *t, *elem;
3816 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3818 elem = builtin_type (gdbarch)->builtin_uint8;
3819 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3820 elem = builtin_type (gdbarch)->builtin_uint16;
3821 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3822 elem = builtin_type (gdbarch)->builtin_uint32;
3823 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3824 elem = builtin_type (gdbarch)->builtin_uint64;
3825 append_composite_type_field (t, "u64", elem);
3826 elem = builtin_type (gdbarch)->builtin_float;
3827 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3828 elem = builtin_type (gdbarch)->builtin_double;
3829 append_composite_type_field (t, "f64", elem);
3831 TYPE_VECTOR (t) = 1;
3832 TYPE_NAME (t) = "neon_d";
3833 tdep->neon_double_type = t;
3836 return tdep->neon_double_type;
3839 /* FIXME: The vector types are not correctly ordered on big-endian
3840 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3841 bits of d0 - regardless of what unit size is being held in d0. So
3842 the offset of the first uint8 in d0 is 7, but the offset of the
3843 first float is 4. This code works as-is for little-endian
3846 static struct type *
3847 arm_neon_quad_type (struct gdbarch *gdbarch)
3849 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3851 if (tdep->neon_quad_type == NULL)
3853 struct type *t, *elem;
3855 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3857 elem = builtin_type (gdbarch)->builtin_uint8;
3858 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3859 elem = builtin_type (gdbarch)->builtin_uint16;
3860 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3861 elem = builtin_type (gdbarch)->builtin_uint32;
3862 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3863 elem = builtin_type (gdbarch)->builtin_uint64;
3864 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3865 elem = builtin_type (gdbarch)->builtin_float;
3866 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3867 elem = builtin_type (gdbarch)->builtin_double;
3868 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3870 TYPE_VECTOR (t) = 1;
3871 TYPE_NAME (t) = "neon_q";
3872 tdep->neon_quad_type = t;
3875 return tdep->neon_quad_type;
3878 /* Return the GDB type object for the "standard" data type of data in
3881 static struct type *
3882 arm_register_type (struct gdbarch *gdbarch, int regnum)
3884 int num_regs = gdbarch_num_regs (gdbarch);
3886 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3887 && regnum >= num_regs && regnum < num_regs + 32)
3888 return builtin_type (gdbarch)->builtin_float;
3890 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3891 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3892 return arm_neon_quad_type (gdbarch);
3894 /* If the target description has register information, we are only
3895 in this function so that we can override the types of
3896 double-precision registers for NEON. */
3897 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3899 struct type *t = tdesc_register_type (gdbarch, regnum);
3901 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3902 && TYPE_CODE (t) == TYPE_CODE_FLT
3903 && gdbarch_tdep (gdbarch)->have_neon)
3904 return arm_neon_double_type (gdbarch);
3909 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3911 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3912 return builtin_type (gdbarch)->builtin_void;
3914 return arm_ext_type (gdbarch);
3916 else if (regnum == ARM_SP_REGNUM)
3917 return builtin_type (gdbarch)->builtin_data_ptr;
3918 else if (regnum == ARM_PC_REGNUM)
3919 return builtin_type (gdbarch)->builtin_func_ptr;
3920 else if (regnum >= ARRAY_SIZE (arm_register_names))
3921 /* These registers are only supported on targets which supply
3922 an XML description. */
3923 return builtin_type (gdbarch)->builtin_int0;
3925 return builtin_type (gdbarch)->builtin_uint32;
3928 /* Map a DWARF register REGNUM onto the appropriate GDB register
3932 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3934 /* Core integer regs. */
3935 if (reg >= 0 && reg <= 15)
3938 /* Legacy FPA encoding. These were once used in a way which
3939 overlapped with VFP register numbering, so their use is
3940 discouraged, but GDB doesn't support the ARM toolchain
3941 which used them for VFP. */
3942 if (reg >= 16 && reg <= 23)
3943 return ARM_F0_REGNUM + reg - 16;
3945 /* New assignments for the FPA registers. */
3946 if (reg >= 96 && reg <= 103)
3947 return ARM_F0_REGNUM + reg - 96;
3949 /* WMMX register assignments. */
3950 if (reg >= 104 && reg <= 111)
3951 return ARM_WCGR0_REGNUM + reg - 104;
3953 if (reg >= 112 && reg <= 127)
3954 return ARM_WR0_REGNUM + reg - 112;
3956 if (reg >= 192 && reg <= 199)
3957 return ARM_WC0_REGNUM + reg - 192;
3959 /* VFP v2 registers. A double precision value is actually
3960 in d1 rather than s2, but the ABI only defines numbering
3961 for the single precision registers. This will "just work"
3962 in GDB for little endian targets (we'll read eight bytes,
3963 starting in s0 and then progressing to s1), but will be
3964 reversed on big endian targets with VFP. This won't
3965 be a problem for the new Neon quad registers; you're supposed
3966 to use DW_OP_piece for those. */
3967 if (reg >= 64 && reg <= 95)
3971 sprintf (name_buf, "s%d", reg - 64);
3972 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3976 /* VFP v3 / Neon registers. This range is also used for VFP v2
3977 registers, except that it now describes d0 instead of s0. */
3978 if (reg >= 256 && reg <= 287)
3982 sprintf (name_buf, "d%d", reg - 256);
3983 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3990 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
3992 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
3995 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
3997 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
3998 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4000 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4001 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4003 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4004 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4006 if (reg < NUM_GREGS)
4007 return SIM_ARM_R0_REGNUM + reg;
4010 if (reg < NUM_FREGS)
4011 return SIM_ARM_FP0_REGNUM + reg;
4014 if (reg < NUM_SREGS)
4015 return SIM_ARM_FPS_REGNUM + reg;
4018 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4021 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4022 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4023 It is thought that this is is the floating-point register format on
4024 little-endian systems. */
4027 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4028 void *dbl, int endianess)
4032 if (endianess == BFD_ENDIAN_BIG)
4033 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4035 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4037 floatformat_from_doublest (fmt, &d, dbl);
4041 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4046 floatformat_to_doublest (fmt, ptr, &d);
4047 if (endianess == BFD_ENDIAN_BIG)
4048 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4050 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4055 condition_true (unsigned long cond, unsigned long status_reg)
4057 if (cond == INST_AL || cond == INST_NV)
4063 return ((status_reg & FLAG_Z) != 0);
4065 return ((status_reg & FLAG_Z) == 0);
4067 return ((status_reg & FLAG_C) != 0);
4069 return ((status_reg & FLAG_C) == 0);
4071 return ((status_reg & FLAG_N) != 0);
4073 return ((status_reg & FLAG_N) == 0);
4075 return ((status_reg & FLAG_V) != 0);
4077 return ((status_reg & FLAG_V) == 0);
4079 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4081 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4083 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4085 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4087 return (((status_reg & FLAG_Z) == 0)
4088 && (((status_reg & FLAG_N) == 0)
4089 == ((status_reg & FLAG_V) == 0)));
4091 return (((status_reg & FLAG_Z) != 0)
4092 || (((status_reg & FLAG_N) == 0)
4093 != ((status_reg & FLAG_V) == 0)));
4098 static unsigned long
4099 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4100 unsigned long pc_val, unsigned long status_reg)
4102 unsigned long res, shift;
4103 int rm = bits (inst, 0, 3);
4104 unsigned long shifttype = bits (inst, 5, 6);
4108 int rs = bits (inst, 8, 11);
4109 shift = (rs == 15 ? pc_val + 8
4110 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4113 shift = bits (inst, 7, 11);
4116 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4117 : get_frame_register_unsigned (frame, rm));
4122 res = shift >= 32 ? 0 : res << shift;
4126 res = shift >= 32 ? 0 : res >> shift;
4132 res = ((res & 0x80000000L)
4133 ? ~((~res) >> shift) : res >> shift);
4136 case 3: /* ROR/RRX */
4139 res = (res >> 1) | (carry ? 0x80000000L : 0);
4141 res = (res >> shift) | (res << (32 - shift));
4145 return res & 0xffffffff;
4148 /* Return number of 1-bits in VAL. */
4151 bitcount (unsigned long val)
4154 for (nbits = 0; val != 0; nbits++)
4155 val &= val - 1; /* Delete rightmost 1-bit in val. */
4159 /* Return the size in bytes of the complete Thumb instruction whose
4160 first halfword is INST1. */
4163 thumb_insn_size (unsigned short inst1)
4165 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4172 thumb_advance_itstate (unsigned int itstate)
4174 /* Preserve IT[7:5], the first three bits of the condition. Shift
4175 the upcoming condition flags left by one bit. */
4176 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4178 /* If we have finished the IT block, clear the state. */
4179 if ((itstate & 0x0f) == 0)
4185 /* Find the next PC after the current instruction executes. In some
4186 cases we can not statically determine the answer (see the IT state
4187 handling in this function); in that case, a breakpoint may be
4188 inserted in addition to the returned PC, which will be used to set
4189 another breakpoint by our caller. */
4192 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
4194 struct gdbarch *gdbarch = get_frame_arch (frame);
4195 struct address_space *aspace = get_frame_address_space (frame);
4196 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4197 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4198 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4199 unsigned short inst1;
4200 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4201 unsigned long offset;
4202 ULONGEST status, itstate;
4204 nextpc = MAKE_THUMB_ADDR (nextpc);
4205 pc_val = MAKE_THUMB_ADDR (pc_val);
4207 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4209 /* Thumb-2 conditional execution support. There are eight bits in
4210 the CPSR which describe conditional execution state. Once
4211 reconstructed (they're in a funny order), the low five bits
4212 describe the low bit of the condition for each instruction and
4213 how many instructions remain. The high three bits describe the
4214 base condition. One of the low four bits will be set if an IT
4215 block is active. These bits read as zero on earlier
4217 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4218 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4220 /* If-Then handling. On GNU/Linux, where this routine is used, we
4221 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4222 can disable execution of the undefined instruction. So we might
4223 miss the breakpoint if we set it on a skipped conditional
4224 instruction. Because conditional instructions can change the
4225 flags, affecting the execution of further instructions, we may
4226 need to set two breakpoints. */
4228 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4230 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4232 /* An IT instruction. Because this instruction does not
4233 modify the flags, we can accurately predict the next
4234 executed instruction. */
4235 itstate = inst1 & 0x00ff;
4236 pc += thumb_insn_size (inst1);
4238 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4240 inst1 = read_memory_unsigned_integer (pc, 2,
4241 byte_order_for_code);
4242 pc += thumb_insn_size (inst1);
4243 itstate = thumb_advance_itstate (itstate);
4246 return MAKE_THUMB_ADDR (pc);
4248 else if (itstate != 0)
4250 /* We are in a conditional block. Check the condition. */
4251 if (! condition_true (itstate >> 4, status))
4253 /* Advance to the next executed instruction. */
4254 pc += thumb_insn_size (inst1);
4255 itstate = thumb_advance_itstate (itstate);
4257 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4259 inst1 = read_memory_unsigned_integer (pc, 2,
4260 byte_order_for_code);
4261 pc += thumb_insn_size (inst1);
4262 itstate = thumb_advance_itstate (itstate);
4265 return MAKE_THUMB_ADDR (pc);
4267 else if ((itstate & 0x0f) == 0x08)
4269 /* This is the last instruction of the conditional
4270 block, and it is executed. We can handle it normally
4271 because the following instruction is not conditional,
4272 and we must handle it normally because it is
4273 permitted to branch. Fall through. */
4279 /* There are conditional instructions after this one.
4280 If this instruction modifies the flags, then we can
4281 not predict what the next executed instruction will
4282 be. Fortunately, this instruction is architecturally
4283 forbidden to branch; we know it will fall through.
4284 Start by skipping past it. */
4285 pc += thumb_insn_size (inst1);
4286 itstate = thumb_advance_itstate (itstate);
4288 /* Set a breakpoint on the following instruction. */
4289 gdb_assert ((itstate & 0x0f) != 0);
4291 insert_single_step_breakpoint (gdbarch, aspace, pc);
4292 cond_negated = (itstate >> 4) & 1;
4294 /* Skip all following instructions with the same
4295 condition. If there is a later instruction in the IT
4296 block with the opposite condition, set the other
4297 breakpoint there. If not, then set a breakpoint on
4298 the instruction after the IT block. */
4301 inst1 = read_memory_unsigned_integer (pc, 2,
4302 byte_order_for_code);
4303 pc += thumb_insn_size (inst1);
4304 itstate = thumb_advance_itstate (itstate);
4306 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4308 return MAKE_THUMB_ADDR (pc);
4312 else if (itstate & 0x0f)
4314 /* We are in a conditional block. Check the condition. */
4315 int cond = itstate >> 4;
4317 if (! condition_true (cond, status))
4319 /* Advance to the next instruction. All the 32-bit
4320 instructions share a common prefix. */
4321 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4322 return MAKE_THUMB_ADDR (pc + 4);
4324 return MAKE_THUMB_ADDR (pc + 2);
4327 /* Otherwise, handle the instruction normally. */
4330 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4334 /* Fetch the saved PC from the stack. It's stored above
4335 all of the other registers. */
4336 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4337 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4338 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4340 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4342 unsigned long cond = bits (inst1, 8, 11);
4343 if (cond == 0x0f) /* 0x0f = SWI */
4345 struct gdbarch_tdep *tdep;
4346 tdep = gdbarch_tdep (gdbarch);
4348 if (tdep->syscall_next_pc != NULL)
4349 nextpc = tdep->syscall_next_pc (frame);
4352 else if (cond != 0x0f && condition_true (cond, status))
4353 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4355 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4357 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4359 else if ((inst1 & 0xe000) == 0xe000) /* 32-bit instruction */
4361 unsigned short inst2;
4362 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4364 /* Default to the next instruction. */
4366 nextpc = MAKE_THUMB_ADDR (nextpc);
4368 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4370 /* Branches and miscellaneous control instructions. */
4372 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4375 int j1, j2, imm1, imm2;
4377 imm1 = sbits (inst1, 0, 10);
4378 imm2 = bits (inst2, 0, 10);
4379 j1 = bit (inst2, 13);
4380 j2 = bit (inst2, 11);
4382 offset = ((imm1 << 12) + (imm2 << 1));
4383 offset ^= ((!j2) << 22) | ((!j1) << 23);
4385 nextpc = pc_val + offset;
4386 /* For BLX make sure to clear the low bits. */
4387 if (bit (inst2, 12) == 0)
4388 nextpc = nextpc & 0xfffffffc;
4390 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4392 /* SUBS PC, LR, #imm8. */
4393 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4394 nextpc -= inst2 & 0x00ff;
4396 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4398 /* Conditional branch. */
4399 if (condition_true (bits (inst1, 6, 9), status))
4401 int sign, j1, j2, imm1, imm2;
4403 sign = sbits (inst1, 10, 10);
4404 imm1 = bits (inst1, 0, 5);
4405 imm2 = bits (inst2, 0, 10);
4406 j1 = bit (inst2, 13);
4407 j2 = bit (inst2, 11);
4409 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4410 offset += (imm1 << 12) + (imm2 << 1);
4412 nextpc = pc_val + offset;
4416 else if ((inst1 & 0xfe50) == 0xe810)
4418 /* Load multiple or RFE. */
4419 int rn, offset, load_pc = 1;
4421 rn = bits (inst1, 0, 3);
4422 if (bit (inst1, 7) && !bit (inst1, 8))
4425 if (!bit (inst2, 15))
4427 offset = bitcount (inst2) * 4 - 4;
4429 else if (!bit (inst1, 7) && bit (inst1, 8))
4432 if (!bit (inst2, 15))
4436 else if (bit (inst1, 7) && bit (inst1, 8))
4441 else if (!bit (inst1, 7) && !bit (inst1, 8))
4451 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4452 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4455 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4457 /* MOV PC or MOVS PC. */
4458 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4459 nextpc = MAKE_THUMB_ADDR (nextpc);
4461 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4465 int rn, load_pc = 1;
4467 rn = bits (inst1, 0, 3);
4468 base = get_frame_register_unsigned (frame, rn);
4471 base = (base + 4) & ~(CORE_ADDR) 0x3;
4473 base += bits (inst2, 0, 11);
4475 base -= bits (inst2, 0, 11);
4477 else if (bit (inst1, 7))
4478 base += bits (inst2, 0, 11);
4479 else if (bit (inst2, 11))
4481 if (bit (inst2, 10))
4484 base += bits (inst2, 0, 7);
4486 base -= bits (inst2, 0, 7);
4489 else if ((inst2 & 0x0fc0) == 0x0000)
4491 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4492 base += get_frame_register_unsigned (frame, rm) << shift;
4499 nextpc = get_frame_memory_unsigned (frame, base, 4);
4501 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4504 CORE_ADDR tbl_reg, table, offset, length;
4506 tbl_reg = bits (inst1, 0, 3);
4507 if (tbl_reg == 0x0f)
4508 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4510 table = get_frame_register_unsigned (frame, tbl_reg);
4512 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4513 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4514 nextpc = pc_val + length;
4516 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4519 CORE_ADDR tbl_reg, table, offset, length;
4521 tbl_reg = bits (inst1, 0, 3);
4522 if (tbl_reg == 0x0f)
4523 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4525 table = get_frame_register_unsigned (frame, tbl_reg);
4527 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4528 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4529 nextpc = pc_val + length;
4532 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4534 if (bits (inst1, 3, 6) == 0x0f)
4537 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4539 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4541 if (bits (inst1, 3, 6) == 0x0f)
4544 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4546 nextpc = MAKE_THUMB_ADDR (nextpc);
4548 else if ((inst1 & 0xf500) == 0xb100)
4551 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4552 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4554 if (bit (inst1, 11) && reg != 0)
4555 nextpc = pc_val + imm;
4556 else if (!bit (inst1, 11) && reg == 0)
4557 nextpc = pc_val + imm;
4562 /* Get the raw next address. PC is the current program counter, in
4563 FRAME. INSERT_BKPT should be TRUE if we want a breakpoint set on
4564 the alternative next instruction if there are two options.
4566 The value returned has the execution state of the next instruction
4567 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4568 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4572 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
4574 struct gdbarch *gdbarch = get_frame_arch (frame);
4575 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4576 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4577 unsigned long pc_val;
4578 unsigned long this_instr;
4579 unsigned long status;
4582 if (arm_frame_is_thumb (frame))
4583 return thumb_get_next_pc_raw (frame, pc, insert_bkpt);
4585 pc_val = (unsigned long) pc;
4586 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4588 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4589 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4591 if (bits (this_instr, 28, 31) == INST_NV)
4592 switch (bits (this_instr, 24, 27))
4597 /* Branch with Link and change to Thumb. */
4598 nextpc = BranchDest (pc, this_instr);
4599 nextpc |= bit (this_instr, 24) << 1;
4600 nextpc = MAKE_THUMB_ADDR (nextpc);
4606 /* Coprocessor register transfer. */
4607 if (bits (this_instr, 12, 15) == 15)
4608 error (_("Invalid update to pc in instruction"));
4611 else if (condition_true (bits (this_instr, 28, 31), status))
4613 switch (bits (this_instr, 24, 27))
4616 case 0x1: /* data processing */
4620 unsigned long operand1, operand2, result = 0;
4624 if (bits (this_instr, 12, 15) != 15)
4627 if (bits (this_instr, 22, 25) == 0
4628 && bits (this_instr, 4, 7) == 9) /* multiply */
4629 error (_("Invalid update to pc in instruction"));
4631 /* BX <reg>, BLX <reg> */
4632 if (bits (this_instr, 4, 27) == 0x12fff1
4633 || bits (this_instr, 4, 27) == 0x12fff3)
4635 rn = bits (this_instr, 0, 3);
4636 nextpc = (rn == 15) ? pc_val + 8
4637 : get_frame_register_unsigned (frame, rn);
4641 /* Multiply into PC. */
4642 c = (status & FLAG_C) ? 1 : 0;
4643 rn = bits (this_instr, 16, 19);
4644 operand1 = (rn == 15) ? pc_val + 8
4645 : get_frame_register_unsigned (frame, rn);
4647 if (bit (this_instr, 25))
4649 unsigned long immval = bits (this_instr, 0, 7);
4650 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4651 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4654 else /* operand 2 is a shifted register. */
4655 operand2 = shifted_reg_val (frame, this_instr, c,
4658 switch (bits (this_instr, 21, 24))
4661 result = operand1 & operand2;
4665 result = operand1 ^ operand2;
4669 result = operand1 - operand2;
4673 result = operand2 - operand1;
4677 result = operand1 + operand2;
4681 result = operand1 + operand2 + c;
4685 result = operand1 - operand2 + c;
4689 result = operand2 - operand1 + c;
4695 case 0xb: /* tst, teq, cmp, cmn */
4696 result = (unsigned long) nextpc;
4700 result = operand1 | operand2;
4704 /* Always step into a function. */
4709 result = operand1 & ~operand2;
4717 /* In 26-bit APCS the bottom two bits of the result are
4718 ignored, and we always end up in ARM state. */
4720 nextpc = arm_addr_bits_remove (gdbarch, result);
4728 case 0x5: /* data transfer */
4731 if (bit (this_instr, 20))
4734 if (bits (this_instr, 12, 15) == 15)
4740 if (bit (this_instr, 22))
4741 error (_("Invalid update to pc in instruction"));
4743 /* byte write to PC */
4744 rn = bits (this_instr, 16, 19);
4745 base = (rn == 15) ? pc_val + 8
4746 : get_frame_register_unsigned (frame, rn);
4747 if (bit (this_instr, 24))
4750 int c = (status & FLAG_C) ? 1 : 0;
4751 unsigned long offset =
4752 (bit (this_instr, 25)
4753 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4754 : bits (this_instr, 0, 11));
4756 if (bit (this_instr, 23))
4761 nextpc = (CORE_ADDR) read_memory_integer ((CORE_ADDR) base,
4768 case 0x9: /* block transfer */
4769 if (bit (this_instr, 20))
4772 if (bit (this_instr, 15))
4777 if (bit (this_instr, 23))
4780 unsigned long reglist = bits (this_instr, 0, 14);
4781 offset = bitcount (reglist) * 4;
4782 if (bit (this_instr, 24)) /* pre */
4785 else if (bit (this_instr, 24))
4789 unsigned long rn_val =
4790 get_frame_register_unsigned (frame,
4791 bits (this_instr, 16, 19));
4793 (CORE_ADDR) read_memory_integer ((CORE_ADDR) (rn_val
4801 case 0xb: /* branch & link */
4802 case 0xa: /* branch */
4804 nextpc = BranchDest (pc, this_instr);
4810 case 0xe: /* coproc ops */
4814 struct gdbarch_tdep *tdep;
4815 tdep = gdbarch_tdep (gdbarch);
4817 if (tdep->syscall_next_pc != NULL)
4818 nextpc = tdep->syscall_next_pc (frame);
4824 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4833 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4835 struct gdbarch *gdbarch = get_frame_arch (frame);
4837 gdbarch_addr_bits_remove (gdbarch,
4838 arm_get_next_pc_raw (frame, pc, TRUE));
4840 error (_("Infinite loop detected"));
4844 /* single_step() is called just before we want to resume the inferior,
4845 if we want to single-step it but there is no hardware or kernel
4846 single-step support. We find the target of the coming instruction
4847 and breakpoint it. */
4850 arm_software_single_step (struct frame_info *frame)
4852 struct gdbarch *gdbarch = get_frame_arch (frame);
4853 struct address_space *aspace = get_frame_address_space (frame);
4855 /* NOTE: This may insert the wrong breakpoint instruction when
4856 single-stepping over a mode-changing instruction, if the
4857 CPSR heuristics are used. */
4859 CORE_ADDR next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
4860 insert_single_step_breakpoint (gdbarch, aspace, next_pc);
4865 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4866 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4867 NULL if an error occurs. BUF is freed. */
4870 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4871 int old_len, int new_len)
4873 gdb_byte *new_buf, *middle;
4874 int bytes_to_read = new_len - old_len;
4876 new_buf = xmalloc (new_len);
4877 memcpy (new_buf + bytes_to_read, buf, old_len);
4879 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4887 /* An IT block is at most the 2-byte IT instruction followed by
4888 four 4-byte instructions. The furthest back we must search to
4889 find an IT block that affects the current instruction is thus
4890 2 + 3 * 4 == 14 bytes. */
4891 #define MAX_IT_BLOCK_PREFIX 14
4893 /* Use a quick scan if there are more than this many bytes of
4895 #define IT_SCAN_THRESHOLD 32
4897 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4898 A breakpoint in an IT block may not be hit, depending on the
4901 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4905 CORE_ADDR boundary, func_start;
4906 int buf_len, buf2_len;
4907 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4908 int i, any, last_it, last_it_count;
4910 /* If we are using BKPT breakpoints, none of this is necessary. */
4911 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4914 /* ARM mode does not have this problem. */
4915 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4918 /* We are setting a breakpoint in Thumb code that could potentially
4919 contain an IT block. The first step is to find how much Thumb
4920 code there is; we do not need to read outside of known Thumb
4922 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4924 /* Thumb-2 code must have mapping symbols to have a chance. */
4927 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4929 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4930 && func_start > boundary)
4931 boundary = func_start;
4933 /* Search for a candidate IT instruction. We have to do some fancy
4934 footwork to distinguish a real IT instruction from the second
4935 half of a 32-bit instruction, but there is no need for that if
4936 there's no candidate. */
4937 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4939 /* No room for an IT instruction. */
4942 buf = xmalloc (buf_len);
4943 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4946 for (i = 0; i < buf_len; i += 2)
4948 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4949 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4961 /* OK, the code bytes before this instruction contain at least one
4962 halfword which resembles an IT instruction. We know that it's
4963 Thumb code, but there are still two possibilities. Either the
4964 halfword really is an IT instruction, or it is the second half of
4965 a 32-bit Thumb instruction. The only way we can tell is to
4966 scan forwards from a known instruction boundary. */
4967 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4971 /* There's a lot of code before this instruction. Start with an
4972 optimistic search; it's easy to recognize halfwords that can
4973 not be the start of a 32-bit instruction, and use that to
4974 lock on to the instruction boundaries. */
4975 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4978 buf_len = IT_SCAN_THRESHOLD;
4981 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4983 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4984 if (thumb_insn_size (inst1) == 2)
4991 /* At this point, if DEFINITE, BUF[I] is the first place we
4992 are sure that we know the instruction boundaries, and it is far
4993 enough from BPADDR that we could not miss an IT instruction
4994 affecting BPADDR. If ! DEFINITE, give up - start from a
4998 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5002 buf_len = bpaddr - boundary;
5008 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5011 buf_len = bpaddr - boundary;
5015 /* Scan forwards. Find the last IT instruction before BPADDR. */
5020 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5022 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5027 else if (inst1 & 0x0002)
5029 else if (inst1 & 0x0004)
5034 i += thumb_insn_size (inst1);
5040 /* There wasn't really an IT instruction after all. */
5043 if (last_it_count < 1)
5044 /* It was too far away. */
5047 /* This really is a trouble spot. Move the breakpoint to the IT
5049 return bpaddr - buf_len + last_it;
5052 /* ARM displaced stepping support.
5054 Generally ARM displaced stepping works as follows:
5056 1. When an instruction is to be single-stepped, it is first decoded by
5057 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5058 Depending on the type of instruction, it is then copied to a scratch
5059 location, possibly in a modified form. The copy_* set of functions
5060 performs such modification, as necessary. A breakpoint is placed after
5061 the modified instruction in the scratch space to return control to GDB.
5062 Note in particular that instructions which modify the PC will no longer
5063 do so after modification.
5065 2. The instruction is single-stepped, by setting the PC to the scratch
5066 location address, and resuming. Control returns to GDB when the
5069 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5070 function used for the current instruction. This function's job is to
5071 put the CPU/memory state back to what it would have been if the
5072 instruction had been executed unmodified in its original location. */
5074 /* NOP instruction (mov r0, r0). */
5075 #define ARM_NOP 0xe1a00000
5077 /* Helper for register reads for displaced stepping. In particular, this
5078 returns the PC as it would be seen by the instruction at its original
5082 displaced_read_reg (struct regcache *regs, CORE_ADDR from, int regno)
5088 if (debug_displaced)
5089 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5090 (unsigned long) from + 8);
5091 return (ULONGEST) from + 8; /* Pipeline offset. */
5095 regcache_cooked_read_unsigned (regs, regno, &ret);
5096 if (debug_displaced)
5097 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5098 regno, (unsigned long) ret);
5104 displaced_in_arm_mode (struct regcache *regs)
5107 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5109 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5111 return (ps & t_bit) == 0;
5114 /* Write to the PC as from a branch instruction. */
5117 branch_write_pc (struct regcache *regs, ULONGEST val)
5119 if (displaced_in_arm_mode (regs))
5120 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5121 architecture versions < 6. */
5122 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5123 val & ~(ULONGEST) 0x3);
5125 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5126 val & ~(ULONGEST) 0x1);
5129 /* Write to the PC as from a branch-exchange instruction. */
5132 bx_write_pc (struct regcache *regs, ULONGEST val)
5135 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5137 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5141 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5142 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5144 else if ((val & 2) == 0)
5146 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5147 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5151 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5152 mode, align dest to 4 bytes). */
5153 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5154 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5155 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5159 /* Write to the PC as if from a load instruction. */
5162 load_write_pc (struct regcache *regs, ULONGEST val)
5164 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5165 bx_write_pc (regs, val);
5167 branch_write_pc (regs, val);
5170 /* Write to the PC as if from an ALU instruction. */
5173 alu_write_pc (struct regcache *regs, ULONGEST val)
5175 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && displaced_in_arm_mode (regs))
5176 bx_write_pc (regs, val);
5178 branch_write_pc (regs, val);
5181 /* Helper for writing to registers for displaced stepping. Writing to the PC
5182 has a varying effects depending on the instruction which does the write:
5183 this is controlled by the WRITE_PC argument. */
5186 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5187 int regno, ULONGEST val, enum pc_write_style write_pc)
5191 if (debug_displaced)
5192 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5193 (unsigned long) val);
5196 case BRANCH_WRITE_PC:
5197 branch_write_pc (regs, val);
5201 bx_write_pc (regs, val);
5205 load_write_pc (regs, val);
5209 alu_write_pc (regs, val);
5212 case CANNOT_WRITE_PC:
5213 warning (_("Instruction wrote to PC in an unexpected way when "
5214 "single-stepping"));
5218 internal_error (__FILE__, __LINE__,
5219 _("Invalid argument to displaced_write_reg"));
5222 dsc->wrote_to_pc = 1;
5226 if (debug_displaced)
5227 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5228 regno, (unsigned long) val);
5229 regcache_cooked_write_unsigned (regs, regno, val);
5233 /* This function is used to concisely determine if an instruction INSN
5234 references PC. Register fields of interest in INSN should have the
5235 corresponding fields of BITMASK set to 0b1111. The function
5236 returns return 1 if any of these fields in INSN reference the PC
5237 (also 0b1111, r15), else it returns 0. */
5240 insn_references_pc (uint32_t insn, uint32_t bitmask)
5242 uint32_t lowbit = 1;
5244 while (bitmask != 0)
5248 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5254 mask = lowbit * 0xf;
5256 if ((insn & mask) == mask)
5265 /* The simplest copy function. Many instructions have the same effect no
5266 matter what address they are executed at: in those cases, use this. */
5269 copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5270 const char *iname, struct displaced_step_closure *dsc)
5272 if (debug_displaced)
5273 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5274 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5277 dsc->modinsn[0] = insn;
5282 /* Preload instructions with immediate offset. */
5285 cleanup_preload (struct gdbarch *gdbarch,
5286 struct regcache *regs, struct displaced_step_closure *dsc)
5288 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5289 if (!dsc->u.preload.immed)
5290 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5294 copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5295 struct displaced_step_closure *dsc)
5297 unsigned int rn = bits (insn, 16, 19);
5299 CORE_ADDR from = dsc->insn_addr;
5301 if (!insn_references_pc (insn, 0x000f0000ul))
5302 return copy_unmodified (gdbarch, insn, "preload", dsc);
5304 if (debug_displaced)
5305 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5306 (unsigned long) insn);
5308 /* Preload instructions:
5310 {pli/pld} [rn, #+/-imm]
5312 {pli/pld} [r0, #+/-imm]. */
5314 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5315 rn_val = displaced_read_reg (regs, from, rn);
5316 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5318 dsc->u.preload.immed = 1;
5320 dsc->modinsn[0] = insn & 0xfff0ffff;
5322 dsc->cleanup = &cleanup_preload;
5327 /* Preload instructions with register offset. */
5330 copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5331 struct regcache *regs,
5332 struct displaced_step_closure *dsc)
5334 unsigned int rn = bits (insn, 16, 19);
5335 unsigned int rm = bits (insn, 0, 3);
5336 ULONGEST rn_val, rm_val;
5337 CORE_ADDR from = dsc->insn_addr;
5339 if (!insn_references_pc (insn, 0x000f000ful))
5340 return copy_unmodified (gdbarch, insn, "preload reg", dsc);
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5344 (unsigned long) insn);
5346 /* Preload register-offset instructions:
5348 {pli/pld} [rn, rm {, shift}]
5350 {pli/pld} [r0, r1 {, shift}]. */
5352 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5353 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5354 rn_val = displaced_read_reg (regs, from, rn);
5355 rm_val = displaced_read_reg (regs, from, rm);
5356 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5357 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5359 dsc->u.preload.immed = 0;
5361 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5363 dsc->cleanup = &cleanup_preload;
5368 /* Copy/cleanup coprocessor load and store instructions. */
5371 cleanup_copro_load_store (struct gdbarch *gdbarch,
5372 struct regcache *regs,
5373 struct displaced_step_closure *dsc)
5375 ULONGEST rn_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5377 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5379 if (dsc->u.ldst.writeback)
5380 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5384 copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5385 struct regcache *regs,
5386 struct displaced_step_closure *dsc)
5388 unsigned int rn = bits (insn, 16, 19);
5390 CORE_ADDR from = dsc->insn_addr;
5392 if (!insn_references_pc (insn, 0x000f0000ul))
5393 return copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5395 if (debug_displaced)
5396 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5397 "load/store insn %.8lx\n", (unsigned long) insn);
5399 /* Coprocessor load/store instructions:
5401 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5403 {stc/stc2} [r0, #+/-imm].
5405 ldc/ldc2 are handled identically. */
5407 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5408 rn_val = displaced_read_reg (regs, from, rn);
5409 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5411 dsc->u.ldst.writeback = bit (insn, 25);
5412 dsc->u.ldst.rn = rn;
5414 dsc->modinsn[0] = insn & 0xfff0ffff;
5416 dsc->cleanup = &cleanup_copro_load_store;
5421 /* Clean up branch instructions (actually perform the branch, by setting
5425 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5426 struct displaced_step_closure *dsc)
5428 ULONGEST from = dsc->insn_addr;
5429 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
5430 int branch_taken = condition_true (dsc->u.branch.cond, status);
5431 enum pc_write_style write_pc = dsc->u.branch.exchange
5432 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5437 if (dsc->u.branch.link)
5439 ULONGEST pc = displaced_read_reg (regs, from, 15);
5440 displaced_write_reg (regs, dsc, 14, pc - 4, CANNOT_WRITE_PC);
5443 displaced_write_reg (regs, dsc, 15, dsc->u.branch.dest, write_pc);
5446 /* Copy B/BL/BLX instructions with immediate destinations. */
5449 copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5450 struct regcache *regs, struct displaced_step_closure *dsc)
5452 unsigned int cond = bits (insn, 28, 31);
5453 int exchange = (cond == 0xf);
5454 int link = exchange || bit (insn, 24);
5455 CORE_ADDR from = dsc->insn_addr;
5458 if (debug_displaced)
5459 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5460 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5461 (unsigned long) insn);
5463 /* Implement "BL<cond> <label>" as:
5465 Preparation: cond <- instruction condition
5466 Insn: mov r0, r0 (nop)
5467 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5469 B<cond> similar, but don't set r14 in cleanup. */
5472 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5473 then arrange the switch into Thumb mode. */
5474 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5476 offset = bits (insn, 0, 23) << 2;
5478 if (bit (offset, 25))
5479 offset = offset | ~0x3ffffff;
5481 dsc->u.branch.cond = cond;
5482 dsc->u.branch.link = link;
5483 dsc->u.branch.exchange = exchange;
5484 dsc->u.branch.dest = from + 8 + offset;
5486 dsc->modinsn[0] = ARM_NOP;
5488 dsc->cleanup = &cleanup_branch;
5493 /* Copy BX/BLX with register-specified destinations. */
5496 copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5497 struct regcache *regs, struct displaced_step_closure *dsc)
5499 unsigned int cond = bits (insn, 28, 31);
5502 int link = bit (insn, 5);
5503 unsigned int rm = bits (insn, 0, 3);
5504 CORE_ADDR from = dsc->insn_addr;
5506 if (debug_displaced)
5507 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s register insn "
5508 "%.8lx\n", (link) ? "blx" : "bx",
5509 (unsigned long) insn);
5511 /* Implement {BX,BLX}<cond> <reg>" as:
5513 Preparation: cond <- instruction condition
5514 Insn: mov r0, r0 (nop)
5515 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5517 Don't set r14 in cleanup for BX. */
5519 dsc->u.branch.dest = displaced_read_reg (regs, from, rm);
5521 dsc->u.branch.cond = cond;
5522 dsc->u.branch.link = link;
5523 dsc->u.branch.exchange = 1;
5525 dsc->modinsn[0] = ARM_NOP;
5527 dsc->cleanup = &cleanup_branch;
5532 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5535 cleanup_alu_imm (struct gdbarch *gdbarch,
5536 struct regcache *regs, struct displaced_step_closure *dsc)
5538 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5539 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5540 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5541 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5545 copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5546 struct displaced_step_closure *dsc)
5548 unsigned int rn = bits (insn, 16, 19);
5549 unsigned int rd = bits (insn, 12, 15);
5550 unsigned int op = bits (insn, 21, 24);
5551 int is_mov = (op == 0xd);
5552 ULONGEST rd_val, rn_val;
5553 CORE_ADDR from = dsc->insn_addr;
5555 if (!insn_references_pc (insn, 0x000ff000ul))
5556 return copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5558 if (debug_displaced)
5559 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5560 "%.8lx\n", is_mov ? "move" : "ALU",
5561 (unsigned long) insn);
5563 /* Instruction is of form:
5565 <op><cond> rd, [rn,] #imm
5569 Preparation: tmp1, tmp2 <- r0, r1;
5571 Insn: <op><cond> r0, r1, #imm
5572 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5575 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5576 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5577 rn_val = displaced_read_reg (regs, from, rn);
5578 rd_val = displaced_read_reg (regs, from, rd);
5579 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5580 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5584 dsc->modinsn[0] = insn & 0xfff00fff;
5586 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5588 dsc->cleanup = &cleanup_alu_imm;
5593 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5596 cleanup_alu_reg (struct gdbarch *gdbarch,
5597 struct regcache *regs, struct displaced_step_closure *dsc)
5602 rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5604 for (i = 0; i < 3; i++)
5605 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5607 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5611 copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5612 struct displaced_step_closure *dsc)
5614 unsigned int rn = bits (insn, 16, 19);
5615 unsigned int rm = bits (insn, 0, 3);
5616 unsigned int rd = bits (insn, 12, 15);
5617 unsigned int op = bits (insn, 21, 24);
5618 int is_mov = (op == 0xd);
5619 ULONGEST rd_val, rn_val, rm_val;
5620 CORE_ADDR from = dsc->insn_addr;
5622 if (!insn_references_pc (insn, 0x000ff00ful))
5623 return copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5625 if (debug_displaced)
5626 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5627 is_mov ? "move" : "ALU", (unsigned long) insn);
5629 /* Instruction is of form:
5631 <op><cond> rd, [rn,] rm [, <shift>]
5635 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5636 r0, r1, r2 <- rd, rn, rm
5637 Insn: <op><cond> r0, r1, r2 [, <shift>]
5638 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5641 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5642 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5643 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5644 rd_val = displaced_read_reg (regs, from, rd);
5645 rn_val = displaced_read_reg (regs, from, rn);
5646 rm_val = displaced_read_reg (regs, from, rm);
5647 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5648 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5649 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5653 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5655 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5657 dsc->cleanup = &cleanup_alu_reg;
5662 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5665 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5666 struct regcache *regs,
5667 struct displaced_step_closure *dsc)
5669 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5672 for (i = 0; i < 4; i++)
5673 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5675 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5679 copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5680 struct regcache *regs,
5681 struct displaced_step_closure *dsc)
5683 unsigned int rn = bits (insn, 16, 19);
5684 unsigned int rm = bits (insn, 0, 3);
5685 unsigned int rd = bits (insn, 12, 15);
5686 unsigned int rs = bits (insn, 8, 11);
5687 unsigned int op = bits (insn, 21, 24);
5688 int is_mov = (op == 0xd), i;
5689 ULONGEST rd_val, rn_val, rm_val, rs_val;
5690 CORE_ADDR from = dsc->insn_addr;
5692 if (!insn_references_pc (insn, 0x000fff0ful))
5693 return copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5695 if (debug_displaced)
5696 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5697 "%.8lx\n", is_mov ? "move" : "ALU",
5698 (unsigned long) insn);
5700 /* Instruction is of form:
5702 <op><cond> rd, [rn,] rm, <shift> rs
5706 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5707 r0, r1, r2, r3 <- rd, rn, rm, rs
5708 Insn: <op><cond> r0, r1, r2, <shift> r3
5710 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5714 for (i = 0; i < 4; i++)
5715 dsc->tmp[i] = displaced_read_reg (regs, from, i);
5717 rd_val = displaced_read_reg (regs, from, rd);
5718 rn_val = displaced_read_reg (regs, from, rn);
5719 rm_val = displaced_read_reg (regs, from, rm);
5720 rs_val = displaced_read_reg (regs, from, rs);
5721 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5722 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5723 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5724 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5728 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5730 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5732 dsc->cleanup = &cleanup_alu_shifted_reg;
5737 /* Clean up load instructions. */
5740 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5741 struct displaced_step_closure *dsc)
5743 ULONGEST rt_val, rt_val2 = 0, rn_val;
5744 CORE_ADDR from = dsc->insn_addr;
5746 rt_val = displaced_read_reg (regs, from, 0);
5747 if (dsc->u.ldst.xfersize == 8)
5748 rt_val2 = displaced_read_reg (regs, from, 1);
5749 rn_val = displaced_read_reg (regs, from, 2);
5751 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5752 if (dsc->u.ldst.xfersize > 4)
5753 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5754 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5755 if (!dsc->u.ldst.immed)
5756 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5758 /* Handle register writeback. */
5759 if (dsc->u.ldst.writeback)
5760 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5761 /* Put result in right place. */
5762 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5763 if (dsc->u.ldst.xfersize == 8)
5764 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5767 /* Clean up store instructions. */
5770 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5771 struct displaced_step_closure *dsc)
5773 CORE_ADDR from = dsc->insn_addr;
5774 ULONGEST rn_val = displaced_read_reg (regs, from, 2);
5776 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5777 if (dsc->u.ldst.xfersize > 4)
5778 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5779 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5780 if (!dsc->u.ldst.immed)
5781 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5782 if (!dsc->u.ldst.restore_r4)
5783 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5786 if (dsc->u.ldst.writeback)
5787 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5790 /* Copy "extra" load/store instructions. These are halfword/doubleword
5791 transfers, which have a different encoding to byte/word transfers. */
5794 copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
5795 struct regcache *regs, struct displaced_step_closure *dsc)
5797 unsigned int op1 = bits (insn, 20, 24);
5798 unsigned int op2 = bits (insn, 5, 6);
5799 unsigned int rt = bits (insn, 12, 15);
5800 unsigned int rn = bits (insn, 16, 19);
5801 unsigned int rm = bits (insn, 0, 3);
5802 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5803 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5804 int immed = (op1 & 0x4) != 0;
5806 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5807 CORE_ADDR from = dsc->insn_addr;
5809 if (!insn_references_pc (insn, 0x000ff00ful))
5810 return copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5812 if (debug_displaced)
5813 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5814 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
5815 (unsigned long) insn);
5817 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5820 internal_error (__FILE__, __LINE__,
5821 _("copy_extra_ld_st: instruction decode error"));
5823 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5824 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5825 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5827 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
5829 rt_val = displaced_read_reg (regs, from, rt);
5830 if (bytesize[opcode] == 8)
5831 rt_val2 = displaced_read_reg (regs, from, rt + 1);
5832 rn_val = displaced_read_reg (regs, from, rn);
5834 rm_val = displaced_read_reg (regs, from, rm);
5836 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5837 if (bytesize[opcode] == 8)
5838 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5839 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5841 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5844 dsc->u.ldst.xfersize = bytesize[opcode];
5845 dsc->u.ldst.rn = rn;
5846 dsc->u.ldst.immed = immed;
5847 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5848 dsc->u.ldst.restore_r4 = 0;
5851 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5853 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5854 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5856 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5858 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5859 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5861 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5866 /* Copy byte/word loads and stores. */
5869 copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5870 struct regcache *regs,
5871 struct displaced_step_closure *dsc, int load, int byte,
5874 int immed = !bit (insn, 25);
5875 unsigned int rt = bits (insn, 12, 15);
5876 unsigned int rn = bits (insn, 16, 19);
5877 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5878 ULONGEST rt_val, rn_val, rm_val = 0;
5879 CORE_ADDR from = dsc->insn_addr;
5881 if (!insn_references_pc (insn, 0x000ff00ful))
5882 return copy_unmodified (gdbarch, insn, "load/store", dsc);
5884 if (debug_displaced)
5885 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s%s insn %.8lx\n",
5886 load ? (byte ? "ldrb" : "ldr")
5887 : (byte ? "strb" : "str"), usermode ? "t" : "",
5888 (unsigned long) insn);
5890 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5891 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5893 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
5895 dsc->tmp[4] = displaced_read_reg (regs, from, 4);
5897 rt_val = displaced_read_reg (regs, from, rt);
5898 rn_val = displaced_read_reg (regs, from, rn);
5900 rm_val = displaced_read_reg (regs, from, rm);
5902 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5903 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5905 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5908 dsc->u.ldst.xfersize = byte ? 1 : 4;
5909 dsc->u.ldst.rn = rn;
5910 dsc->u.ldst.immed = immed;
5911 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5913 /* To write PC we can do:
5915 scratch+0: str pc, temp (*temp = scratch + 8 + offset)
5916 scratch+4: ldr r4, temp
5917 scratch+8: sub r4, r4, pc (r4 = scratch + 8 + offset - scratch - 8 - 8)
5918 scratch+12: add r4, r4, #8 (r4 = offset)
5919 scratch+16: add r0, r0, r4
5920 scratch+20: str r0, [r2, #imm] (or str r0, [r2, r3])
5923 Otherwise we don't know what value to write for PC, since the offset is
5924 architecture-dependent (sometimes PC+8, sometimes PC+12). */
5926 if (load || rt != 15)
5928 dsc->u.ldst.restore_r4 = 0;
5931 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5933 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5934 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5936 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5938 {ldr,str}[b]<cond> r0, [r2, r3]. */
5939 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5943 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5944 dsc->u.ldst.restore_r4 = 1;
5946 dsc->modinsn[0] = 0xe58ff014; /* str pc, [pc, #20]. */
5947 dsc->modinsn[1] = 0xe59f4010; /* ldr r4, [pc, #16]. */
5948 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5949 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5950 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5954 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5956 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5958 dsc->modinsn[6] = 0x0; /* breakpoint location. */
5959 dsc->modinsn[7] = 0x0; /* scratch space. */
5964 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5969 /* Cleanup LDM instructions with fully-populated register list. This is an
5970 unfortunate corner case: it's impossible to implement correctly by modifying
5971 the instruction. The issue is as follows: we have an instruction,
5975 which we must rewrite to avoid loading PC. A possible solution would be to
5976 do the load in two halves, something like (with suitable cleanup
5980 ldm[id][ab] r8!, {r0-r7}
5982 ldm[id][ab] r8, {r7-r14}
5985 but at present there's no suitable place for <temp>, since the scratch space
5986 is overwritten before the cleanup routine is called. For now, we simply
5987 emulate the instruction. */
5990 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5991 struct displaced_step_closure *dsc)
5993 ULONGEST from = dsc->insn_addr;
5994 int inc = dsc->u.block.increment;
5995 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5996 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5997 uint32_t regmask = dsc->u.block.regmask;
5998 int regno = inc ? 0 : 15;
5999 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6000 int exception_return = dsc->u.block.load && dsc->u.block.user
6001 && (regmask & 0x8000) != 0;
6002 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6003 int do_transfer = condition_true (dsc->u.block.cond, status);
6004 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6009 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6010 sensible we can do here. Complain loudly. */
6011 if (exception_return)
6012 error (_("Cannot single-step exception return"));
6014 /* We don't handle any stores here for now. */
6015 gdb_assert (dsc->u.block.load != 0);
6017 if (debug_displaced)
6018 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6019 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6020 dsc->u.block.increment ? "inc" : "dec",
6021 dsc->u.block.before ? "before" : "after");
6028 while (regno <= 15 && (regmask & (1 << regno)) == 0)
6031 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6034 xfer_addr += bump_before;
6036 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6037 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6039 xfer_addr += bump_after;
6041 regmask &= ~(1 << regno);
6044 if (dsc->u.block.writeback)
6045 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6049 /* Clean up an STM which included the PC in the register list. */
6052 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6053 struct displaced_step_closure *dsc)
6055 ULONGEST from = dsc->insn_addr;
6056 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6057 int store_executed = condition_true (dsc->u.block.cond, status);
6058 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6059 CORE_ADDR stm_insn_addr;
6062 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6064 /* If condition code fails, there's nothing else to do. */
6065 if (!store_executed)
6068 if (dsc->u.block.increment)
6070 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6072 if (dsc->u.block.before)
6077 pc_stored_at = dsc->u.block.xfer_addr;
6079 if (dsc->u.block.before)
6083 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6084 stm_insn_addr = dsc->scratch_base;
6085 offset = pc_val - stm_insn_addr;
6087 if (debug_displaced)
6088 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6089 "STM instruction\n", offset);
6091 /* Rewrite the stored PC to the proper value for the non-displaced original
6093 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6094 dsc->insn_addr + offset);
6097 /* Clean up an LDM which includes the PC in the register list. We clumped all
6098 the registers in the transferred list into a contiguous range r0...rX (to
6099 avoid loading PC directly and losing control of the debugged program), so we
6100 must undo that here. */
6103 cleanup_block_load_pc (struct gdbarch *gdbarch,
6104 struct regcache *regs,
6105 struct displaced_step_closure *dsc)
6107 ULONGEST from = dsc->insn_addr;
6108 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6109 int load_executed = condition_true (dsc->u.block.cond, status), i;
6110 unsigned int mask = dsc->u.block.regmask, write_reg = 15;
6111 unsigned int regs_loaded = bitcount (mask);
6112 unsigned int num_to_shuffle = regs_loaded, clobbered;
6114 /* The method employed here will fail if the register list is fully populated
6115 (we need to avoid loading PC directly). */
6116 gdb_assert (num_to_shuffle < 16);
6121 clobbered = (1 << num_to_shuffle) - 1;
6123 while (num_to_shuffle > 0)
6125 if ((mask & (1 << write_reg)) != 0)
6127 unsigned int read_reg = num_to_shuffle - 1;
6129 if (read_reg != write_reg)
6131 ULONGEST rval = displaced_read_reg (regs, from, read_reg);
6132 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6133 if (debug_displaced)
6134 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6135 "loaded register r%d to r%d\n"), read_reg,
6138 else if (debug_displaced)
6139 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6140 "r%d already in the right place\n"),
6143 clobbered &= ~(1 << write_reg);
6151 /* Restore any registers we scribbled over. */
6152 for (write_reg = 0; clobbered != 0; write_reg++)
6154 if ((clobbered & (1 << write_reg)) != 0)
6156 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6158 if (debug_displaced)
6159 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6160 "clobbered register r%d\n"), write_reg);
6161 clobbered &= ~(1 << write_reg);
6165 /* Perform register writeback manually. */
6166 if (dsc->u.block.writeback)
6168 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6170 if (dsc->u.block.increment)
6171 new_rn_val += regs_loaded * 4;
6173 new_rn_val -= regs_loaded * 4;
6175 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6180 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6181 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6184 copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6185 struct displaced_step_closure *dsc)
6187 int load = bit (insn, 20);
6188 int user = bit (insn, 22);
6189 int increment = bit (insn, 23);
6190 int before = bit (insn, 24);
6191 int writeback = bit (insn, 21);
6192 int rn = bits (insn, 16, 19);
6193 CORE_ADDR from = dsc->insn_addr;
6195 /* Block transfers which don't mention PC can be run directly
6197 if (rn != 15 && (insn & 0x8000) == 0)
6198 return copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6202 warning (_("displaced: Unpredictable LDM or STM with "
6203 "base register r15"));
6204 return copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6207 if (debug_displaced)
6208 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6209 "%.8lx\n", (unsigned long) insn);
6211 dsc->u.block.xfer_addr = displaced_read_reg (regs, from, rn);
6212 dsc->u.block.rn = rn;
6214 dsc->u.block.load = load;
6215 dsc->u.block.user = user;
6216 dsc->u.block.increment = increment;
6217 dsc->u.block.before = before;
6218 dsc->u.block.writeback = writeback;
6219 dsc->u.block.cond = bits (insn, 28, 31);
6221 dsc->u.block.regmask = insn & 0xffff;
6225 if ((insn & 0xffff) == 0xffff)
6227 /* LDM with a fully-populated register list. This case is
6228 particularly tricky. Implement for now by fully emulating the
6229 instruction (which might not behave perfectly in all cases, but
6230 these instructions should be rare enough for that not to matter
6232 dsc->modinsn[0] = ARM_NOP;
6234 dsc->cleanup = &cleanup_block_load_all;
6238 /* LDM of a list of registers which includes PC. Implement by
6239 rewriting the list of registers to be transferred into a
6240 contiguous chunk r0...rX before doing the transfer, then shuffling
6241 registers into the correct places in the cleanup routine. */
6242 unsigned int regmask = insn & 0xffff;
6243 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6244 unsigned int to = 0, from = 0, i, new_rn;
6246 for (i = 0; i < num_in_list; i++)
6247 dsc->tmp[i] = displaced_read_reg (regs, from, i);
6249 /* Writeback makes things complicated. We need to avoid clobbering
6250 the base register with one of the registers in our modified
6251 register list, but just using a different register can't work in
6254 ldm r14!, {r0-r13,pc}
6256 which would need to be rewritten as:
6260 but that can't work, because there's no free register for N.
6262 Solve this by turning off the writeback bit, and emulating
6263 writeback manually in the cleanup routine. */
6268 new_regmask = (1 << num_in_list) - 1;
6270 if (debug_displaced)
6271 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6272 "{..., pc}: original reg list %.4x, modified "
6273 "list %.4x\n"), rn, writeback ? "!" : "",
6274 (int) insn & 0xffff, new_regmask);
6276 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6278 dsc->cleanup = &cleanup_block_load_pc;
6283 /* STM of a list of registers which includes PC. Run the instruction
6284 as-is, but out of line: this will store the wrong value for the PC,
6285 so we must manually fix up the memory in the cleanup routine.
6286 Doing things this way has the advantage that we can auto-detect
6287 the offset of the PC write (which is architecture-dependent) in
6288 the cleanup routine. */
6289 dsc->modinsn[0] = insn;
6291 dsc->cleanup = &cleanup_block_store_pc;
6297 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6298 for Linux, where some SVC instructions must be treated specially. */
6301 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6302 struct displaced_step_closure *dsc)
6304 CORE_ADDR from = dsc->insn_addr;
6305 CORE_ADDR resume_addr = from + 4;
6307 if (debug_displaced)
6308 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6309 "%.8lx\n", (unsigned long) resume_addr);
6311 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6315 copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6316 struct regcache *regs, struct displaced_step_closure *dsc)
6318 CORE_ADDR from = dsc->insn_addr;
6320 /* Allow OS-specific code to override SVC handling. */
6321 if (dsc->u.svc.copy_svc_os)
6322 return dsc->u.svc.copy_svc_os (gdbarch, insn, to, regs, dsc);
6324 if (debug_displaced)
6325 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6326 (unsigned long) insn);
6328 /* Preparation: none.
6329 Insn: unmodified svc.
6330 Cleanup: pc <- insn_addr + 4. */
6332 dsc->modinsn[0] = insn;
6334 dsc->cleanup = &cleanup_svc;
6335 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6337 dsc->wrote_to_pc = 1;
6342 /* Copy undefined instructions. */
6345 copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6346 struct displaced_step_closure *dsc)
6348 if (debug_displaced)
6349 fprintf_unfiltered (gdb_stdlog,
6350 "displaced: copying undefined insn %.8lx\n",
6351 (unsigned long) insn);
6353 dsc->modinsn[0] = insn;
6358 /* Copy unpredictable instructions. */
6361 copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6362 struct displaced_step_closure *dsc)
6364 if (debug_displaced)
6365 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6366 "%.8lx\n", (unsigned long) insn);
6368 dsc->modinsn[0] = insn;
6373 /* The decode_* functions are instruction decoding helpers. They mostly follow
6374 the presentation in the ARM ARM. */
6377 decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6378 struct regcache *regs,
6379 struct displaced_step_closure *dsc)
6381 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6382 unsigned int rn = bits (insn, 16, 19);
6384 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6385 return copy_unmodified (gdbarch, insn, "cps", dsc);
6386 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6387 return copy_unmodified (gdbarch, insn, "setend", dsc);
6388 else if ((op1 & 0x60) == 0x20)
6389 return copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6390 else if ((op1 & 0x71) == 0x40)
6391 return copy_unmodified (gdbarch, insn, "neon elt/struct load/store", dsc);
6392 else if ((op1 & 0x77) == 0x41)
6393 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6394 else if ((op1 & 0x77) == 0x45)
6395 return copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6396 else if ((op1 & 0x77) == 0x51)
6399 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6401 return copy_unpred (gdbarch, insn, dsc);
6403 else if ((op1 & 0x77) == 0x55)
6404 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6405 else if (op1 == 0x57)
6408 case 0x1: return copy_unmodified (gdbarch, insn, "clrex", dsc);
6409 case 0x4: return copy_unmodified (gdbarch, insn, "dsb", dsc);
6410 case 0x5: return copy_unmodified (gdbarch, insn, "dmb", dsc);
6411 case 0x6: return copy_unmodified (gdbarch, insn, "isb", dsc);
6412 default: return copy_unpred (gdbarch, insn, dsc);
6414 else if ((op1 & 0x63) == 0x43)
6415 return copy_unpred (gdbarch, insn, dsc);
6416 else if ((op2 & 0x1) == 0x0)
6417 switch (op1 & ~0x80)
6420 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6422 return copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6423 case 0x71: case 0x75:
6425 return copy_preload_reg (gdbarch, insn, regs, dsc);
6426 case 0x63: case 0x67: case 0x73: case 0x77:
6427 return copy_unpred (gdbarch, insn, dsc);
6429 return copy_undef (gdbarch, insn, dsc);
6432 return copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6436 decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6437 struct regcache *regs,
6438 struct displaced_step_closure *dsc)
6440 if (bit (insn, 27) == 0)
6441 return decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6442 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6443 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6446 return copy_unmodified (gdbarch, insn, "srs", dsc);
6449 return copy_unmodified (gdbarch, insn, "rfe", dsc);
6451 case 0x4: case 0x5: case 0x6: case 0x7:
6452 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
6455 switch ((insn & 0xe00000) >> 21)
6457 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6459 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6462 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6465 return copy_undef (gdbarch, insn, dsc);
6470 int rn_f = (bits (insn, 16, 19) == 0xf);
6471 switch ((insn & 0xe00000) >> 21)
6474 /* ldc/ldc2 imm (undefined for rn == pc). */
6475 return rn_f ? copy_undef (gdbarch, insn, dsc)
6476 : copy_copro_load_store (gdbarch, insn, regs, dsc);
6479 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6481 case 0x4: case 0x5: case 0x6: case 0x7:
6482 /* ldc/ldc2 lit (undefined for rn != pc). */
6483 return rn_f ? copy_copro_load_store (gdbarch, insn, regs, dsc)
6484 : copy_undef (gdbarch, insn, dsc);
6487 return copy_undef (gdbarch, insn, dsc);
6492 return copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6495 if (bits (insn, 16, 19) == 0xf)
6497 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6499 return copy_undef (gdbarch, insn, dsc);
6503 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6505 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6509 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6511 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6514 return copy_undef (gdbarch, insn, dsc);
6518 /* Decode miscellaneous instructions in dp/misc encoding space. */
6521 decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6522 struct regcache *regs,
6523 struct displaced_step_closure *dsc)
6525 unsigned int op2 = bits (insn, 4, 6);
6526 unsigned int op = bits (insn, 21, 22);
6527 unsigned int op1 = bits (insn, 16, 19);
6532 return copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6535 if (op == 0x1) /* bx. */
6536 return copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6538 return copy_unmodified (gdbarch, insn, "clz", dsc);
6540 return copy_undef (gdbarch, insn, dsc);
6544 /* Not really supported. */
6545 return copy_unmodified (gdbarch, insn, "bxj", dsc);
6547 return copy_undef (gdbarch, insn, dsc);
6551 return copy_bx_blx_reg (gdbarch, insn,
6552 regs, dsc); /* blx register. */
6554 return copy_undef (gdbarch, insn, dsc);
6557 return copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6561 return copy_unmodified (gdbarch, insn, "bkpt", dsc);
6563 /* Not really supported. */
6564 return copy_unmodified (gdbarch, insn, "smc", dsc);
6567 return copy_undef (gdbarch, insn, dsc);
6572 decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6573 struct displaced_step_closure *dsc)
6576 switch (bits (insn, 20, 24))
6579 return copy_unmodified (gdbarch, insn, "movw", dsc);
6582 return copy_unmodified (gdbarch, insn, "movt", dsc);
6584 case 0x12: case 0x16:
6585 return copy_unmodified (gdbarch, insn, "msr imm", dsc);
6588 return copy_alu_imm (gdbarch, insn, regs, dsc);
6592 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6594 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6595 return copy_alu_reg (gdbarch, insn, regs, dsc);
6596 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6597 return copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6598 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6599 return decode_miscellaneous (gdbarch, insn, regs, dsc);
6600 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6601 return copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6602 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6603 return copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6604 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6605 return copy_unmodified (gdbarch, insn, "synch", dsc);
6606 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6607 /* 2nd arg means "unpriveleged". */
6608 return copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6612 /* Should be unreachable. */
6617 decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6618 struct regcache *regs,
6619 struct displaced_step_closure *dsc)
6621 int a = bit (insn, 25), b = bit (insn, 4);
6622 uint32_t op1 = bits (insn, 20, 24);
6623 int rn_f = bits (insn, 16, 19) == 0xf;
6625 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6626 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6627 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
6628 else if ((!a && (op1 & 0x17) == 0x02)
6629 || (a && (op1 & 0x17) == 0x02 && !b))
6630 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
6631 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6632 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6633 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
6634 else if ((!a && (op1 & 0x17) == 0x03)
6635 || (a && (op1 & 0x17) == 0x03 && !b))
6636 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
6637 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6638 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6639 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6640 else if ((!a && (op1 & 0x17) == 0x06)
6641 || (a && (op1 & 0x17) == 0x06 && !b))
6642 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6643 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6644 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6645 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6646 else if ((!a && (op1 & 0x17) == 0x07)
6647 || (a && (op1 & 0x17) == 0x07 && !b))
6648 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6650 /* Should be unreachable. */
6655 decode_media (struct gdbarch *gdbarch, uint32_t insn,
6656 struct displaced_step_closure *dsc)
6658 switch (bits (insn, 20, 24))
6660 case 0x00: case 0x01: case 0x02: case 0x03:
6661 return copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6663 case 0x04: case 0x05: case 0x06: case 0x07:
6664 return copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6666 case 0x08: case 0x09: case 0x0a: case 0x0b:
6667 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6668 return copy_unmodified (gdbarch, insn,
6669 "decode/pack/unpack/saturate/reverse", dsc);
6672 if (bits (insn, 5, 7) == 0) /* op2. */
6674 if (bits (insn, 12, 15) == 0xf)
6675 return copy_unmodified (gdbarch, insn, "usad8", dsc);
6677 return copy_unmodified (gdbarch, insn, "usada8", dsc);
6680 return copy_undef (gdbarch, insn, dsc);
6682 case 0x1a: case 0x1b:
6683 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6684 return copy_unmodified (gdbarch, insn, "sbfx", dsc);
6686 return copy_undef (gdbarch, insn, dsc);
6688 case 0x1c: case 0x1d:
6689 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6691 if (bits (insn, 0, 3) == 0xf)
6692 return copy_unmodified (gdbarch, insn, "bfc", dsc);
6694 return copy_unmodified (gdbarch, insn, "bfi", dsc);
6697 return copy_undef (gdbarch, insn, dsc);
6699 case 0x1e: case 0x1f:
6700 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6701 return copy_unmodified (gdbarch, insn, "ubfx", dsc);
6703 return copy_undef (gdbarch, insn, dsc);
6706 /* Should be unreachable. */
6711 decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
6712 struct regcache *regs, struct displaced_step_closure *dsc)
6715 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
6717 return copy_block_xfer (gdbarch, insn, regs, dsc);
6721 decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6722 struct regcache *regs,
6723 struct displaced_step_closure *dsc)
6725 unsigned int opcode = bits (insn, 20, 24);
6729 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6730 return copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6732 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6733 case 0x12: case 0x16:
6734 return copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6736 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6737 case 0x13: case 0x17:
6738 return copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6740 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6741 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6742 /* Note: no writeback for these instructions. Bit 25 will always be
6743 zero though (via caller), so the following works OK. */
6744 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6747 /* Should be unreachable. */
6752 decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6753 struct regcache *regs, struct displaced_step_closure *dsc)
6755 unsigned int op1 = bits (insn, 20, 25);
6756 int op = bit (insn, 4);
6757 unsigned int coproc = bits (insn, 8, 11);
6758 unsigned int rn = bits (insn, 16, 19);
6760 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6761 return decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6762 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6763 && (coproc & 0xe) != 0xa)
6765 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6766 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6767 && (coproc & 0xe) != 0xa)
6768 /* ldc/ldc2 imm/lit. */
6769 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6770 else if ((op1 & 0x3e) == 0x00)
6771 return copy_undef (gdbarch, insn, dsc);
6772 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6773 return copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6774 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6775 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6776 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6777 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6778 else if ((op1 & 0x30) == 0x20 && !op)
6780 if ((coproc & 0xe) == 0xa)
6781 return copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6783 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6785 else if ((op1 & 0x30) == 0x20 && op)
6786 return copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6787 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6788 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6789 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6790 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6791 else if ((op1 & 0x30) == 0x30)
6792 return copy_svc (gdbarch, insn, to, regs, dsc);
6794 return copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6798 arm_process_displaced_insn (struct gdbarch *gdbarch, uint32_t insn,
6799 CORE_ADDR from, CORE_ADDR to,
6800 struct regcache *regs,
6801 struct displaced_step_closure *dsc)
6805 if (!displaced_in_arm_mode (regs))
6806 error (_("Displaced stepping is only supported in ARM mode"));
6808 /* Most displaced instructions use a 1-instruction scratch space, so set this
6809 here and override below if/when necessary. */
6811 dsc->insn_addr = from;
6812 dsc->scratch_base = to;
6813 dsc->cleanup = NULL;
6814 dsc->wrote_to_pc = 0;
6816 if ((insn & 0xf0000000) == 0xf0000000)
6817 err = decode_unconditional (gdbarch, insn, regs, dsc);
6818 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
6820 case 0x0: case 0x1: case 0x2: case 0x3:
6821 err = decode_dp_misc (gdbarch, insn, regs, dsc);
6824 case 0x4: case 0x5: case 0x6:
6825 err = decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
6829 err = decode_media (gdbarch, insn, dsc);
6832 case 0x8: case 0x9: case 0xa: case 0xb:
6833 err = decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
6836 case 0xc: case 0xd: case 0xe: case 0xf:
6837 err = decode_svc_copro (gdbarch, insn, to, regs, dsc);
6842 internal_error (__FILE__, __LINE__,
6843 _("arm_process_displaced_insn: Instruction decode error"));
6846 /* Actually set up the scratch space for a displaced instruction. */
6849 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
6850 CORE_ADDR to, struct displaced_step_closure *dsc)
6852 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6854 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
6856 /* Poke modified instruction(s). */
6857 for (i = 0; i < dsc->numinsns; i++)
6859 if (debug_displaced)
6860 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn %.8lx at "
6861 "%.8lx\n", (unsigned long) dsc->modinsn[i],
6862 (unsigned long) to + i * 4);
6863 write_memory_unsigned_integer (to + i * 4, 4, byte_order_for_code,
6867 /* Put breakpoint afterwards. */
6868 write_memory (to + dsc->numinsns * 4, tdep->arm_breakpoint,
6869 tdep->arm_breakpoint_size);
6871 if (debug_displaced)
6872 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
6873 paddress (gdbarch, from), paddress (gdbarch, to));
6876 /* Entry point for copying an instruction into scratch space for displaced
6879 struct displaced_step_closure *
6880 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
6881 CORE_ADDR from, CORE_ADDR to,
6882 struct regcache *regs)
6884 struct displaced_step_closure *dsc
6885 = xmalloc (sizeof (struct displaced_step_closure));
6886 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
6887 uint32_t insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
6889 if (debug_displaced)
6890 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
6891 "at %.8lx\n", (unsigned long) insn,
6892 (unsigned long) from);
6894 arm_process_displaced_insn (gdbarch, insn, from, to, regs, dsc);
6895 arm_displaced_init_closure (gdbarch, from, to, dsc);
6900 /* Entry point for cleaning things up after a displaced instruction has been
6904 arm_displaced_step_fixup (struct gdbarch *gdbarch,
6905 struct displaced_step_closure *dsc,
6906 CORE_ADDR from, CORE_ADDR to,
6907 struct regcache *regs)
6910 dsc->cleanup (gdbarch, regs, dsc);
6912 if (!dsc->wrote_to_pc)
6913 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, dsc->insn_addr + 4);
6916 #include "bfd-in2.h"
6917 #include "libcoff.h"
6920 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
6922 struct gdbarch *gdbarch = info->application_data;
6924 if (arm_pc_is_thumb (gdbarch, memaddr))
6926 static asymbol *asym;
6927 static combined_entry_type ce;
6928 static struct coff_symbol_struct csym;
6929 static struct bfd fake_bfd;
6930 static bfd_target fake_target;
6932 if (csym.native == NULL)
6934 /* Create a fake symbol vector containing a Thumb symbol.
6935 This is solely so that the code in print_insn_little_arm()
6936 and print_insn_big_arm() in opcodes/arm-dis.c will detect
6937 the presence of a Thumb symbol and switch to decoding
6938 Thumb instructions. */
6940 fake_target.flavour = bfd_target_coff_flavour;
6941 fake_bfd.xvec = &fake_target;
6942 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
6944 csym.symbol.the_bfd = &fake_bfd;
6945 csym.symbol.name = "fake";
6946 asym = (asymbol *) & csym;
6949 memaddr = UNMAKE_THUMB_ADDR (memaddr);
6950 info->symbols = &asym;
6953 info->symbols = NULL;
6955 if (info->endian == BFD_ENDIAN_BIG)
6956 return print_insn_big_arm (memaddr, info);
6958 return print_insn_little_arm (memaddr, info);
6961 /* The following define instruction sequences that will cause ARM
6962 cpu's to take an undefined instruction trap. These are used to
6963 signal a breakpoint to GDB.
6965 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
6966 modes. A different instruction is required for each mode. The ARM
6967 cpu's can also be big or little endian. Thus four different
6968 instructions are needed to support all cases.
6970 Note: ARMv4 defines several new instructions that will take the
6971 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
6972 not in fact add the new instructions. The new undefined
6973 instructions in ARMv4 are all instructions that had no defined
6974 behaviour in earlier chips. There is no guarantee that they will
6975 raise an exception, but may be treated as NOP's. In practice, it
6976 may only safe to rely on instructions matching:
6978 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
6979 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
6980 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
6982 Even this may only true if the condition predicate is true. The
6983 following use a condition predicate of ALWAYS so it is always TRUE.
6985 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
6986 and NetBSD all use a software interrupt rather than an undefined
6987 instruction to force a trap. This can be handled by by the
6988 abi-specific code during establishment of the gdbarch vector. */
6990 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
6991 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
6992 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
6993 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
6995 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
6996 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
6997 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
6998 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7000 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7001 the program counter value to determine whether a 16-bit or 32-bit
7002 breakpoint should be used. It returns a pointer to a string of
7003 bytes that encode a breakpoint instruction, stores the length of
7004 the string to *lenptr, and adjusts the program counter (if
7005 necessary) to point to the actual memory location where the
7006 breakpoint should be inserted. */
7008 static const unsigned char *
7009 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7011 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7012 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7014 if (arm_pc_is_thumb (gdbarch, *pcptr))
7016 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7018 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7019 check whether we are replacing a 32-bit instruction. */
7020 if (tdep->thumb2_breakpoint != NULL)
7023 if (target_read_memory (*pcptr, buf, 2) == 0)
7025 unsigned short inst1;
7026 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7027 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
7029 *lenptr = tdep->thumb2_breakpoint_size;
7030 return tdep->thumb2_breakpoint;
7035 *lenptr = tdep->thumb_breakpoint_size;
7036 return tdep->thumb_breakpoint;
7040 *lenptr = tdep->arm_breakpoint_size;
7041 return tdep->arm_breakpoint;
7046 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7049 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7051 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7053 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7054 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7055 that this is not confused with a 32-bit ARM breakpoint. */
7059 /* Extract from an array REGBUF containing the (raw) register state a
7060 function return value of type TYPE, and copy that, in virtual
7061 format, into VALBUF. */
7064 arm_extract_return_value (struct type *type, struct regcache *regs,
7067 struct gdbarch *gdbarch = get_regcache_arch (regs);
7068 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7070 if (TYPE_CODE_FLT == TYPE_CODE (type))
7072 switch (gdbarch_tdep (gdbarch)->fp_model)
7076 /* The value is in register F0 in internal format. We need to
7077 extract the raw value and then convert it to the desired
7079 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7081 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7082 convert_from_extended (floatformat_from_type (type), tmpbuf,
7083 valbuf, gdbarch_byte_order (gdbarch));
7087 case ARM_FLOAT_SOFT_FPA:
7088 case ARM_FLOAT_SOFT_VFP:
7089 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7090 not using the VFP ABI code. */
7092 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7093 if (TYPE_LENGTH (type) > 4)
7094 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7095 valbuf + INT_REGISTER_SIZE);
7099 internal_error (__FILE__, __LINE__,
7100 _("arm_extract_return_value: "
7101 "Floating point model not supported"));
7105 else if (TYPE_CODE (type) == TYPE_CODE_INT
7106 || TYPE_CODE (type) == TYPE_CODE_CHAR
7107 || TYPE_CODE (type) == TYPE_CODE_BOOL
7108 || TYPE_CODE (type) == TYPE_CODE_PTR
7109 || TYPE_CODE (type) == TYPE_CODE_REF
7110 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7112 /* If the the type is a plain integer, then the access is
7113 straight-forward. Otherwise we have to play around a bit more. */
7114 int len = TYPE_LENGTH (type);
7115 int regno = ARM_A1_REGNUM;
7120 /* By using store_unsigned_integer we avoid having to do
7121 anything special for small big-endian values. */
7122 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7123 store_unsigned_integer (valbuf,
7124 (len > INT_REGISTER_SIZE
7125 ? INT_REGISTER_SIZE : len),
7127 len -= INT_REGISTER_SIZE;
7128 valbuf += INT_REGISTER_SIZE;
7133 /* For a structure or union the behaviour is as if the value had
7134 been stored to word-aligned memory and then loaded into
7135 registers with 32-bit load instruction(s). */
7136 int len = TYPE_LENGTH (type);
7137 int regno = ARM_A1_REGNUM;
7138 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7142 regcache_cooked_read (regs, regno++, tmpbuf);
7143 memcpy (valbuf, tmpbuf,
7144 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7145 len -= INT_REGISTER_SIZE;
7146 valbuf += INT_REGISTER_SIZE;
7152 /* Will a function return an aggregate type in memory or in a
7153 register? Return 0 if an aggregate type can be returned in a
7154 register, 1 if it must be returned in memory. */
7157 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7160 enum type_code code;
7162 CHECK_TYPEDEF (type);
7164 /* In the ARM ABI, "integer" like aggregate types are returned in
7165 registers. For an aggregate type to be integer like, its size
7166 must be less than or equal to INT_REGISTER_SIZE and the
7167 offset of each addressable subfield must be zero. Note that bit
7168 fields are not addressable, and all addressable subfields of
7169 unions always start at offset zero.
7171 This function is based on the behaviour of GCC 2.95.1.
7172 See: gcc/arm.c: arm_return_in_memory() for details.
7174 Note: All versions of GCC before GCC 2.95.2 do not set up the
7175 parameters correctly for a function returning the following
7176 structure: struct { float f;}; This should be returned in memory,
7177 not a register. Richard Earnshaw sent me a patch, but I do not
7178 know of any way to detect if a function like the above has been
7179 compiled with the correct calling convention. */
7181 /* All aggregate types that won't fit in a register must be returned
7183 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7188 /* The AAPCS says all aggregates not larger than a word are returned
7190 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7193 /* The only aggregate types that can be returned in a register are
7194 structs and unions. Arrays must be returned in memory. */
7195 code = TYPE_CODE (type);
7196 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
7201 /* Assume all other aggregate types can be returned in a register.
7202 Run a check for structures, unions and arrays. */
7205 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7208 /* Need to check if this struct/union is "integer" like. For
7209 this to be true, its size must be less than or equal to
7210 INT_REGISTER_SIZE and the offset of each addressable
7211 subfield must be zero. Note that bit fields are not
7212 addressable, and unions always start at offset zero. If any
7213 of the subfields is a floating point type, the struct/union
7214 cannot be an integer type. */
7216 /* For each field in the object, check:
7217 1) Is it FP? --> yes, nRc = 1;
7218 2) Is it addressable (bitpos != 0) and
7219 not packed (bitsize == 0)?
7223 for (i = 0; i < TYPE_NFIELDS (type); i++)
7225 enum type_code field_type_code;
7226 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7229 /* Is it a floating point type field? */
7230 if (field_type_code == TYPE_CODE_FLT)
7236 /* If bitpos != 0, then we have to care about it. */
7237 if (TYPE_FIELD_BITPOS (type, i) != 0)
7239 /* Bitfields are not addressable. If the field bitsize is
7240 zero, then the field is not packed. Hence it cannot be
7241 a bitfield or any other packed type. */
7242 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7254 /* Write into appropriate registers a function return value of type
7255 TYPE, given in virtual format. */
7258 arm_store_return_value (struct type *type, struct regcache *regs,
7259 const gdb_byte *valbuf)
7261 struct gdbarch *gdbarch = get_regcache_arch (regs);
7262 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7264 if (TYPE_CODE (type) == TYPE_CODE_FLT)
7266 char buf[MAX_REGISTER_SIZE];
7268 switch (gdbarch_tdep (gdbarch)->fp_model)
7272 convert_to_extended (floatformat_from_type (type), buf, valbuf,
7273 gdbarch_byte_order (gdbarch));
7274 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
7277 case ARM_FLOAT_SOFT_FPA:
7278 case ARM_FLOAT_SOFT_VFP:
7279 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7280 not using the VFP ABI code. */
7282 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
7283 if (TYPE_LENGTH (type) > 4)
7284 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7285 valbuf + INT_REGISTER_SIZE);
7289 internal_error (__FILE__, __LINE__,
7290 _("arm_store_return_value: Floating "
7291 "point model not supported"));
7295 else if (TYPE_CODE (type) == TYPE_CODE_INT
7296 || TYPE_CODE (type) == TYPE_CODE_CHAR
7297 || TYPE_CODE (type) == TYPE_CODE_BOOL
7298 || TYPE_CODE (type) == TYPE_CODE_PTR
7299 || TYPE_CODE (type) == TYPE_CODE_REF
7300 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7302 if (TYPE_LENGTH (type) <= 4)
7304 /* Values of one word or less are zero/sign-extended and
7306 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7307 LONGEST val = unpack_long (type, valbuf);
7309 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
7310 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
7314 /* Integral values greater than one word are stored in consecutive
7315 registers starting with r0. This will always be a multiple of
7316 the regiser size. */
7317 int len = TYPE_LENGTH (type);
7318 int regno = ARM_A1_REGNUM;
7322 regcache_cooked_write (regs, regno++, valbuf);
7323 len -= INT_REGISTER_SIZE;
7324 valbuf += INT_REGISTER_SIZE;
7330 /* For a structure or union the behaviour is as if the value had
7331 been stored to word-aligned memory and then loaded into
7332 registers with 32-bit load instruction(s). */
7333 int len = TYPE_LENGTH (type);
7334 int regno = ARM_A1_REGNUM;
7335 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7339 memcpy (tmpbuf, valbuf,
7340 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7341 regcache_cooked_write (regs, regno++, tmpbuf);
7342 len -= INT_REGISTER_SIZE;
7343 valbuf += INT_REGISTER_SIZE;
7349 /* Handle function return values. */
7351 static enum return_value_convention
7352 arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
7353 struct type *valtype, struct regcache *regcache,
7354 gdb_byte *readbuf, const gdb_byte *writebuf)
7356 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7357 enum arm_vfp_cprc_base_type vfp_base_type;
7360 if (arm_vfp_abi_for_function (gdbarch, func_type)
7361 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
7363 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
7364 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
7366 for (i = 0; i < vfp_base_count; i++)
7368 if (reg_char == 'q')
7371 arm_neon_quad_write (gdbarch, regcache, i,
7372 writebuf + i * unit_length);
7375 arm_neon_quad_read (gdbarch, regcache, i,
7376 readbuf + i * unit_length);
7383 sprintf (name_buf, "%c%d", reg_char, i);
7384 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7387 regcache_cooked_write (regcache, regnum,
7388 writebuf + i * unit_length);
7390 regcache_cooked_read (regcache, regnum,
7391 readbuf + i * unit_length);
7394 return RETURN_VALUE_REGISTER_CONVENTION;
7397 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
7398 || TYPE_CODE (valtype) == TYPE_CODE_UNION
7399 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
7401 if (tdep->struct_return == pcc_struct_return
7402 || arm_return_in_memory (gdbarch, valtype))
7403 return RETURN_VALUE_STRUCT_CONVENTION;
7407 arm_store_return_value (valtype, regcache, writebuf);
7410 arm_extract_return_value (valtype, regcache, readbuf);
7412 return RETURN_VALUE_REGISTER_CONVENTION;
7417 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
7419 struct gdbarch *gdbarch = get_frame_arch (frame);
7420 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7421 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7423 char buf[INT_REGISTER_SIZE];
7425 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
7427 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7431 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
7435 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
7436 return the target PC. Otherwise return 0. */
7439 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
7443 CORE_ADDR start_addr;
7445 /* Find the starting address and name of the function containing the PC. */
7446 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
7449 /* If PC is in a Thumb call or return stub, return the address of the
7450 target PC, which is in a register. The thunk functions are called
7451 _call_via_xx, where x is the register name. The possible names
7452 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
7453 functions, named __ARM_call_via_r[0-7]. */
7454 if (strncmp (name, "_call_via_", 10) == 0
7455 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
7457 /* Use the name suffix to determine which register contains the
7459 static char *table[15] =
7460 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
7461 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
7464 int offset = strlen (name) - 2;
7466 for (regno = 0; regno <= 14; regno++)
7467 if (strcmp (&name[offset], table[regno]) == 0)
7468 return get_frame_register_unsigned (frame, regno);
7471 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
7472 non-interworking calls to foo. We could decode the stubs
7473 to find the target but it's easier to use the symbol table. */
7474 namelen = strlen (name);
7475 if (name[0] == '_' && name[1] == '_'
7476 && ((namelen > 2 + strlen ("_from_thumb")
7477 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
7478 strlen ("_from_thumb")) == 0)
7479 || (namelen > 2 + strlen ("_from_arm")
7480 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
7481 strlen ("_from_arm")) == 0)))
7484 int target_len = namelen - 2;
7485 struct minimal_symbol *minsym;
7486 struct objfile *objfile;
7487 struct obj_section *sec;
7489 if (name[namelen - 1] == 'b')
7490 target_len -= strlen ("_from_thumb");
7492 target_len -= strlen ("_from_arm");
7494 target_name = alloca (target_len + 1);
7495 memcpy (target_name, name + 2, target_len);
7496 target_name[target_len] = '\0';
7498 sec = find_pc_section (pc);
7499 objfile = (sec == NULL) ? NULL : sec->objfile;
7500 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
7502 return SYMBOL_VALUE_ADDRESS (minsym);
7507 return 0; /* not a stub */
7511 set_arm_command (char *args, int from_tty)
7513 printf_unfiltered (_("\
7514 \"set arm\" must be followed by an apporpriate subcommand.\n"));
7515 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
7519 show_arm_command (char *args, int from_tty)
7521 cmd_show_list (showarmcmdlist, from_tty, "");
7525 arm_update_current_architecture (void)
7527 struct gdbarch_info info;
7529 /* If the current architecture is not ARM, we have nothing to do. */
7530 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
7533 /* Update the architecture. */
7534 gdbarch_info_init (&info);
7536 if (!gdbarch_update_p (info))
7537 internal_error (__FILE__, __LINE__, _("could not update architecture"));
7541 set_fp_model_sfunc (char *args, int from_tty,
7542 struct cmd_list_element *c)
7544 enum arm_float_model fp_model;
7546 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
7547 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
7549 arm_fp_model = fp_model;
7553 if (fp_model == ARM_FLOAT_LAST)
7554 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
7557 arm_update_current_architecture ();
7561 show_fp_model (struct ui_file *file, int from_tty,
7562 struct cmd_list_element *c, const char *value)
7564 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7566 if (arm_fp_model == ARM_FLOAT_AUTO
7567 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
7568 fprintf_filtered (file, _("\
7569 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
7570 fp_model_strings[tdep->fp_model]);
7572 fprintf_filtered (file, _("\
7573 The current ARM floating point model is \"%s\".\n"),
7574 fp_model_strings[arm_fp_model]);
7578 arm_set_abi (char *args, int from_tty,
7579 struct cmd_list_element *c)
7581 enum arm_abi_kind arm_abi;
7583 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
7584 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
7586 arm_abi_global = arm_abi;
7590 if (arm_abi == ARM_ABI_LAST)
7591 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
7594 arm_update_current_architecture ();
7598 arm_show_abi (struct ui_file *file, int from_tty,
7599 struct cmd_list_element *c, const char *value)
7601 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7603 if (arm_abi_global == ARM_ABI_AUTO
7604 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
7605 fprintf_filtered (file, _("\
7606 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
7607 arm_abi_strings[tdep->arm_abi]);
7609 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
7614 arm_show_fallback_mode (struct ui_file *file, int from_tty,
7615 struct cmd_list_element *c, const char *value)
7617 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7619 fprintf_filtered (file,
7620 _("The current execution mode assumed "
7621 "(when symbols are unavailable) is \"%s\".\n"),
7622 arm_fallback_mode_string);
7626 arm_show_force_mode (struct ui_file *file, int from_tty,
7627 struct cmd_list_element *c, const char *value)
7629 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7631 fprintf_filtered (file,
7632 _("The current execution mode assumed "
7633 "(even when symbols are available) is \"%s\".\n"),
7634 arm_force_mode_string);
7637 /* If the user changes the register disassembly style used for info
7638 register and other commands, we have to also switch the style used
7639 in opcodes for disassembly output. This function is run in the "set
7640 arm disassembly" command, and does that. */
7643 set_disassembly_style_sfunc (char *args, int from_tty,
7644 struct cmd_list_element *c)
7646 set_disassembly_style ();
7649 /* Return the ARM register name corresponding to register I. */
7651 arm_register_name (struct gdbarch *gdbarch, int i)
7653 const int num_regs = gdbarch_num_regs (gdbarch);
7655 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
7656 && i >= num_regs && i < num_regs + 32)
7658 static const char *const vfp_pseudo_names[] = {
7659 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
7660 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
7661 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
7662 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
7665 return vfp_pseudo_names[i - num_regs];
7668 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
7669 && i >= num_regs + 32 && i < num_regs + 32 + 16)
7671 static const char *const neon_pseudo_names[] = {
7672 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
7673 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
7676 return neon_pseudo_names[i - num_regs - 32];
7679 if (i >= ARRAY_SIZE (arm_register_names))
7680 /* These registers are only supported on targets which supply
7681 an XML description. */
7684 return arm_register_names[i];
7688 set_disassembly_style (void)
7692 /* Find the style that the user wants. */
7693 for (current = 0; current < num_disassembly_options; current++)
7694 if (disassembly_style == valid_disassembly_styles[current])
7696 gdb_assert (current < num_disassembly_options);
7698 /* Synchronize the disassembler. */
7699 set_arm_regname_option (current);
7702 /* Test whether the coff symbol specific value corresponds to a Thumb
7706 coff_sym_is_thumb (int val)
7708 return (val == C_THUMBEXT
7709 || val == C_THUMBSTAT
7710 || val == C_THUMBEXTFUNC
7711 || val == C_THUMBSTATFUNC
7712 || val == C_THUMBLABEL);
7715 /* arm_coff_make_msymbol_special()
7716 arm_elf_make_msymbol_special()
7718 These functions test whether the COFF or ELF symbol corresponds to
7719 an address in thumb code, and set a "special" bit in a minimal
7720 symbol to indicate that it does. */
7723 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
7725 /* Thumb symbols are of type STT_LOPROC, (synonymous with
7727 if (ELF_ST_TYPE (((elf_symbol_type *)sym)->internal_elf_sym.st_info)
7729 MSYMBOL_SET_SPECIAL (msym);
7733 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
7735 if (coff_sym_is_thumb (val))
7736 MSYMBOL_SET_SPECIAL (msym);
7740 arm_objfile_data_free (struct objfile *objfile, void *arg)
7742 struct arm_per_objfile *data = arg;
7745 for (i = 0; i < objfile->obfd->section_count; i++)
7746 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
7750 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
7753 const char *name = bfd_asymbol_name (sym);
7754 struct arm_per_objfile *data;
7755 VEC(arm_mapping_symbol_s) **map_p;
7756 struct arm_mapping_symbol new_map_sym;
7758 gdb_assert (name[0] == '$');
7759 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
7762 data = objfile_data (objfile, arm_objfile_data_key);
7765 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
7766 struct arm_per_objfile);
7767 set_objfile_data (objfile, arm_objfile_data_key, data);
7768 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
7769 objfile->obfd->section_count,
7770 VEC(arm_mapping_symbol_s) *);
7772 map_p = &data->section_maps[bfd_get_section (sym)->index];
7774 new_map_sym.value = sym->value;
7775 new_map_sym.type = name[1];
7777 /* Assume that most mapping symbols appear in order of increasing
7778 value. If they were randomly distributed, it would be faster to
7779 always push here and then sort at first use. */
7780 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
7782 struct arm_mapping_symbol *prev_map_sym;
7784 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
7785 if (prev_map_sym->value >= sym->value)
7788 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
7789 arm_compare_mapping_symbols);
7790 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
7795 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
7799 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
7801 struct gdbarch *gdbarch = get_regcache_arch (regcache);
7802 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
7804 /* If necessary, set the T bit. */
7807 ULONGEST val, t_bit;
7808 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
7809 t_bit = arm_psr_thumb_bit (gdbarch);
7810 if (arm_pc_is_thumb (gdbarch, pc))
7811 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
7814 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
7819 /* Read the contents of a NEON quad register, by reading from two
7820 double registers. This is used to implement the quad pseudo
7821 registers, and for argument passing in case the quad registers are
7822 missing; vectors are passed in quad registers when using the VFP
7823 ABI, even if a NEON unit is not present. REGNUM is the index of
7824 the quad register, in [0, 15]. */
7827 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
7828 int regnum, gdb_byte *buf)
7831 gdb_byte reg_buf[8];
7832 int offset, double_regnum;
7834 sprintf (name_buf, "d%d", regnum << 1);
7835 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7838 /* d0 is always the least significant half of q0. */
7839 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7844 regcache_raw_read (regcache, double_regnum, reg_buf);
7845 memcpy (buf + offset, reg_buf, 8);
7847 offset = 8 - offset;
7848 regcache_raw_read (regcache, double_regnum + 1, reg_buf);
7849 memcpy (buf + offset, reg_buf, 8);
7853 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
7854 int regnum, gdb_byte *buf)
7856 const int num_regs = gdbarch_num_regs (gdbarch);
7858 gdb_byte reg_buf[8];
7859 int offset, double_regnum;
7861 gdb_assert (regnum >= num_regs);
7864 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
7865 /* Quad-precision register. */
7866 arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
7869 /* Single-precision register. */
7870 gdb_assert (regnum < 32);
7872 /* s0 is always the least significant half of d0. */
7873 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7874 offset = (regnum & 1) ? 0 : 4;
7876 offset = (regnum & 1) ? 4 : 0;
7878 sprintf (name_buf, "d%d", regnum >> 1);
7879 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7882 regcache_raw_read (regcache, double_regnum, reg_buf);
7883 memcpy (buf, reg_buf + offset, 4);
7887 /* Store the contents of BUF to a NEON quad register, by writing to
7888 two double registers. This is used to implement the quad pseudo
7889 registers, and for argument passing in case the quad registers are
7890 missing; vectors are passed in quad registers when using the VFP
7891 ABI, even if a NEON unit is not present. REGNUM is the index
7892 of the quad register, in [0, 15]. */
7895 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
7896 int regnum, const gdb_byte *buf)
7899 gdb_byte reg_buf[8];
7900 int offset, double_regnum;
7902 sprintf (name_buf, "d%d", regnum << 1);
7903 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7906 /* d0 is always the least significant half of q0. */
7907 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7912 regcache_raw_write (regcache, double_regnum, buf + offset);
7913 offset = 8 - offset;
7914 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
7918 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
7919 int regnum, const gdb_byte *buf)
7921 const int num_regs = gdbarch_num_regs (gdbarch);
7923 gdb_byte reg_buf[8];
7924 int offset, double_regnum;
7926 gdb_assert (regnum >= num_regs);
7929 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
7930 /* Quad-precision register. */
7931 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
7934 /* Single-precision register. */
7935 gdb_assert (regnum < 32);
7937 /* s0 is always the least significant half of d0. */
7938 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7939 offset = (regnum & 1) ? 0 : 4;
7941 offset = (regnum & 1) ? 4 : 0;
7943 sprintf (name_buf, "d%d", regnum >> 1);
7944 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7947 regcache_raw_read (regcache, double_regnum, reg_buf);
7948 memcpy (reg_buf + offset, buf, 4);
7949 regcache_raw_write (regcache, double_regnum, reg_buf);
7953 static struct value *
7954 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
7956 const int *reg_p = baton;
7957 return value_of_register (*reg_p, frame);
7960 static enum gdb_osabi
7961 arm_elf_osabi_sniffer (bfd *abfd)
7963 unsigned int elfosabi;
7964 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
7966 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
7968 if (elfosabi == ELFOSABI_ARM)
7969 /* GNU tools use this value. Check note sections in this case,
7971 bfd_map_over_sections (abfd,
7972 generic_elf_osabi_sniff_abi_tag_sections,
7975 /* Anything else will be handled by the generic ELF sniffer. */
7980 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
7981 struct reggroup *group)
7983 /* FPS register's type is INT, but belongs to float_reggroup. Beside
7984 this, FPS register belongs to save_regroup, restore_reggroup, and
7985 all_reggroup, of course. */
7986 if (regnum == ARM_FPS_REGNUM)
7987 return (group == float_reggroup
7988 || group == save_reggroup
7989 || group == restore_reggroup
7990 || group == all_reggroup);
7992 return default_register_reggroup_p (gdbarch, regnum, group);
7996 /* Initialize the current architecture based on INFO. If possible,
7997 re-use an architecture from ARCHES, which is a list of
7998 architectures already created during this debugging session.
8000 Called e.g. at program startup, when reading a core file, and when
8001 reading a binary file. */
8003 static struct gdbarch *
8004 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8006 struct gdbarch_tdep *tdep;
8007 struct gdbarch *gdbarch;
8008 struct gdbarch_list *best_arch;
8009 enum arm_abi_kind arm_abi = arm_abi_global;
8010 enum arm_float_model fp_model = arm_fp_model;
8011 struct tdesc_arch_data *tdesc_data = NULL;
8013 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8015 int have_fpa_registers = 1;
8016 const struct target_desc *tdesc = info.target_desc;
8018 /* If we have an object to base this architecture on, try to determine
8021 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8023 int ei_osabi, e_flags;
8025 switch (bfd_get_flavour (info.abfd))
8027 case bfd_target_aout_flavour:
8028 /* Assume it's an old APCS-style ABI. */
8029 arm_abi = ARM_ABI_APCS;
8032 case bfd_target_coff_flavour:
8033 /* Assume it's an old APCS-style ABI. */
8035 arm_abi = ARM_ABI_APCS;
8038 case bfd_target_elf_flavour:
8039 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8040 e_flags = elf_elfheader (info.abfd)->e_flags;
8042 if (ei_osabi == ELFOSABI_ARM)
8044 /* GNU tools used to use this value, but do not for EABI
8045 objects. There's nowhere to tag an EABI version
8046 anyway, so assume APCS. */
8047 arm_abi = ARM_ABI_APCS;
8049 else if (ei_osabi == ELFOSABI_NONE)
8051 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8052 int attr_arch, attr_profile;
8056 case EF_ARM_EABI_UNKNOWN:
8057 /* Assume GNU tools. */
8058 arm_abi = ARM_ABI_APCS;
8061 case EF_ARM_EABI_VER4:
8062 case EF_ARM_EABI_VER5:
8063 arm_abi = ARM_ABI_AAPCS;
8064 /* EABI binaries default to VFP float ordering.
8065 They may also contain build attributes that can
8066 be used to identify if the VFP argument-passing
8068 if (fp_model == ARM_FLOAT_AUTO)
8071 switch (bfd_elf_get_obj_attr_int (info.abfd,
8076 /* "The user intended FP parameter/result
8077 passing to conform to AAPCS, base
8079 fp_model = ARM_FLOAT_SOFT_VFP;
8082 /* "The user intended FP parameter/result
8083 passing to conform to AAPCS, VFP
8085 fp_model = ARM_FLOAT_VFP;
8088 /* "The user intended FP parameter/result
8089 passing to conform to tool chain-specific
8090 conventions" - we don't know any such
8091 conventions, so leave it as "auto". */
8094 /* Attribute value not mentioned in the
8095 October 2008 ABI, so leave it as
8100 fp_model = ARM_FLOAT_SOFT_VFP;
8106 /* Leave it as "auto". */
8107 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8112 /* Detect M-profile programs. This only works if the
8113 executable file includes build attributes; GCC does
8114 copy them to the executable, but e.g. RealView does
8116 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8118 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8120 Tag_CPU_arch_profile);
8121 /* GCC specifies the profile for v6-M; RealView only
8122 specifies the profile for architectures starting with
8123 V7 (as opposed to architectures with a tag
8124 numerically greater than TAG_CPU_ARCH_V7). */
8125 if (!tdesc_has_registers (tdesc)
8126 && (attr_arch == TAG_CPU_ARCH_V6_M
8127 || attr_arch == TAG_CPU_ARCH_V6S_M
8128 || attr_profile == 'M'))
8129 tdesc = tdesc_arm_with_m;
8133 if (fp_model == ARM_FLOAT_AUTO)
8135 int e_flags = elf_elfheader (info.abfd)->e_flags;
8137 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8140 /* Leave it as "auto". Strictly speaking this case
8141 means FPA, but almost nobody uses that now, and
8142 many toolchains fail to set the appropriate bits
8143 for the floating-point model they use. */
8145 case EF_ARM_SOFT_FLOAT:
8146 fp_model = ARM_FLOAT_SOFT_FPA;
8148 case EF_ARM_VFP_FLOAT:
8149 fp_model = ARM_FLOAT_VFP;
8151 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8152 fp_model = ARM_FLOAT_SOFT_VFP;
8157 if (e_flags & EF_ARM_BE8)
8158 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8163 /* Leave it as "auto". */
8168 /* Check any target description for validity. */
8169 if (tdesc_has_registers (tdesc))
8171 /* For most registers we require GDB's default names; but also allow
8172 the numeric names for sp / lr / pc, as a convenience. */
8173 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8174 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8175 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8177 const struct tdesc_feature *feature;
8180 feature = tdesc_find_feature (tdesc,
8181 "org.gnu.gdb.arm.core");
8182 if (feature == NULL)
8184 feature = tdesc_find_feature (tdesc,
8185 "org.gnu.gdb.arm.m-profile");
8186 if (feature == NULL)
8192 tdesc_data = tdesc_data_alloc ();
8195 for (i = 0; i < ARM_SP_REGNUM; i++)
8196 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8197 arm_register_names[i]);
8198 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8201 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8204 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8208 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8209 ARM_PS_REGNUM, "xpsr");
8211 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8212 ARM_PS_REGNUM, "cpsr");
8216 tdesc_data_cleanup (tdesc_data);
8220 feature = tdesc_find_feature (tdesc,
8221 "org.gnu.gdb.arm.fpa");
8222 if (feature != NULL)
8225 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
8226 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8227 arm_register_names[i]);
8230 tdesc_data_cleanup (tdesc_data);
8235 have_fpa_registers = 0;
8237 feature = tdesc_find_feature (tdesc,
8238 "org.gnu.gdb.xscale.iwmmxt");
8239 if (feature != NULL)
8241 static const char *const iwmmxt_names[] = {
8242 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
8243 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
8244 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
8245 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
8249 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
8251 &= tdesc_numbered_register (feature, tdesc_data, i,
8252 iwmmxt_names[i - ARM_WR0_REGNUM]);
8254 /* Check for the control registers, but do not fail if they
8256 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
8257 tdesc_numbered_register (feature, tdesc_data, i,
8258 iwmmxt_names[i - ARM_WR0_REGNUM]);
8260 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
8262 &= tdesc_numbered_register (feature, tdesc_data, i,
8263 iwmmxt_names[i - ARM_WR0_REGNUM]);
8267 tdesc_data_cleanup (tdesc_data);
8272 /* If we have a VFP unit, check whether the single precision registers
8273 are present. If not, then we will synthesize them as pseudo
8275 feature = tdesc_find_feature (tdesc,
8276 "org.gnu.gdb.arm.vfp");
8277 if (feature != NULL)
8279 static const char *const vfp_double_names[] = {
8280 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
8281 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
8282 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
8283 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
8286 /* Require the double precision registers. There must be either
8289 for (i = 0; i < 32; i++)
8291 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8293 vfp_double_names[i]);
8298 if (!valid_p && i != 16)
8300 tdesc_data_cleanup (tdesc_data);
8304 if (tdesc_unnumbered_register (feature, "s0") == 0)
8305 have_vfp_pseudos = 1;
8307 have_vfp_registers = 1;
8309 /* If we have VFP, also check for NEON. The architecture allows
8310 NEON without VFP (integer vector operations only), but GDB
8311 does not support that. */
8312 feature = tdesc_find_feature (tdesc,
8313 "org.gnu.gdb.arm.neon");
8314 if (feature != NULL)
8316 /* NEON requires 32 double-precision registers. */
8319 tdesc_data_cleanup (tdesc_data);
8323 /* If there are quad registers defined by the stub, use
8324 their type; otherwise (normally) provide them with
8325 the default type. */
8326 if (tdesc_unnumbered_register (feature, "q0") == 0)
8327 have_neon_pseudos = 1;
8334 /* If there is already a candidate, use it. */
8335 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
8337 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
8339 if (arm_abi != ARM_ABI_AUTO
8340 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
8343 if (fp_model != ARM_FLOAT_AUTO
8344 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
8347 /* There are various other properties in tdep that we do not
8348 need to check here: those derived from a target description,
8349 since gdbarches with a different target description are
8350 automatically disqualified. */
8352 /* Do check is_m, though, since it might come from the binary. */
8353 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
8356 /* Found a match. */
8360 if (best_arch != NULL)
8362 if (tdesc_data != NULL)
8363 tdesc_data_cleanup (tdesc_data);
8364 return best_arch->gdbarch;
8367 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
8368 gdbarch = gdbarch_alloc (&info, tdep);
8370 /* Record additional information about the architecture we are defining.
8371 These are gdbarch discriminators, like the OSABI. */
8372 tdep->arm_abi = arm_abi;
8373 tdep->fp_model = fp_model;
8375 tdep->have_fpa_registers = have_fpa_registers;
8376 tdep->have_vfp_registers = have_vfp_registers;
8377 tdep->have_vfp_pseudos = have_vfp_pseudos;
8378 tdep->have_neon_pseudos = have_neon_pseudos;
8379 tdep->have_neon = have_neon;
8382 switch (info.byte_order_for_code)
8384 case BFD_ENDIAN_BIG:
8385 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
8386 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
8387 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
8388 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
8392 case BFD_ENDIAN_LITTLE:
8393 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
8394 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
8395 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
8396 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
8401 internal_error (__FILE__, __LINE__,
8402 _("arm_gdbarch_init: bad byte order for float format"));
8405 /* On ARM targets char defaults to unsigned. */
8406 set_gdbarch_char_signed (gdbarch, 0);
8408 /* Note: for displaced stepping, this includes the breakpoint, and one word
8409 of additional scratch space. This setting isn't used for anything beside
8410 displaced stepping at present. */
8411 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
8413 /* This should be low enough for everything. */
8414 tdep->lowest_pc = 0x20;
8415 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
8417 /* The default, for both APCS and AAPCS, is to return small
8418 structures in registers. */
8419 tdep->struct_return = reg_struct_return;
8421 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
8422 set_gdbarch_frame_align (gdbarch, arm_frame_align);
8424 set_gdbarch_write_pc (gdbarch, arm_write_pc);
8426 /* Frame handling. */
8427 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
8428 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
8429 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
8431 frame_base_set_default (gdbarch, &arm_normal_base);
8433 /* Address manipulation. */
8434 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
8435 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
8437 /* Advance PC across function entry code. */
8438 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
8440 /* Detect whether PC is in function epilogue. */
8441 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
8443 /* Skip trampolines. */
8444 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
8446 /* The stack grows downward. */
8447 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
8449 /* Breakpoint manipulation. */
8450 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
8451 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
8452 arm_remote_breakpoint_from_pc);
8454 /* Information about registers, etc. */
8455 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
8456 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
8457 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
8458 set_gdbarch_register_type (gdbarch, arm_register_type);
8459 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
8461 /* This "info float" is FPA-specific. Use the generic version if we
8463 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
8464 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
8466 /* Internal <-> external register number maps. */
8467 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
8468 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
8470 set_gdbarch_register_name (gdbarch, arm_register_name);
8472 /* Returning results. */
8473 set_gdbarch_return_value (gdbarch, arm_return_value);
8476 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
8478 /* Minsymbol frobbing. */
8479 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
8480 set_gdbarch_coff_make_msymbol_special (gdbarch,
8481 arm_coff_make_msymbol_special);
8482 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
8484 /* Thumb-2 IT block support. */
8485 set_gdbarch_adjust_breakpoint_address (gdbarch,
8486 arm_adjust_breakpoint_address);
8488 /* Virtual tables. */
8489 set_gdbarch_vbit_in_delta (gdbarch, 1);
8491 /* Hook in the ABI-specific overrides, if they have been registered. */
8492 gdbarch_init_osabi (info, gdbarch);
8494 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
8496 /* Add some default predicates. */
8497 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
8498 dwarf2_append_unwinders (gdbarch);
8499 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
8500 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
8502 /* Now we have tuned the configuration, set a few final things,
8503 based on what the OS ABI has told us. */
8505 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
8506 binaries are always marked. */
8507 if (tdep->arm_abi == ARM_ABI_AUTO)
8508 tdep->arm_abi = ARM_ABI_APCS;
8510 /* We used to default to FPA for generic ARM, but almost nobody
8511 uses that now, and we now provide a way for the user to force
8512 the model. So default to the most useful variant. */
8513 if (tdep->fp_model == ARM_FLOAT_AUTO)
8514 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
8516 if (tdep->jb_pc >= 0)
8517 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
8519 /* Floating point sizes and format. */
8520 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
8521 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
8523 set_gdbarch_double_format
8524 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
8525 set_gdbarch_long_double_format
8526 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
8530 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
8531 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
8534 if (have_vfp_pseudos)
8536 /* NOTE: These are the only pseudo registers used by
8537 the ARM target at the moment. If more are added, a
8538 little more care in numbering will be needed. */
8540 int num_pseudos = 32;
8541 if (have_neon_pseudos)
8543 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
8544 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
8545 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
8550 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
8552 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
8554 /* Override tdesc_register_type to adjust the types of VFP
8555 registers for NEON. */
8556 set_gdbarch_register_type (gdbarch, arm_register_type);
8559 /* Add standard register aliases. We add aliases even for those
8560 nanes which are used by the current architecture - it's simpler,
8561 and does no harm, since nothing ever lists user registers. */
8562 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
8563 user_reg_add (gdbarch, arm_register_aliases[i].name,
8564 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
8570 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
8572 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8577 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
8578 (unsigned long) tdep->lowest_pc);
8581 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
8584 _initialize_arm_tdep (void)
8586 struct ui_file *stb;
8588 struct cmd_list_element *new_set, *new_show;
8589 const char *setname;
8590 const char *setdesc;
8591 const char *const *regnames;
8593 static char *helptext;
8594 char regdesc[1024], *rdptr = regdesc;
8595 size_t rest = sizeof (regdesc);
8597 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
8599 arm_objfile_data_key
8600 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
8602 /* Add ourselves to objfile event chain. */
8603 observer_attach_new_objfile (arm_exidx_new_objfile);
8605 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
8607 /* Register an ELF OS ABI sniffer for ARM binaries. */
8608 gdbarch_register_osabi_sniffer (bfd_arch_arm,
8609 bfd_target_elf_flavour,
8610 arm_elf_osabi_sniffer);
8612 /* Initialize the standard target descriptions. */
8613 initialize_tdesc_arm_with_m ();
8615 /* Get the number of possible sets of register names defined in opcodes. */
8616 num_disassembly_options = get_arm_regname_num_options ();
8618 /* Add root prefix command for all "set arm"/"show arm" commands. */
8619 add_prefix_cmd ("arm", no_class, set_arm_command,
8620 _("Various ARM-specific commands."),
8621 &setarmcmdlist, "set arm ", 0, &setlist);
8623 add_prefix_cmd ("arm", no_class, show_arm_command,
8624 _("Various ARM-specific commands."),
8625 &showarmcmdlist, "show arm ", 0, &showlist);
8627 /* Sync the opcode insn printer with our register viewer. */
8628 parse_arm_disassembler_option ("reg-names-std");
8630 /* Initialize the array that will be passed to
8631 add_setshow_enum_cmd(). */
8632 valid_disassembly_styles
8633 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
8634 for (i = 0; i < num_disassembly_options; i++)
8636 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
8637 valid_disassembly_styles[i] = setname;
8638 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
8641 /* When we find the default names, tell the disassembler to use
8643 if (!strcmp (setname, "std"))
8645 disassembly_style = setname;
8646 set_arm_regname_option (i);
8649 /* Mark the end of valid options. */
8650 valid_disassembly_styles[num_disassembly_options] = NULL;
8652 /* Create the help text. */
8653 stb = mem_fileopen ();
8654 fprintf_unfiltered (stb, "%s%s%s",
8655 _("The valid values are:\n"),
8657 _("The default is \"std\"."));
8658 helptext = ui_file_xstrdup (stb, NULL);
8659 ui_file_delete (stb);
8661 add_setshow_enum_cmd("disassembler", no_class,
8662 valid_disassembly_styles, &disassembly_style,
8663 _("Set the disassembly style."),
8664 _("Show the disassembly style."),
8666 set_disassembly_style_sfunc,
8667 NULL, /* FIXME: i18n: The disassembly style is
8669 &setarmcmdlist, &showarmcmdlist);
8671 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
8672 _("Set usage of ARM 32-bit mode."),
8673 _("Show usage of ARM 32-bit mode."),
8674 _("When off, a 26-bit PC will be used."),
8676 NULL, /* FIXME: i18n: Usage of ARM 32-bit
8678 &setarmcmdlist, &showarmcmdlist);
8680 /* Add a command to allow the user to force the FPU model. */
8681 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
8682 _("Set the floating point type."),
8683 _("Show the floating point type."),
8684 _("auto - Determine the FP typefrom the OS-ABI.\n\
8685 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
8686 fpa - FPA co-processor (GCC compiled).\n\
8687 softvfp - Software FP with pure-endian doubles.\n\
8688 vfp - VFP co-processor."),
8689 set_fp_model_sfunc, show_fp_model,
8690 &setarmcmdlist, &showarmcmdlist);
8692 /* Add a command to allow the user to force the ABI. */
8693 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
8696 NULL, arm_set_abi, arm_show_abi,
8697 &setarmcmdlist, &showarmcmdlist);
8699 /* Add two commands to allow the user to force the assumed
8701 add_setshow_enum_cmd ("fallback-mode", class_support,
8702 arm_mode_strings, &arm_fallback_mode_string,
8703 _("Set the mode assumed when symbols are unavailable."),
8704 _("Show the mode assumed when symbols are unavailable."),
8705 NULL, NULL, arm_show_fallback_mode,
8706 &setarmcmdlist, &showarmcmdlist);
8707 add_setshow_enum_cmd ("force-mode", class_support,
8708 arm_mode_strings, &arm_force_mode_string,
8709 _("Set the mode assumed even when symbols are available."),
8710 _("Show the mode assumed even when symbols are available."),
8711 NULL, NULL, arm_show_force_mode,
8712 &setarmcmdlist, &showarmcmdlist);
8714 /* Debugging flag. */
8715 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
8716 _("Set ARM debugging."),
8717 _("Show ARM debugging."),
8718 _("When on, arm-specific debugging is enabled."),
8720 NULL, /* FIXME: i18n: "ARM debugging is %s. */
8721 &setdebuglist, &showdebuglist);