1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "gdb_string.h"
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
58 #include "features/arm-with-m.c"
62 /* Macros for setting and testing a bit in a minimal symbol that marks
63 it as Thumb function. The MSB of the minimal symbol's "info" field
64 is used for this purpose.
66 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
67 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
69 #define MSYMBOL_SET_SPECIAL(msym) \
70 MSYMBOL_TARGET_FLAG_1 (msym) = 1
72 #define MSYMBOL_IS_SPECIAL(msym) \
73 MSYMBOL_TARGET_FLAG_1 (msym)
75 /* Per-objfile data used for mapping symbols. */
76 static const struct objfile_data *arm_objfile_data_key;
78 struct arm_mapping_symbol
83 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
84 DEF_VEC_O(arm_mapping_symbol_s);
86 struct arm_per_objfile
88 VEC(arm_mapping_symbol_s) **section_maps;
91 /* The list of available "set arm ..." and "show arm ..." commands. */
92 static struct cmd_list_element *setarmcmdlist = NULL;
93 static struct cmd_list_element *showarmcmdlist = NULL;
95 /* The type of floating-point to use. Keep this in sync with enum
96 arm_float_model, and the help string in _initialize_arm_tdep. */
97 static const char *fp_model_strings[] =
107 /* A variable that can be configured by the user. */
108 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
109 static const char *current_fp_model = "auto";
111 /* The ABI to use. Keep this in sync with arm_abi_kind. */
112 static const char *arm_abi_strings[] =
120 /* A variable that can be configured by the user. */
121 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
122 static const char *arm_abi_string = "auto";
124 /* The execution mode to assume. */
125 static const char *arm_mode_strings[] =
133 static const char *arm_fallback_mode_string = "auto";
134 static const char *arm_force_mode_string = "auto";
136 /* Number of different reg name sets (options). */
137 static int num_disassembly_options;
139 /* The standard register names, and all the valid aliases for them. Note
140 that `fp', `sp' and `pc' are not added in this alias list, because they
141 have been added as builtin user registers in
142 std-regs.c:_initialize_frame_reg. */
147 } arm_register_aliases[] = {
148 /* Basic register numbers. */
165 /* Synonyms (argument and variable registers). */
178 /* Other platform-specific names for r9. */
184 /* Names used by GCC (not listed in the ARM EABI). */
186 /* A special name from the older ATPCS. */
190 static const char *const arm_register_names[] =
191 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
192 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
193 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
194 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
195 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
196 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
197 "fps", "cpsr" }; /* 24 25 */
199 /* Valid register name styles. */
200 static const char **valid_disassembly_styles;
202 /* Disassembly style to use. Default to "std" register names. */
203 static const char *disassembly_style;
205 /* This is used to keep the bfd arch_info in sync with the disassembly
207 static void set_disassembly_style_sfunc(char *, int,
208 struct cmd_list_element *);
209 static void set_disassembly_style (void);
211 static void convert_from_extended (const struct floatformat *, const void *,
213 static void convert_to_extended (const struct floatformat *, void *,
216 static void arm_neon_quad_read (struct gdbarch *gdbarch,
217 struct regcache *regcache,
218 int regnum, gdb_byte *buf);
219 static void arm_neon_quad_write (struct gdbarch *gdbarch,
220 struct regcache *regcache,
221 int regnum, const gdb_byte *buf);
223 struct arm_prologue_cache
225 /* The stack pointer at the time this frame was created; i.e. the
226 caller's stack pointer when this function was called. It is used
227 to identify this frame. */
230 /* The frame base for this frame is just prev_sp - frame size.
231 FRAMESIZE is the distance from the frame pointer to the
232 initial stack pointer. */
236 /* The register used to hold the frame pointer for this frame. */
239 /* Saved register offsets. */
240 struct trad_frame_saved_reg *saved_regs;
243 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
244 CORE_ADDR prologue_start,
245 CORE_ADDR prologue_end,
246 struct arm_prologue_cache *cache);
248 /* Architecture version for displaced stepping. This effects the behaviour of
249 certain instructions, and really should not be hard-wired. */
251 #define DISPLACED_STEPPING_ARCH_VERSION 5
253 /* Addresses for calling Thumb functions have the bit 0 set.
254 Here are some macros to test, set, or clear bit 0 of addresses. */
255 #define IS_THUMB_ADDR(addr) ((addr) & 1)
256 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
257 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
259 /* Set to true if the 32-bit mode is in use. */
263 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
266 arm_psr_thumb_bit (struct gdbarch *gdbarch)
268 if (gdbarch_tdep (gdbarch)->is_m)
274 /* Determine if FRAME is executing in Thumb mode. */
277 arm_frame_is_thumb (struct frame_info *frame)
280 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
282 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
283 directly (from a signal frame or dummy frame) or by interpreting
284 the saved LR (from a prologue or DWARF frame). So consult it and
285 trust the unwinders. */
286 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
288 return (cpsr & t_bit) != 0;
291 /* Callback for VEC_lower_bound. */
294 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
295 const struct arm_mapping_symbol *rhs)
297 return lhs->value < rhs->value;
300 /* Search for the mapping symbol covering MEMADDR. If one is found,
301 return its type. Otherwise, return 0. If START is non-NULL,
302 set *START to the location of the mapping symbol. */
305 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
307 struct obj_section *sec;
309 /* If there are mapping symbols, consult them. */
310 sec = find_pc_section (memaddr);
313 struct arm_per_objfile *data;
314 VEC(arm_mapping_symbol_s) *map;
315 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
319 data = objfile_data (sec->objfile, arm_objfile_data_key);
322 map = data->section_maps[sec->the_bfd_section->index];
323 if (!VEC_empty (arm_mapping_symbol_s, map))
325 struct arm_mapping_symbol *map_sym;
327 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
328 arm_compare_mapping_symbols);
330 /* VEC_lower_bound finds the earliest ordered insertion
331 point. If the following symbol starts at this exact
332 address, we use that; otherwise, the preceding
333 mapping symbol covers this address. */
334 if (idx < VEC_length (arm_mapping_symbol_s, map))
336 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
337 if (map_sym->value == map_key.value)
340 *start = map_sym->value + obj_section_addr (sec);
341 return map_sym->type;
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
349 *start = map_sym->value + obj_section_addr (sec);
350 return map_sym->type;
359 static CORE_ADDR arm_get_next_pc_raw (struct frame_info *frame,
360 CORE_ADDR pc, int insert_bkpt);
362 /* Determine if the program counter specified in MEMADDR is in a Thumb
363 function. This function should be called for addresses unrelated to
364 any executing frame; otherwise, prefer arm_frame_is_thumb. */
367 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
369 struct obj_section *sec;
370 struct minimal_symbol *sym;
373 /* If bit 0 of the address is set, assume this is a Thumb address. */
374 if (IS_THUMB_ADDR (memaddr))
377 /* If the user wants to override the symbol table, let him. */
378 if (strcmp (arm_force_mode_string, "arm") == 0)
380 if (strcmp (arm_force_mode_string, "thumb") == 0)
383 /* ARM v6-M and v7-M are always in Thumb mode. */
384 if (gdbarch_tdep (gdbarch)->is_m)
387 /* If there are mapping symbols, consult them. */
388 type = arm_find_mapping_symbol (memaddr, NULL);
392 /* Thumb functions have a "special" bit set in minimal symbols. */
393 sym = lookup_minimal_symbol_by_pc (memaddr);
395 return (MSYMBOL_IS_SPECIAL (sym));
397 /* If the user wants to override the fallback mode, let them. */
398 if (strcmp (arm_fallback_mode_string, "arm") == 0)
400 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
403 /* If we couldn't find any symbol, but we're talking to a running
404 target, then trust the current value of $cpsr. This lets
405 "display/i $pc" always show the correct mode (though if there is
406 a symbol table we will not reach here, so it still may not be
407 displayed in the mode it will be executed).
409 As a further heuristic if we detect that we are doing a single-step we
410 see what state executing the current instruction ends up with us being
412 if (target_has_registers)
414 struct frame_info *current_frame = get_current_frame ();
415 CORE_ADDR current_pc = get_frame_pc (current_frame);
416 int is_thumb = arm_frame_is_thumb (current_frame);
418 if (memaddr == current_pc)
422 struct gdbarch *gdbarch = get_frame_arch (current_frame);
423 next_pc = arm_get_next_pc_raw (current_frame, current_pc, FALSE);
424 if (memaddr == gdbarch_addr_bits_remove (gdbarch, next_pc))
425 return IS_THUMB_ADDR (next_pc);
431 /* Otherwise we're out of luck; we assume ARM. */
435 /* Remove useless bits from addresses in a running program. */
437 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
440 return UNMAKE_THUMB_ADDR (val);
442 return (val & 0x03fffffc);
445 /* When reading symbols, we need to zap the low bit of the address,
446 which may be set to 1 for Thumb functions. */
448 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
453 /* Return 1 if PC is the start of a compiler helper function which
454 can be safely ignored during prologue skipping. IS_THUMB is true
455 if the function is known to be a Thumb function due to the way it
458 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
460 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
461 struct minimal_symbol *msym;
463 msym = lookup_minimal_symbol_by_pc (pc);
465 && SYMBOL_VALUE_ADDRESS (msym) == pc
466 && SYMBOL_LINKAGE_NAME (msym) != NULL)
468 const char *name = SYMBOL_LINKAGE_NAME (msym);
470 /* The GNU linker's Thumb call stub to foo is named
472 if (strstr (name, "_from_thumb") != NULL)
475 /* On soft-float targets, __truncdfsf2 is called to convert promoted
476 arguments to their argument types in non-prototyped
478 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
480 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
483 /* Internal functions related to thread-local storage. */
484 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
486 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
491 /* If we run against a stripped glibc, we may be unable to identify
492 special functions by name. Check for one important case,
493 __aeabi_read_tp, by comparing the *code* against the default
494 implementation (this is hand-written ARM assembler in glibc). */
497 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
498 == 0xe3e00a0f /* mov r0, #0xffff0fff */
499 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
500 == 0xe240f01f) /* sub pc, r0, #31 */
507 /* Support routines for instruction parsing. */
508 #define submask(x) ((1L << ((x) + 1)) - 1)
509 #define bit(obj,st) (((obj) >> (st)) & 1)
510 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
511 #define sbits(obj,st,fn) \
512 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
513 #define BranchDest(addr,instr) \
514 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
516 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
517 the first 16-bit of instruction, and INSN2 is the second 16-bit of
519 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
520 ((bits ((insn1), 0, 3) << 12) \
521 | (bits ((insn1), 10, 10) << 11) \
522 | (bits ((insn2), 12, 14) << 8) \
523 | bits ((insn2), 0, 7))
525 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
526 the 32-bit instruction. */
527 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
528 ((bits ((insn), 16, 19) << 12) \
529 | bits ((insn), 0, 11))
531 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
534 thumb_expand_immediate (unsigned int imm)
536 unsigned int count = imm >> 7;
544 return (imm & 0xff) | ((imm & 0xff) << 16);
546 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
548 return (imm & 0xff) | ((imm & 0xff) << 8)
549 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
552 return (0x80 | (imm & 0x7f)) << (32 - count);
555 /* Return 1 if the 16-bit Thumb instruction INST might change
556 control flow, 0 otherwise. */
559 thumb_instruction_changes_pc (unsigned short inst)
561 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
564 if ((inst & 0xf000) == 0xd000) /* conditional branch */
567 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
570 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
573 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
576 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
582 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
583 might change control flow, 0 otherwise. */
586 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
588 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
590 /* Branches and miscellaneous control instructions. */
592 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
597 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
599 /* SUBS PC, LR, #imm8. */
602 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
604 /* Conditional branch. */
611 if ((inst1 & 0xfe50) == 0xe810)
613 /* Load multiple or RFE. */
615 if (bit (inst1, 7) && !bit (inst1, 8))
621 else if (!bit (inst1, 7) && bit (inst1, 8))
627 else if (bit (inst1, 7) && bit (inst1, 8))
632 else if (!bit (inst1, 7) && !bit (inst1, 8))
641 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
643 /* MOV PC or MOVS PC. */
647 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
650 if (bits (inst1, 0, 3) == 15)
656 if ((inst2 & 0x0fc0) == 0x0000)
662 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
668 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
677 /* Analyze a Thumb prologue, looking for a recognizable stack frame
678 and frame pointer. Scan until we encounter a store that could
679 clobber the stack frame unexpectedly, or an unknown instruction.
680 Return the last address which is definitely safe to skip for an
681 initial breakpoint. */
684 thumb_analyze_prologue (struct gdbarch *gdbarch,
685 CORE_ADDR start, CORE_ADDR limit,
686 struct arm_prologue_cache *cache)
688 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
689 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
692 struct pv_area *stack;
693 struct cleanup *back_to;
695 CORE_ADDR unrecognized_pc = 0;
697 for (i = 0; i < 16; i++)
698 regs[i] = pv_register (i, 0);
699 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
700 back_to = make_cleanup_free_pv_area (stack);
702 while (start < limit)
706 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
708 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
713 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
716 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
717 whether to save LR (R14). */
718 mask = (insn & 0xff) | ((insn & 0x100) << 6);
720 /* Calculate offsets of saved R0-R7 and LR. */
721 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
722 if (mask & (1 << regno))
724 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
726 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
729 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
732 offset = (insn & 0x7f) << 2; /* get scaled offset */
733 if (insn & 0x80) /* Check for SUB. */
734 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
740 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
741 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
743 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
744 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
745 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
747 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
748 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
751 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
752 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
753 && pv_is_constant (regs[bits (insn, 3, 5)]))
754 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
755 regs[bits (insn, 6, 8)]);
756 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
757 && pv_is_constant (regs[bits (insn, 3, 6)]))
759 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
760 int rm = bits (insn, 3, 6);
761 regs[rd] = pv_add (regs[rd], regs[rm]);
763 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
765 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
766 int src_reg = (insn & 0x78) >> 3;
767 regs[dst_reg] = regs[src_reg];
769 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
771 /* Handle stores to the stack. Normally pushes are used,
772 but with GCC -mtpcs-frame, there may be other stores
773 in the prologue to create the frame. */
774 int regno = (insn >> 8) & 0x7;
777 offset = (insn & 0xff) << 2;
778 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
780 if (pv_area_store_would_trash (stack, addr))
783 pv_area_store (stack, addr, 4, regs[regno]);
785 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
787 int rd = bits (insn, 0, 2);
788 int rn = bits (insn, 3, 5);
791 offset = bits (insn, 6, 10) << 2;
792 addr = pv_add_constant (regs[rn], offset);
794 if (pv_area_store_would_trash (stack, addr))
797 pv_area_store (stack, addr, 4, regs[rd]);
799 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
800 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
801 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
802 /* Ignore stores of argument registers to the stack. */
804 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
805 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
806 /* Ignore block loads from the stack, potentially copying
807 parameters from memory. */
809 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
810 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
811 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
812 /* Similarly ignore single loads from the stack. */
814 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
815 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
816 /* Skip register copies, i.e. saves to another register
817 instead of the stack. */
819 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
820 /* Recognize constant loads; even with small stacks these are necessary
822 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
823 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
825 /* Constant pool loads, for the same reason. */
826 unsigned int constant;
829 loc = start + 4 + bits (insn, 0, 7) * 4;
830 constant = read_memory_unsigned_integer (loc, 4, byte_order);
831 regs[bits (insn, 8, 10)] = pv_constant (constant);
833 else if ((insn & 0xe000) == 0xe000)
835 unsigned short inst2;
837 inst2 = read_memory_unsigned_integer (start + 2, 2,
838 byte_order_for_code);
840 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
842 /* BL, BLX. Allow some special function calls when
843 skipping the prologue; GCC generates these before
844 storing arguments to the stack. */
846 int j1, j2, imm1, imm2;
848 imm1 = sbits (insn, 0, 10);
849 imm2 = bits (inst2, 0, 10);
850 j1 = bit (inst2, 13);
851 j2 = bit (inst2, 11);
853 offset = ((imm1 << 12) + (imm2 << 1));
854 offset ^= ((!j2) << 22) | ((!j1) << 23);
856 nextpc = start + 4 + offset;
857 /* For BLX make sure to clear the low bits. */
858 if (bit (inst2, 12) == 0)
859 nextpc = nextpc & 0xfffffffc;
861 if (!skip_prologue_function (gdbarch, nextpc,
862 bit (inst2, 12) != 0))
866 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 pv_t addr = regs[bits (insn, 0, 3)];
873 if (pv_area_store_would_trash (stack, addr))
876 /* Calculate offsets of saved registers. */
877 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
878 if (inst2 & (1 << regno))
880 addr = pv_add_constant (addr, -4);
881 pv_area_store (stack, addr, 4, regs[regno]);
885 regs[bits (insn, 0, 3)] = addr;
888 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
890 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
892 int regno1 = bits (inst2, 12, 15);
893 int regno2 = bits (inst2, 8, 11);
894 pv_t addr = regs[bits (insn, 0, 3)];
896 offset = inst2 & 0xff;
898 addr = pv_add_constant (addr, offset);
900 addr = pv_add_constant (addr, -offset);
902 if (pv_area_store_would_trash (stack, addr))
905 pv_area_store (stack, addr, 4, regs[regno1]);
906 pv_area_store (stack, pv_add_constant (addr, 4),
910 regs[bits (insn, 0, 3)] = addr;
913 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
914 && (inst2 & 0x0c00) == 0x0c00
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917 int regno = bits (inst2, 12, 15);
918 pv_t addr = regs[bits (insn, 0, 3)];
920 offset = inst2 & 0xff;
922 addr = pv_add_constant (addr, offset);
924 addr = pv_add_constant (addr, -offset);
926 if (pv_area_store_would_trash (stack, addr))
929 pv_area_store (stack, addr, 4, regs[regno]);
932 regs[bits (insn, 0, 3)] = addr;
935 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 int regno = bits (inst2, 12, 15);
941 offset = inst2 & 0xfff;
942 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
944 if (pv_area_store_would_trash (stack, addr))
947 pv_area_store (stack, addr, 4, regs[regno]);
950 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
951 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
952 /* Ignore stores of argument registers to the stack. */
955 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
956 && (inst2 & 0x0d00) == 0x0c00
957 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 /* Ignore stores of argument registers to the stack. */
961 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
963 && (inst2 & 0x8000) == 0x0000
964 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
965 /* Ignore block loads from the stack, potentially copying
966 parameters from memory. */
969 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Similarly ignore dual loads from the stack. */
975 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Similarly ignore single loads from the stack. */
981 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore single loads from the stack. */
986 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)],
995 thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
999 && (inst2 & 0x8000) == 0x0000)
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1005 regs[bits (inst2, 8, 11)]
1006 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1009 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)],
1018 - (CORE_ADDR) thumb_expand_immediate (imm));
1021 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1022 && (inst2 & 0x8000) == 0x0000)
1024 unsigned int imm = ((bits (insn, 10, 10) << 11)
1025 | (bits (inst2, 12, 14) << 8)
1026 | bits (inst2, 0, 7));
1028 regs[bits (inst2, 8, 11)]
1029 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1032 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1034 unsigned int imm = ((bits (insn, 10, 10) << 11)
1035 | (bits (inst2, 12, 14) << 8)
1036 | bits (inst2, 0, 7));
1038 regs[bits (inst2, 8, 11)]
1039 = pv_constant (thumb_expand_immediate (imm));
1042 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1045 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1047 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1050 else if (insn == 0xea5f /* mov.w Rd,Rm */
1051 && (inst2 & 0xf0f0) == 0)
1053 int dst_reg = (inst2 & 0x0f00) >> 8;
1054 int src_reg = inst2 & 0xf;
1055 regs[dst_reg] = regs[src_reg];
1058 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1060 /* Constant pool loads. */
1061 unsigned int constant;
1064 offset = bits (insn, 0, 11);
1066 loc = start + 4 + offset;
1068 loc = start + 4 - offset;
1070 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1071 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1074 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1076 /* Constant pool loads. */
1077 unsigned int constant;
1080 offset = bits (insn, 0, 7) << 2;
1082 loc = start + 4 + offset;
1084 loc = start + 4 - offset;
1086 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1087 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1089 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1090 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1093 else if (thumb2_instruction_changes_pc (insn, inst2))
1095 /* Don't scan past anything that might change control flow. */
1100 /* The optimizer might shove anything into the prologue,
1101 so we just skip what we don't recognize. */
1102 unrecognized_pc = start;
1107 else if (thumb_instruction_changes_pc (insn))
1109 /* Don't scan past anything that might change control flow. */
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1123 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1124 paddress (gdbarch, start));
1126 if (unrecognized_pc == 0)
1127 unrecognized_pc = start;
1131 do_cleanups (back_to);
1132 return unrecognized_pc;
1135 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1137 /* Frame pointer is fp. Frame size is constant. */
1138 cache->framereg = ARM_FP_REGNUM;
1139 cache->framesize = -regs[ARM_FP_REGNUM].k;
1141 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1143 /* Frame pointer is r7. Frame size is constant. */
1144 cache->framereg = THUMB_FP_REGNUM;
1145 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1147 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1149 /* Try the stack pointer... this is a bit desperate. */
1150 cache->framereg = ARM_SP_REGNUM;
1151 cache->framesize = -regs[ARM_SP_REGNUM].k;
1155 /* We're just out of luck. We don't know where the frame is. */
1156 cache->framereg = -1;
1157 cache->framesize = 0;
1160 for (i = 0; i < 16; i++)
1161 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1162 cache->saved_regs[i].addr = offset;
1164 do_cleanups (back_to);
1165 return unrecognized_pc;
1169 /* Try to analyze the instructions starting from PC, which load symbol
1170 __stack_chk_guard. Return the address of instruction after loading this
1171 symbol, set the dest register number to *BASEREG, and set the size of
1172 instructions for loading symbol in OFFSET. Return 0 if instructions are
1176 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1177 unsigned int *destreg, int *offset)
1179 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1180 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1181 unsigned int low, high, address;
1186 unsigned short insn1
1187 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1189 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1191 *destreg = bits (insn1, 8, 10);
1193 address = bits (insn1, 0, 7);
1195 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1197 unsigned short insn2
1198 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1200 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1205 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1207 /* movt Rd, #const */
1208 if ((insn1 & 0xfbc0) == 0xf2c0)
1210 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1211 *destreg = bits (insn2, 8, 11);
1213 address = (high << 16 | low);
1220 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1222 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1224 address = bits (insn, 0, 11);
1225 *destreg = bits (insn, 12, 15);
1228 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1230 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1233 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1235 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1237 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 *destreg = bits (insn, 12, 15);
1240 address = (high << 16 | low);
1248 /* Try to skip a sequence of instructions used for stack protector. If PC
1249 points to the first instruction of this sequence, return the address of
1250 first instruction after this sequence, otherwise, return original PC.
1252 On arm, this sequence of instructions is composed of mainly three steps,
1253 Step 1: load symbol __stack_chk_guard,
1254 Step 2: load from address of __stack_chk_guard,
1255 Step 3: store it to somewhere else.
1257 Usually, instructions on step 2 and step 3 are the same on various ARM
1258 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1259 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1260 instructions in step 1 vary from different ARM architectures. On ARMv7,
1263 movw Rn, #:lower16:__stack_chk_guard
1264 movt Rn, #:upper16:__stack_chk_guard
1271 .word __stack_chk_guard
1273 Since ldr/str is a very popular instruction, we can't use them as
1274 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1275 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1276 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1279 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1281 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1282 unsigned int address, basereg;
1283 struct minimal_symbol *stack_chk_guard;
1285 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1288 /* Try to parse the instructions in Step 1. */
1289 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1294 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1295 /* If name of symbol doesn't start with '__stack_chk_guard', this
1296 instruction sequence is not for stack protector. If symbol is
1297 removed, we conservatively think this sequence is for stack protector. */
1299 && strcmp (SYMBOL_LINKAGE_NAME(stack_chk_guard), "__stack_chk_guard"))
1304 unsigned int destreg;
1306 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1308 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1309 if ((insn & 0xf800) != 0x6800)
1311 if (bits (insn, 3, 5) != basereg)
1313 destreg = bits (insn, 0, 2);
1315 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1316 byte_order_for_code);
1317 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1318 if ((insn & 0xf800) != 0x6000)
1320 if (destreg != bits (insn, 0, 2))
1325 unsigned int destreg;
1327 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1329 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1330 if ((insn & 0x0e500000) != 0x04100000)
1332 if (bits (insn, 16, 19) != basereg)
1334 destreg = bits (insn, 12, 15);
1335 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1336 insn = read_memory_unsigned_integer (pc + offset + 4,
1337 4, byte_order_for_code);
1338 if ((insn & 0x0e500000) != 0x04000000)
1340 if (bits (insn, 12, 15) != destreg)
1343 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1346 return pc + offset + 4;
1348 return pc + offset + 8;
1351 /* Advance the PC across any function entry prologue instructions to
1352 reach some "real" code.
1354 The APCS (ARM Procedure Call Standard) defines the following
1358 [stmfd sp!, {a1,a2,a3,a4}]
1359 stmfd sp!, {...,fp,ip,lr,pc}
1360 [stfe f7, [sp, #-12]!]
1361 [stfe f6, [sp, #-12]!]
1362 [stfe f5, [sp, #-12]!]
1363 [stfe f4, [sp, #-12]!]
1364 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1367 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1369 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1372 CORE_ADDR func_addr, limit_pc;
1373 struct symtab_and_line sal;
1375 /* See if we can determine the end of the prologue via the symbol table.
1376 If so, then return either PC, or the PC after the prologue, whichever
1378 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1380 CORE_ADDR post_prologue_pc
1381 = skip_prologue_using_sal (gdbarch, func_addr);
1382 struct symtab *s = find_pc_symtab (func_addr);
1384 if (post_prologue_pc)
1386 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1389 /* GCC always emits a line note before the prologue and another
1390 one after, even if the two are at the same address or on the
1391 same line. Take advantage of this so that we do not need to
1392 know every instruction that might appear in the prologue. We
1393 will have producer information for most binaries; if it is
1394 missing (e.g. for -gstabs), assuming the GNU tools. */
1395 if (post_prologue_pc
1397 || s->producer == NULL
1398 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1399 return post_prologue_pc;
1401 if (post_prologue_pc != 0)
1403 CORE_ADDR analyzed_limit;
1405 /* For non-GCC compilers, make sure the entire line is an
1406 acceptable prologue; GDB will round this function's
1407 return value up to the end of the following line so we
1408 can not skip just part of a line (and we do not want to).
1410 RealView does not treat the prologue specially, but does
1411 associate prologue code with the opening brace; so this
1412 lets us skip the first line if we think it is the opening
1414 if (arm_pc_is_thumb (gdbarch, func_addr))
1415 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1416 post_prologue_pc, NULL);
1418 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1419 post_prologue_pc, NULL);
1421 if (analyzed_limit != post_prologue_pc)
1424 return post_prologue_pc;
1428 /* Can't determine prologue from the symbol table, need to examine
1431 /* Find an upper limit on the function prologue using the debug
1432 information. If the debug information could not be used to provide
1433 that bound, then use an arbitrary large number as the upper bound. */
1434 /* Like arm_scan_prologue, stop no later than pc + 64. */
1435 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1437 limit_pc = pc + 64; /* Magic. */
1440 /* Check if this is Thumb code. */
1441 if (arm_pc_is_thumb (gdbarch, pc))
1442 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1444 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1446 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1448 /* "mov ip, sp" is no longer a required part of the prologue. */
1449 if (inst == 0xe1a0c00d) /* mov ip, sp */
1452 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1455 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1458 /* Some prologues begin with "str lr, [sp, #-4]!". */
1459 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1462 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1465 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1468 /* Any insns after this point may float into the code, if it makes
1469 for better instruction scheduling, so we skip them only if we
1470 find them, but still consider the function to be frame-ful. */
1472 /* We may have either one sfmfd instruction here, or several stfe
1473 insns, depending on the version of floating point code we
1475 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1478 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1481 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1484 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1487 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1488 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1489 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1492 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1493 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1494 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1497 /* Un-recognized instruction; stop scanning. */
1501 return skip_pc; /* End of prologue. */
1505 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1506 This function decodes a Thumb function prologue to determine:
1507 1) the size of the stack frame
1508 2) which registers are saved on it
1509 3) the offsets of saved regs
1510 4) the offset from the stack pointer to the frame pointer
1512 A typical Thumb function prologue would create this stack frame
1513 (offsets relative to FP)
1514 old SP -> 24 stack parameters
1517 R7 -> 0 local variables (16 bytes)
1518 SP -> -12 additional stack space (12 bytes)
1519 The frame size would thus be 36 bytes, and the frame offset would be
1520 12 bytes. The frame register is R7.
1522 The comments for thumb_skip_prolog() describe the algorithm we use
1523 to detect the end of the prolog. */
1527 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1528 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1530 CORE_ADDR prologue_start;
1531 CORE_ADDR prologue_end;
1532 CORE_ADDR current_pc;
1534 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1537 /* See comment in arm_scan_prologue for an explanation of
1539 if (prologue_end > prologue_start + 64)
1541 prologue_end = prologue_start + 64;
1545 /* We're in the boondocks: we have no idea where the start of the
1549 prologue_end = min (prologue_end, prev_pc);
1551 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1554 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1557 arm_instruction_changes_pc (uint32_t this_instr)
1559 if (bits (this_instr, 28, 31) == INST_NV)
1560 /* Unconditional instructions. */
1561 switch (bits (this_instr, 24, 27))
1565 /* Branch with Link and change to Thumb. */
1570 /* Coprocessor register transfer. */
1571 if (bits (this_instr, 12, 15) == 15)
1572 error (_("Invalid update to pc in instruction"));
1578 switch (bits (this_instr, 25, 27))
1581 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1583 /* Multiplies and extra load/stores. */
1584 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1585 /* Neither multiplies nor extension load/stores are allowed
1589 /* Otherwise, miscellaneous instructions. */
1591 /* BX <reg>, BXJ <reg>, BLX <reg> */
1592 if (bits (this_instr, 4, 27) == 0x12fff1
1593 || bits (this_instr, 4, 27) == 0x12fff2
1594 || bits (this_instr, 4, 27) == 0x12fff3)
1597 /* Other miscellaneous instructions are unpredictable if they
1601 /* Data processing instruction. Fall through. */
1604 if (bits (this_instr, 12, 15) == 15)
1611 /* Media instructions and architecturally undefined instructions. */
1612 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1616 if (bit (this_instr, 20) == 0)
1620 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1626 /* Load/store multiple. */
1627 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1633 /* Branch and branch with link. */
1638 /* Coprocessor transfers or SWIs can not affect PC. */
1642 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1646 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1647 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1648 fill it in. Return the first address not recognized as a prologue
1651 We recognize all the instructions typically found in ARM prologues,
1652 plus harmless instructions which can be skipped (either for analysis
1653 purposes, or a more restrictive set that can be skipped when finding
1654 the end of the prologue). */
1657 arm_analyze_prologue (struct gdbarch *gdbarch,
1658 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1659 struct arm_prologue_cache *cache)
1661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1662 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1664 CORE_ADDR offset, current_pc;
1665 pv_t regs[ARM_FPS_REGNUM];
1666 struct pv_area *stack;
1667 struct cleanup *back_to;
1668 int framereg, framesize;
1669 CORE_ADDR unrecognized_pc = 0;
1671 /* Search the prologue looking for instructions that set up the
1672 frame pointer, adjust the stack pointer, and save registers.
1674 Be careful, however, and if it doesn't look like a prologue,
1675 don't try to scan it. If, for instance, a frameless function
1676 begins with stmfd sp!, then we will tell ourselves there is
1677 a frame, which will confuse stack traceback, as well as "finish"
1678 and other operations that rely on a knowledge of the stack
1681 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1682 regs[regno] = pv_register (regno, 0);
1683 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1684 back_to = make_cleanup_free_pv_area (stack);
1686 for (current_pc = prologue_start;
1687 current_pc < prologue_end;
1691 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1693 if (insn == 0xe1a0c00d) /* mov ip, sp */
1695 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1698 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1699 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1701 unsigned imm = insn & 0xff; /* immediate value */
1702 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1703 int rd = bits (insn, 12, 15);
1704 imm = (imm >> rot) | (imm << (32 - rot));
1705 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1708 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1709 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1711 unsigned imm = insn & 0xff; /* immediate value */
1712 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1713 int rd = bits (insn, 12, 15);
1714 imm = (imm >> rot) | (imm << (32 - rot));
1715 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1718 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1721 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1723 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1724 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1725 regs[bits (insn, 12, 15)]);
1728 else if ((insn & 0xffff0000) == 0xe92d0000)
1729 /* stmfd sp!, {..., fp, ip, lr, pc}
1731 stmfd sp!, {a1, a2, a3, a4} */
1733 int mask = insn & 0xffff;
1735 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1738 /* Calculate offsets of saved registers. */
1739 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1740 if (mask & (1 << regno))
1743 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1744 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1747 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1748 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1749 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1751 /* No need to add this to saved_regs -- it's just an arg reg. */
1754 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1755 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1756 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1758 /* No need to add this to saved_regs -- it's just an arg reg. */
1761 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1763 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1765 /* No need to add this to saved_regs -- it's just arg regs. */
1768 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1770 unsigned imm = insn & 0xff; /* immediate value */
1771 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1772 imm = (imm >> rot) | (imm << (32 - rot));
1773 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1775 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1777 unsigned imm = insn & 0xff; /* immediate value */
1778 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1779 imm = (imm >> rot) | (imm << (32 - rot));
1780 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1782 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1784 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1786 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1789 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1790 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1791 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1793 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1795 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1797 int n_saved_fp_regs;
1798 unsigned int fp_start_reg, fp_bound_reg;
1800 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1803 if ((insn & 0x800) == 0x800) /* N0 is set */
1805 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1806 n_saved_fp_regs = 3;
1808 n_saved_fp_regs = 1;
1812 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1813 n_saved_fp_regs = 2;
1815 n_saved_fp_regs = 4;
1818 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1819 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1820 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1822 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1823 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1824 regs[fp_start_reg++]);
1827 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1829 /* Allow some special function calls when skipping the
1830 prologue; GCC generates these before storing arguments to
1832 CORE_ADDR dest = BranchDest (current_pc, insn);
1834 if (skip_prologue_function (gdbarch, dest, 0))
1839 else if ((insn & 0xf0000000) != 0xe0000000)
1840 break; /* Condition not true, exit early. */
1841 else if (arm_instruction_changes_pc (insn))
1842 /* Don't scan past anything that might change control flow. */
1844 else if ((insn & 0xfe500000) == 0xe8100000) /* ldm */
1846 /* Ignore block loads from the stack, potentially copying
1847 parameters from memory. */
1848 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1853 else if ((insn & 0xfc500000) == 0xe4100000)
1855 /* Similarly ignore single loads from the stack. */
1856 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1861 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1862 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1863 register instead of the stack. */
1867 /* The optimizer might shove anything into the prologue,
1868 so we just skip what we don't recognize. */
1869 unrecognized_pc = current_pc;
1874 if (unrecognized_pc == 0)
1875 unrecognized_pc = current_pc;
1877 /* The frame size is just the distance from the frame register
1878 to the original stack pointer. */
1879 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1881 /* Frame pointer is fp. */
1882 framereg = ARM_FP_REGNUM;
1883 framesize = -regs[ARM_FP_REGNUM].k;
1885 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1887 /* Try the stack pointer... this is a bit desperate. */
1888 framereg = ARM_SP_REGNUM;
1889 framesize = -regs[ARM_SP_REGNUM].k;
1893 /* We're just out of luck. We don't know where the frame is. */
1900 cache->framereg = framereg;
1901 cache->framesize = framesize;
1903 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1904 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1905 cache->saved_regs[regno].addr = offset;
1909 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1910 paddress (gdbarch, unrecognized_pc));
1912 do_cleanups (back_to);
1913 return unrecognized_pc;
1917 arm_scan_prologue (struct frame_info *this_frame,
1918 struct arm_prologue_cache *cache)
1920 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1921 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1923 CORE_ADDR prologue_start, prologue_end, current_pc;
1924 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1925 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1926 pv_t regs[ARM_FPS_REGNUM];
1927 struct pv_area *stack;
1928 struct cleanup *back_to;
1931 /* Assume there is no frame until proven otherwise. */
1932 cache->framereg = ARM_SP_REGNUM;
1933 cache->framesize = 0;
1935 /* Check for Thumb prologue. */
1936 if (arm_frame_is_thumb (this_frame))
1938 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1942 /* Find the function prologue. If we can't find the function in
1943 the symbol table, peek in the stack frame to find the PC. */
1944 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1947 /* One way to find the end of the prologue (which works well
1948 for unoptimized code) is to do the following:
1950 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1953 prologue_end = prev_pc;
1954 else if (sal.end < prologue_end)
1955 prologue_end = sal.end;
1957 This mechanism is very accurate so long as the optimizer
1958 doesn't move any instructions from the function body into the
1959 prologue. If this happens, sal.end will be the last
1960 instruction in the first hunk of prologue code just before
1961 the first instruction that the scheduler has moved from
1962 the body to the prologue.
1964 In order to make sure that we scan all of the prologue
1965 instructions, we use a slightly less accurate mechanism which
1966 may scan more than necessary. To help compensate for this
1967 lack of accuracy, the prologue scanning loop below contains
1968 several clauses which'll cause the loop to terminate early if
1969 an implausible prologue instruction is encountered.
1975 is a suitable endpoint since it accounts for the largest
1976 possible prologue plus up to five instructions inserted by
1979 if (prologue_end > prologue_start + 64)
1981 prologue_end = prologue_start + 64; /* See above. */
1986 /* We have no symbol information. Our only option is to assume this
1987 function has a standard stack frame and the normal frame register.
1988 Then, we can find the value of our frame pointer on entrance to
1989 the callee (or at the present moment if this is the innermost frame).
1990 The value stored there should be the address of the stmfd + 8. */
1991 CORE_ADDR frame_loc;
1992 LONGEST return_value;
1994 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1995 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1999 prologue_start = gdbarch_addr_bits_remove
2000 (gdbarch, return_value) - 8;
2001 prologue_end = prologue_start + 64; /* See above. */
2005 if (prev_pc < prologue_end)
2006 prologue_end = prev_pc;
2008 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2011 static struct arm_prologue_cache *
2012 arm_make_prologue_cache (struct frame_info *this_frame)
2015 struct arm_prologue_cache *cache;
2016 CORE_ADDR unwound_fp;
2018 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2019 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2021 arm_scan_prologue (this_frame, cache);
2023 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2024 if (unwound_fp == 0)
2027 cache->prev_sp = unwound_fp + cache->framesize;
2029 /* Calculate actual addresses of saved registers using offsets
2030 determined by arm_scan_prologue. */
2031 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2032 if (trad_frame_addr_p (cache->saved_regs, reg))
2033 cache->saved_regs[reg].addr += cache->prev_sp;
2038 /* Our frame ID for a normal frame is the current function's starting PC
2039 and the caller's SP when we were called. */
2042 arm_prologue_this_id (struct frame_info *this_frame,
2044 struct frame_id *this_id)
2046 struct arm_prologue_cache *cache;
2050 if (*this_cache == NULL)
2051 *this_cache = arm_make_prologue_cache (this_frame);
2052 cache = *this_cache;
2054 /* This is meant to halt the backtrace at "_start". */
2055 pc = get_frame_pc (this_frame);
2056 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2059 /* If we've hit a wall, stop. */
2060 if (cache->prev_sp == 0)
2063 /* Use function start address as part of the frame ID. If we cannot
2064 identify the start address (due to missing symbol information),
2065 fall back to just using the current PC. */
2066 func = get_frame_func (this_frame);
2070 id = frame_id_build (cache->prev_sp, func);
2074 static struct value *
2075 arm_prologue_prev_register (struct frame_info *this_frame,
2079 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2080 struct arm_prologue_cache *cache;
2082 if (*this_cache == NULL)
2083 *this_cache = arm_make_prologue_cache (this_frame);
2084 cache = *this_cache;
2086 /* If we are asked to unwind the PC, then we need to return the LR
2087 instead. The prologue may save PC, but it will point into this
2088 frame's prologue, not the next frame's resume location. Also
2089 strip the saved T bit. A valid LR may have the low bit set, but
2090 a valid PC never does. */
2091 if (prev_regnum == ARM_PC_REGNUM)
2095 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2096 return frame_unwind_got_constant (this_frame, prev_regnum,
2097 arm_addr_bits_remove (gdbarch, lr));
2100 /* SP is generally not saved to the stack, but this frame is
2101 identified by the next frame's stack pointer at the time of the call.
2102 The value was already reconstructed into PREV_SP. */
2103 if (prev_regnum == ARM_SP_REGNUM)
2104 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2106 /* The CPSR may have been changed by the call instruction and by the
2107 called function. The only bit we can reconstruct is the T bit,
2108 by checking the low bit of LR as of the call. This is a reliable
2109 indicator of Thumb-ness except for some ARM v4T pre-interworking
2110 Thumb code, which could get away with a clear low bit as long as
2111 the called function did not use bx. Guess that all other
2112 bits are unchanged; the condition flags are presumably lost,
2113 but the processor status is likely valid. */
2114 if (prev_regnum == ARM_PS_REGNUM)
2117 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2119 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2120 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2121 if (IS_THUMB_ADDR (lr))
2125 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2128 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2132 struct frame_unwind arm_prologue_unwind = {
2134 arm_prologue_this_id,
2135 arm_prologue_prev_register,
2137 default_frame_sniffer
2140 /* Maintain a list of ARM exception table entries per objfile, similar to the
2141 list of mapping symbols. We only cache entries for standard ARM-defined
2142 personality routines; the cache will contain only the frame unwinding
2143 instructions associated with the entry (not the descriptors). */
2145 static const struct objfile_data *arm_exidx_data_key;
2147 struct arm_exidx_entry
2152 typedef struct arm_exidx_entry arm_exidx_entry_s;
2153 DEF_VEC_O(arm_exidx_entry_s);
2155 struct arm_exidx_data
2157 VEC(arm_exidx_entry_s) **section_maps;
2161 arm_exidx_data_free (struct objfile *objfile, void *arg)
2163 struct arm_exidx_data *data = arg;
2166 for (i = 0; i < objfile->obfd->section_count; i++)
2167 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2171 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2172 const struct arm_exidx_entry *rhs)
2174 return lhs->addr < rhs->addr;
2177 static struct obj_section *
2178 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2180 struct obj_section *osect;
2182 ALL_OBJFILE_OSECTIONS (objfile, osect)
2183 if (bfd_get_section_flags (objfile->obfd,
2184 osect->the_bfd_section) & SEC_ALLOC)
2186 bfd_vma start, size;
2187 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2188 size = bfd_get_section_size (osect->the_bfd_section);
2190 if (start <= vma && vma < start + size)
2197 /* Parse contents of exception table and exception index sections
2198 of OBJFILE, and fill in the exception table entry cache.
2200 For each entry that refers to a standard ARM-defined personality
2201 routine, extract the frame unwinding instructions (from either
2202 the index or the table section). The unwinding instructions
2204 - extracting them from the rest of the table data
2205 - converting to host endianness
2206 - appending the implicit 0xb0 ("Finish") code
2208 The extracted and normalized instructions are stored for later
2209 retrieval by the arm_find_exidx_entry routine. */
2212 arm_exidx_new_objfile (struct objfile *objfile)
2214 struct cleanup *cleanups = make_cleanup (null_cleanup, NULL);
2215 struct arm_exidx_data *data;
2216 asection *exidx, *extab;
2217 bfd_vma exidx_vma = 0, extab_vma = 0;
2218 bfd_size_type exidx_size = 0, extab_size = 0;
2219 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2222 /* If we've already touched this file, do nothing. */
2223 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2226 /* Read contents of exception table and index. */
2227 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2230 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2231 exidx_size = bfd_get_section_size (exidx);
2232 exidx_data = xmalloc (exidx_size);
2233 make_cleanup (xfree, exidx_data);
2235 if (!bfd_get_section_contents (objfile->obfd, exidx,
2236 exidx_data, 0, exidx_size))
2238 do_cleanups (cleanups);
2243 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2246 extab_vma = bfd_section_vma (objfile->obfd, extab);
2247 extab_size = bfd_get_section_size (extab);
2248 extab_data = xmalloc (extab_size);
2249 make_cleanup (xfree, extab_data);
2251 if (!bfd_get_section_contents (objfile->obfd, extab,
2252 extab_data, 0, extab_size))
2254 do_cleanups (cleanups);
2259 /* Allocate exception table data structure. */
2260 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2261 set_objfile_data (objfile, arm_exidx_data_key, data);
2262 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2263 objfile->obfd->section_count,
2264 VEC(arm_exidx_entry_s) *);
2266 /* Fill in exception table. */
2267 for (i = 0; i < exidx_size / 8; i++)
2269 struct arm_exidx_entry new_exidx_entry;
2270 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2271 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2272 bfd_vma addr = 0, word = 0;
2273 int n_bytes = 0, n_words = 0;
2274 struct obj_section *sec;
2275 gdb_byte *entry = NULL;
2277 /* Extract address of start of function. */
2278 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2279 idx += exidx_vma + i * 8;
2281 /* Find section containing function and compute section offset. */
2282 sec = arm_obj_section_from_vma (objfile, idx);
2285 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2287 /* Determine address of exception table entry. */
2290 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2292 else if ((val & 0xff000000) == 0x80000000)
2294 /* Exception table entry embedded in .ARM.exidx
2295 -- must be short form. */
2299 else if (!(val & 0x80000000))
2301 /* Exception table entry in .ARM.extab. */
2302 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2303 addr += exidx_vma + i * 8 + 4;
2305 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2307 word = bfd_h_get_32 (objfile->obfd,
2308 extab_data + addr - extab_vma);
2311 if ((word & 0xff000000) == 0x80000000)
2316 else if ((word & 0xff000000) == 0x81000000
2317 || (word & 0xff000000) == 0x82000000)
2321 n_words = ((word >> 16) & 0xff);
2323 else if (!(word & 0x80000000))
2326 struct obj_section *pers_sec;
2327 int gnu_personality = 0;
2329 /* Custom personality routine. */
2330 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2331 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2333 /* Check whether we've got one of the variants of the
2334 GNU personality routines. */
2335 pers_sec = arm_obj_section_from_vma (objfile, pers);
2338 static const char *personality[] =
2340 "__gcc_personality_v0",
2341 "__gxx_personality_v0",
2342 "__gcj_personality_v0",
2343 "__gnu_objc_personality_v0",
2347 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2350 for (k = 0; personality[k]; k++)
2351 if (lookup_minimal_symbol_by_pc_name
2352 (pc, personality[k], objfile))
2354 gnu_personality = 1;
2359 /* If so, the next word contains a word count in the high
2360 byte, followed by the same unwind instructions as the
2361 pre-defined forms. */
2363 && addr + 4 <= extab_vma + extab_size)
2365 word = bfd_h_get_32 (objfile->obfd,
2366 extab_data + addr - extab_vma);
2369 n_words = ((word >> 24) & 0xff);
2375 /* Sanity check address. */
2377 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2378 n_words = n_bytes = 0;
2380 /* The unwind instructions reside in WORD (only the N_BYTES least
2381 significant bytes are valid), followed by N_WORDS words in the
2382 extab section starting at ADDR. */
2383 if (n_bytes || n_words)
2385 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2386 n_bytes + n_words * 4 + 1);
2389 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2393 word = bfd_h_get_32 (objfile->obfd,
2394 extab_data + addr - extab_vma);
2397 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2398 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2399 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2400 *p++ = (gdb_byte) (word & 0xff);
2403 /* Implied "Finish" to terminate the list. */
2407 /* Push entry onto vector. They are guaranteed to always
2408 appear in order of increasing addresses. */
2409 new_exidx_entry.addr = idx;
2410 new_exidx_entry.entry = entry;
2411 VEC_safe_push (arm_exidx_entry_s,
2412 data->section_maps[sec->the_bfd_section->index],
2416 do_cleanups (cleanups);
2419 /* Search for the exception table entry covering MEMADDR. If one is found,
2420 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2421 set *START to the start of the region covered by this entry. */
2424 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2426 struct obj_section *sec;
2428 sec = find_pc_section (memaddr);
2431 struct arm_exidx_data *data;
2432 VEC(arm_exidx_entry_s) *map;
2433 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2436 data = objfile_data (sec->objfile, arm_exidx_data_key);
2439 map = data->section_maps[sec->the_bfd_section->index];
2440 if (!VEC_empty (arm_exidx_entry_s, map))
2442 struct arm_exidx_entry *map_sym;
2444 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2445 arm_compare_exidx_entries);
2447 /* VEC_lower_bound finds the earliest ordered insertion
2448 point. If the following symbol starts at this exact
2449 address, we use that; otherwise, the preceding
2450 exception table entry covers this address. */
2451 if (idx < VEC_length (arm_exidx_entry_s, map))
2453 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2454 if (map_sym->addr == map_key.addr)
2457 *start = map_sym->addr + obj_section_addr (sec);
2458 return map_sym->entry;
2464 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2466 *start = map_sym->addr + obj_section_addr (sec);
2467 return map_sym->entry;
2476 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2477 instruction list from the ARM exception table entry ENTRY, allocate and
2478 return a prologue cache structure describing how to unwind this frame.
2480 Return NULL if the unwinding instruction list contains a "spare",
2481 "reserved" or "refuse to unwind" instruction as defined in section
2482 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2483 for the ARM Architecture" document. */
2485 static struct arm_prologue_cache *
2486 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2491 struct arm_prologue_cache *cache;
2492 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2493 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2499 /* Whenever we reload SP, we actually have to retrieve its
2500 actual value in the current frame. */
2503 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2505 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2506 vsp = get_frame_register_unsigned (this_frame, reg);
2510 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2511 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2517 /* Decode next unwind instruction. */
2520 if ((insn & 0xc0) == 0)
2522 int offset = insn & 0x3f;
2523 vsp += (offset << 2) + 4;
2525 else if ((insn & 0xc0) == 0x40)
2527 int offset = insn & 0x3f;
2528 vsp -= (offset << 2) + 4;
2530 else if ((insn & 0xf0) == 0x80)
2532 int mask = ((insn & 0xf) << 8) | *entry++;
2535 /* The special case of an all-zero mask identifies
2536 "Refuse to unwind". We return NULL to fall back
2537 to the prologue analyzer. */
2541 /* Pop registers r4..r15 under mask. */
2542 for (i = 0; i < 12; i++)
2543 if (mask & (1 << i))
2545 cache->saved_regs[4 + i].addr = vsp;
2549 /* Special-case popping SP -- we need to reload vsp. */
2550 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2553 else if ((insn & 0xf0) == 0x90)
2555 int reg = insn & 0xf;
2557 /* Reserved cases. */
2558 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2561 /* Set SP from another register and mark VSP for reload. */
2562 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2565 else if ((insn & 0xf0) == 0xa0)
2567 int count = insn & 0x7;
2568 int pop_lr = (insn & 0x8) != 0;
2571 /* Pop r4..r[4+count]. */
2572 for (i = 0; i <= count; i++)
2574 cache->saved_regs[4 + i].addr = vsp;
2578 /* If indicated by flag, pop LR as well. */
2581 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2585 else if (insn == 0xb0)
2587 /* We could only have updated PC by popping into it; if so, it
2588 will show up as address. Otherwise, copy LR into PC. */
2589 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2590 cache->saved_regs[ARM_PC_REGNUM]
2591 = cache->saved_regs[ARM_LR_REGNUM];
2596 else if (insn == 0xb1)
2598 int mask = *entry++;
2601 /* All-zero mask and mask >= 16 is "spare". */
2602 if (mask == 0 || mask >= 16)
2605 /* Pop r0..r3 under mask. */
2606 for (i = 0; i < 4; i++)
2607 if (mask & (1 << i))
2609 cache->saved_regs[i].addr = vsp;
2613 else if (insn == 0xb2)
2615 ULONGEST offset = 0;
2620 offset |= (*entry & 0x7f) << shift;
2623 while (*entry++ & 0x80);
2625 vsp += 0x204 + (offset << 2);
2627 else if (insn == 0xb3)
2629 int start = *entry >> 4;
2630 int count = (*entry++) & 0xf;
2633 /* Only registers D0..D15 are valid here. */
2634 if (start + count >= 16)
2637 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2638 for (i = 0; i <= count; i++)
2640 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2644 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2647 else if ((insn & 0xf8) == 0xb8)
2649 int count = insn & 0x7;
2652 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2653 for (i = 0; i <= count; i++)
2655 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2659 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2662 else if (insn == 0xc6)
2664 int start = *entry >> 4;
2665 int count = (*entry++) & 0xf;
2668 /* Only registers WR0..WR15 are valid. */
2669 if (start + count >= 16)
2672 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2673 for (i = 0; i <= count; i++)
2675 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2679 else if (insn == 0xc7)
2681 int mask = *entry++;
2684 /* All-zero mask and mask >= 16 is "spare". */
2685 if (mask == 0 || mask >= 16)
2688 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2689 for (i = 0; i < 4; i++)
2690 if (mask & (1 << i))
2692 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2696 else if ((insn & 0xf8) == 0xc0)
2698 int count = insn & 0x7;
2701 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2702 for (i = 0; i <= count; i++)
2704 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2708 else if (insn == 0xc8)
2710 int start = *entry >> 4;
2711 int count = (*entry++) & 0xf;
2714 /* Only registers D0..D31 are valid. */
2715 if (start + count >= 16)
2718 /* Pop VFP double-precision registers
2719 D[16+start]..D[16+start+count]. */
2720 for (i = 0; i <= count; i++)
2722 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2726 else if (insn == 0xc9)
2728 int start = *entry >> 4;
2729 int count = (*entry++) & 0xf;
2732 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2733 for (i = 0; i <= count; i++)
2735 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2739 else if ((insn & 0xf8) == 0xd0)
2741 int count = insn & 0x7;
2744 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2745 for (i = 0; i <= count; i++)
2747 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2753 /* Everything else is "spare". */
2758 /* If we restore SP from a register, assume this was the frame register.
2759 Otherwise just fall back to SP as frame register. */
2760 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2761 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2763 cache->framereg = ARM_SP_REGNUM;
2765 /* Determine offset to previous frame. */
2767 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2769 /* We already got the previous SP. */
2770 cache->prev_sp = vsp;
2775 /* Unwinding via ARM exception table entries. Note that the sniffer
2776 already computes a filled-in prologue cache, which is then used
2777 with the same arm_prologue_this_id and arm_prologue_prev_register
2778 routines also used for prologue-parsing based unwinding. */
2781 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2782 struct frame_info *this_frame,
2783 void **this_prologue_cache)
2785 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2786 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2787 CORE_ADDR addr_in_block, exidx_region, func_start;
2788 struct arm_prologue_cache *cache;
2791 /* See if we have an ARM exception table entry covering this address. */
2792 addr_in_block = get_frame_address_in_block (this_frame);
2793 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2797 /* The ARM exception table does not describe unwind information
2798 for arbitrary PC values, but is guaranteed to be correct only
2799 at call sites. We have to decide here whether we want to use
2800 ARM exception table information for this frame, or fall back
2801 to using prologue parsing. (Note that if we have DWARF CFI,
2802 this sniffer isn't even called -- CFI is always preferred.)
2804 Before we make this decision, however, we check whether we
2805 actually have *symbol* information for the current frame.
2806 If not, prologue parsing would not work anyway, so we might
2807 as well use the exception table and hope for the best. */
2808 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2812 /* If the next frame is "normal", we are at a call site in this
2813 frame, so exception information is guaranteed to be valid. */
2814 if (get_next_frame (this_frame)
2815 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2818 /* We also assume exception information is valid if we're currently
2819 blocked in a system call. The system library is supposed to
2820 ensure this, so that e.g. pthread cancellation works. */
2821 if (arm_frame_is_thumb (this_frame))
2825 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2826 byte_order_for_code, &insn)
2827 && (insn & 0xff00) == 0xdf00 /* svc */)
2834 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2835 byte_order_for_code, &insn)
2836 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2840 /* Bail out if we don't know that exception information is valid. */
2844 /* The ARM exception index does not mark the *end* of the region
2845 covered by the entry, and some functions will not have any entry.
2846 To correctly recognize the end of the covered region, the linker
2847 should have inserted dummy records with a CANTUNWIND marker.
2849 Unfortunately, current versions of GNU ld do not reliably do
2850 this, and thus we may have found an incorrect entry above.
2851 As a (temporary) sanity check, we only use the entry if it
2852 lies *within* the bounds of the function. Note that this check
2853 might reject perfectly valid entries that just happen to cover
2854 multiple functions; therefore this check ought to be removed
2855 once the linker is fixed. */
2856 if (func_start > exidx_region)
2860 /* Decode the list of unwinding instructions into a prologue cache.
2861 Note that this may fail due to e.g. a "refuse to unwind" code. */
2862 cache = arm_exidx_fill_cache (this_frame, entry);
2866 *this_prologue_cache = cache;
2870 struct frame_unwind arm_exidx_unwind = {
2872 arm_prologue_this_id,
2873 arm_prologue_prev_register,
2875 arm_exidx_unwind_sniffer
2878 static struct arm_prologue_cache *
2879 arm_make_stub_cache (struct frame_info *this_frame)
2881 struct arm_prologue_cache *cache;
2883 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2884 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2886 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2891 /* Our frame ID for a stub frame is the current SP and LR. */
2894 arm_stub_this_id (struct frame_info *this_frame,
2896 struct frame_id *this_id)
2898 struct arm_prologue_cache *cache;
2900 if (*this_cache == NULL)
2901 *this_cache = arm_make_stub_cache (this_frame);
2902 cache = *this_cache;
2904 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2908 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2909 struct frame_info *this_frame,
2910 void **this_prologue_cache)
2912 CORE_ADDR addr_in_block;
2915 addr_in_block = get_frame_address_in_block (this_frame);
2916 if (in_plt_section (addr_in_block, NULL)
2917 /* We also use the stub winder if the target memory is unreadable
2918 to avoid having the prologue unwinder trying to read it. */
2919 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2925 struct frame_unwind arm_stub_unwind = {
2928 arm_prologue_prev_register,
2930 arm_stub_unwind_sniffer
2934 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2936 struct arm_prologue_cache *cache;
2938 if (*this_cache == NULL)
2939 *this_cache = arm_make_prologue_cache (this_frame);
2940 cache = *this_cache;
2942 return cache->prev_sp - cache->framesize;
2945 struct frame_base arm_normal_base = {
2946 &arm_prologue_unwind,
2947 arm_normal_frame_base,
2948 arm_normal_frame_base,
2949 arm_normal_frame_base
2952 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2953 dummy frame. The frame ID's base needs to match the TOS value
2954 saved by save_dummy_frame_tos() and returned from
2955 arm_push_dummy_call, and the PC needs to match the dummy frame's
2958 static struct frame_id
2959 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2961 return frame_id_build (get_frame_register_unsigned (this_frame,
2963 get_frame_pc (this_frame));
2966 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2967 be used to construct the previous frame's ID, after looking up the
2968 containing function). */
2971 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2974 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2975 return arm_addr_bits_remove (gdbarch, pc);
2979 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2981 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2984 static struct value *
2985 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2988 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2990 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2995 /* The PC is normally copied from the return column, which
2996 describes saves of LR. However, that version may have an
2997 extra bit set to indicate Thumb state. The bit is not
2999 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3000 return frame_unwind_got_constant (this_frame, regnum,
3001 arm_addr_bits_remove (gdbarch, lr));
3004 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3005 cpsr = get_frame_register_unsigned (this_frame, regnum);
3006 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3007 if (IS_THUMB_ADDR (lr))
3011 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3014 internal_error (__FILE__, __LINE__,
3015 _("Unexpected register %d"), regnum);
3020 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3021 struct dwarf2_frame_state_reg *reg,
3022 struct frame_info *this_frame)
3028 reg->how = DWARF2_FRAME_REG_FN;
3029 reg->loc.fn = arm_dwarf2_prev_register;
3032 reg->how = DWARF2_FRAME_REG_CFA;
3037 /* Return true if we are in the function's epilogue, i.e. after the
3038 instruction that destroyed the function's stack frame. */
3041 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3043 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3044 unsigned int insn, insn2;
3045 int found_return = 0, found_stack_adjust = 0;
3046 CORE_ADDR func_start, func_end;
3050 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3053 /* The epilogue is a sequence of instructions along the following lines:
3055 - add stack frame size to SP or FP
3056 - [if frame pointer used] restore SP from FP
3057 - restore registers from SP [may include PC]
3058 - a return-type instruction [if PC wasn't already restored]
3060 In a first pass, we scan forward from the current PC and verify the
3061 instructions we find as compatible with this sequence, ending in a
3064 However, this is not sufficient to distinguish indirect function calls
3065 within a function from indirect tail calls in the epilogue in some cases.
3066 Therefore, if we didn't already find any SP-changing instruction during
3067 forward scan, we add a backward scanning heuristic to ensure we actually
3068 are in the epilogue. */
3071 while (scan_pc < func_end && !found_return)
3073 if (target_read_memory (scan_pc, buf, 2))
3077 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3079 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3081 else if (insn == 0x46f7) /* mov pc, lr */
3083 else if (insn == 0x46bd) /* mov sp, r7 */
3084 found_stack_adjust = 1;
3085 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3086 found_stack_adjust = 1;
3087 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3089 found_stack_adjust = 1;
3090 if (insn & 0x0100) /* <registers> include PC. */
3093 else if ((insn & 0xe000) == 0xe000) /* 32-bit Thumb-2 instruction */
3095 if (target_read_memory (scan_pc, buf, 2))
3099 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3101 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3103 found_stack_adjust = 1;
3104 if (insn2 & 0x8000) /* <registers> include PC. */
3107 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3108 && (insn2 & 0x0fff) == 0x0b04)
3110 found_stack_adjust = 1;
3111 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3114 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3115 && (insn2 & 0x0e00) == 0x0a00)
3116 found_stack_adjust = 1;
3127 /* Since any instruction in the epilogue sequence, with the possible
3128 exception of return itself, updates the stack pointer, we need to
3129 scan backwards for at most one instruction. Try either a 16-bit or
3130 a 32-bit instruction. This is just a heuristic, so we do not worry
3131 too much about false positives. */
3133 if (!found_stack_adjust)
3135 if (pc - 4 < func_start)
3137 if (target_read_memory (pc - 4, buf, 4))
3140 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3141 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3143 if (insn2 == 0x46bd) /* mov sp, r7 */
3144 found_stack_adjust = 1;
3145 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3146 found_stack_adjust = 1;
3147 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3148 found_stack_adjust = 1;
3149 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3150 found_stack_adjust = 1;
3151 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3152 && (insn2 & 0x0fff) == 0x0b04)
3153 found_stack_adjust = 1;
3154 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3155 && (insn2 & 0x0e00) == 0x0a00)
3156 found_stack_adjust = 1;
3159 return found_stack_adjust;
3162 /* Return true if we are in the function's epilogue, i.e. after the
3163 instruction that destroyed the function's stack frame. */
3166 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3168 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3170 int found_return, found_stack_adjust;
3171 CORE_ADDR func_start, func_end;
3173 if (arm_pc_is_thumb (gdbarch, pc))
3174 return thumb_in_function_epilogue_p (gdbarch, pc);
3176 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3179 /* We are in the epilogue if the previous instruction was a stack
3180 adjustment and the next instruction is a possible return (bx, mov
3181 pc, or pop). We could have to scan backwards to find the stack
3182 adjustment, or forwards to find the return, but this is a decent
3183 approximation. First scan forwards. */
3186 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3187 if (bits (insn, 28, 31) != INST_NV)
3189 if ((insn & 0x0ffffff0) == 0x012fff10)
3192 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3195 else if ((insn & 0x0fff0000) == 0x08bd0000
3196 && (insn & 0x0000c000) != 0)
3197 /* POP (LDMIA), including PC or LR. */
3204 /* Scan backwards. This is just a heuristic, so do not worry about
3205 false positives from mode changes. */
3207 if (pc < func_start + 4)
3210 found_stack_adjust = 0;
3211 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3212 if (bits (insn, 28, 31) != INST_NV)
3214 if ((insn & 0x0df0f000) == 0x0080d000)
3215 /* ADD SP (register or immediate). */
3216 found_stack_adjust = 1;
3217 else if ((insn & 0x0df0f000) == 0x0040d000)
3218 /* SUB SP (register or immediate). */
3219 found_stack_adjust = 1;
3220 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3222 found_stack_adjust = 1;
3223 else if ((insn & 0x0fff0000) == 0x08bd0000)
3225 found_stack_adjust = 1;
3228 if (found_stack_adjust)
3235 /* When arguments must be pushed onto the stack, they go on in reverse
3236 order. The code below implements a FILO (stack) to do this. */
3241 struct stack_item *prev;
3245 static struct stack_item *
3246 push_stack_item (struct stack_item *prev, const void *contents, int len)
3248 struct stack_item *si;
3249 si = xmalloc (sizeof (struct stack_item));
3250 si->data = xmalloc (len);
3253 memcpy (si->data, contents, len);
3257 static struct stack_item *
3258 pop_stack_item (struct stack_item *si)
3260 struct stack_item *dead = si;
3268 /* Return the alignment (in bytes) of the given type. */
3271 arm_type_align (struct type *t)
3277 t = check_typedef (t);
3278 switch (TYPE_CODE (t))
3281 /* Should never happen. */
3282 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3286 case TYPE_CODE_ENUM:
3290 case TYPE_CODE_RANGE:
3291 case TYPE_CODE_BITSTRING:
3293 case TYPE_CODE_CHAR:
3294 case TYPE_CODE_BOOL:
3295 return TYPE_LENGTH (t);
3297 case TYPE_CODE_ARRAY:
3298 case TYPE_CODE_COMPLEX:
3299 /* TODO: What about vector types? */
3300 return arm_type_align (TYPE_TARGET_TYPE (t));
3302 case TYPE_CODE_STRUCT:
3303 case TYPE_CODE_UNION:
3305 for (n = 0; n < TYPE_NFIELDS (t); n++)
3307 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3315 /* Possible base types for a candidate for passing and returning in
3318 enum arm_vfp_cprc_base_type
3327 /* The length of one element of base type B. */
3330 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3334 case VFP_CPRC_SINGLE:
3336 case VFP_CPRC_DOUBLE:
3338 case VFP_CPRC_VEC64:
3340 case VFP_CPRC_VEC128:
3343 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3348 /* The character ('s', 'd' or 'q') for the type of VFP register used
3349 for passing base type B. */
3352 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3356 case VFP_CPRC_SINGLE:
3358 case VFP_CPRC_DOUBLE:
3360 case VFP_CPRC_VEC64:
3362 case VFP_CPRC_VEC128:
3365 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3370 /* Determine whether T may be part of a candidate for passing and
3371 returning in VFP registers, ignoring the limit on the total number
3372 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3373 classification of the first valid component found; if it is not
3374 VFP_CPRC_UNKNOWN, all components must have the same classification
3375 as *BASE_TYPE. If it is found that T contains a type not permitted
3376 for passing and returning in VFP registers, a type differently
3377 classified from *BASE_TYPE, or two types differently classified
3378 from each other, return -1, otherwise return the total number of
3379 base-type elements found (possibly 0 in an empty structure or
3380 array). Vectors and complex types are not currently supported,
3381 matching the generic AAPCS support. */
3384 arm_vfp_cprc_sub_candidate (struct type *t,
3385 enum arm_vfp_cprc_base_type *base_type)
3387 t = check_typedef (t);
3388 switch (TYPE_CODE (t))
3391 switch (TYPE_LENGTH (t))
3394 if (*base_type == VFP_CPRC_UNKNOWN)
3395 *base_type = VFP_CPRC_SINGLE;
3396 else if (*base_type != VFP_CPRC_SINGLE)
3401 if (*base_type == VFP_CPRC_UNKNOWN)
3402 *base_type = VFP_CPRC_DOUBLE;
3403 else if (*base_type != VFP_CPRC_DOUBLE)
3412 case TYPE_CODE_ARRAY:
3416 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3419 if (TYPE_LENGTH (t) == 0)
3421 gdb_assert (count == 0);
3424 else if (count == 0)
3426 unitlen = arm_vfp_cprc_unit_length (*base_type);
3427 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3428 return TYPE_LENGTH (t) / unitlen;
3432 case TYPE_CODE_STRUCT:
3437 for (i = 0; i < TYPE_NFIELDS (t); i++)
3439 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3441 if (sub_count == -1)
3445 if (TYPE_LENGTH (t) == 0)
3447 gdb_assert (count == 0);
3450 else if (count == 0)
3452 unitlen = arm_vfp_cprc_unit_length (*base_type);
3453 if (TYPE_LENGTH (t) != unitlen * count)
3458 case TYPE_CODE_UNION:
3463 for (i = 0; i < TYPE_NFIELDS (t); i++)
3465 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3467 if (sub_count == -1)
3469 count = (count > sub_count ? count : sub_count);
3471 if (TYPE_LENGTH (t) == 0)
3473 gdb_assert (count == 0);
3476 else if (count == 0)
3478 unitlen = arm_vfp_cprc_unit_length (*base_type);
3479 if (TYPE_LENGTH (t) != unitlen * count)
3491 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3492 if passed to or returned from a non-variadic function with the VFP
3493 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3494 *BASE_TYPE to the base type for T and *COUNT to the number of
3495 elements of that base type before returning. */
3498 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3501 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3502 int c = arm_vfp_cprc_sub_candidate (t, &b);
3503 if (c <= 0 || c > 4)
3510 /* Return 1 if the VFP ABI should be used for passing arguments to and
3511 returning values from a function of type FUNC_TYPE, 0
3515 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3517 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3518 /* Variadic functions always use the base ABI. Assume that functions
3519 without debug info are not variadic. */
3520 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3522 /* The VFP ABI is only supported as a variant of AAPCS. */
3523 if (tdep->arm_abi != ARM_ABI_AAPCS)
3525 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3528 /* We currently only support passing parameters in integer registers, which
3529 conforms with GCC's default model, and VFP argument passing following
3530 the VFP variant of AAPCS. Several other variants exist and
3531 we should probably support some of them based on the selected ABI. */
3534 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3535 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3536 struct value **args, CORE_ADDR sp, int struct_return,
3537 CORE_ADDR struct_addr)
3539 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3543 struct stack_item *si = NULL;
3546 unsigned vfp_regs_free = (1 << 16) - 1;
3548 /* Determine the type of this function and whether the VFP ABI
3550 ftype = check_typedef (value_type (function));
3551 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3552 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3553 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3555 /* Set the return address. For the ARM, the return breakpoint is
3556 always at BP_ADDR. */
3557 if (arm_pc_is_thumb (gdbarch, bp_addr))
3559 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3561 /* Walk through the list of args and determine how large a temporary
3562 stack is required. Need to take care here as structs may be
3563 passed on the stack, and we have to to push them. */
3566 argreg = ARM_A1_REGNUM;
3569 /* The struct_return pointer occupies the first parameter
3570 passing register. */
3574 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3575 gdbarch_register_name (gdbarch, argreg),
3576 paddress (gdbarch, struct_addr));
3577 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3581 for (argnum = 0; argnum < nargs; argnum++)
3584 struct type *arg_type;
3585 struct type *target_type;
3586 enum type_code typecode;
3587 const bfd_byte *val;
3589 enum arm_vfp_cprc_base_type vfp_base_type;
3591 int may_use_core_reg = 1;
3593 arg_type = check_typedef (value_type (args[argnum]));
3594 len = TYPE_LENGTH (arg_type);
3595 target_type = TYPE_TARGET_TYPE (arg_type);
3596 typecode = TYPE_CODE (arg_type);
3597 val = value_contents (args[argnum]);
3599 align = arm_type_align (arg_type);
3600 /* Round alignment up to a whole number of words. */
3601 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3602 /* Different ABIs have different maximum alignments. */
3603 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3605 /* The APCS ABI only requires word alignment. */
3606 align = INT_REGISTER_SIZE;
3610 /* The AAPCS requires at most doubleword alignment. */
3611 if (align > INT_REGISTER_SIZE * 2)
3612 align = INT_REGISTER_SIZE * 2;
3616 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3624 /* Because this is a CPRC it cannot go in a core register or
3625 cause a core register to be skipped for alignment.
3626 Either it goes in VFP registers and the rest of this loop
3627 iteration is skipped for this argument, or it goes on the
3628 stack (and the stack alignment code is correct for this
3630 may_use_core_reg = 0;
3632 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3633 shift = unit_length / 4;
3634 mask = (1 << (shift * vfp_base_count)) - 1;
3635 for (regno = 0; regno < 16; regno += shift)
3636 if (((vfp_regs_free >> regno) & mask) == mask)
3645 vfp_regs_free &= ~(mask << regno);
3646 reg_scaled = regno / shift;
3647 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3648 for (i = 0; i < vfp_base_count; i++)
3652 if (reg_char == 'q')
3653 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3654 val + i * unit_length);
3657 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3658 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3660 regcache_cooked_write (regcache, regnum,
3661 val + i * unit_length);
3668 /* This CPRC could not go in VFP registers, so all VFP
3669 registers are now marked as used. */
3674 /* Push stack padding for dowubleword alignment. */
3675 if (nstack & (align - 1))
3677 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3678 nstack += INT_REGISTER_SIZE;
3681 /* Doubleword aligned quantities must go in even register pairs. */
3682 if (may_use_core_reg
3683 && argreg <= ARM_LAST_ARG_REGNUM
3684 && align > INT_REGISTER_SIZE
3688 /* If the argument is a pointer to a function, and it is a
3689 Thumb function, create a LOCAL copy of the value and set
3690 the THUMB bit in it. */
3691 if (TYPE_CODE_PTR == typecode
3692 && target_type != NULL
3693 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3695 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3696 if (arm_pc_is_thumb (gdbarch, regval))
3698 bfd_byte *copy = alloca (len);
3699 store_unsigned_integer (copy, len, byte_order,
3700 MAKE_THUMB_ADDR (regval));
3705 /* Copy the argument to general registers or the stack in
3706 register-sized pieces. Large arguments are split between
3707 registers and stack. */
3710 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3712 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3714 /* The argument is being passed in a general purpose
3717 = extract_unsigned_integer (val, partial_len, byte_order);
3718 if (byte_order == BFD_ENDIAN_BIG)
3719 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3721 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3723 gdbarch_register_name
3725 phex (regval, INT_REGISTER_SIZE));
3726 regcache_cooked_write_unsigned (regcache, argreg, regval);
3731 /* Push the arguments onto the stack. */
3733 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3735 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3736 nstack += INT_REGISTER_SIZE;
3743 /* If we have an odd number of words to push, then decrement the stack
3744 by one word now, so first stack argument will be dword aligned. */
3751 write_memory (sp, si->data, si->len);
3752 si = pop_stack_item (si);
3755 /* Finally, update teh SP register. */
3756 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3762 /* Always align the frame to an 8-byte boundary. This is required on
3763 some platforms and harmless on the rest. */
3766 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3768 /* Align the stack to eight bytes. */
3769 return sp & ~ (CORE_ADDR) 7;
3773 print_fpu_flags (int flags)
3775 if (flags & (1 << 0))
3776 fputs ("IVO ", stdout);
3777 if (flags & (1 << 1))
3778 fputs ("DVZ ", stdout);
3779 if (flags & (1 << 2))
3780 fputs ("OFL ", stdout);
3781 if (flags & (1 << 3))
3782 fputs ("UFL ", stdout);
3783 if (flags & (1 << 4))
3784 fputs ("INX ", stdout);
3788 /* Print interesting information about the floating point processor
3789 (if present) or emulator. */
3791 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3792 struct frame_info *frame, const char *args)
3794 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3797 type = (status >> 24) & 127;
3798 if (status & (1 << 31))
3799 printf (_("Hardware FPU type %d\n"), type);
3801 printf (_("Software FPU type %d\n"), type);
3802 /* i18n: [floating point unit] mask */
3803 fputs (_("mask: "), stdout);
3804 print_fpu_flags (status >> 16);
3805 /* i18n: [floating point unit] flags */
3806 fputs (_("flags: "), stdout);
3807 print_fpu_flags (status);
3810 /* Construct the ARM extended floating point type. */
3811 static struct type *
3812 arm_ext_type (struct gdbarch *gdbarch)
3814 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3816 if (!tdep->arm_ext_type)
3818 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3819 floatformats_arm_ext);
3821 return tdep->arm_ext_type;
3824 static struct type *
3825 arm_neon_double_type (struct gdbarch *gdbarch)
3827 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3829 if (tdep->neon_double_type == NULL)
3831 struct type *t, *elem;
3833 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3835 elem = builtin_type (gdbarch)->builtin_uint8;
3836 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3837 elem = builtin_type (gdbarch)->builtin_uint16;
3838 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3839 elem = builtin_type (gdbarch)->builtin_uint32;
3840 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3841 elem = builtin_type (gdbarch)->builtin_uint64;
3842 append_composite_type_field (t, "u64", elem);
3843 elem = builtin_type (gdbarch)->builtin_float;
3844 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3845 elem = builtin_type (gdbarch)->builtin_double;
3846 append_composite_type_field (t, "f64", elem);
3848 TYPE_VECTOR (t) = 1;
3849 TYPE_NAME (t) = "neon_d";
3850 tdep->neon_double_type = t;
3853 return tdep->neon_double_type;
3856 /* FIXME: The vector types are not correctly ordered on big-endian
3857 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3858 bits of d0 - regardless of what unit size is being held in d0. So
3859 the offset of the first uint8 in d0 is 7, but the offset of the
3860 first float is 4. This code works as-is for little-endian
3863 static struct type *
3864 arm_neon_quad_type (struct gdbarch *gdbarch)
3866 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3868 if (tdep->neon_quad_type == NULL)
3870 struct type *t, *elem;
3872 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3874 elem = builtin_type (gdbarch)->builtin_uint8;
3875 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3876 elem = builtin_type (gdbarch)->builtin_uint16;
3877 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3878 elem = builtin_type (gdbarch)->builtin_uint32;
3879 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3880 elem = builtin_type (gdbarch)->builtin_uint64;
3881 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3882 elem = builtin_type (gdbarch)->builtin_float;
3883 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3884 elem = builtin_type (gdbarch)->builtin_double;
3885 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3887 TYPE_VECTOR (t) = 1;
3888 TYPE_NAME (t) = "neon_q";
3889 tdep->neon_quad_type = t;
3892 return tdep->neon_quad_type;
3895 /* Return the GDB type object for the "standard" data type of data in
3898 static struct type *
3899 arm_register_type (struct gdbarch *gdbarch, int regnum)
3901 int num_regs = gdbarch_num_regs (gdbarch);
3903 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3904 && regnum >= num_regs && regnum < num_regs + 32)
3905 return builtin_type (gdbarch)->builtin_float;
3907 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3908 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3909 return arm_neon_quad_type (gdbarch);
3911 /* If the target description has register information, we are only
3912 in this function so that we can override the types of
3913 double-precision registers for NEON. */
3914 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3916 struct type *t = tdesc_register_type (gdbarch, regnum);
3918 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3919 && TYPE_CODE (t) == TYPE_CODE_FLT
3920 && gdbarch_tdep (gdbarch)->have_neon)
3921 return arm_neon_double_type (gdbarch);
3926 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3928 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3929 return builtin_type (gdbarch)->builtin_void;
3931 return arm_ext_type (gdbarch);
3933 else if (regnum == ARM_SP_REGNUM)
3934 return builtin_type (gdbarch)->builtin_data_ptr;
3935 else if (regnum == ARM_PC_REGNUM)
3936 return builtin_type (gdbarch)->builtin_func_ptr;
3937 else if (regnum >= ARRAY_SIZE (arm_register_names))
3938 /* These registers are only supported on targets which supply
3939 an XML description. */
3940 return builtin_type (gdbarch)->builtin_int0;
3942 return builtin_type (gdbarch)->builtin_uint32;
3945 /* Map a DWARF register REGNUM onto the appropriate GDB register
3949 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3951 /* Core integer regs. */
3952 if (reg >= 0 && reg <= 15)
3955 /* Legacy FPA encoding. These were once used in a way which
3956 overlapped with VFP register numbering, so their use is
3957 discouraged, but GDB doesn't support the ARM toolchain
3958 which used them for VFP. */
3959 if (reg >= 16 && reg <= 23)
3960 return ARM_F0_REGNUM + reg - 16;
3962 /* New assignments for the FPA registers. */
3963 if (reg >= 96 && reg <= 103)
3964 return ARM_F0_REGNUM + reg - 96;
3966 /* WMMX register assignments. */
3967 if (reg >= 104 && reg <= 111)
3968 return ARM_WCGR0_REGNUM + reg - 104;
3970 if (reg >= 112 && reg <= 127)
3971 return ARM_WR0_REGNUM + reg - 112;
3973 if (reg >= 192 && reg <= 199)
3974 return ARM_WC0_REGNUM + reg - 192;
3976 /* VFP v2 registers. A double precision value is actually
3977 in d1 rather than s2, but the ABI only defines numbering
3978 for the single precision registers. This will "just work"
3979 in GDB for little endian targets (we'll read eight bytes,
3980 starting in s0 and then progressing to s1), but will be
3981 reversed on big endian targets with VFP. This won't
3982 be a problem for the new Neon quad registers; you're supposed
3983 to use DW_OP_piece for those. */
3984 if (reg >= 64 && reg <= 95)
3988 sprintf (name_buf, "s%d", reg - 64);
3989 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3993 /* VFP v3 / Neon registers. This range is also used for VFP v2
3994 registers, except that it now describes d0 instead of s0. */
3995 if (reg >= 256 && reg <= 287)
3999 sprintf (name_buf, "d%d", reg - 256);
4000 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4007 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4009 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4012 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4014 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4015 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4017 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4018 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4020 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4021 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4023 if (reg < NUM_GREGS)
4024 return SIM_ARM_R0_REGNUM + reg;
4027 if (reg < NUM_FREGS)
4028 return SIM_ARM_FP0_REGNUM + reg;
4031 if (reg < NUM_SREGS)
4032 return SIM_ARM_FPS_REGNUM + reg;
4035 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4038 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4039 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4040 It is thought that this is is the floating-point register format on
4041 little-endian systems. */
4044 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4045 void *dbl, int endianess)
4049 if (endianess == BFD_ENDIAN_BIG)
4050 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4052 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4054 floatformat_from_doublest (fmt, &d, dbl);
4058 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4063 floatformat_to_doublest (fmt, ptr, &d);
4064 if (endianess == BFD_ENDIAN_BIG)
4065 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4067 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4072 condition_true (unsigned long cond, unsigned long status_reg)
4074 if (cond == INST_AL || cond == INST_NV)
4080 return ((status_reg & FLAG_Z) != 0);
4082 return ((status_reg & FLAG_Z) == 0);
4084 return ((status_reg & FLAG_C) != 0);
4086 return ((status_reg & FLAG_C) == 0);
4088 return ((status_reg & FLAG_N) != 0);
4090 return ((status_reg & FLAG_N) == 0);
4092 return ((status_reg & FLAG_V) != 0);
4094 return ((status_reg & FLAG_V) == 0);
4096 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4098 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4100 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4102 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4104 return (((status_reg & FLAG_Z) == 0)
4105 && (((status_reg & FLAG_N) == 0)
4106 == ((status_reg & FLAG_V) == 0)));
4108 return (((status_reg & FLAG_Z) != 0)
4109 || (((status_reg & FLAG_N) == 0)
4110 != ((status_reg & FLAG_V) == 0)));
4115 static unsigned long
4116 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4117 unsigned long pc_val, unsigned long status_reg)
4119 unsigned long res, shift;
4120 int rm = bits (inst, 0, 3);
4121 unsigned long shifttype = bits (inst, 5, 6);
4125 int rs = bits (inst, 8, 11);
4126 shift = (rs == 15 ? pc_val + 8
4127 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4130 shift = bits (inst, 7, 11);
4133 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4134 : get_frame_register_unsigned (frame, rm));
4139 res = shift >= 32 ? 0 : res << shift;
4143 res = shift >= 32 ? 0 : res >> shift;
4149 res = ((res & 0x80000000L)
4150 ? ~((~res) >> shift) : res >> shift);
4153 case 3: /* ROR/RRX */
4156 res = (res >> 1) | (carry ? 0x80000000L : 0);
4158 res = (res >> shift) | (res << (32 - shift));
4162 return res & 0xffffffff;
4165 /* Return number of 1-bits in VAL. */
4168 bitcount (unsigned long val)
4171 for (nbits = 0; val != 0; nbits++)
4172 val &= val - 1; /* Delete rightmost 1-bit in val. */
4176 /* Return the size in bytes of the complete Thumb instruction whose
4177 first halfword is INST1. */
4180 thumb_insn_size (unsigned short inst1)
4182 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4189 thumb_advance_itstate (unsigned int itstate)
4191 /* Preserve IT[7:5], the first three bits of the condition. Shift
4192 the upcoming condition flags left by one bit. */
4193 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4195 /* If we have finished the IT block, clear the state. */
4196 if ((itstate & 0x0f) == 0)
4202 /* Find the next PC after the current instruction executes. In some
4203 cases we can not statically determine the answer (see the IT state
4204 handling in this function); in that case, a breakpoint may be
4205 inserted in addition to the returned PC, which will be used to set
4206 another breakpoint by our caller. */
4209 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
4211 struct gdbarch *gdbarch = get_frame_arch (frame);
4212 struct address_space *aspace = get_frame_address_space (frame);
4213 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4214 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4215 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4216 unsigned short inst1;
4217 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4218 unsigned long offset;
4219 ULONGEST status, itstate;
4221 nextpc = MAKE_THUMB_ADDR (nextpc);
4222 pc_val = MAKE_THUMB_ADDR (pc_val);
4224 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4226 /* Thumb-2 conditional execution support. There are eight bits in
4227 the CPSR which describe conditional execution state. Once
4228 reconstructed (they're in a funny order), the low five bits
4229 describe the low bit of the condition for each instruction and
4230 how many instructions remain. The high three bits describe the
4231 base condition. One of the low four bits will be set if an IT
4232 block is active. These bits read as zero on earlier
4234 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4235 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4237 /* If-Then handling. On GNU/Linux, where this routine is used, we
4238 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4239 can disable execution of the undefined instruction. So we might
4240 miss the breakpoint if we set it on a skipped conditional
4241 instruction. Because conditional instructions can change the
4242 flags, affecting the execution of further instructions, we may
4243 need to set two breakpoints. */
4245 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4247 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4249 /* An IT instruction. Because this instruction does not
4250 modify the flags, we can accurately predict the next
4251 executed instruction. */
4252 itstate = inst1 & 0x00ff;
4253 pc += thumb_insn_size (inst1);
4255 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4257 inst1 = read_memory_unsigned_integer (pc, 2,
4258 byte_order_for_code);
4259 pc += thumb_insn_size (inst1);
4260 itstate = thumb_advance_itstate (itstate);
4263 return MAKE_THUMB_ADDR (pc);
4265 else if (itstate != 0)
4267 /* We are in a conditional block. Check the condition. */
4268 if (! condition_true (itstate >> 4, status))
4270 /* Advance to the next executed instruction. */
4271 pc += thumb_insn_size (inst1);
4272 itstate = thumb_advance_itstate (itstate);
4274 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4276 inst1 = read_memory_unsigned_integer (pc, 2,
4277 byte_order_for_code);
4278 pc += thumb_insn_size (inst1);
4279 itstate = thumb_advance_itstate (itstate);
4282 return MAKE_THUMB_ADDR (pc);
4284 else if ((itstate & 0x0f) == 0x08)
4286 /* This is the last instruction of the conditional
4287 block, and it is executed. We can handle it normally
4288 because the following instruction is not conditional,
4289 and we must handle it normally because it is
4290 permitted to branch. Fall through. */
4296 /* There are conditional instructions after this one.
4297 If this instruction modifies the flags, then we can
4298 not predict what the next executed instruction will
4299 be. Fortunately, this instruction is architecturally
4300 forbidden to branch; we know it will fall through.
4301 Start by skipping past it. */
4302 pc += thumb_insn_size (inst1);
4303 itstate = thumb_advance_itstate (itstate);
4305 /* Set a breakpoint on the following instruction. */
4306 gdb_assert ((itstate & 0x0f) != 0);
4308 insert_single_step_breakpoint (gdbarch, aspace, pc);
4309 cond_negated = (itstate >> 4) & 1;
4311 /* Skip all following instructions with the same
4312 condition. If there is a later instruction in the IT
4313 block with the opposite condition, set the other
4314 breakpoint there. If not, then set a breakpoint on
4315 the instruction after the IT block. */
4318 inst1 = read_memory_unsigned_integer (pc, 2,
4319 byte_order_for_code);
4320 pc += thumb_insn_size (inst1);
4321 itstate = thumb_advance_itstate (itstate);
4323 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4325 return MAKE_THUMB_ADDR (pc);
4329 else if (itstate & 0x0f)
4331 /* We are in a conditional block. Check the condition. */
4332 int cond = itstate >> 4;
4334 if (! condition_true (cond, status))
4336 /* Advance to the next instruction. All the 32-bit
4337 instructions share a common prefix. */
4338 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4339 return MAKE_THUMB_ADDR (pc + 4);
4341 return MAKE_THUMB_ADDR (pc + 2);
4344 /* Otherwise, handle the instruction normally. */
4347 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4351 /* Fetch the saved PC from the stack. It's stored above
4352 all of the other registers. */
4353 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4354 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4355 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4357 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4359 unsigned long cond = bits (inst1, 8, 11);
4360 if (cond == 0x0f) /* 0x0f = SWI */
4362 struct gdbarch_tdep *tdep;
4363 tdep = gdbarch_tdep (gdbarch);
4365 if (tdep->syscall_next_pc != NULL)
4366 nextpc = tdep->syscall_next_pc (frame);
4369 else if (cond != 0x0f && condition_true (cond, status))
4370 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4372 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4374 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4376 else if ((inst1 & 0xe000) == 0xe000) /* 32-bit instruction */
4378 unsigned short inst2;
4379 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4381 /* Default to the next instruction. */
4383 nextpc = MAKE_THUMB_ADDR (nextpc);
4385 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4387 /* Branches and miscellaneous control instructions. */
4389 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4392 int j1, j2, imm1, imm2;
4394 imm1 = sbits (inst1, 0, 10);
4395 imm2 = bits (inst2, 0, 10);
4396 j1 = bit (inst2, 13);
4397 j2 = bit (inst2, 11);
4399 offset = ((imm1 << 12) + (imm2 << 1));
4400 offset ^= ((!j2) << 22) | ((!j1) << 23);
4402 nextpc = pc_val + offset;
4403 /* For BLX make sure to clear the low bits. */
4404 if (bit (inst2, 12) == 0)
4405 nextpc = nextpc & 0xfffffffc;
4407 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4409 /* SUBS PC, LR, #imm8. */
4410 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4411 nextpc -= inst2 & 0x00ff;
4413 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4415 /* Conditional branch. */
4416 if (condition_true (bits (inst1, 6, 9), status))
4418 int sign, j1, j2, imm1, imm2;
4420 sign = sbits (inst1, 10, 10);
4421 imm1 = bits (inst1, 0, 5);
4422 imm2 = bits (inst2, 0, 10);
4423 j1 = bit (inst2, 13);
4424 j2 = bit (inst2, 11);
4426 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4427 offset += (imm1 << 12) + (imm2 << 1);
4429 nextpc = pc_val + offset;
4433 else if ((inst1 & 0xfe50) == 0xe810)
4435 /* Load multiple or RFE. */
4436 int rn, offset, load_pc = 1;
4438 rn = bits (inst1, 0, 3);
4439 if (bit (inst1, 7) && !bit (inst1, 8))
4442 if (!bit (inst2, 15))
4444 offset = bitcount (inst2) * 4 - 4;
4446 else if (!bit (inst1, 7) && bit (inst1, 8))
4449 if (!bit (inst2, 15))
4453 else if (bit (inst1, 7) && bit (inst1, 8))
4458 else if (!bit (inst1, 7) && !bit (inst1, 8))
4468 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4469 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4472 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4474 /* MOV PC or MOVS PC. */
4475 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4476 nextpc = MAKE_THUMB_ADDR (nextpc);
4478 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4482 int rn, load_pc = 1;
4484 rn = bits (inst1, 0, 3);
4485 base = get_frame_register_unsigned (frame, rn);
4488 base = (base + 4) & ~(CORE_ADDR) 0x3;
4490 base += bits (inst2, 0, 11);
4492 base -= bits (inst2, 0, 11);
4494 else if (bit (inst1, 7))
4495 base += bits (inst2, 0, 11);
4496 else if (bit (inst2, 11))
4498 if (bit (inst2, 10))
4501 base += bits (inst2, 0, 7);
4503 base -= bits (inst2, 0, 7);
4506 else if ((inst2 & 0x0fc0) == 0x0000)
4508 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4509 base += get_frame_register_unsigned (frame, rm) << shift;
4516 nextpc = get_frame_memory_unsigned (frame, base, 4);
4518 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4521 CORE_ADDR tbl_reg, table, offset, length;
4523 tbl_reg = bits (inst1, 0, 3);
4524 if (tbl_reg == 0x0f)
4525 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4527 table = get_frame_register_unsigned (frame, tbl_reg);
4529 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4530 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4531 nextpc = pc_val + length;
4533 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4536 CORE_ADDR tbl_reg, table, offset, length;
4538 tbl_reg = bits (inst1, 0, 3);
4539 if (tbl_reg == 0x0f)
4540 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4542 table = get_frame_register_unsigned (frame, tbl_reg);
4544 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4545 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4546 nextpc = pc_val + length;
4549 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4551 if (bits (inst1, 3, 6) == 0x0f)
4554 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4556 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4558 if (bits (inst1, 3, 6) == 0x0f)
4561 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4563 nextpc = MAKE_THUMB_ADDR (nextpc);
4565 else if ((inst1 & 0xf500) == 0xb100)
4568 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4569 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4571 if (bit (inst1, 11) && reg != 0)
4572 nextpc = pc_val + imm;
4573 else if (!bit (inst1, 11) && reg == 0)
4574 nextpc = pc_val + imm;
4579 /* Get the raw next address. PC is the current program counter, in
4580 FRAME. INSERT_BKPT should be TRUE if we want a breakpoint set on
4581 the alternative next instruction if there are two options.
4583 The value returned has the execution state of the next instruction
4584 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4585 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4589 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
4591 struct gdbarch *gdbarch = get_frame_arch (frame);
4592 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4593 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4594 unsigned long pc_val;
4595 unsigned long this_instr;
4596 unsigned long status;
4599 if (arm_frame_is_thumb (frame))
4600 return thumb_get_next_pc_raw (frame, pc, insert_bkpt);
4602 pc_val = (unsigned long) pc;
4603 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4605 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4606 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4608 if (bits (this_instr, 28, 31) == INST_NV)
4609 switch (bits (this_instr, 24, 27))
4614 /* Branch with Link and change to Thumb. */
4615 nextpc = BranchDest (pc, this_instr);
4616 nextpc |= bit (this_instr, 24) << 1;
4617 nextpc = MAKE_THUMB_ADDR (nextpc);
4623 /* Coprocessor register transfer. */
4624 if (bits (this_instr, 12, 15) == 15)
4625 error (_("Invalid update to pc in instruction"));
4628 else if (condition_true (bits (this_instr, 28, 31), status))
4630 switch (bits (this_instr, 24, 27))
4633 case 0x1: /* data processing */
4637 unsigned long operand1, operand2, result = 0;
4641 if (bits (this_instr, 12, 15) != 15)
4644 if (bits (this_instr, 22, 25) == 0
4645 && bits (this_instr, 4, 7) == 9) /* multiply */
4646 error (_("Invalid update to pc in instruction"));
4648 /* BX <reg>, BLX <reg> */
4649 if (bits (this_instr, 4, 27) == 0x12fff1
4650 || bits (this_instr, 4, 27) == 0x12fff3)
4652 rn = bits (this_instr, 0, 3);
4653 nextpc = (rn == 15) ? pc_val + 8
4654 : get_frame_register_unsigned (frame, rn);
4658 /* Multiply into PC. */
4659 c = (status & FLAG_C) ? 1 : 0;
4660 rn = bits (this_instr, 16, 19);
4661 operand1 = (rn == 15) ? pc_val + 8
4662 : get_frame_register_unsigned (frame, rn);
4664 if (bit (this_instr, 25))
4666 unsigned long immval = bits (this_instr, 0, 7);
4667 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4668 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4671 else /* operand 2 is a shifted register. */
4672 operand2 = shifted_reg_val (frame, this_instr, c,
4675 switch (bits (this_instr, 21, 24))
4678 result = operand1 & operand2;
4682 result = operand1 ^ operand2;
4686 result = operand1 - operand2;
4690 result = operand2 - operand1;
4694 result = operand1 + operand2;
4698 result = operand1 + operand2 + c;
4702 result = operand1 - operand2 + c;
4706 result = operand2 - operand1 + c;
4712 case 0xb: /* tst, teq, cmp, cmn */
4713 result = (unsigned long) nextpc;
4717 result = operand1 | operand2;
4721 /* Always step into a function. */
4726 result = operand1 & ~operand2;
4734 /* In 26-bit APCS the bottom two bits of the result are
4735 ignored, and we always end up in ARM state. */
4737 nextpc = arm_addr_bits_remove (gdbarch, result);
4745 case 0x5: /* data transfer */
4748 if (bit (this_instr, 20))
4751 if (bits (this_instr, 12, 15) == 15)
4757 if (bit (this_instr, 22))
4758 error (_("Invalid update to pc in instruction"));
4760 /* byte write to PC */
4761 rn = bits (this_instr, 16, 19);
4762 base = (rn == 15) ? pc_val + 8
4763 : get_frame_register_unsigned (frame, rn);
4764 if (bit (this_instr, 24))
4767 int c = (status & FLAG_C) ? 1 : 0;
4768 unsigned long offset =
4769 (bit (this_instr, 25)
4770 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4771 : bits (this_instr, 0, 11));
4773 if (bit (this_instr, 23))
4778 nextpc = (CORE_ADDR) read_memory_integer ((CORE_ADDR) base,
4785 case 0x9: /* block transfer */
4786 if (bit (this_instr, 20))
4789 if (bit (this_instr, 15))
4794 if (bit (this_instr, 23))
4797 unsigned long reglist = bits (this_instr, 0, 14);
4798 offset = bitcount (reglist) * 4;
4799 if (bit (this_instr, 24)) /* pre */
4802 else if (bit (this_instr, 24))
4806 unsigned long rn_val =
4807 get_frame_register_unsigned (frame,
4808 bits (this_instr, 16, 19));
4810 (CORE_ADDR) read_memory_integer ((CORE_ADDR) (rn_val
4818 case 0xb: /* branch & link */
4819 case 0xa: /* branch */
4821 nextpc = BranchDest (pc, this_instr);
4827 case 0xe: /* coproc ops */
4831 struct gdbarch_tdep *tdep;
4832 tdep = gdbarch_tdep (gdbarch);
4834 if (tdep->syscall_next_pc != NULL)
4835 nextpc = tdep->syscall_next_pc (frame);
4841 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4850 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4852 struct gdbarch *gdbarch = get_frame_arch (frame);
4854 gdbarch_addr_bits_remove (gdbarch,
4855 arm_get_next_pc_raw (frame, pc, TRUE));
4857 error (_("Infinite loop detected"));
4861 /* single_step() is called just before we want to resume the inferior,
4862 if we want to single-step it but there is no hardware or kernel
4863 single-step support. We find the target of the coming instruction
4864 and breakpoint it. */
4867 arm_software_single_step (struct frame_info *frame)
4869 struct gdbarch *gdbarch = get_frame_arch (frame);
4870 struct address_space *aspace = get_frame_address_space (frame);
4872 /* NOTE: This may insert the wrong breakpoint instruction when
4873 single-stepping over a mode-changing instruction, if the
4874 CPSR heuristics are used. */
4876 CORE_ADDR next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
4877 insert_single_step_breakpoint (gdbarch, aspace, next_pc);
4882 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4883 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4884 NULL if an error occurs. BUF is freed. */
4887 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4888 int old_len, int new_len)
4890 gdb_byte *new_buf, *middle;
4891 int bytes_to_read = new_len - old_len;
4893 new_buf = xmalloc (new_len);
4894 memcpy (new_buf + bytes_to_read, buf, old_len);
4896 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4904 /* An IT block is at most the 2-byte IT instruction followed by
4905 four 4-byte instructions. The furthest back we must search to
4906 find an IT block that affects the current instruction is thus
4907 2 + 3 * 4 == 14 bytes. */
4908 #define MAX_IT_BLOCK_PREFIX 14
4910 /* Use a quick scan if there are more than this many bytes of
4912 #define IT_SCAN_THRESHOLD 32
4914 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4915 A breakpoint in an IT block may not be hit, depending on the
4918 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4922 CORE_ADDR boundary, func_start;
4923 int buf_len, buf2_len;
4924 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4925 int i, any, last_it, last_it_count;
4927 /* If we are using BKPT breakpoints, none of this is necessary. */
4928 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4931 /* ARM mode does not have this problem. */
4932 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4935 /* We are setting a breakpoint in Thumb code that could potentially
4936 contain an IT block. The first step is to find how much Thumb
4937 code there is; we do not need to read outside of known Thumb
4939 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4941 /* Thumb-2 code must have mapping symbols to have a chance. */
4944 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4946 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4947 && func_start > boundary)
4948 boundary = func_start;
4950 /* Search for a candidate IT instruction. We have to do some fancy
4951 footwork to distinguish a real IT instruction from the second
4952 half of a 32-bit instruction, but there is no need for that if
4953 there's no candidate. */
4954 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4956 /* No room for an IT instruction. */
4959 buf = xmalloc (buf_len);
4960 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4963 for (i = 0; i < buf_len; i += 2)
4965 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4966 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4978 /* OK, the code bytes before this instruction contain at least one
4979 halfword which resembles an IT instruction. We know that it's
4980 Thumb code, but there are still two possibilities. Either the
4981 halfword really is an IT instruction, or it is the second half of
4982 a 32-bit Thumb instruction. The only way we can tell is to
4983 scan forwards from a known instruction boundary. */
4984 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4988 /* There's a lot of code before this instruction. Start with an
4989 optimistic search; it's easy to recognize halfwords that can
4990 not be the start of a 32-bit instruction, and use that to
4991 lock on to the instruction boundaries. */
4992 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4995 buf_len = IT_SCAN_THRESHOLD;
4998 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5000 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5001 if (thumb_insn_size (inst1) == 2)
5008 /* At this point, if DEFINITE, BUF[I] is the first place we
5009 are sure that we know the instruction boundaries, and it is far
5010 enough from BPADDR that we could not miss an IT instruction
5011 affecting BPADDR. If ! DEFINITE, give up - start from a
5015 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5019 buf_len = bpaddr - boundary;
5025 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5028 buf_len = bpaddr - boundary;
5032 /* Scan forwards. Find the last IT instruction before BPADDR. */
5037 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5039 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5044 else if (inst1 & 0x0002)
5046 else if (inst1 & 0x0004)
5051 i += thumb_insn_size (inst1);
5057 /* There wasn't really an IT instruction after all. */
5060 if (last_it_count < 1)
5061 /* It was too far away. */
5064 /* This really is a trouble spot. Move the breakpoint to the IT
5066 return bpaddr - buf_len + last_it;
5069 /* ARM displaced stepping support.
5071 Generally ARM displaced stepping works as follows:
5073 1. When an instruction is to be single-stepped, it is first decoded by
5074 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5075 Depending on the type of instruction, it is then copied to a scratch
5076 location, possibly in a modified form. The copy_* set of functions
5077 performs such modification, as necessary. A breakpoint is placed after
5078 the modified instruction in the scratch space to return control to GDB.
5079 Note in particular that instructions which modify the PC will no longer
5080 do so after modification.
5082 2. The instruction is single-stepped, by setting the PC to the scratch
5083 location address, and resuming. Control returns to GDB when the
5086 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5087 function used for the current instruction. This function's job is to
5088 put the CPU/memory state back to what it would have been if the
5089 instruction had been executed unmodified in its original location. */
5091 /* NOP instruction (mov r0, r0). */
5092 #define ARM_NOP 0xe1a00000
5094 /* Helper for register reads for displaced stepping. In particular, this
5095 returns the PC as it would be seen by the instruction at its original
5099 displaced_read_reg (struct regcache *regs, CORE_ADDR from, int regno)
5105 if (debug_displaced)
5106 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5107 (unsigned long) from + 8);
5108 return (ULONGEST) from + 8; /* Pipeline offset. */
5112 regcache_cooked_read_unsigned (regs, regno, &ret);
5113 if (debug_displaced)
5114 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5115 regno, (unsigned long) ret);
5121 displaced_in_arm_mode (struct regcache *regs)
5124 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5126 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5128 return (ps & t_bit) == 0;
5131 /* Write to the PC as from a branch instruction. */
5134 branch_write_pc (struct regcache *regs, ULONGEST val)
5136 if (displaced_in_arm_mode (regs))
5137 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5138 architecture versions < 6. */
5139 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5140 val & ~(ULONGEST) 0x3);
5142 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5143 val & ~(ULONGEST) 0x1);
5146 /* Write to the PC as from a branch-exchange instruction. */
5149 bx_write_pc (struct regcache *regs, ULONGEST val)
5152 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5154 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5158 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5159 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5161 else if ((val & 2) == 0)
5163 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5164 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5168 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5169 mode, align dest to 4 bytes). */
5170 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5171 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5172 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5176 /* Write to the PC as if from a load instruction. */
5179 load_write_pc (struct regcache *regs, ULONGEST val)
5181 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5182 bx_write_pc (regs, val);
5184 branch_write_pc (regs, val);
5187 /* Write to the PC as if from an ALU instruction. */
5190 alu_write_pc (struct regcache *regs, ULONGEST val)
5192 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && displaced_in_arm_mode (regs))
5193 bx_write_pc (regs, val);
5195 branch_write_pc (regs, val);
5198 /* Helper for writing to registers for displaced stepping. Writing to the PC
5199 has a varying effects depending on the instruction which does the write:
5200 this is controlled by the WRITE_PC argument. */
5203 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5204 int regno, ULONGEST val, enum pc_write_style write_pc)
5208 if (debug_displaced)
5209 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5210 (unsigned long) val);
5213 case BRANCH_WRITE_PC:
5214 branch_write_pc (regs, val);
5218 bx_write_pc (regs, val);
5222 load_write_pc (regs, val);
5226 alu_write_pc (regs, val);
5229 case CANNOT_WRITE_PC:
5230 warning (_("Instruction wrote to PC in an unexpected way when "
5231 "single-stepping"));
5235 internal_error (__FILE__, __LINE__,
5236 _("Invalid argument to displaced_write_reg"));
5239 dsc->wrote_to_pc = 1;
5243 if (debug_displaced)
5244 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5245 regno, (unsigned long) val);
5246 regcache_cooked_write_unsigned (regs, regno, val);
5250 /* This function is used to concisely determine if an instruction INSN
5251 references PC. Register fields of interest in INSN should have the
5252 corresponding fields of BITMASK set to 0b1111. The function
5253 returns return 1 if any of these fields in INSN reference the PC
5254 (also 0b1111, r15), else it returns 0. */
5257 insn_references_pc (uint32_t insn, uint32_t bitmask)
5259 uint32_t lowbit = 1;
5261 while (bitmask != 0)
5265 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5271 mask = lowbit * 0xf;
5273 if ((insn & mask) == mask)
5282 /* The simplest copy function. Many instructions have the same effect no
5283 matter what address they are executed at: in those cases, use this. */
5286 copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5287 const char *iname, struct displaced_step_closure *dsc)
5289 if (debug_displaced)
5290 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5291 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5294 dsc->modinsn[0] = insn;
5299 /* Preload instructions with immediate offset. */
5302 cleanup_preload (struct gdbarch *gdbarch,
5303 struct regcache *regs, struct displaced_step_closure *dsc)
5305 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5306 if (!dsc->u.preload.immed)
5307 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5311 copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5312 struct displaced_step_closure *dsc)
5314 unsigned int rn = bits (insn, 16, 19);
5316 CORE_ADDR from = dsc->insn_addr;
5318 if (!insn_references_pc (insn, 0x000f0000ul))
5319 return copy_unmodified (gdbarch, insn, "preload", dsc);
5321 if (debug_displaced)
5322 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5323 (unsigned long) insn);
5325 /* Preload instructions:
5327 {pli/pld} [rn, #+/-imm]
5329 {pli/pld} [r0, #+/-imm]. */
5331 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5332 rn_val = displaced_read_reg (regs, from, rn);
5333 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5335 dsc->u.preload.immed = 1;
5337 dsc->modinsn[0] = insn & 0xfff0ffff;
5339 dsc->cleanup = &cleanup_preload;
5344 /* Preload instructions with register offset. */
5347 copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5348 struct regcache *regs,
5349 struct displaced_step_closure *dsc)
5351 unsigned int rn = bits (insn, 16, 19);
5352 unsigned int rm = bits (insn, 0, 3);
5353 ULONGEST rn_val, rm_val;
5354 CORE_ADDR from = dsc->insn_addr;
5356 if (!insn_references_pc (insn, 0x000f000ful))
5357 return copy_unmodified (gdbarch, insn, "preload reg", dsc);
5359 if (debug_displaced)
5360 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5361 (unsigned long) insn);
5363 /* Preload register-offset instructions:
5365 {pli/pld} [rn, rm {, shift}]
5367 {pli/pld} [r0, r1 {, shift}]. */
5369 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5370 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5371 rn_val = displaced_read_reg (regs, from, rn);
5372 rm_val = displaced_read_reg (regs, from, rm);
5373 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5374 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5376 dsc->u.preload.immed = 0;
5378 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5380 dsc->cleanup = &cleanup_preload;
5385 /* Copy/cleanup coprocessor load and store instructions. */
5388 cleanup_copro_load_store (struct gdbarch *gdbarch,
5389 struct regcache *regs,
5390 struct displaced_step_closure *dsc)
5392 ULONGEST rn_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5394 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5396 if (dsc->u.ldst.writeback)
5397 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5401 copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5402 struct regcache *regs,
5403 struct displaced_step_closure *dsc)
5405 unsigned int rn = bits (insn, 16, 19);
5407 CORE_ADDR from = dsc->insn_addr;
5409 if (!insn_references_pc (insn, 0x000f0000ul))
5410 return copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5412 if (debug_displaced)
5413 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5414 "load/store insn %.8lx\n", (unsigned long) insn);
5416 /* Coprocessor load/store instructions:
5418 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5420 {stc/stc2} [r0, #+/-imm].
5422 ldc/ldc2 are handled identically. */
5424 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5425 rn_val = displaced_read_reg (regs, from, rn);
5426 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5428 dsc->u.ldst.writeback = bit (insn, 25);
5429 dsc->u.ldst.rn = rn;
5431 dsc->modinsn[0] = insn & 0xfff0ffff;
5433 dsc->cleanup = &cleanup_copro_load_store;
5438 /* Clean up branch instructions (actually perform the branch, by setting
5442 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5443 struct displaced_step_closure *dsc)
5445 ULONGEST from = dsc->insn_addr;
5446 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
5447 int branch_taken = condition_true (dsc->u.branch.cond, status);
5448 enum pc_write_style write_pc = dsc->u.branch.exchange
5449 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5454 if (dsc->u.branch.link)
5456 ULONGEST pc = displaced_read_reg (regs, from, 15);
5457 displaced_write_reg (regs, dsc, 14, pc - 4, CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 15, dsc->u.branch.dest, write_pc);
5463 /* Copy B/BL/BLX instructions with immediate destinations. */
5466 copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5467 struct regcache *regs, struct displaced_step_closure *dsc)
5469 unsigned int cond = bits (insn, 28, 31);
5470 int exchange = (cond == 0xf);
5471 int link = exchange || bit (insn, 24);
5472 CORE_ADDR from = dsc->insn_addr;
5475 if (debug_displaced)
5476 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5477 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5478 (unsigned long) insn);
5480 /* Implement "BL<cond> <label>" as:
5482 Preparation: cond <- instruction condition
5483 Insn: mov r0, r0 (nop)
5484 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5486 B<cond> similar, but don't set r14 in cleanup. */
5489 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5490 then arrange the switch into Thumb mode. */
5491 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5493 offset = bits (insn, 0, 23) << 2;
5495 if (bit (offset, 25))
5496 offset = offset | ~0x3ffffff;
5498 dsc->u.branch.cond = cond;
5499 dsc->u.branch.link = link;
5500 dsc->u.branch.exchange = exchange;
5501 dsc->u.branch.dest = from + 8 + offset;
5503 dsc->modinsn[0] = ARM_NOP;
5505 dsc->cleanup = &cleanup_branch;
5510 /* Copy BX/BLX with register-specified destinations. */
5513 copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5514 struct regcache *regs, struct displaced_step_closure *dsc)
5516 unsigned int cond = bits (insn, 28, 31);
5519 int link = bit (insn, 5);
5520 unsigned int rm = bits (insn, 0, 3);
5521 CORE_ADDR from = dsc->insn_addr;
5523 if (debug_displaced)
5524 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s register insn "
5525 "%.8lx\n", (link) ? "blx" : "bx",
5526 (unsigned long) insn);
5528 /* Implement {BX,BLX}<cond> <reg>" as:
5530 Preparation: cond <- instruction condition
5531 Insn: mov r0, r0 (nop)
5532 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5534 Don't set r14 in cleanup for BX. */
5536 dsc->u.branch.dest = displaced_read_reg (regs, from, rm);
5538 dsc->u.branch.cond = cond;
5539 dsc->u.branch.link = link;
5540 dsc->u.branch.exchange = 1;
5542 dsc->modinsn[0] = ARM_NOP;
5544 dsc->cleanup = &cleanup_branch;
5549 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5552 cleanup_alu_imm (struct gdbarch *gdbarch,
5553 struct regcache *regs, struct displaced_step_closure *dsc)
5555 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5556 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5557 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5558 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5562 copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5563 struct displaced_step_closure *dsc)
5565 unsigned int rn = bits (insn, 16, 19);
5566 unsigned int rd = bits (insn, 12, 15);
5567 unsigned int op = bits (insn, 21, 24);
5568 int is_mov = (op == 0xd);
5569 ULONGEST rd_val, rn_val;
5570 CORE_ADDR from = dsc->insn_addr;
5572 if (!insn_references_pc (insn, 0x000ff000ul))
5573 return copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5575 if (debug_displaced)
5576 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5577 "%.8lx\n", is_mov ? "move" : "ALU",
5578 (unsigned long) insn);
5580 /* Instruction is of form:
5582 <op><cond> rd, [rn,] #imm
5586 Preparation: tmp1, tmp2 <- r0, r1;
5588 Insn: <op><cond> r0, r1, #imm
5589 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5592 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5593 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5594 rn_val = displaced_read_reg (regs, from, rn);
5595 rd_val = displaced_read_reg (regs, from, rd);
5596 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5597 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5601 dsc->modinsn[0] = insn & 0xfff00fff;
5603 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5605 dsc->cleanup = &cleanup_alu_imm;
5610 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5613 cleanup_alu_reg (struct gdbarch *gdbarch,
5614 struct regcache *regs, struct displaced_step_closure *dsc)
5619 rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5621 for (i = 0; i < 3; i++)
5622 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5624 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5628 copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5629 struct displaced_step_closure *dsc)
5631 unsigned int rn = bits (insn, 16, 19);
5632 unsigned int rm = bits (insn, 0, 3);
5633 unsigned int rd = bits (insn, 12, 15);
5634 unsigned int op = bits (insn, 21, 24);
5635 int is_mov = (op == 0xd);
5636 ULONGEST rd_val, rn_val, rm_val;
5637 CORE_ADDR from = dsc->insn_addr;
5639 if (!insn_references_pc (insn, 0x000ff00ful))
5640 return copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5642 if (debug_displaced)
5643 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5644 is_mov ? "move" : "ALU", (unsigned long) insn);
5646 /* Instruction is of form:
5648 <op><cond> rd, [rn,] rm [, <shift>]
5652 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5653 r0, r1, r2 <- rd, rn, rm
5654 Insn: <op><cond> r0, r1, r2 [, <shift>]
5655 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5658 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5659 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5660 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5661 rd_val = displaced_read_reg (regs, from, rd);
5662 rn_val = displaced_read_reg (regs, from, rn);
5663 rm_val = displaced_read_reg (regs, from, rm);
5664 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5665 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5666 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5670 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5672 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5674 dsc->cleanup = &cleanup_alu_reg;
5679 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5682 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5683 struct regcache *regs,
5684 struct displaced_step_closure *dsc)
5686 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
5689 for (i = 0; i < 4; i++)
5690 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5692 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5696 copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5697 struct regcache *regs,
5698 struct displaced_step_closure *dsc)
5700 unsigned int rn = bits (insn, 16, 19);
5701 unsigned int rm = bits (insn, 0, 3);
5702 unsigned int rd = bits (insn, 12, 15);
5703 unsigned int rs = bits (insn, 8, 11);
5704 unsigned int op = bits (insn, 21, 24);
5705 int is_mov = (op == 0xd), i;
5706 ULONGEST rd_val, rn_val, rm_val, rs_val;
5707 CORE_ADDR from = dsc->insn_addr;
5709 if (!insn_references_pc (insn, 0x000fff0ful))
5710 return copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5712 if (debug_displaced)
5713 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5714 "%.8lx\n", is_mov ? "move" : "ALU",
5715 (unsigned long) insn);
5717 /* Instruction is of form:
5719 <op><cond> rd, [rn,] rm, <shift> rs
5723 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5724 r0, r1, r2, r3 <- rd, rn, rm, rs
5725 Insn: <op><cond> r0, r1, r2, <shift> r3
5727 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5731 for (i = 0; i < 4; i++)
5732 dsc->tmp[i] = displaced_read_reg (regs, from, i);
5734 rd_val = displaced_read_reg (regs, from, rd);
5735 rn_val = displaced_read_reg (regs, from, rn);
5736 rm_val = displaced_read_reg (regs, from, rm);
5737 rs_val = displaced_read_reg (regs, from, rs);
5738 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5739 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5740 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5741 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5745 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5747 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5749 dsc->cleanup = &cleanup_alu_shifted_reg;
5754 /* Clean up load instructions. */
5757 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5758 struct displaced_step_closure *dsc)
5760 ULONGEST rt_val, rt_val2 = 0, rn_val;
5761 CORE_ADDR from = dsc->insn_addr;
5763 rt_val = displaced_read_reg (regs, from, 0);
5764 if (dsc->u.ldst.xfersize == 8)
5765 rt_val2 = displaced_read_reg (regs, from, 1);
5766 rn_val = displaced_read_reg (regs, from, 2);
5768 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5769 if (dsc->u.ldst.xfersize > 4)
5770 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5771 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5772 if (!dsc->u.ldst.immed)
5773 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5775 /* Handle register writeback. */
5776 if (dsc->u.ldst.writeback)
5777 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5778 /* Put result in right place. */
5779 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5780 if (dsc->u.ldst.xfersize == 8)
5781 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5784 /* Clean up store instructions. */
5787 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5788 struct displaced_step_closure *dsc)
5790 CORE_ADDR from = dsc->insn_addr;
5791 ULONGEST rn_val = displaced_read_reg (regs, from, 2);
5793 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5794 if (dsc->u.ldst.xfersize > 4)
5795 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5796 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5797 if (!dsc->u.ldst.immed)
5798 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5799 if (!dsc->u.ldst.restore_r4)
5800 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5803 if (dsc->u.ldst.writeback)
5804 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5807 /* Copy "extra" load/store instructions. These are halfword/doubleword
5808 transfers, which have a different encoding to byte/word transfers. */
5811 copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
5812 struct regcache *regs, struct displaced_step_closure *dsc)
5814 unsigned int op1 = bits (insn, 20, 24);
5815 unsigned int op2 = bits (insn, 5, 6);
5816 unsigned int rt = bits (insn, 12, 15);
5817 unsigned int rn = bits (insn, 16, 19);
5818 unsigned int rm = bits (insn, 0, 3);
5819 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5820 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5821 int immed = (op1 & 0x4) != 0;
5823 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5824 CORE_ADDR from = dsc->insn_addr;
5826 if (!insn_references_pc (insn, 0x000ff00ful))
5827 return copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5829 if (debug_displaced)
5830 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5831 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
5832 (unsigned long) insn);
5834 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5837 internal_error (__FILE__, __LINE__,
5838 _("copy_extra_ld_st: instruction decode error"));
5840 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5841 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
5842 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5844 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
5846 rt_val = displaced_read_reg (regs, from, rt);
5847 if (bytesize[opcode] == 8)
5848 rt_val2 = displaced_read_reg (regs, from, rt + 1);
5849 rn_val = displaced_read_reg (regs, from, rn);
5851 rm_val = displaced_read_reg (regs, from, rm);
5853 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5854 if (bytesize[opcode] == 8)
5855 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5856 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5858 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5861 dsc->u.ldst.xfersize = bytesize[opcode];
5862 dsc->u.ldst.rn = rn;
5863 dsc->u.ldst.immed = immed;
5864 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5865 dsc->u.ldst.restore_r4 = 0;
5868 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5870 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5871 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5873 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5875 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5876 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5878 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5883 /* Copy byte/word loads and stores. */
5886 copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5887 struct regcache *regs,
5888 struct displaced_step_closure *dsc, int load, int byte,
5891 int immed = !bit (insn, 25);
5892 unsigned int rt = bits (insn, 12, 15);
5893 unsigned int rn = bits (insn, 16, 19);
5894 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5895 ULONGEST rt_val, rn_val, rm_val = 0;
5896 CORE_ADDR from = dsc->insn_addr;
5898 if (!insn_references_pc (insn, 0x000ff00ful))
5899 return copy_unmodified (gdbarch, insn, "load/store", dsc);
5901 if (debug_displaced)
5902 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s%s insn %.8lx\n",
5903 load ? (byte ? "ldrb" : "ldr")
5904 : (byte ? "strb" : "str"), usermode ? "t" : "",
5905 (unsigned long) insn);
5907 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
5908 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
5910 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
5912 dsc->tmp[4] = displaced_read_reg (regs, from, 4);
5914 rt_val = displaced_read_reg (regs, from, rt);
5915 rn_val = displaced_read_reg (regs, from, rn);
5917 rm_val = displaced_read_reg (regs, from, rm);
5919 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5920 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5922 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5925 dsc->u.ldst.xfersize = byte ? 1 : 4;
5926 dsc->u.ldst.rn = rn;
5927 dsc->u.ldst.immed = immed;
5928 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5930 /* To write PC we can do:
5932 scratch+0: str pc, temp (*temp = scratch + 8 + offset)
5933 scratch+4: ldr r4, temp
5934 scratch+8: sub r4, r4, pc (r4 = scratch + 8 + offset - scratch - 8 - 8)
5935 scratch+12: add r4, r4, #8 (r4 = offset)
5936 scratch+16: add r0, r0, r4
5937 scratch+20: str r0, [r2, #imm] (or str r0, [r2, r3])
5940 Otherwise we don't know what value to write for PC, since the offset is
5941 architecture-dependent (sometimes PC+8, sometimes PC+12). */
5943 if (load || rt != 15)
5945 dsc->u.ldst.restore_r4 = 0;
5948 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5950 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5951 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5953 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5955 {ldr,str}[b]<cond> r0, [r2, r3]. */
5956 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5960 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5961 dsc->u.ldst.restore_r4 = 1;
5963 dsc->modinsn[0] = 0xe58ff014; /* str pc, [pc, #20]. */
5964 dsc->modinsn[1] = 0xe59f4010; /* ldr r4, [pc, #16]. */
5965 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5966 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5967 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5971 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5973 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5975 dsc->modinsn[6] = 0x0; /* breakpoint location. */
5976 dsc->modinsn[7] = 0x0; /* scratch space. */
5981 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5986 /* Cleanup LDM instructions with fully-populated register list. This is an
5987 unfortunate corner case: it's impossible to implement correctly by modifying
5988 the instruction. The issue is as follows: we have an instruction,
5992 which we must rewrite to avoid loading PC. A possible solution would be to
5993 do the load in two halves, something like (with suitable cleanup
5997 ldm[id][ab] r8!, {r0-r7}
5999 ldm[id][ab] r8, {r7-r14}
6002 but at present there's no suitable place for <temp>, since the scratch space
6003 is overwritten before the cleanup routine is called. For now, we simply
6004 emulate the instruction. */
6007 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6008 struct displaced_step_closure *dsc)
6010 ULONGEST from = dsc->insn_addr;
6011 int inc = dsc->u.block.increment;
6012 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6013 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6014 uint32_t regmask = dsc->u.block.regmask;
6015 int regno = inc ? 0 : 15;
6016 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6017 int exception_return = dsc->u.block.load && dsc->u.block.user
6018 && (regmask & 0x8000) != 0;
6019 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6020 int do_transfer = condition_true (dsc->u.block.cond, status);
6021 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6026 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6027 sensible we can do here. Complain loudly. */
6028 if (exception_return)
6029 error (_("Cannot single-step exception return"));
6031 /* We don't handle any stores here for now. */
6032 gdb_assert (dsc->u.block.load != 0);
6034 if (debug_displaced)
6035 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6036 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6037 dsc->u.block.increment ? "inc" : "dec",
6038 dsc->u.block.before ? "before" : "after");
6045 while (regno <= 15 && (regmask & (1 << regno)) == 0)
6048 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6051 xfer_addr += bump_before;
6053 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6054 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6056 xfer_addr += bump_after;
6058 regmask &= ~(1 << regno);
6061 if (dsc->u.block.writeback)
6062 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6066 /* Clean up an STM which included the PC in the register list. */
6069 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6070 struct displaced_step_closure *dsc)
6072 ULONGEST from = dsc->insn_addr;
6073 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6074 int store_executed = condition_true (dsc->u.block.cond, status);
6075 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6076 CORE_ADDR stm_insn_addr;
6079 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6081 /* If condition code fails, there's nothing else to do. */
6082 if (!store_executed)
6085 if (dsc->u.block.increment)
6087 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6089 if (dsc->u.block.before)
6094 pc_stored_at = dsc->u.block.xfer_addr;
6096 if (dsc->u.block.before)
6100 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6101 stm_insn_addr = dsc->scratch_base;
6102 offset = pc_val - stm_insn_addr;
6104 if (debug_displaced)
6105 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6106 "STM instruction\n", offset);
6108 /* Rewrite the stored PC to the proper value for the non-displaced original
6110 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6111 dsc->insn_addr + offset);
6114 /* Clean up an LDM which includes the PC in the register list. We clumped all
6115 the registers in the transferred list into a contiguous range r0...rX (to
6116 avoid loading PC directly and losing control of the debugged program), so we
6117 must undo that here. */
6120 cleanup_block_load_pc (struct gdbarch *gdbarch,
6121 struct regcache *regs,
6122 struct displaced_step_closure *dsc)
6124 ULONGEST from = dsc->insn_addr;
6125 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
6126 int load_executed = condition_true (dsc->u.block.cond, status), i;
6127 unsigned int mask = dsc->u.block.regmask, write_reg = 15;
6128 unsigned int regs_loaded = bitcount (mask);
6129 unsigned int num_to_shuffle = regs_loaded, clobbered;
6131 /* The method employed here will fail if the register list is fully populated
6132 (we need to avoid loading PC directly). */
6133 gdb_assert (num_to_shuffle < 16);
6138 clobbered = (1 << num_to_shuffle) - 1;
6140 while (num_to_shuffle > 0)
6142 if ((mask & (1 << write_reg)) != 0)
6144 unsigned int read_reg = num_to_shuffle - 1;
6146 if (read_reg != write_reg)
6148 ULONGEST rval = displaced_read_reg (regs, from, read_reg);
6149 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6150 if (debug_displaced)
6151 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6152 "loaded register r%d to r%d\n"), read_reg,
6155 else if (debug_displaced)
6156 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6157 "r%d already in the right place\n"),
6160 clobbered &= ~(1 << write_reg);
6168 /* Restore any registers we scribbled over. */
6169 for (write_reg = 0; clobbered != 0; write_reg++)
6171 if ((clobbered & (1 << write_reg)) != 0)
6173 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6175 if (debug_displaced)
6176 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6177 "clobbered register r%d\n"), write_reg);
6178 clobbered &= ~(1 << write_reg);
6182 /* Perform register writeback manually. */
6183 if (dsc->u.block.writeback)
6185 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6187 if (dsc->u.block.increment)
6188 new_rn_val += regs_loaded * 4;
6190 new_rn_val -= regs_loaded * 4;
6192 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6197 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6198 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6201 copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6202 struct displaced_step_closure *dsc)
6204 int load = bit (insn, 20);
6205 int user = bit (insn, 22);
6206 int increment = bit (insn, 23);
6207 int before = bit (insn, 24);
6208 int writeback = bit (insn, 21);
6209 int rn = bits (insn, 16, 19);
6210 CORE_ADDR from = dsc->insn_addr;
6212 /* Block transfers which don't mention PC can be run directly
6214 if (rn != 15 && (insn & 0x8000) == 0)
6215 return copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6219 warning (_("displaced: Unpredictable LDM or STM with "
6220 "base register r15"));
6221 return copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6224 if (debug_displaced)
6225 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6226 "%.8lx\n", (unsigned long) insn);
6228 dsc->u.block.xfer_addr = displaced_read_reg (regs, from, rn);
6229 dsc->u.block.rn = rn;
6231 dsc->u.block.load = load;
6232 dsc->u.block.user = user;
6233 dsc->u.block.increment = increment;
6234 dsc->u.block.before = before;
6235 dsc->u.block.writeback = writeback;
6236 dsc->u.block.cond = bits (insn, 28, 31);
6238 dsc->u.block.regmask = insn & 0xffff;
6242 if ((insn & 0xffff) == 0xffff)
6244 /* LDM with a fully-populated register list. This case is
6245 particularly tricky. Implement for now by fully emulating the
6246 instruction (which might not behave perfectly in all cases, but
6247 these instructions should be rare enough for that not to matter
6249 dsc->modinsn[0] = ARM_NOP;
6251 dsc->cleanup = &cleanup_block_load_all;
6255 /* LDM of a list of registers which includes PC. Implement by
6256 rewriting the list of registers to be transferred into a
6257 contiguous chunk r0...rX before doing the transfer, then shuffling
6258 registers into the correct places in the cleanup routine. */
6259 unsigned int regmask = insn & 0xffff;
6260 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6261 unsigned int to = 0, from = 0, i, new_rn;
6263 for (i = 0; i < num_in_list; i++)
6264 dsc->tmp[i] = displaced_read_reg (regs, from, i);
6266 /* Writeback makes things complicated. We need to avoid clobbering
6267 the base register with one of the registers in our modified
6268 register list, but just using a different register can't work in
6271 ldm r14!, {r0-r13,pc}
6273 which would need to be rewritten as:
6277 but that can't work, because there's no free register for N.
6279 Solve this by turning off the writeback bit, and emulating
6280 writeback manually in the cleanup routine. */
6285 new_regmask = (1 << num_in_list) - 1;
6287 if (debug_displaced)
6288 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6289 "{..., pc}: original reg list %.4x, modified "
6290 "list %.4x\n"), rn, writeback ? "!" : "",
6291 (int) insn & 0xffff, new_regmask);
6293 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6295 dsc->cleanup = &cleanup_block_load_pc;
6300 /* STM of a list of registers which includes PC. Run the instruction
6301 as-is, but out of line: this will store the wrong value for the PC,
6302 so we must manually fix up the memory in the cleanup routine.
6303 Doing things this way has the advantage that we can auto-detect
6304 the offset of the PC write (which is architecture-dependent) in
6305 the cleanup routine. */
6306 dsc->modinsn[0] = insn;
6308 dsc->cleanup = &cleanup_block_store_pc;
6314 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6315 for Linux, where some SVC instructions must be treated specially. */
6318 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6319 struct displaced_step_closure *dsc)
6321 CORE_ADDR from = dsc->insn_addr;
6322 CORE_ADDR resume_addr = from + 4;
6324 if (debug_displaced)
6325 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6326 "%.8lx\n", (unsigned long) resume_addr);
6328 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6332 copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6333 struct regcache *regs, struct displaced_step_closure *dsc)
6335 CORE_ADDR from = dsc->insn_addr;
6337 /* Allow OS-specific code to override SVC handling. */
6338 if (dsc->u.svc.copy_svc_os)
6339 return dsc->u.svc.copy_svc_os (gdbarch, insn, to, regs, dsc);
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6343 (unsigned long) insn);
6345 /* Preparation: none.
6346 Insn: unmodified svc.
6347 Cleanup: pc <- insn_addr + 4. */
6349 dsc->modinsn[0] = insn;
6351 dsc->cleanup = &cleanup_svc;
6352 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6354 dsc->wrote_to_pc = 1;
6359 /* Copy undefined instructions. */
6362 copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6363 struct displaced_step_closure *dsc)
6365 if (debug_displaced)
6366 fprintf_unfiltered (gdb_stdlog,
6367 "displaced: copying undefined insn %.8lx\n",
6368 (unsigned long) insn);
6370 dsc->modinsn[0] = insn;
6375 /* Copy unpredictable instructions. */
6378 copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6379 struct displaced_step_closure *dsc)
6381 if (debug_displaced)
6382 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6383 "%.8lx\n", (unsigned long) insn);
6385 dsc->modinsn[0] = insn;
6390 /* The decode_* functions are instruction decoding helpers. They mostly follow
6391 the presentation in the ARM ARM. */
6394 decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6395 struct regcache *regs,
6396 struct displaced_step_closure *dsc)
6398 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6399 unsigned int rn = bits (insn, 16, 19);
6401 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6402 return copy_unmodified (gdbarch, insn, "cps", dsc);
6403 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6404 return copy_unmodified (gdbarch, insn, "setend", dsc);
6405 else if ((op1 & 0x60) == 0x20)
6406 return copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6407 else if ((op1 & 0x71) == 0x40)
6408 return copy_unmodified (gdbarch, insn, "neon elt/struct load/store", dsc);
6409 else if ((op1 & 0x77) == 0x41)
6410 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6411 else if ((op1 & 0x77) == 0x45)
6412 return copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6413 else if ((op1 & 0x77) == 0x51)
6416 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6418 return copy_unpred (gdbarch, insn, dsc);
6420 else if ((op1 & 0x77) == 0x55)
6421 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6422 else if (op1 == 0x57)
6425 case 0x1: return copy_unmodified (gdbarch, insn, "clrex", dsc);
6426 case 0x4: return copy_unmodified (gdbarch, insn, "dsb", dsc);
6427 case 0x5: return copy_unmodified (gdbarch, insn, "dmb", dsc);
6428 case 0x6: return copy_unmodified (gdbarch, insn, "isb", dsc);
6429 default: return copy_unpred (gdbarch, insn, dsc);
6431 else if ((op1 & 0x63) == 0x43)
6432 return copy_unpred (gdbarch, insn, dsc);
6433 else if ((op2 & 0x1) == 0x0)
6434 switch (op1 & ~0x80)
6437 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6439 return copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6440 case 0x71: case 0x75:
6442 return copy_preload_reg (gdbarch, insn, regs, dsc);
6443 case 0x63: case 0x67: case 0x73: case 0x77:
6444 return copy_unpred (gdbarch, insn, dsc);
6446 return copy_undef (gdbarch, insn, dsc);
6449 return copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6453 decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6454 struct regcache *regs,
6455 struct displaced_step_closure *dsc)
6457 if (bit (insn, 27) == 0)
6458 return decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6459 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6460 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6463 return copy_unmodified (gdbarch, insn, "srs", dsc);
6466 return copy_unmodified (gdbarch, insn, "rfe", dsc);
6468 case 0x4: case 0x5: case 0x6: case 0x7:
6469 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
6472 switch ((insn & 0xe00000) >> 21)
6474 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6476 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6479 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6482 return copy_undef (gdbarch, insn, dsc);
6487 int rn_f = (bits (insn, 16, 19) == 0xf);
6488 switch ((insn & 0xe00000) >> 21)
6491 /* ldc/ldc2 imm (undefined for rn == pc). */
6492 return rn_f ? copy_undef (gdbarch, insn, dsc)
6493 : copy_copro_load_store (gdbarch, insn, regs, dsc);
6496 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6498 case 0x4: case 0x5: case 0x6: case 0x7:
6499 /* ldc/ldc2 lit (undefined for rn != pc). */
6500 return rn_f ? copy_copro_load_store (gdbarch, insn, regs, dsc)
6501 : copy_undef (gdbarch, insn, dsc);
6504 return copy_undef (gdbarch, insn, dsc);
6509 return copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6512 if (bits (insn, 16, 19) == 0xf)
6514 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6516 return copy_undef (gdbarch, insn, dsc);
6520 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6522 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6526 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6528 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6531 return copy_undef (gdbarch, insn, dsc);
6535 /* Decode miscellaneous instructions in dp/misc encoding space. */
6538 decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6539 struct regcache *regs,
6540 struct displaced_step_closure *dsc)
6542 unsigned int op2 = bits (insn, 4, 6);
6543 unsigned int op = bits (insn, 21, 22);
6544 unsigned int op1 = bits (insn, 16, 19);
6549 return copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6552 if (op == 0x1) /* bx. */
6553 return copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6555 return copy_unmodified (gdbarch, insn, "clz", dsc);
6557 return copy_undef (gdbarch, insn, dsc);
6561 /* Not really supported. */
6562 return copy_unmodified (gdbarch, insn, "bxj", dsc);
6564 return copy_undef (gdbarch, insn, dsc);
6568 return copy_bx_blx_reg (gdbarch, insn,
6569 regs, dsc); /* blx register. */
6571 return copy_undef (gdbarch, insn, dsc);
6574 return copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6578 return copy_unmodified (gdbarch, insn, "bkpt", dsc);
6580 /* Not really supported. */
6581 return copy_unmodified (gdbarch, insn, "smc", dsc);
6584 return copy_undef (gdbarch, insn, dsc);
6589 decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6590 struct displaced_step_closure *dsc)
6593 switch (bits (insn, 20, 24))
6596 return copy_unmodified (gdbarch, insn, "movw", dsc);
6599 return copy_unmodified (gdbarch, insn, "movt", dsc);
6601 case 0x12: case 0x16:
6602 return copy_unmodified (gdbarch, insn, "msr imm", dsc);
6605 return copy_alu_imm (gdbarch, insn, regs, dsc);
6609 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6611 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6612 return copy_alu_reg (gdbarch, insn, regs, dsc);
6613 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6614 return copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6615 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6616 return decode_miscellaneous (gdbarch, insn, regs, dsc);
6617 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6618 return copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6619 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6620 return copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6621 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6622 return copy_unmodified (gdbarch, insn, "synch", dsc);
6623 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6624 /* 2nd arg means "unpriveleged". */
6625 return copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6629 /* Should be unreachable. */
6634 decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6635 struct regcache *regs,
6636 struct displaced_step_closure *dsc)
6638 int a = bit (insn, 25), b = bit (insn, 4);
6639 uint32_t op1 = bits (insn, 20, 24);
6640 int rn_f = bits (insn, 16, 19) == 0xf;
6642 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6643 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6644 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
6645 else if ((!a && (op1 & 0x17) == 0x02)
6646 || (a && (op1 & 0x17) == 0x02 && !b))
6647 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
6648 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6649 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6650 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
6651 else if ((!a && (op1 & 0x17) == 0x03)
6652 || (a && (op1 & 0x17) == 0x03 && !b))
6653 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
6654 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6655 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6656 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6657 else if ((!a && (op1 & 0x17) == 0x06)
6658 || (a && (op1 & 0x17) == 0x06 && !b))
6659 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6660 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6661 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6662 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6663 else if ((!a && (op1 & 0x17) == 0x07)
6664 || (a && (op1 & 0x17) == 0x07 && !b))
6665 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6667 /* Should be unreachable. */
6672 decode_media (struct gdbarch *gdbarch, uint32_t insn,
6673 struct displaced_step_closure *dsc)
6675 switch (bits (insn, 20, 24))
6677 case 0x00: case 0x01: case 0x02: case 0x03:
6678 return copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6680 case 0x04: case 0x05: case 0x06: case 0x07:
6681 return copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6683 case 0x08: case 0x09: case 0x0a: case 0x0b:
6684 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6685 return copy_unmodified (gdbarch, insn,
6686 "decode/pack/unpack/saturate/reverse", dsc);
6689 if (bits (insn, 5, 7) == 0) /* op2. */
6691 if (bits (insn, 12, 15) == 0xf)
6692 return copy_unmodified (gdbarch, insn, "usad8", dsc);
6694 return copy_unmodified (gdbarch, insn, "usada8", dsc);
6697 return copy_undef (gdbarch, insn, dsc);
6699 case 0x1a: case 0x1b:
6700 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6701 return copy_unmodified (gdbarch, insn, "sbfx", dsc);
6703 return copy_undef (gdbarch, insn, dsc);
6705 case 0x1c: case 0x1d:
6706 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6708 if (bits (insn, 0, 3) == 0xf)
6709 return copy_unmodified (gdbarch, insn, "bfc", dsc);
6711 return copy_unmodified (gdbarch, insn, "bfi", dsc);
6714 return copy_undef (gdbarch, insn, dsc);
6716 case 0x1e: case 0x1f:
6717 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6718 return copy_unmodified (gdbarch, insn, "ubfx", dsc);
6720 return copy_undef (gdbarch, insn, dsc);
6723 /* Should be unreachable. */
6728 decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
6729 struct regcache *regs, struct displaced_step_closure *dsc)
6732 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
6734 return copy_block_xfer (gdbarch, insn, regs, dsc);
6738 decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6739 struct regcache *regs,
6740 struct displaced_step_closure *dsc)
6742 unsigned int opcode = bits (insn, 20, 24);
6746 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6747 return copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6749 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6750 case 0x12: case 0x16:
6751 return copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6753 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6754 case 0x13: case 0x17:
6755 return copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6757 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6758 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6759 /* Note: no writeback for these instructions. Bit 25 will always be
6760 zero though (via caller), so the following works OK. */
6761 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6764 /* Should be unreachable. */
6769 decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6770 struct regcache *regs, struct displaced_step_closure *dsc)
6772 unsigned int op1 = bits (insn, 20, 25);
6773 int op = bit (insn, 4);
6774 unsigned int coproc = bits (insn, 8, 11);
6775 unsigned int rn = bits (insn, 16, 19);
6777 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6778 return decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6779 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6780 && (coproc & 0xe) != 0xa)
6782 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6783 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6784 && (coproc & 0xe) != 0xa)
6785 /* ldc/ldc2 imm/lit. */
6786 return copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 else if ((op1 & 0x3e) == 0x00)
6788 return copy_undef (gdbarch, insn, dsc);
6789 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6790 return copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6791 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6792 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6793 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6794 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6795 else if ((op1 & 0x30) == 0x20 && !op)
6797 if ((coproc & 0xe) == 0xa)
6798 return copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6800 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6802 else if ((op1 & 0x30) == 0x20 && op)
6803 return copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6804 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6805 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6806 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6807 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6808 else if ((op1 & 0x30) == 0x30)
6809 return copy_svc (gdbarch, insn, to, regs, dsc);
6811 return copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6815 arm_process_displaced_insn (struct gdbarch *gdbarch, uint32_t insn,
6816 CORE_ADDR from, CORE_ADDR to,
6817 struct regcache *regs,
6818 struct displaced_step_closure *dsc)
6822 if (!displaced_in_arm_mode (regs))
6823 error (_("Displaced stepping is only supported in ARM mode"));
6825 /* Most displaced instructions use a 1-instruction scratch space, so set this
6826 here and override below if/when necessary. */
6828 dsc->insn_addr = from;
6829 dsc->scratch_base = to;
6830 dsc->cleanup = NULL;
6831 dsc->wrote_to_pc = 0;
6833 if ((insn & 0xf0000000) == 0xf0000000)
6834 err = decode_unconditional (gdbarch, insn, regs, dsc);
6835 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
6837 case 0x0: case 0x1: case 0x2: case 0x3:
6838 err = decode_dp_misc (gdbarch, insn, regs, dsc);
6841 case 0x4: case 0x5: case 0x6:
6842 err = decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
6846 err = decode_media (gdbarch, insn, dsc);
6849 case 0x8: case 0x9: case 0xa: case 0xb:
6850 err = decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
6853 case 0xc: case 0xd: case 0xe: case 0xf:
6854 err = decode_svc_copro (gdbarch, insn, to, regs, dsc);
6859 internal_error (__FILE__, __LINE__,
6860 _("arm_process_displaced_insn: Instruction decode error"));
6863 /* Actually set up the scratch space for a displaced instruction. */
6866 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
6867 CORE_ADDR to, struct displaced_step_closure *dsc)
6869 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6871 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
6873 /* Poke modified instruction(s). */
6874 for (i = 0; i < dsc->numinsns; i++)
6876 if (debug_displaced)
6877 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn %.8lx at "
6878 "%.8lx\n", (unsigned long) dsc->modinsn[i],
6879 (unsigned long) to + i * 4);
6880 write_memory_unsigned_integer (to + i * 4, 4, byte_order_for_code,
6884 /* Put breakpoint afterwards. */
6885 write_memory (to + dsc->numinsns * 4, tdep->arm_breakpoint,
6886 tdep->arm_breakpoint_size);
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
6890 paddress (gdbarch, from), paddress (gdbarch, to));
6893 /* Entry point for copying an instruction into scratch space for displaced
6896 struct displaced_step_closure *
6897 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
6898 CORE_ADDR from, CORE_ADDR to,
6899 struct regcache *regs)
6901 struct displaced_step_closure *dsc
6902 = xmalloc (sizeof (struct displaced_step_closure));
6903 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
6904 uint32_t insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
6906 if (debug_displaced)
6907 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
6908 "at %.8lx\n", (unsigned long) insn,
6909 (unsigned long) from);
6911 arm_process_displaced_insn (gdbarch, insn, from, to, regs, dsc);
6912 arm_displaced_init_closure (gdbarch, from, to, dsc);
6917 /* Entry point for cleaning things up after a displaced instruction has been
6921 arm_displaced_step_fixup (struct gdbarch *gdbarch,
6922 struct displaced_step_closure *dsc,
6923 CORE_ADDR from, CORE_ADDR to,
6924 struct regcache *regs)
6927 dsc->cleanup (gdbarch, regs, dsc);
6929 if (!dsc->wrote_to_pc)
6930 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, dsc->insn_addr + 4);
6933 #include "bfd-in2.h"
6934 #include "libcoff.h"
6937 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
6939 struct gdbarch *gdbarch = info->application_data;
6941 if (arm_pc_is_thumb (gdbarch, memaddr))
6943 static asymbol *asym;
6944 static combined_entry_type ce;
6945 static struct coff_symbol_struct csym;
6946 static struct bfd fake_bfd;
6947 static bfd_target fake_target;
6949 if (csym.native == NULL)
6951 /* Create a fake symbol vector containing a Thumb symbol.
6952 This is solely so that the code in print_insn_little_arm()
6953 and print_insn_big_arm() in opcodes/arm-dis.c will detect
6954 the presence of a Thumb symbol and switch to decoding
6955 Thumb instructions. */
6957 fake_target.flavour = bfd_target_coff_flavour;
6958 fake_bfd.xvec = &fake_target;
6959 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
6961 csym.symbol.the_bfd = &fake_bfd;
6962 csym.symbol.name = "fake";
6963 asym = (asymbol *) & csym;
6966 memaddr = UNMAKE_THUMB_ADDR (memaddr);
6967 info->symbols = &asym;
6970 info->symbols = NULL;
6972 if (info->endian == BFD_ENDIAN_BIG)
6973 return print_insn_big_arm (memaddr, info);
6975 return print_insn_little_arm (memaddr, info);
6978 /* The following define instruction sequences that will cause ARM
6979 cpu's to take an undefined instruction trap. These are used to
6980 signal a breakpoint to GDB.
6982 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
6983 modes. A different instruction is required for each mode. The ARM
6984 cpu's can also be big or little endian. Thus four different
6985 instructions are needed to support all cases.
6987 Note: ARMv4 defines several new instructions that will take the
6988 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
6989 not in fact add the new instructions. The new undefined
6990 instructions in ARMv4 are all instructions that had no defined
6991 behaviour in earlier chips. There is no guarantee that they will
6992 raise an exception, but may be treated as NOP's. In practice, it
6993 may only safe to rely on instructions matching:
6995 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
6996 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
6997 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
6999 Even this may only true if the condition predicate is true. The
7000 following use a condition predicate of ALWAYS so it is always TRUE.
7002 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7003 and NetBSD all use a software interrupt rather than an undefined
7004 instruction to force a trap. This can be handled by by the
7005 abi-specific code during establishment of the gdbarch vector. */
7007 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7008 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7009 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7010 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7012 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7013 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7014 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7015 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7017 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7018 the program counter value to determine whether a 16-bit or 32-bit
7019 breakpoint should be used. It returns a pointer to a string of
7020 bytes that encode a breakpoint instruction, stores the length of
7021 the string to *lenptr, and adjusts the program counter (if
7022 necessary) to point to the actual memory location where the
7023 breakpoint should be inserted. */
7025 static const unsigned char *
7026 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7028 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7029 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7031 if (arm_pc_is_thumb (gdbarch, *pcptr))
7033 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7035 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7036 check whether we are replacing a 32-bit instruction. */
7037 if (tdep->thumb2_breakpoint != NULL)
7040 if (target_read_memory (*pcptr, buf, 2) == 0)
7042 unsigned short inst1;
7043 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7044 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
7046 *lenptr = tdep->thumb2_breakpoint_size;
7047 return tdep->thumb2_breakpoint;
7052 *lenptr = tdep->thumb_breakpoint_size;
7053 return tdep->thumb_breakpoint;
7057 *lenptr = tdep->arm_breakpoint_size;
7058 return tdep->arm_breakpoint;
7063 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7066 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7068 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7070 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7071 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7072 that this is not confused with a 32-bit ARM breakpoint. */
7076 /* Extract from an array REGBUF containing the (raw) register state a
7077 function return value of type TYPE, and copy that, in virtual
7078 format, into VALBUF. */
7081 arm_extract_return_value (struct type *type, struct regcache *regs,
7084 struct gdbarch *gdbarch = get_regcache_arch (regs);
7085 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7087 if (TYPE_CODE_FLT == TYPE_CODE (type))
7089 switch (gdbarch_tdep (gdbarch)->fp_model)
7093 /* The value is in register F0 in internal format. We need to
7094 extract the raw value and then convert it to the desired
7096 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7098 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7099 convert_from_extended (floatformat_from_type (type), tmpbuf,
7100 valbuf, gdbarch_byte_order (gdbarch));
7104 case ARM_FLOAT_SOFT_FPA:
7105 case ARM_FLOAT_SOFT_VFP:
7106 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7107 not using the VFP ABI code. */
7109 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7110 if (TYPE_LENGTH (type) > 4)
7111 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7112 valbuf + INT_REGISTER_SIZE);
7116 internal_error (__FILE__, __LINE__,
7117 _("arm_extract_return_value: "
7118 "Floating point model not supported"));
7122 else if (TYPE_CODE (type) == TYPE_CODE_INT
7123 || TYPE_CODE (type) == TYPE_CODE_CHAR
7124 || TYPE_CODE (type) == TYPE_CODE_BOOL
7125 || TYPE_CODE (type) == TYPE_CODE_PTR
7126 || TYPE_CODE (type) == TYPE_CODE_REF
7127 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7129 /* If the the type is a plain integer, then the access is
7130 straight-forward. Otherwise we have to play around a bit more. */
7131 int len = TYPE_LENGTH (type);
7132 int regno = ARM_A1_REGNUM;
7137 /* By using store_unsigned_integer we avoid having to do
7138 anything special for small big-endian values. */
7139 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7140 store_unsigned_integer (valbuf,
7141 (len > INT_REGISTER_SIZE
7142 ? INT_REGISTER_SIZE : len),
7144 len -= INT_REGISTER_SIZE;
7145 valbuf += INT_REGISTER_SIZE;
7150 /* For a structure or union the behaviour is as if the value had
7151 been stored to word-aligned memory and then loaded into
7152 registers with 32-bit load instruction(s). */
7153 int len = TYPE_LENGTH (type);
7154 int regno = ARM_A1_REGNUM;
7155 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7159 regcache_cooked_read (regs, regno++, tmpbuf);
7160 memcpy (valbuf, tmpbuf,
7161 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7162 len -= INT_REGISTER_SIZE;
7163 valbuf += INT_REGISTER_SIZE;
7169 /* Will a function return an aggregate type in memory or in a
7170 register? Return 0 if an aggregate type can be returned in a
7171 register, 1 if it must be returned in memory. */
7174 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7177 enum type_code code;
7179 CHECK_TYPEDEF (type);
7181 /* In the ARM ABI, "integer" like aggregate types are returned in
7182 registers. For an aggregate type to be integer like, its size
7183 must be less than or equal to INT_REGISTER_SIZE and the
7184 offset of each addressable subfield must be zero. Note that bit
7185 fields are not addressable, and all addressable subfields of
7186 unions always start at offset zero.
7188 This function is based on the behaviour of GCC 2.95.1.
7189 See: gcc/arm.c: arm_return_in_memory() for details.
7191 Note: All versions of GCC before GCC 2.95.2 do not set up the
7192 parameters correctly for a function returning the following
7193 structure: struct { float f;}; This should be returned in memory,
7194 not a register. Richard Earnshaw sent me a patch, but I do not
7195 know of any way to detect if a function like the above has been
7196 compiled with the correct calling convention. */
7198 /* All aggregate types that won't fit in a register must be returned
7200 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7205 /* The AAPCS says all aggregates not larger than a word are returned
7207 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7210 /* The only aggregate types that can be returned in a register are
7211 structs and unions. Arrays must be returned in memory. */
7212 code = TYPE_CODE (type);
7213 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
7218 /* Assume all other aggregate types can be returned in a register.
7219 Run a check for structures, unions and arrays. */
7222 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7225 /* Need to check if this struct/union is "integer" like. For
7226 this to be true, its size must be less than or equal to
7227 INT_REGISTER_SIZE and the offset of each addressable
7228 subfield must be zero. Note that bit fields are not
7229 addressable, and unions always start at offset zero. If any
7230 of the subfields is a floating point type, the struct/union
7231 cannot be an integer type. */
7233 /* For each field in the object, check:
7234 1) Is it FP? --> yes, nRc = 1;
7235 2) Is it addressable (bitpos != 0) and
7236 not packed (bitsize == 0)?
7240 for (i = 0; i < TYPE_NFIELDS (type); i++)
7242 enum type_code field_type_code;
7243 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7246 /* Is it a floating point type field? */
7247 if (field_type_code == TYPE_CODE_FLT)
7253 /* If bitpos != 0, then we have to care about it. */
7254 if (TYPE_FIELD_BITPOS (type, i) != 0)
7256 /* Bitfields are not addressable. If the field bitsize is
7257 zero, then the field is not packed. Hence it cannot be
7258 a bitfield or any other packed type. */
7259 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7271 /* Write into appropriate registers a function return value of type
7272 TYPE, given in virtual format. */
7275 arm_store_return_value (struct type *type, struct regcache *regs,
7276 const gdb_byte *valbuf)
7278 struct gdbarch *gdbarch = get_regcache_arch (regs);
7279 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7281 if (TYPE_CODE (type) == TYPE_CODE_FLT)
7283 char buf[MAX_REGISTER_SIZE];
7285 switch (gdbarch_tdep (gdbarch)->fp_model)
7289 convert_to_extended (floatformat_from_type (type), buf, valbuf,
7290 gdbarch_byte_order (gdbarch));
7291 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
7294 case ARM_FLOAT_SOFT_FPA:
7295 case ARM_FLOAT_SOFT_VFP:
7296 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7297 not using the VFP ABI code. */
7299 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
7300 if (TYPE_LENGTH (type) > 4)
7301 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7302 valbuf + INT_REGISTER_SIZE);
7306 internal_error (__FILE__, __LINE__,
7307 _("arm_store_return_value: Floating "
7308 "point model not supported"));
7312 else if (TYPE_CODE (type) == TYPE_CODE_INT
7313 || TYPE_CODE (type) == TYPE_CODE_CHAR
7314 || TYPE_CODE (type) == TYPE_CODE_BOOL
7315 || TYPE_CODE (type) == TYPE_CODE_PTR
7316 || TYPE_CODE (type) == TYPE_CODE_REF
7317 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7319 if (TYPE_LENGTH (type) <= 4)
7321 /* Values of one word or less are zero/sign-extended and
7323 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7324 LONGEST val = unpack_long (type, valbuf);
7326 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
7327 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
7331 /* Integral values greater than one word are stored in consecutive
7332 registers starting with r0. This will always be a multiple of
7333 the regiser size. */
7334 int len = TYPE_LENGTH (type);
7335 int regno = ARM_A1_REGNUM;
7339 regcache_cooked_write (regs, regno++, valbuf);
7340 len -= INT_REGISTER_SIZE;
7341 valbuf += INT_REGISTER_SIZE;
7347 /* For a structure or union the behaviour is as if the value had
7348 been stored to word-aligned memory and then loaded into
7349 registers with 32-bit load instruction(s). */
7350 int len = TYPE_LENGTH (type);
7351 int regno = ARM_A1_REGNUM;
7352 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7356 memcpy (tmpbuf, valbuf,
7357 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7358 regcache_cooked_write (regs, regno++, tmpbuf);
7359 len -= INT_REGISTER_SIZE;
7360 valbuf += INT_REGISTER_SIZE;
7366 /* Handle function return values. */
7368 static enum return_value_convention
7369 arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
7370 struct type *valtype, struct regcache *regcache,
7371 gdb_byte *readbuf, const gdb_byte *writebuf)
7373 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7374 enum arm_vfp_cprc_base_type vfp_base_type;
7377 if (arm_vfp_abi_for_function (gdbarch, func_type)
7378 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
7380 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
7381 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
7383 for (i = 0; i < vfp_base_count; i++)
7385 if (reg_char == 'q')
7388 arm_neon_quad_write (gdbarch, regcache, i,
7389 writebuf + i * unit_length);
7392 arm_neon_quad_read (gdbarch, regcache, i,
7393 readbuf + i * unit_length);
7400 sprintf (name_buf, "%c%d", reg_char, i);
7401 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7404 regcache_cooked_write (regcache, regnum,
7405 writebuf + i * unit_length);
7407 regcache_cooked_read (regcache, regnum,
7408 readbuf + i * unit_length);
7411 return RETURN_VALUE_REGISTER_CONVENTION;
7414 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
7415 || TYPE_CODE (valtype) == TYPE_CODE_UNION
7416 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
7418 if (tdep->struct_return == pcc_struct_return
7419 || arm_return_in_memory (gdbarch, valtype))
7420 return RETURN_VALUE_STRUCT_CONVENTION;
7424 arm_store_return_value (valtype, regcache, writebuf);
7427 arm_extract_return_value (valtype, regcache, readbuf);
7429 return RETURN_VALUE_REGISTER_CONVENTION;
7434 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
7436 struct gdbarch *gdbarch = get_frame_arch (frame);
7437 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7438 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7440 char buf[INT_REGISTER_SIZE];
7442 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
7444 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7448 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
7452 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
7453 return the target PC. Otherwise return 0. */
7456 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
7460 CORE_ADDR start_addr;
7462 /* Find the starting address and name of the function containing the PC. */
7463 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
7466 /* If PC is in a Thumb call or return stub, return the address of the
7467 target PC, which is in a register. The thunk functions are called
7468 _call_via_xx, where x is the register name. The possible names
7469 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
7470 functions, named __ARM_call_via_r[0-7]. */
7471 if (strncmp (name, "_call_via_", 10) == 0
7472 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
7474 /* Use the name suffix to determine which register contains the
7476 static char *table[15] =
7477 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
7478 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
7481 int offset = strlen (name) - 2;
7483 for (regno = 0; regno <= 14; regno++)
7484 if (strcmp (&name[offset], table[regno]) == 0)
7485 return get_frame_register_unsigned (frame, regno);
7488 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
7489 non-interworking calls to foo. We could decode the stubs
7490 to find the target but it's easier to use the symbol table. */
7491 namelen = strlen (name);
7492 if (name[0] == '_' && name[1] == '_'
7493 && ((namelen > 2 + strlen ("_from_thumb")
7494 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
7495 strlen ("_from_thumb")) == 0)
7496 || (namelen > 2 + strlen ("_from_arm")
7497 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
7498 strlen ("_from_arm")) == 0)))
7501 int target_len = namelen - 2;
7502 struct minimal_symbol *minsym;
7503 struct objfile *objfile;
7504 struct obj_section *sec;
7506 if (name[namelen - 1] == 'b')
7507 target_len -= strlen ("_from_thumb");
7509 target_len -= strlen ("_from_arm");
7511 target_name = alloca (target_len + 1);
7512 memcpy (target_name, name + 2, target_len);
7513 target_name[target_len] = '\0';
7515 sec = find_pc_section (pc);
7516 objfile = (sec == NULL) ? NULL : sec->objfile;
7517 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
7519 return SYMBOL_VALUE_ADDRESS (minsym);
7524 return 0; /* not a stub */
7528 set_arm_command (char *args, int from_tty)
7530 printf_unfiltered (_("\
7531 \"set arm\" must be followed by an apporpriate subcommand.\n"));
7532 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
7536 show_arm_command (char *args, int from_tty)
7538 cmd_show_list (showarmcmdlist, from_tty, "");
7542 arm_update_current_architecture (void)
7544 struct gdbarch_info info;
7546 /* If the current architecture is not ARM, we have nothing to do. */
7547 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
7550 /* Update the architecture. */
7551 gdbarch_info_init (&info);
7553 if (!gdbarch_update_p (info))
7554 internal_error (__FILE__, __LINE__, _("could not update architecture"));
7558 set_fp_model_sfunc (char *args, int from_tty,
7559 struct cmd_list_element *c)
7561 enum arm_float_model fp_model;
7563 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
7564 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
7566 arm_fp_model = fp_model;
7570 if (fp_model == ARM_FLOAT_LAST)
7571 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
7574 arm_update_current_architecture ();
7578 show_fp_model (struct ui_file *file, int from_tty,
7579 struct cmd_list_element *c, const char *value)
7581 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7583 if (arm_fp_model == ARM_FLOAT_AUTO
7584 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
7585 fprintf_filtered (file, _("\
7586 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
7587 fp_model_strings[tdep->fp_model]);
7589 fprintf_filtered (file, _("\
7590 The current ARM floating point model is \"%s\".\n"),
7591 fp_model_strings[arm_fp_model]);
7595 arm_set_abi (char *args, int from_tty,
7596 struct cmd_list_element *c)
7598 enum arm_abi_kind arm_abi;
7600 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
7601 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
7603 arm_abi_global = arm_abi;
7607 if (arm_abi == ARM_ABI_LAST)
7608 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
7611 arm_update_current_architecture ();
7615 arm_show_abi (struct ui_file *file, int from_tty,
7616 struct cmd_list_element *c, const char *value)
7618 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7620 if (arm_abi_global == ARM_ABI_AUTO
7621 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
7622 fprintf_filtered (file, _("\
7623 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
7624 arm_abi_strings[tdep->arm_abi]);
7626 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
7631 arm_show_fallback_mode (struct ui_file *file, int from_tty,
7632 struct cmd_list_element *c, const char *value)
7634 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7636 fprintf_filtered (file,
7637 _("The current execution mode assumed "
7638 "(when symbols are unavailable) is \"%s\".\n"),
7639 arm_fallback_mode_string);
7643 arm_show_force_mode (struct ui_file *file, int from_tty,
7644 struct cmd_list_element *c, const char *value)
7646 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
7648 fprintf_filtered (file,
7649 _("The current execution mode assumed "
7650 "(even when symbols are available) is \"%s\".\n"),
7651 arm_force_mode_string);
7654 /* If the user changes the register disassembly style used for info
7655 register and other commands, we have to also switch the style used
7656 in opcodes for disassembly output. This function is run in the "set
7657 arm disassembly" command, and does that. */
7660 set_disassembly_style_sfunc (char *args, int from_tty,
7661 struct cmd_list_element *c)
7663 set_disassembly_style ();
7666 /* Return the ARM register name corresponding to register I. */
7668 arm_register_name (struct gdbarch *gdbarch, int i)
7670 const int num_regs = gdbarch_num_regs (gdbarch);
7672 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
7673 && i >= num_regs && i < num_regs + 32)
7675 static const char *const vfp_pseudo_names[] = {
7676 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
7677 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
7678 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
7679 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
7682 return vfp_pseudo_names[i - num_regs];
7685 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
7686 && i >= num_regs + 32 && i < num_regs + 32 + 16)
7688 static const char *const neon_pseudo_names[] = {
7689 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
7690 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
7693 return neon_pseudo_names[i - num_regs - 32];
7696 if (i >= ARRAY_SIZE (arm_register_names))
7697 /* These registers are only supported on targets which supply
7698 an XML description. */
7701 return arm_register_names[i];
7705 set_disassembly_style (void)
7709 /* Find the style that the user wants. */
7710 for (current = 0; current < num_disassembly_options; current++)
7711 if (disassembly_style == valid_disassembly_styles[current])
7713 gdb_assert (current < num_disassembly_options);
7715 /* Synchronize the disassembler. */
7716 set_arm_regname_option (current);
7719 /* Test whether the coff symbol specific value corresponds to a Thumb
7723 coff_sym_is_thumb (int val)
7725 return (val == C_THUMBEXT
7726 || val == C_THUMBSTAT
7727 || val == C_THUMBEXTFUNC
7728 || val == C_THUMBSTATFUNC
7729 || val == C_THUMBLABEL);
7732 /* arm_coff_make_msymbol_special()
7733 arm_elf_make_msymbol_special()
7735 These functions test whether the COFF or ELF symbol corresponds to
7736 an address in thumb code, and set a "special" bit in a minimal
7737 symbol to indicate that it does. */
7740 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
7742 /* Thumb symbols are of type STT_LOPROC, (synonymous with
7744 if (ELF_ST_TYPE (((elf_symbol_type *)sym)->internal_elf_sym.st_info)
7746 MSYMBOL_SET_SPECIAL (msym);
7750 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
7752 if (coff_sym_is_thumb (val))
7753 MSYMBOL_SET_SPECIAL (msym);
7757 arm_objfile_data_free (struct objfile *objfile, void *arg)
7759 struct arm_per_objfile *data = arg;
7762 for (i = 0; i < objfile->obfd->section_count; i++)
7763 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
7767 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
7770 const char *name = bfd_asymbol_name (sym);
7771 struct arm_per_objfile *data;
7772 VEC(arm_mapping_symbol_s) **map_p;
7773 struct arm_mapping_symbol new_map_sym;
7775 gdb_assert (name[0] == '$');
7776 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
7779 data = objfile_data (objfile, arm_objfile_data_key);
7782 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
7783 struct arm_per_objfile);
7784 set_objfile_data (objfile, arm_objfile_data_key, data);
7785 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
7786 objfile->obfd->section_count,
7787 VEC(arm_mapping_symbol_s) *);
7789 map_p = &data->section_maps[bfd_get_section (sym)->index];
7791 new_map_sym.value = sym->value;
7792 new_map_sym.type = name[1];
7794 /* Assume that most mapping symbols appear in order of increasing
7795 value. If they were randomly distributed, it would be faster to
7796 always push here and then sort at first use. */
7797 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
7799 struct arm_mapping_symbol *prev_map_sym;
7801 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
7802 if (prev_map_sym->value >= sym->value)
7805 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
7806 arm_compare_mapping_symbols);
7807 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
7812 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
7816 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
7818 struct gdbarch *gdbarch = get_regcache_arch (regcache);
7819 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
7821 /* If necessary, set the T bit. */
7824 ULONGEST val, t_bit;
7825 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
7826 t_bit = arm_psr_thumb_bit (gdbarch);
7827 if (arm_pc_is_thumb (gdbarch, pc))
7828 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
7831 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
7836 /* Read the contents of a NEON quad register, by reading from two
7837 double registers. This is used to implement the quad pseudo
7838 registers, and for argument passing in case the quad registers are
7839 missing; vectors are passed in quad registers when using the VFP
7840 ABI, even if a NEON unit is not present. REGNUM is the index of
7841 the quad register, in [0, 15]. */
7844 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
7845 int regnum, gdb_byte *buf)
7848 gdb_byte reg_buf[8];
7849 int offset, double_regnum;
7851 sprintf (name_buf, "d%d", regnum << 1);
7852 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7855 /* d0 is always the least significant half of q0. */
7856 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7861 regcache_raw_read (regcache, double_regnum, reg_buf);
7862 memcpy (buf + offset, reg_buf, 8);
7864 offset = 8 - offset;
7865 regcache_raw_read (regcache, double_regnum + 1, reg_buf);
7866 memcpy (buf + offset, reg_buf, 8);
7870 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
7871 int regnum, gdb_byte *buf)
7873 const int num_regs = gdbarch_num_regs (gdbarch);
7875 gdb_byte reg_buf[8];
7876 int offset, double_regnum;
7878 gdb_assert (regnum >= num_regs);
7881 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
7882 /* Quad-precision register. */
7883 arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
7886 /* Single-precision register. */
7887 gdb_assert (regnum < 32);
7889 /* s0 is always the least significant half of d0. */
7890 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7891 offset = (regnum & 1) ? 0 : 4;
7893 offset = (regnum & 1) ? 4 : 0;
7895 sprintf (name_buf, "d%d", regnum >> 1);
7896 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7899 regcache_raw_read (regcache, double_regnum, reg_buf);
7900 memcpy (buf, reg_buf + offset, 4);
7904 /* Store the contents of BUF to a NEON quad register, by writing to
7905 two double registers. This is used to implement the quad pseudo
7906 registers, and for argument passing in case the quad registers are
7907 missing; vectors are passed in quad registers when using the VFP
7908 ABI, even if a NEON unit is not present. REGNUM is the index
7909 of the quad register, in [0, 15]. */
7912 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
7913 int regnum, const gdb_byte *buf)
7916 gdb_byte reg_buf[8];
7917 int offset, double_regnum;
7919 sprintf (name_buf, "d%d", regnum << 1);
7920 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7923 /* d0 is always the least significant half of q0. */
7924 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7929 regcache_raw_write (regcache, double_regnum, buf + offset);
7930 offset = 8 - offset;
7931 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
7935 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
7936 int regnum, const gdb_byte *buf)
7938 const int num_regs = gdbarch_num_regs (gdbarch);
7940 gdb_byte reg_buf[8];
7941 int offset, double_regnum;
7943 gdb_assert (regnum >= num_regs);
7946 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
7947 /* Quad-precision register. */
7948 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
7951 /* Single-precision register. */
7952 gdb_assert (regnum < 32);
7954 /* s0 is always the least significant half of d0. */
7955 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
7956 offset = (regnum & 1) ? 0 : 4;
7958 offset = (regnum & 1) ? 4 : 0;
7960 sprintf (name_buf, "d%d", regnum >> 1);
7961 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
7964 regcache_raw_read (regcache, double_regnum, reg_buf);
7965 memcpy (reg_buf + offset, buf, 4);
7966 regcache_raw_write (regcache, double_regnum, reg_buf);
7970 static struct value *
7971 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
7973 const int *reg_p = baton;
7974 return value_of_register (*reg_p, frame);
7977 static enum gdb_osabi
7978 arm_elf_osabi_sniffer (bfd *abfd)
7980 unsigned int elfosabi;
7981 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
7983 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
7985 if (elfosabi == ELFOSABI_ARM)
7986 /* GNU tools use this value. Check note sections in this case,
7988 bfd_map_over_sections (abfd,
7989 generic_elf_osabi_sniff_abi_tag_sections,
7992 /* Anything else will be handled by the generic ELF sniffer. */
7997 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
7998 struct reggroup *group)
8000 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8001 this, FPS register belongs to save_regroup, restore_reggroup, and
8002 all_reggroup, of course. */
8003 if (regnum == ARM_FPS_REGNUM)
8004 return (group == float_reggroup
8005 || group == save_reggroup
8006 || group == restore_reggroup
8007 || group == all_reggroup);
8009 return default_register_reggroup_p (gdbarch, regnum, group);
8013 /* Initialize the current architecture based on INFO. If possible,
8014 re-use an architecture from ARCHES, which is a list of
8015 architectures already created during this debugging session.
8017 Called e.g. at program startup, when reading a core file, and when
8018 reading a binary file. */
8020 static struct gdbarch *
8021 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8023 struct gdbarch_tdep *tdep;
8024 struct gdbarch *gdbarch;
8025 struct gdbarch_list *best_arch;
8026 enum arm_abi_kind arm_abi = arm_abi_global;
8027 enum arm_float_model fp_model = arm_fp_model;
8028 struct tdesc_arch_data *tdesc_data = NULL;
8030 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8032 int have_fpa_registers = 1;
8033 const struct target_desc *tdesc = info.target_desc;
8035 /* If we have an object to base this architecture on, try to determine
8038 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8040 int ei_osabi, e_flags;
8042 switch (bfd_get_flavour (info.abfd))
8044 case bfd_target_aout_flavour:
8045 /* Assume it's an old APCS-style ABI. */
8046 arm_abi = ARM_ABI_APCS;
8049 case bfd_target_coff_flavour:
8050 /* Assume it's an old APCS-style ABI. */
8052 arm_abi = ARM_ABI_APCS;
8055 case bfd_target_elf_flavour:
8056 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8057 e_flags = elf_elfheader (info.abfd)->e_flags;
8059 if (ei_osabi == ELFOSABI_ARM)
8061 /* GNU tools used to use this value, but do not for EABI
8062 objects. There's nowhere to tag an EABI version
8063 anyway, so assume APCS. */
8064 arm_abi = ARM_ABI_APCS;
8066 else if (ei_osabi == ELFOSABI_NONE)
8068 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8069 int attr_arch, attr_profile;
8073 case EF_ARM_EABI_UNKNOWN:
8074 /* Assume GNU tools. */
8075 arm_abi = ARM_ABI_APCS;
8078 case EF_ARM_EABI_VER4:
8079 case EF_ARM_EABI_VER5:
8080 arm_abi = ARM_ABI_AAPCS;
8081 /* EABI binaries default to VFP float ordering.
8082 They may also contain build attributes that can
8083 be used to identify if the VFP argument-passing
8085 if (fp_model == ARM_FLOAT_AUTO)
8088 switch (bfd_elf_get_obj_attr_int (info.abfd,
8093 /* "The user intended FP parameter/result
8094 passing to conform to AAPCS, base
8096 fp_model = ARM_FLOAT_SOFT_VFP;
8099 /* "The user intended FP parameter/result
8100 passing to conform to AAPCS, VFP
8102 fp_model = ARM_FLOAT_VFP;
8105 /* "The user intended FP parameter/result
8106 passing to conform to tool chain-specific
8107 conventions" - we don't know any such
8108 conventions, so leave it as "auto". */
8111 /* Attribute value not mentioned in the
8112 October 2008 ABI, so leave it as
8117 fp_model = ARM_FLOAT_SOFT_VFP;
8123 /* Leave it as "auto". */
8124 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8129 /* Detect M-profile programs. This only works if the
8130 executable file includes build attributes; GCC does
8131 copy them to the executable, but e.g. RealView does
8133 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8135 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8137 Tag_CPU_arch_profile);
8138 /* GCC specifies the profile for v6-M; RealView only
8139 specifies the profile for architectures starting with
8140 V7 (as opposed to architectures with a tag
8141 numerically greater than TAG_CPU_ARCH_V7). */
8142 if (!tdesc_has_registers (tdesc)
8143 && (attr_arch == TAG_CPU_ARCH_V6_M
8144 || attr_arch == TAG_CPU_ARCH_V6S_M
8145 || attr_profile == 'M'))
8146 tdesc = tdesc_arm_with_m;
8150 if (fp_model == ARM_FLOAT_AUTO)
8152 int e_flags = elf_elfheader (info.abfd)->e_flags;
8154 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8157 /* Leave it as "auto". Strictly speaking this case
8158 means FPA, but almost nobody uses that now, and
8159 many toolchains fail to set the appropriate bits
8160 for the floating-point model they use. */
8162 case EF_ARM_SOFT_FLOAT:
8163 fp_model = ARM_FLOAT_SOFT_FPA;
8165 case EF_ARM_VFP_FLOAT:
8166 fp_model = ARM_FLOAT_VFP;
8168 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8169 fp_model = ARM_FLOAT_SOFT_VFP;
8174 if (e_flags & EF_ARM_BE8)
8175 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8180 /* Leave it as "auto". */
8185 /* Check any target description for validity. */
8186 if (tdesc_has_registers (tdesc))
8188 /* For most registers we require GDB's default names; but also allow
8189 the numeric names for sp / lr / pc, as a convenience. */
8190 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8191 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8192 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8194 const struct tdesc_feature *feature;
8197 feature = tdesc_find_feature (tdesc,
8198 "org.gnu.gdb.arm.core");
8199 if (feature == NULL)
8201 feature = tdesc_find_feature (tdesc,
8202 "org.gnu.gdb.arm.m-profile");
8203 if (feature == NULL)
8209 tdesc_data = tdesc_data_alloc ();
8212 for (i = 0; i < ARM_SP_REGNUM; i++)
8213 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8214 arm_register_names[i]);
8215 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8218 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8221 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8225 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8226 ARM_PS_REGNUM, "xpsr");
8228 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8229 ARM_PS_REGNUM, "cpsr");
8233 tdesc_data_cleanup (tdesc_data);
8237 feature = tdesc_find_feature (tdesc,
8238 "org.gnu.gdb.arm.fpa");
8239 if (feature != NULL)
8242 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
8243 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8244 arm_register_names[i]);
8247 tdesc_data_cleanup (tdesc_data);
8252 have_fpa_registers = 0;
8254 feature = tdesc_find_feature (tdesc,
8255 "org.gnu.gdb.xscale.iwmmxt");
8256 if (feature != NULL)
8258 static const char *const iwmmxt_names[] = {
8259 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
8260 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
8261 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
8262 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
8266 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
8268 &= tdesc_numbered_register (feature, tdesc_data, i,
8269 iwmmxt_names[i - ARM_WR0_REGNUM]);
8271 /* Check for the control registers, but do not fail if they
8273 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
8274 tdesc_numbered_register (feature, tdesc_data, i,
8275 iwmmxt_names[i - ARM_WR0_REGNUM]);
8277 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
8279 &= tdesc_numbered_register (feature, tdesc_data, i,
8280 iwmmxt_names[i - ARM_WR0_REGNUM]);
8284 tdesc_data_cleanup (tdesc_data);
8289 /* If we have a VFP unit, check whether the single precision registers
8290 are present. If not, then we will synthesize them as pseudo
8292 feature = tdesc_find_feature (tdesc,
8293 "org.gnu.gdb.arm.vfp");
8294 if (feature != NULL)
8296 static const char *const vfp_double_names[] = {
8297 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
8298 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
8299 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
8300 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
8303 /* Require the double precision registers. There must be either
8306 for (i = 0; i < 32; i++)
8308 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8310 vfp_double_names[i]);
8315 if (!valid_p && i != 16)
8317 tdesc_data_cleanup (tdesc_data);
8321 if (tdesc_unnumbered_register (feature, "s0") == 0)
8322 have_vfp_pseudos = 1;
8324 have_vfp_registers = 1;
8326 /* If we have VFP, also check for NEON. The architecture allows
8327 NEON without VFP (integer vector operations only), but GDB
8328 does not support that. */
8329 feature = tdesc_find_feature (tdesc,
8330 "org.gnu.gdb.arm.neon");
8331 if (feature != NULL)
8333 /* NEON requires 32 double-precision registers. */
8336 tdesc_data_cleanup (tdesc_data);
8340 /* If there are quad registers defined by the stub, use
8341 their type; otherwise (normally) provide them with
8342 the default type. */
8343 if (tdesc_unnumbered_register (feature, "q0") == 0)
8344 have_neon_pseudos = 1;
8351 /* If there is already a candidate, use it. */
8352 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
8354 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
8356 if (arm_abi != ARM_ABI_AUTO
8357 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
8360 if (fp_model != ARM_FLOAT_AUTO
8361 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
8364 /* There are various other properties in tdep that we do not
8365 need to check here: those derived from a target description,
8366 since gdbarches with a different target description are
8367 automatically disqualified. */
8369 /* Do check is_m, though, since it might come from the binary. */
8370 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
8373 /* Found a match. */
8377 if (best_arch != NULL)
8379 if (tdesc_data != NULL)
8380 tdesc_data_cleanup (tdesc_data);
8381 return best_arch->gdbarch;
8384 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
8385 gdbarch = gdbarch_alloc (&info, tdep);
8387 /* Record additional information about the architecture we are defining.
8388 These are gdbarch discriminators, like the OSABI. */
8389 tdep->arm_abi = arm_abi;
8390 tdep->fp_model = fp_model;
8392 tdep->have_fpa_registers = have_fpa_registers;
8393 tdep->have_vfp_registers = have_vfp_registers;
8394 tdep->have_vfp_pseudos = have_vfp_pseudos;
8395 tdep->have_neon_pseudos = have_neon_pseudos;
8396 tdep->have_neon = have_neon;
8399 switch (info.byte_order_for_code)
8401 case BFD_ENDIAN_BIG:
8402 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
8403 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
8404 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
8405 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
8409 case BFD_ENDIAN_LITTLE:
8410 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
8411 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
8412 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
8413 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
8418 internal_error (__FILE__, __LINE__,
8419 _("arm_gdbarch_init: bad byte order for float format"));
8422 /* On ARM targets char defaults to unsigned. */
8423 set_gdbarch_char_signed (gdbarch, 0);
8425 /* Note: for displaced stepping, this includes the breakpoint, and one word
8426 of additional scratch space. This setting isn't used for anything beside
8427 displaced stepping at present. */
8428 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
8430 /* This should be low enough for everything. */
8431 tdep->lowest_pc = 0x20;
8432 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
8434 /* The default, for both APCS and AAPCS, is to return small
8435 structures in registers. */
8436 tdep->struct_return = reg_struct_return;
8438 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
8439 set_gdbarch_frame_align (gdbarch, arm_frame_align);
8441 set_gdbarch_write_pc (gdbarch, arm_write_pc);
8443 /* Frame handling. */
8444 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
8445 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
8446 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
8448 frame_base_set_default (gdbarch, &arm_normal_base);
8450 /* Address manipulation. */
8451 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
8452 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
8454 /* Advance PC across function entry code. */
8455 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
8457 /* Detect whether PC is in function epilogue. */
8458 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
8460 /* Skip trampolines. */
8461 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
8463 /* The stack grows downward. */
8464 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
8466 /* Breakpoint manipulation. */
8467 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
8468 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
8469 arm_remote_breakpoint_from_pc);
8471 /* Information about registers, etc. */
8472 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
8473 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
8474 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
8475 set_gdbarch_register_type (gdbarch, arm_register_type);
8476 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
8478 /* This "info float" is FPA-specific. Use the generic version if we
8480 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
8481 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
8483 /* Internal <-> external register number maps. */
8484 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
8485 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
8487 set_gdbarch_register_name (gdbarch, arm_register_name);
8489 /* Returning results. */
8490 set_gdbarch_return_value (gdbarch, arm_return_value);
8493 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
8495 /* Minsymbol frobbing. */
8496 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
8497 set_gdbarch_coff_make_msymbol_special (gdbarch,
8498 arm_coff_make_msymbol_special);
8499 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
8501 /* Thumb-2 IT block support. */
8502 set_gdbarch_adjust_breakpoint_address (gdbarch,
8503 arm_adjust_breakpoint_address);
8505 /* Virtual tables. */
8506 set_gdbarch_vbit_in_delta (gdbarch, 1);
8508 /* Hook in the ABI-specific overrides, if they have been registered. */
8509 gdbarch_init_osabi (info, gdbarch);
8511 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
8513 /* Add some default predicates. */
8514 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
8515 dwarf2_append_unwinders (gdbarch);
8516 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
8517 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
8519 /* Now we have tuned the configuration, set a few final things,
8520 based on what the OS ABI has told us. */
8522 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
8523 binaries are always marked. */
8524 if (tdep->arm_abi == ARM_ABI_AUTO)
8525 tdep->arm_abi = ARM_ABI_APCS;
8527 /* We used to default to FPA for generic ARM, but almost nobody
8528 uses that now, and we now provide a way for the user to force
8529 the model. So default to the most useful variant. */
8530 if (tdep->fp_model == ARM_FLOAT_AUTO)
8531 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
8533 if (tdep->jb_pc >= 0)
8534 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
8536 /* Floating point sizes and format. */
8537 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
8538 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
8540 set_gdbarch_double_format
8541 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
8542 set_gdbarch_long_double_format
8543 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
8547 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
8548 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
8551 if (have_vfp_pseudos)
8553 /* NOTE: These are the only pseudo registers used by
8554 the ARM target at the moment. If more are added, a
8555 little more care in numbering will be needed. */
8557 int num_pseudos = 32;
8558 if (have_neon_pseudos)
8560 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
8561 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
8562 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
8567 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
8569 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
8571 /* Override tdesc_register_type to adjust the types of VFP
8572 registers for NEON. */
8573 set_gdbarch_register_type (gdbarch, arm_register_type);
8576 /* Add standard register aliases. We add aliases even for those
8577 nanes which are used by the current architecture - it's simpler,
8578 and does no harm, since nothing ever lists user registers. */
8579 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
8580 user_reg_add (gdbarch, arm_register_aliases[i].name,
8581 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
8587 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
8589 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8594 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
8595 (unsigned long) tdep->lowest_pc);
8598 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
8601 _initialize_arm_tdep (void)
8603 struct ui_file *stb;
8605 struct cmd_list_element *new_set, *new_show;
8606 const char *setname;
8607 const char *setdesc;
8608 const char *const *regnames;
8610 static char *helptext;
8611 char regdesc[1024], *rdptr = regdesc;
8612 size_t rest = sizeof (regdesc);
8614 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
8616 arm_objfile_data_key
8617 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
8619 /* Add ourselves to objfile event chain. */
8620 observer_attach_new_objfile (arm_exidx_new_objfile);
8622 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
8624 /* Register an ELF OS ABI sniffer for ARM binaries. */
8625 gdbarch_register_osabi_sniffer (bfd_arch_arm,
8626 bfd_target_elf_flavour,
8627 arm_elf_osabi_sniffer);
8629 /* Initialize the standard target descriptions. */
8630 initialize_tdesc_arm_with_m ();
8632 /* Get the number of possible sets of register names defined in opcodes. */
8633 num_disassembly_options = get_arm_regname_num_options ();
8635 /* Add root prefix command for all "set arm"/"show arm" commands. */
8636 add_prefix_cmd ("arm", no_class, set_arm_command,
8637 _("Various ARM-specific commands."),
8638 &setarmcmdlist, "set arm ", 0, &setlist);
8640 add_prefix_cmd ("arm", no_class, show_arm_command,
8641 _("Various ARM-specific commands."),
8642 &showarmcmdlist, "show arm ", 0, &showlist);
8644 /* Sync the opcode insn printer with our register viewer. */
8645 parse_arm_disassembler_option ("reg-names-std");
8647 /* Initialize the array that will be passed to
8648 add_setshow_enum_cmd(). */
8649 valid_disassembly_styles
8650 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
8651 for (i = 0; i < num_disassembly_options; i++)
8653 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
8654 valid_disassembly_styles[i] = setname;
8655 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
8658 /* When we find the default names, tell the disassembler to use
8660 if (!strcmp (setname, "std"))
8662 disassembly_style = setname;
8663 set_arm_regname_option (i);
8666 /* Mark the end of valid options. */
8667 valid_disassembly_styles[num_disassembly_options] = NULL;
8669 /* Create the help text. */
8670 stb = mem_fileopen ();
8671 fprintf_unfiltered (stb, "%s%s%s",
8672 _("The valid values are:\n"),
8674 _("The default is \"std\"."));
8675 helptext = ui_file_xstrdup (stb, NULL);
8676 ui_file_delete (stb);
8678 add_setshow_enum_cmd("disassembler", no_class,
8679 valid_disassembly_styles, &disassembly_style,
8680 _("Set the disassembly style."),
8681 _("Show the disassembly style."),
8683 set_disassembly_style_sfunc,
8684 NULL, /* FIXME: i18n: The disassembly style is
8686 &setarmcmdlist, &showarmcmdlist);
8688 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
8689 _("Set usage of ARM 32-bit mode."),
8690 _("Show usage of ARM 32-bit mode."),
8691 _("When off, a 26-bit PC will be used."),
8693 NULL, /* FIXME: i18n: Usage of ARM 32-bit
8695 &setarmcmdlist, &showarmcmdlist);
8697 /* Add a command to allow the user to force the FPU model. */
8698 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
8699 _("Set the floating point type."),
8700 _("Show the floating point type."),
8701 _("auto - Determine the FP typefrom the OS-ABI.\n\
8702 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
8703 fpa - FPA co-processor (GCC compiled).\n\
8704 softvfp - Software FP with pure-endian doubles.\n\
8705 vfp - VFP co-processor."),
8706 set_fp_model_sfunc, show_fp_model,
8707 &setarmcmdlist, &showarmcmdlist);
8709 /* Add a command to allow the user to force the ABI. */
8710 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
8713 NULL, arm_set_abi, arm_show_abi,
8714 &setarmcmdlist, &showarmcmdlist);
8716 /* Add two commands to allow the user to force the assumed
8718 add_setshow_enum_cmd ("fallback-mode", class_support,
8719 arm_mode_strings, &arm_fallback_mode_string,
8720 _("Set the mode assumed when symbols are unavailable."),
8721 _("Show the mode assumed when symbols are unavailable."),
8722 NULL, NULL, arm_show_fallback_mode,
8723 &setarmcmdlist, &showarmcmdlist);
8724 add_setshow_enum_cmd ("force-mode", class_support,
8725 arm_mode_strings, &arm_force_mode_string,
8726 _("Set the mode assumed even when symbols are available."),
8727 _("Show the mode assumed even when symbols are available."),
8728 NULL, NULL, arm_show_force_mode,
8729 &setarmcmdlist, &showarmcmdlist);
8731 /* Debugging flag. */
8732 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
8733 _("Set ARM debugging."),
8734 _("Show ARM debugging."),
8735 _("When on, arm-specific debugging is enabled."),
8737 NULL, /* FIXME: i18n: "ARM debugging is %s. */
8738 &setdebuglist, &showdebuglist);