1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "gdb_string.h"
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
58 #include "features/arm-with-m.c"
59 #include "features/arm-with-iwmmxt.c"
60 #include "features/arm-with-vfpv2.c"
61 #include "features/arm-with-vfpv3.c"
62 #include "features/arm-with-neon.c"
66 /* Macros for setting and testing a bit in a minimal symbol that marks
67 it as Thumb function. The MSB of the minimal symbol's "info" field
68 is used for this purpose.
70 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
71 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
73 #define MSYMBOL_SET_SPECIAL(msym) \
74 MSYMBOL_TARGET_FLAG_1 (msym) = 1
76 #define MSYMBOL_IS_SPECIAL(msym) \
77 MSYMBOL_TARGET_FLAG_1 (msym)
79 /* Per-objfile data used for mapping symbols. */
80 static const struct objfile_data *arm_objfile_data_key;
82 struct arm_mapping_symbol
87 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
88 DEF_VEC_O(arm_mapping_symbol_s);
90 struct arm_per_objfile
92 VEC(arm_mapping_symbol_s) **section_maps;
95 /* The list of available "set arm ..." and "show arm ..." commands. */
96 static struct cmd_list_element *setarmcmdlist = NULL;
97 static struct cmd_list_element *showarmcmdlist = NULL;
99 /* The type of floating-point to use. Keep this in sync with enum
100 arm_float_model, and the help string in _initialize_arm_tdep. */
101 static const char *fp_model_strings[] =
111 /* A variable that can be configured by the user. */
112 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
113 static const char *current_fp_model = "auto";
115 /* The ABI to use. Keep this in sync with arm_abi_kind. */
116 static const char *arm_abi_strings[] =
124 /* A variable that can be configured by the user. */
125 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
126 static const char *arm_abi_string = "auto";
128 /* The execution mode to assume. */
129 static const char *arm_mode_strings[] =
137 static const char *arm_fallback_mode_string = "auto";
138 static const char *arm_force_mode_string = "auto";
140 /* Internal override of the execution mode. -1 means no override,
141 0 means override to ARM mode, 1 means override to Thumb mode.
142 The effect is the same as if arm_force_mode has been set by the
143 user (except the internal override has precedence over a user's
144 arm_force_mode override). */
145 static int arm_override_mode = -1;
147 /* Number of different reg name sets (options). */
148 static int num_disassembly_options;
150 /* The standard register names, and all the valid aliases for them. Note
151 that `fp', `sp' and `pc' are not added in this alias list, because they
152 have been added as builtin user registers in
153 std-regs.c:_initialize_frame_reg. */
158 } arm_register_aliases[] = {
159 /* Basic register numbers. */
176 /* Synonyms (argument and variable registers). */
189 /* Other platform-specific names for r9. */
195 /* Names used by GCC (not listed in the ARM EABI). */
197 /* A special name from the older ATPCS. */
201 static const char *const arm_register_names[] =
202 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
203 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
204 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
205 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
206 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
207 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
208 "fps", "cpsr" }; /* 24 25 */
210 /* Valid register name styles. */
211 static const char **valid_disassembly_styles;
213 /* Disassembly style to use. Default to "std" register names. */
214 static const char *disassembly_style;
216 /* This is used to keep the bfd arch_info in sync with the disassembly
218 static void set_disassembly_style_sfunc(char *, int,
219 struct cmd_list_element *);
220 static void set_disassembly_style (void);
222 static void convert_from_extended (const struct floatformat *, const void *,
224 static void convert_to_extended (const struct floatformat *, void *,
227 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
228 struct regcache *regcache,
229 int regnum, gdb_byte *buf);
230 static void arm_neon_quad_write (struct gdbarch *gdbarch,
231 struct regcache *regcache,
232 int regnum, const gdb_byte *buf);
234 static int thumb_insn_size (unsigned short inst1);
236 struct arm_prologue_cache
238 /* The stack pointer at the time this frame was created; i.e. the
239 caller's stack pointer when this function was called. It is used
240 to identify this frame. */
243 /* The frame base for this frame is just prev_sp - frame size.
244 FRAMESIZE is the distance from the frame pointer to the
245 initial stack pointer. */
249 /* The register used to hold the frame pointer for this frame. */
252 /* Saved register offsets. */
253 struct trad_frame_saved_reg *saved_regs;
256 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
257 CORE_ADDR prologue_start,
258 CORE_ADDR prologue_end,
259 struct arm_prologue_cache *cache);
261 /* Architecture version for displaced stepping. This effects the behaviour of
262 certain instructions, and really should not be hard-wired. */
264 #define DISPLACED_STEPPING_ARCH_VERSION 5
266 /* Addresses for calling Thumb functions have the bit 0 set.
267 Here are some macros to test, set, or clear bit 0 of addresses. */
268 #define IS_THUMB_ADDR(addr) ((addr) & 1)
269 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
270 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
272 /* Set to true if the 32-bit mode is in use. */
276 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
279 arm_psr_thumb_bit (struct gdbarch *gdbarch)
281 if (gdbarch_tdep (gdbarch)->is_m)
287 /* Determine if FRAME is executing in Thumb mode. */
290 arm_frame_is_thumb (struct frame_info *frame)
293 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
295 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
296 directly (from a signal frame or dummy frame) or by interpreting
297 the saved LR (from a prologue or DWARF frame). So consult it and
298 trust the unwinders. */
299 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
301 return (cpsr & t_bit) != 0;
304 /* Callback for VEC_lower_bound. */
307 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
308 const struct arm_mapping_symbol *rhs)
310 return lhs->value < rhs->value;
313 /* Search for the mapping symbol covering MEMADDR. If one is found,
314 return its type. Otherwise, return 0. If START is non-NULL,
315 set *START to the location of the mapping symbol. */
318 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
320 struct obj_section *sec;
322 /* If there are mapping symbols, consult them. */
323 sec = find_pc_section (memaddr);
326 struct arm_per_objfile *data;
327 VEC(arm_mapping_symbol_s) *map;
328 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
332 data = objfile_data (sec->objfile, arm_objfile_data_key);
335 map = data->section_maps[sec->the_bfd_section->index];
336 if (!VEC_empty (arm_mapping_symbol_s, map))
338 struct arm_mapping_symbol *map_sym;
340 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
341 arm_compare_mapping_symbols);
343 /* VEC_lower_bound finds the earliest ordered insertion
344 point. If the following symbol starts at this exact
345 address, we use that; otherwise, the preceding
346 mapping symbol covers this address. */
347 if (idx < VEC_length (arm_mapping_symbol_s, map))
349 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
350 if (map_sym->value == map_key.value)
353 *start = map_sym->value + obj_section_addr (sec);
354 return map_sym->type;
360 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
362 *start = map_sym->value + obj_section_addr (sec);
363 return map_sym->type;
372 /* Determine if the program counter specified in MEMADDR is in a Thumb
373 function. This function should be called for addresses unrelated to
374 any executing frame; otherwise, prefer arm_frame_is_thumb. */
377 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
379 struct obj_section *sec;
380 struct minimal_symbol *sym;
382 struct displaced_step_closure* dsc
383 = get_displaced_step_closure_by_addr(memaddr);
385 /* If checking the mode of displaced instruction in copy area, the mode
386 should be determined by instruction on the original address. */
390 fprintf_unfiltered (gdb_stdlog,
391 "displaced: check mode of %.8lx instead of %.8lx\n",
392 (unsigned long) dsc->insn_addr,
393 (unsigned long) memaddr);
394 memaddr = dsc->insn_addr;
397 /* If bit 0 of the address is set, assume this is a Thumb address. */
398 if (IS_THUMB_ADDR (memaddr))
401 /* Respect internal mode override if active. */
402 if (arm_override_mode != -1)
403 return arm_override_mode;
405 /* If the user wants to override the symbol table, let him. */
406 if (strcmp (arm_force_mode_string, "arm") == 0)
408 if (strcmp (arm_force_mode_string, "thumb") == 0)
411 /* ARM v6-M and v7-M are always in Thumb mode. */
412 if (gdbarch_tdep (gdbarch)->is_m)
415 /* If there are mapping symbols, consult them. */
416 type = arm_find_mapping_symbol (memaddr, NULL);
420 /* Thumb functions have a "special" bit set in minimal symbols. */
421 sym = lookup_minimal_symbol_by_pc (memaddr);
423 return (MSYMBOL_IS_SPECIAL (sym));
425 /* If the user wants to override the fallback mode, let them. */
426 if (strcmp (arm_fallback_mode_string, "arm") == 0)
428 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
431 /* If we couldn't find any symbol, but we're talking to a running
432 target, then trust the current value of $cpsr. This lets
433 "display/i $pc" always show the correct mode (though if there is
434 a symbol table we will not reach here, so it still may not be
435 displayed in the mode it will be executed). */
436 if (target_has_registers)
437 return arm_frame_is_thumb (get_current_frame ());
439 /* Otherwise we're out of luck; we assume ARM. */
443 /* Remove useless bits from addresses in a running program. */
445 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
448 return UNMAKE_THUMB_ADDR (val);
450 return (val & 0x03fffffc);
453 /* When reading symbols, we need to zap the low bit of the address,
454 which may be set to 1 for Thumb functions. */
456 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
461 /* Return 1 if PC is the start of a compiler helper function which
462 can be safely ignored during prologue skipping. IS_THUMB is true
463 if the function is known to be a Thumb function due to the way it
466 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
468 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
469 struct minimal_symbol *msym;
471 msym = lookup_minimal_symbol_by_pc (pc);
473 && SYMBOL_VALUE_ADDRESS (msym) == pc
474 && SYMBOL_LINKAGE_NAME (msym) != NULL)
476 const char *name = SYMBOL_LINKAGE_NAME (msym);
478 /* The GNU linker's Thumb call stub to foo is named
480 if (strstr (name, "_from_thumb") != NULL)
483 /* On soft-float targets, __truncdfsf2 is called to convert promoted
484 arguments to their argument types in non-prototyped
486 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
488 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
491 /* Internal functions related to thread-local storage. */
492 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
494 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
499 /* If we run against a stripped glibc, we may be unable to identify
500 special functions by name. Check for one important case,
501 __aeabi_read_tp, by comparing the *code* against the default
502 implementation (this is hand-written ARM assembler in glibc). */
505 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
506 == 0xe3e00a0f /* mov r0, #0xffff0fff */
507 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
508 == 0xe240f01f) /* sub pc, r0, #31 */
515 /* Support routines for instruction parsing. */
516 #define submask(x) ((1L << ((x) + 1)) - 1)
517 #define bit(obj,st) (((obj) >> (st)) & 1)
518 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
519 #define sbits(obj,st,fn) \
520 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
521 #define BranchDest(addr,instr) \
522 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
524 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
525 the first 16-bit of instruction, and INSN2 is the second 16-bit of
527 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
528 ((bits ((insn1), 0, 3) << 12) \
529 | (bits ((insn1), 10, 10) << 11) \
530 | (bits ((insn2), 12, 14) << 8) \
531 | bits ((insn2), 0, 7))
533 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
534 the 32-bit instruction. */
535 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
536 ((bits ((insn), 16, 19) << 12) \
537 | bits ((insn), 0, 11))
539 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
542 thumb_expand_immediate (unsigned int imm)
544 unsigned int count = imm >> 7;
552 return (imm & 0xff) | ((imm & 0xff) << 16);
554 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556 return (imm & 0xff) | ((imm & 0xff) << 8)
557 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
560 return (0x80 | (imm & 0x7f)) << (32 - count);
563 /* Return 1 if the 16-bit Thumb instruction INST might change
564 control flow, 0 otherwise. */
567 thumb_instruction_changes_pc (unsigned short inst)
569 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
572 if ((inst & 0xf000) == 0xd000) /* conditional branch */
575 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
578 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
581 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
584 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
590 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
591 might change control flow, 0 otherwise. */
594 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
596 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
598 /* Branches and miscellaneous control instructions. */
600 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
605 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
607 /* SUBS PC, LR, #imm8. */
610 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
612 /* Conditional branch. */
619 if ((inst1 & 0xfe50) == 0xe810)
621 /* Load multiple or RFE. */
623 if (bit (inst1, 7) && !bit (inst1, 8))
629 else if (!bit (inst1, 7) && bit (inst1, 8))
635 else if (bit (inst1, 7) && bit (inst1, 8))
640 else if (!bit (inst1, 7) && !bit (inst1, 8))
649 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
651 /* MOV PC or MOVS PC. */
655 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
658 if (bits (inst1, 0, 3) == 15)
664 if ((inst2 & 0x0fc0) == 0x0000)
670 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
676 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
685 /* Analyze a Thumb prologue, looking for a recognizable stack frame
686 and frame pointer. Scan until we encounter a store that could
687 clobber the stack frame unexpectedly, or an unknown instruction.
688 Return the last address which is definitely safe to skip for an
689 initial breakpoint. */
692 thumb_analyze_prologue (struct gdbarch *gdbarch,
693 CORE_ADDR start, CORE_ADDR limit,
694 struct arm_prologue_cache *cache)
696 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
697 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
700 struct pv_area *stack;
701 struct cleanup *back_to;
703 CORE_ADDR unrecognized_pc = 0;
705 for (i = 0; i < 16; i++)
706 regs[i] = pv_register (i, 0);
707 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
708 back_to = make_cleanup_free_pv_area (stack);
710 while (start < limit)
714 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
716 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
721 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
724 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
725 whether to save LR (R14). */
726 mask = (insn & 0xff) | ((insn & 0x100) << 6);
728 /* Calculate offsets of saved R0-R7 and LR. */
729 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
730 if (mask & (1 << regno))
732 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
734 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
737 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
740 offset = (insn & 0x7f) << 2; /* get scaled offset */
741 if (insn & 0x80) /* Check for SUB. */
742 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
752 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
753 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
755 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
756 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
757 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
759 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
760 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
761 && pv_is_constant (regs[bits (insn, 3, 5)]))
762 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
763 regs[bits (insn, 6, 8)]);
764 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
765 && pv_is_constant (regs[bits (insn, 3, 6)]))
767 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
768 int rm = bits (insn, 3, 6);
769 regs[rd] = pv_add (regs[rd], regs[rm]);
771 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
773 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
774 int src_reg = (insn & 0x78) >> 3;
775 regs[dst_reg] = regs[src_reg];
777 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
779 /* Handle stores to the stack. Normally pushes are used,
780 but with GCC -mtpcs-frame, there may be other stores
781 in the prologue to create the frame. */
782 int regno = (insn >> 8) & 0x7;
785 offset = (insn & 0xff) << 2;
786 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
788 if (pv_area_store_would_trash (stack, addr))
791 pv_area_store (stack, addr, 4, regs[regno]);
793 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
795 int rd = bits (insn, 0, 2);
796 int rn = bits (insn, 3, 5);
799 offset = bits (insn, 6, 10) << 2;
800 addr = pv_add_constant (regs[rn], offset);
802 if (pv_area_store_would_trash (stack, addr))
805 pv_area_store (stack, addr, 4, regs[rd]);
807 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
808 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
809 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
810 /* Ignore stores of argument registers to the stack. */
812 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
813 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
814 /* Ignore block loads from the stack, potentially copying
815 parameters from memory. */
817 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
818 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
819 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
820 /* Similarly ignore single loads from the stack. */
822 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
823 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
824 /* Skip register copies, i.e. saves to another register
825 instead of the stack. */
827 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
828 /* Recognize constant loads; even with small stacks these are necessary
830 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
831 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
833 /* Constant pool loads, for the same reason. */
834 unsigned int constant;
837 loc = start + 4 + bits (insn, 0, 7) * 4;
838 constant = read_memory_unsigned_integer (loc, 4, byte_order);
839 regs[bits (insn, 8, 10)] = pv_constant (constant);
841 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
843 unsigned short inst2;
845 inst2 = read_memory_unsigned_integer (start + 2, 2,
846 byte_order_for_code);
848 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
850 /* BL, BLX. Allow some special function calls when
851 skipping the prologue; GCC generates these before
852 storing arguments to the stack. */
854 int j1, j2, imm1, imm2;
856 imm1 = sbits (insn, 0, 10);
857 imm2 = bits (inst2, 0, 10);
858 j1 = bit (inst2, 13);
859 j2 = bit (inst2, 11);
861 offset = ((imm1 << 12) + (imm2 << 1));
862 offset ^= ((!j2) << 22) | ((!j1) << 23);
864 nextpc = start + 4 + offset;
865 /* For BLX make sure to clear the low bits. */
866 if (bit (inst2, 12) == 0)
867 nextpc = nextpc & 0xfffffffc;
869 if (!skip_prologue_function (gdbarch, nextpc,
870 bit (inst2, 12) != 0))
874 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
876 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
878 pv_t addr = regs[bits (insn, 0, 3)];
881 if (pv_area_store_would_trash (stack, addr))
884 /* Calculate offsets of saved registers. */
885 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
886 if (inst2 & (1 << regno))
888 addr = pv_add_constant (addr, -4);
889 pv_area_store (stack, addr, 4, regs[regno]);
893 regs[bits (insn, 0, 3)] = addr;
896 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
898 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
900 int regno1 = bits (inst2, 12, 15);
901 int regno2 = bits (inst2, 8, 11);
902 pv_t addr = regs[bits (insn, 0, 3)];
904 offset = inst2 & 0xff;
906 addr = pv_add_constant (addr, offset);
908 addr = pv_add_constant (addr, -offset);
910 if (pv_area_store_would_trash (stack, addr))
913 pv_area_store (stack, addr, 4, regs[regno1]);
914 pv_area_store (stack, pv_add_constant (addr, 4),
918 regs[bits (insn, 0, 3)] = addr;
921 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
922 && (inst2 & 0x0c00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
925 int regno = bits (inst2, 12, 15);
926 pv_t addr = regs[bits (insn, 0, 3)];
928 offset = inst2 & 0xff;
930 addr = pv_add_constant (addr, offset);
932 addr = pv_add_constant (addr, -offset);
934 if (pv_area_store_would_trash (stack, addr))
937 pv_area_store (stack, addr, 4, regs[regno]);
940 regs[bits (insn, 0, 3)] = addr;
943 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
946 int regno = bits (inst2, 12, 15);
949 offset = inst2 & 0xfff;
950 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
952 if (pv_area_store_would_trash (stack, addr))
955 pv_area_store (stack, addr, 4, regs[regno]);
958 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
959 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
960 /* Ignore stores of argument registers to the stack. */
963 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
964 && (inst2 & 0x0d00) == 0x0c00
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
969 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
971 && (inst2 & 0x8000) == 0x0000
972 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
973 /* Ignore block loads from the stack, potentially copying
974 parameters from memory. */
977 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
979 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
980 /* Similarly ignore dual loads from the stack. */
983 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
984 && (inst2 & 0x0d00) == 0x0c00
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore single loads from the stack. */
989 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
990 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
991 /* Similarly ignore single loads from the stack. */
994 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
995 && (inst2 & 0x8000) == 0x0000)
997 unsigned int imm = ((bits (insn, 10, 10) << 11)
998 | (bits (inst2, 12, 14) << 8)
999 | bits (inst2, 0, 7));
1001 regs[bits (inst2, 8, 11)]
1002 = pv_add_constant (regs[bits (insn, 0, 3)],
1003 thumb_expand_immediate (imm));
1006 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1017 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1018 && (inst2 & 0x8000) == 0x0000)
1020 unsigned int imm = ((bits (insn, 10, 10) << 11)
1021 | (bits (inst2, 12, 14) << 8)
1022 | bits (inst2, 0, 7));
1024 regs[bits (inst2, 8, 11)]
1025 = pv_add_constant (regs[bits (insn, 0, 3)],
1026 - (CORE_ADDR) thumb_expand_immediate (imm));
1029 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1040 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1042 unsigned int imm = ((bits (insn, 10, 10) << 11)
1043 | (bits (inst2, 12, 14) << 8)
1044 | bits (inst2, 0, 7));
1046 regs[bits (inst2, 8, 11)]
1047 = pv_constant (thumb_expand_immediate (imm));
1050 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1053 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1055 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1058 else if (insn == 0xea5f /* mov.w Rd,Rm */
1059 && (inst2 & 0xf0f0) == 0)
1061 int dst_reg = (inst2 & 0x0f00) >> 8;
1062 int src_reg = inst2 & 0xf;
1063 regs[dst_reg] = regs[src_reg];
1066 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1068 /* Constant pool loads. */
1069 unsigned int constant;
1072 offset = bits (insn, 0, 11);
1074 loc = start + 4 + offset;
1076 loc = start + 4 - offset;
1078 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1079 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1082 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1084 /* Constant pool loads. */
1085 unsigned int constant;
1088 offset = bits (insn, 0, 7) << 2;
1090 loc = start + 4 + offset;
1092 loc = start + 4 - offset;
1094 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1095 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1097 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1098 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1101 else if (thumb2_instruction_changes_pc (insn, inst2))
1103 /* Don't scan past anything that might change control flow. */
1108 /* The optimizer might shove anything into the prologue,
1109 so we just skip what we don't recognize. */
1110 unrecognized_pc = start;
1115 else if (thumb_instruction_changes_pc (insn))
1117 /* Don't scan past anything that might change control flow. */
1122 /* The optimizer might shove anything into the prologue,
1123 so we just skip what we don't recognize. */
1124 unrecognized_pc = start;
1131 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1132 paddress (gdbarch, start));
1134 if (unrecognized_pc == 0)
1135 unrecognized_pc = start;
1139 do_cleanups (back_to);
1140 return unrecognized_pc;
1143 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1145 /* Frame pointer is fp. Frame size is constant. */
1146 cache->framereg = ARM_FP_REGNUM;
1147 cache->framesize = -regs[ARM_FP_REGNUM].k;
1149 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1151 /* Frame pointer is r7. Frame size is constant. */
1152 cache->framereg = THUMB_FP_REGNUM;
1153 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1157 /* Try the stack pointer... this is a bit desperate. */
1158 cache->framereg = ARM_SP_REGNUM;
1159 cache->framesize = -regs[ARM_SP_REGNUM].k;
1162 for (i = 0; i < 16; i++)
1163 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1164 cache->saved_regs[i].addr = offset;
1166 do_cleanups (back_to);
1167 return unrecognized_pc;
1171 /* Try to analyze the instructions starting from PC, which load symbol
1172 __stack_chk_guard. Return the address of instruction after loading this
1173 symbol, set the dest register number to *BASEREG, and set the size of
1174 instructions for loading symbol in OFFSET. Return 0 if instructions are
1178 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1179 unsigned int *destreg, int *offset)
1181 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1182 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1183 unsigned int low, high, address;
1188 unsigned short insn1
1189 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1191 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1193 *destreg = bits (insn1, 8, 10);
1195 address = bits (insn1, 0, 7);
1197 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1199 unsigned short insn2
1200 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1202 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1205 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1207 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1209 /* movt Rd, #const */
1210 if ((insn1 & 0xfbc0) == 0xf2c0)
1212 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1213 *destreg = bits (insn2, 8, 11);
1215 address = (high << 16 | low);
1222 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1224 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1226 address = bits (insn, 0, 11);
1227 *destreg = bits (insn, 12, 15);
1230 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1232 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1235 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1237 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1239 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1240 *destreg = bits (insn, 12, 15);
1242 address = (high << 16 | low);
1250 /* Try to skip a sequence of instructions used for stack protector. If PC
1251 points to the first instruction of this sequence, return the address of
1252 first instruction after this sequence, otherwise, return original PC.
1254 On arm, this sequence of instructions is composed of mainly three steps,
1255 Step 1: load symbol __stack_chk_guard,
1256 Step 2: load from address of __stack_chk_guard,
1257 Step 3: store it to somewhere else.
1259 Usually, instructions on step 2 and step 3 are the same on various ARM
1260 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1261 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1262 instructions in step 1 vary from different ARM architectures. On ARMv7,
1265 movw Rn, #:lower16:__stack_chk_guard
1266 movt Rn, #:upper16:__stack_chk_guard
1273 .word __stack_chk_guard
1275 Since ldr/str is a very popular instruction, we can't use them as
1276 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1277 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1278 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1281 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1283 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1284 unsigned int address, basereg;
1285 struct minimal_symbol *stack_chk_guard;
1287 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1290 /* Try to parse the instructions in Step 1. */
1291 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1296 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1297 /* If name of symbol doesn't start with '__stack_chk_guard', this
1298 instruction sequence is not for stack protector. If symbol is
1299 removed, we conservatively think this sequence is for stack protector. */
1301 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1302 strlen ("__stack_chk_guard")) != 0)
1307 unsigned int destreg;
1309 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1311 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1312 if ((insn & 0xf800) != 0x6800)
1314 if (bits (insn, 3, 5) != basereg)
1316 destreg = bits (insn, 0, 2);
1318 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1319 byte_order_for_code);
1320 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6000)
1323 if (destreg != bits (insn, 0, 2))
1328 unsigned int destreg;
1330 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1332 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1333 if ((insn & 0x0e500000) != 0x04100000)
1335 if (bits (insn, 16, 19) != basereg)
1337 destreg = bits (insn, 12, 15);
1338 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1339 insn = read_memory_unsigned_integer (pc + offset + 4,
1340 4, byte_order_for_code);
1341 if ((insn & 0x0e500000) != 0x04000000)
1343 if (bits (insn, 12, 15) != destreg)
1346 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1349 return pc + offset + 4;
1351 return pc + offset + 8;
1354 /* Advance the PC across any function entry prologue instructions to
1355 reach some "real" code.
1357 The APCS (ARM Procedure Call Standard) defines the following
1361 [stmfd sp!, {a1,a2,a3,a4}]
1362 stmfd sp!, {...,fp,ip,lr,pc}
1363 [stfe f7, [sp, #-12]!]
1364 [stfe f6, [sp, #-12]!]
1365 [stfe f5, [sp, #-12]!]
1366 [stfe f4, [sp, #-12]!]
1367 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1370 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1372 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1375 CORE_ADDR func_addr, limit_pc;
1376 struct symtab_and_line sal;
1378 /* See if we can determine the end of the prologue via the symbol table.
1379 If so, then return either PC, or the PC after the prologue, whichever
1381 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1383 CORE_ADDR post_prologue_pc
1384 = skip_prologue_using_sal (gdbarch, func_addr);
1385 struct symtab *s = find_pc_symtab (func_addr);
1387 if (post_prologue_pc)
1389 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1392 /* GCC always emits a line note before the prologue and another
1393 one after, even if the two are at the same address or on the
1394 same line. Take advantage of this so that we do not need to
1395 know every instruction that might appear in the prologue. We
1396 will have producer information for most binaries; if it is
1397 missing (e.g. for -gstabs), assuming the GNU tools. */
1398 if (post_prologue_pc
1400 || s->producer == NULL
1401 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1402 return post_prologue_pc;
1404 if (post_prologue_pc != 0)
1406 CORE_ADDR analyzed_limit;
1408 /* For non-GCC compilers, make sure the entire line is an
1409 acceptable prologue; GDB will round this function's
1410 return value up to the end of the following line so we
1411 can not skip just part of a line (and we do not want to).
1413 RealView does not treat the prologue specially, but does
1414 associate prologue code with the opening brace; so this
1415 lets us skip the first line if we think it is the opening
1417 if (arm_pc_is_thumb (gdbarch, func_addr))
1418 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1419 post_prologue_pc, NULL);
1421 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1422 post_prologue_pc, NULL);
1424 if (analyzed_limit != post_prologue_pc)
1427 return post_prologue_pc;
1431 /* Can't determine prologue from the symbol table, need to examine
1434 /* Find an upper limit on the function prologue using the debug
1435 information. If the debug information could not be used to provide
1436 that bound, then use an arbitrary large number as the upper bound. */
1437 /* Like arm_scan_prologue, stop no later than pc + 64. */
1438 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1440 limit_pc = pc + 64; /* Magic. */
1443 /* Check if this is Thumb code. */
1444 if (arm_pc_is_thumb (gdbarch, pc))
1445 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1447 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1449 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1451 /* "mov ip, sp" is no longer a required part of the prologue. */
1452 if (inst == 0xe1a0c00d) /* mov ip, sp */
1455 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1458 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1461 /* Some prologues begin with "str lr, [sp, #-4]!". */
1462 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1465 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1468 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1471 /* Any insns after this point may float into the code, if it makes
1472 for better instruction scheduling, so we skip them only if we
1473 find them, but still consider the function to be frame-ful. */
1475 /* We may have either one sfmfd instruction here, or several stfe
1476 insns, depending on the version of floating point code we
1478 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1481 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1484 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1487 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1490 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1491 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1492 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1495 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1496 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1497 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1500 /* Un-recognized instruction; stop scanning. */
1504 return skip_pc; /* End of prologue. */
1508 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1509 This function decodes a Thumb function prologue to determine:
1510 1) the size of the stack frame
1511 2) which registers are saved on it
1512 3) the offsets of saved regs
1513 4) the offset from the stack pointer to the frame pointer
1515 A typical Thumb function prologue would create this stack frame
1516 (offsets relative to FP)
1517 old SP -> 24 stack parameters
1520 R7 -> 0 local variables (16 bytes)
1521 SP -> -12 additional stack space (12 bytes)
1522 The frame size would thus be 36 bytes, and the frame offset would be
1523 12 bytes. The frame register is R7.
1525 The comments for thumb_skip_prolog() describe the algorithm we use
1526 to detect the end of the prolog. */
1530 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1531 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1533 CORE_ADDR prologue_start;
1534 CORE_ADDR prologue_end;
1535 CORE_ADDR current_pc;
1537 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1540 /* See comment in arm_scan_prologue for an explanation of
1542 if (prologue_end > prologue_start + 64)
1544 prologue_end = prologue_start + 64;
1548 /* We're in the boondocks: we have no idea where the start of the
1552 prologue_end = min (prologue_end, prev_pc);
1554 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1557 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1560 arm_instruction_changes_pc (uint32_t this_instr)
1562 if (bits (this_instr, 28, 31) == INST_NV)
1563 /* Unconditional instructions. */
1564 switch (bits (this_instr, 24, 27))
1568 /* Branch with Link and change to Thumb. */
1573 /* Coprocessor register transfer. */
1574 if (bits (this_instr, 12, 15) == 15)
1575 error (_("Invalid update to pc in instruction"));
1581 switch (bits (this_instr, 25, 27))
1584 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1586 /* Multiplies and extra load/stores. */
1587 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1588 /* Neither multiplies nor extension load/stores are allowed
1592 /* Otherwise, miscellaneous instructions. */
1594 /* BX <reg>, BXJ <reg>, BLX <reg> */
1595 if (bits (this_instr, 4, 27) == 0x12fff1
1596 || bits (this_instr, 4, 27) == 0x12fff2
1597 || bits (this_instr, 4, 27) == 0x12fff3)
1600 /* Other miscellaneous instructions are unpredictable if they
1604 /* Data processing instruction. Fall through. */
1607 if (bits (this_instr, 12, 15) == 15)
1614 /* Media instructions and architecturally undefined instructions. */
1615 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1619 if (bit (this_instr, 20) == 0)
1623 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1629 /* Load/store multiple. */
1630 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1636 /* Branch and branch with link. */
1641 /* Coprocessor transfers or SWIs can not affect PC. */
1645 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1649 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1650 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1651 fill it in. Return the first address not recognized as a prologue
1654 We recognize all the instructions typically found in ARM prologues,
1655 plus harmless instructions which can be skipped (either for analysis
1656 purposes, or a more restrictive set that can be skipped when finding
1657 the end of the prologue). */
1660 arm_analyze_prologue (struct gdbarch *gdbarch,
1661 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1662 struct arm_prologue_cache *cache)
1664 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1665 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1667 CORE_ADDR offset, current_pc;
1668 pv_t regs[ARM_FPS_REGNUM];
1669 struct pv_area *stack;
1670 struct cleanup *back_to;
1671 int framereg, framesize;
1672 CORE_ADDR unrecognized_pc = 0;
1674 /* Search the prologue looking for instructions that set up the
1675 frame pointer, adjust the stack pointer, and save registers.
1677 Be careful, however, and if it doesn't look like a prologue,
1678 don't try to scan it. If, for instance, a frameless function
1679 begins with stmfd sp!, then we will tell ourselves there is
1680 a frame, which will confuse stack traceback, as well as "finish"
1681 and other operations that rely on a knowledge of the stack
1684 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1685 regs[regno] = pv_register (regno, 0);
1686 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1687 back_to = make_cleanup_free_pv_area (stack);
1689 for (current_pc = prologue_start;
1690 current_pc < prologue_end;
1694 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1696 if (insn == 0xe1a0c00d) /* mov ip, sp */
1698 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1701 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1702 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1704 unsigned imm = insn & 0xff; /* immediate value */
1705 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1706 int rd = bits (insn, 12, 15);
1707 imm = (imm >> rot) | (imm << (32 - rot));
1708 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1711 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1712 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1714 unsigned imm = insn & 0xff; /* immediate value */
1715 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1716 int rd = bits (insn, 12, 15);
1717 imm = (imm >> rot) | (imm << (32 - rot));
1718 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1721 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1726 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1727 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1728 regs[bits (insn, 12, 15)]);
1731 else if ((insn & 0xffff0000) == 0xe92d0000)
1732 /* stmfd sp!, {..., fp, ip, lr, pc}
1734 stmfd sp!, {a1, a2, a3, a4} */
1736 int mask = insn & 0xffff;
1738 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1741 /* Calculate offsets of saved registers. */
1742 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1743 if (mask & (1 << regno))
1746 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1747 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1750 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1751 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1752 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1754 /* No need to add this to saved_regs -- it's just an arg reg. */
1757 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1758 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1759 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1761 /* No need to add this to saved_regs -- it's just an arg reg. */
1764 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1766 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1768 /* No need to add this to saved_regs -- it's just arg regs. */
1771 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1773 unsigned imm = insn & 0xff; /* immediate value */
1774 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1775 imm = (imm >> rot) | (imm << (32 - rot));
1776 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1778 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1780 unsigned imm = insn & 0xff; /* immediate value */
1781 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1782 imm = (imm >> rot) | (imm << (32 - rot));
1783 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1785 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1787 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1789 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1792 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1793 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1794 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1796 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1798 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1800 int n_saved_fp_regs;
1801 unsigned int fp_start_reg, fp_bound_reg;
1803 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1806 if ((insn & 0x800) == 0x800) /* N0 is set */
1808 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1809 n_saved_fp_regs = 3;
1811 n_saved_fp_regs = 1;
1815 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1816 n_saved_fp_regs = 2;
1818 n_saved_fp_regs = 4;
1821 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1822 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1823 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1825 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1826 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1827 regs[fp_start_reg++]);
1830 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1832 /* Allow some special function calls when skipping the
1833 prologue; GCC generates these before storing arguments to
1835 CORE_ADDR dest = BranchDest (current_pc, insn);
1837 if (skip_prologue_function (gdbarch, dest, 0))
1842 else if ((insn & 0xf0000000) != 0xe0000000)
1843 break; /* Condition not true, exit early. */
1844 else if (arm_instruction_changes_pc (insn))
1845 /* Don't scan past anything that might change control flow. */
1847 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1848 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1849 /* Ignore block loads from the stack, potentially copying
1850 parameters from memory. */
1852 else if ((insn & 0xfc500000) == 0xe4100000
1853 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1854 /* Similarly ignore single loads from the stack. */
1856 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1857 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1858 register instead of the stack. */
1862 /* The optimizer might shove anything into the prologue,
1863 so we just skip what we don't recognize. */
1864 unrecognized_pc = current_pc;
1869 if (unrecognized_pc == 0)
1870 unrecognized_pc = current_pc;
1872 /* The frame size is just the distance from the frame register
1873 to the original stack pointer. */
1874 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1876 /* Frame pointer is fp. */
1877 framereg = ARM_FP_REGNUM;
1878 framesize = -regs[ARM_FP_REGNUM].k;
1882 /* Try the stack pointer... this is a bit desperate. */
1883 framereg = ARM_SP_REGNUM;
1884 framesize = -regs[ARM_SP_REGNUM].k;
1889 cache->framereg = framereg;
1890 cache->framesize = framesize;
1892 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1893 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1894 cache->saved_regs[regno].addr = offset;
1898 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1899 paddress (gdbarch, unrecognized_pc));
1901 do_cleanups (back_to);
1902 return unrecognized_pc;
1906 arm_scan_prologue (struct frame_info *this_frame,
1907 struct arm_prologue_cache *cache)
1909 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1910 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1912 CORE_ADDR prologue_start, prologue_end, current_pc;
1913 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1914 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1915 pv_t regs[ARM_FPS_REGNUM];
1916 struct pv_area *stack;
1917 struct cleanup *back_to;
1920 /* Assume there is no frame until proven otherwise. */
1921 cache->framereg = ARM_SP_REGNUM;
1922 cache->framesize = 0;
1924 /* Check for Thumb prologue. */
1925 if (arm_frame_is_thumb (this_frame))
1927 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1931 /* Find the function prologue. If we can't find the function in
1932 the symbol table, peek in the stack frame to find the PC. */
1933 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1936 /* One way to find the end of the prologue (which works well
1937 for unoptimized code) is to do the following:
1939 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1942 prologue_end = prev_pc;
1943 else if (sal.end < prologue_end)
1944 prologue_end = sal.end;
1946 This mechanism is very accurate so long as the optimizer
1947 doesn't move any instructions from the function body into the
1948 prologue. If this happens, sal.end will be the last
1949 instruction in the first hunk of prologue code just before
1950 the first instruction that the scheduler has moved from
1951 the body to the prologue.
1953 In order to make sure that we scan all of the prologue
1954 instructions, we use a slightly less accurate mechanism which
1955 may scan more than necessary. To help compensate for this
1956 lack of accuracy, the prologue scanning loop below contains
1957 several clauses which'll cause the loop to terminate early if
1958 an implausible prologue instruction is encountered.
1964 is a suitable endpoint since it accounts for the largest
1965 possible prologue plus up to five instructions inserted by
1968 if (prologue_end > prologue_start + 64)
1970 prologue_end = prologue_start + 64; /* See above. */
1975 /* We have no symbol information. Our only option is to assume this
1976 function has a standard stack frame and the normal frame register.
1977 Then, we can find the value of our frame pointer on entrance to
1978 the callee (or at the present moment if this is the innermost frame).
1979 The value stored there should be the address of the stmfd + 8. */
1980 CORE_ADDR frame_loc;
1981 LONGEST return_value;
1983 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1984 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1988 prologue_start = gdbarch_addr_bits_remove
1989 (gdbarch, return_value) - 8;
1990 prologue_end = prologue_start + 64; /* See above. */
1994 if (prev_pc < prologue_end)
1995 prologue_end = prev_pc;
1997 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2000 static struct arm_prologue_cache *
2001 arm_make_prologue_cache (struct frame_info *this_frame)
2004 struct arm_prologue_cache *cache;
2005 CORE_ADDR unwound_fp;
2007 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2008 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2010 arm_scan_prologue (this_frame, cache);
2012 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2013 if (unwound_fp == 0)
2016 cache->prev_sp = unwound_fp + cache->framesize;
2018 /* Calculate actual addresses of saved registers using offsets
2019 determined by arm_scan_prologue. */
2020 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2021 if (trad_frame_addr_p (cache->saved_regs, reg))
2022 cache->saved_regs[reg].addr += cache->prev_sp;
2027 /* Our frame ID for a normal frame is the current function's starting PC
2028 and the caller's SP when we were called. */
2031 arm_prologue_this_id (struct frame_info *this_frame,
2033 struct frame_id *this_id)
2035 struct arm_prologue_cache *cache;
2039 if (*this_cache == NULL)
2040 *this_cache = arm_make_prologue_cache (this_frame);
2041 cache = *this_cache;
2043 /* This is meant to halt the backtrace at "_start". */
2044 pc = get_frame_pc (this_frame);
2045 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2048 /* If we've hit a wall, stop. */
2049 if (cache->prev_sp == 0)
2052 /* Use function start address as part of the frame ID. If we cannot
2053 identify the start address (due to missing symbol information),
2054 fall back to just using the current PC. */
2055 func = get_frame_func (this_frame);
2059 id = frame_id_build (cache->prev_sp, func);
2063 static struct value *
2064 arm_prologue_prev_register (struct frame_info *this_frame,
2068 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2069 struct arm_prologue_cache *cache;
2071 if (*this_cache == NULL)
2072 *this_cache = arm_make_prologue_cache (this_frame);
2073 cache = *this_cache;
2075 /* If we are asked to unwind the PC, then we need to return the LR
2076 instead. The prologue may save PC, but it will point into this
2077 frame's prologue, not the next frame's resume location. Also
2078 strip the saved T bit. A valid LR may have the low bit set, but
2079 a valid PC never does. */
2080 if (prev_regnum == ARM_PC_REGNUM)
2084 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2085 return frame_unwind_got_constant (this_frame, prev_regnum,
2086 arm_addr_bits_remove (gdbarch, lr));
2089 /* SP is generally not saved to the stack, but this frame is
2090 identified by the next frame's stack pointer at the time of the call.
2091 The value was already reconstructed into PREV_SP. */
2092 if (prev_regnum == ARM_SP_REGNUM)
2093 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2095 /* The CPSR may have been changed by the call instruction and by the
2096 called function. The only bit we can reconstruct is the T bit,
2097 by checking the low bit of LR as of the call. This is a reliable
2098 indicator of Thumb-ness except for some ARM v4T pre-interworking
2099 Thumb code, which could get away with a clear low bit as long as
2100 the called function did not use bx. Guess that all other
2101 bits are unchanged; the condition flags are presumably lost,
2102 but the processor status is likely valid. */
2103 if (prev_regnum == ARM_PS_REGNUM)
2106 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2108 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2109 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2110 if (IS_THUMB_ADDR (lr))
2114 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2117 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2121 struct frame_unwind arm_prologue_unwind = {
2123 default_frame_unwind_stop_reason,
2124 arm_prologue_this_id,
2125 arm_prologue_prev_register,
2127 default_frame_sniffer
2130 /* Maintain a list of ARM exception table entries per objfile, similar to the
2131 list of mapping symbols. We only cache entries for standard ARM-defined
2132 personality routines; the cache will contain only the frame unwinding
2133 instructions associated with the entry (not the descriptors). */
2135 static const struct objfile_data *arm_exidx_data_key;
2137 struct arm_exidx_entry
2142 typedef struct arm_exidx_entry arm_exidx_entry_s;
2143 DEF_VEC_O(arm_exidx_entry_s);
2145 struct arm_exidx_data
2147 VEC(arm_exidx_entry_s) **section_maps;
2151 arm_exidx_data_free (struct objfile *objfile, void *arg)
2153 struct arm_exidx_data *data = arg;
2156 for (i = 0; i < objfile->obfd->section_count; i++)
2157 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2161 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2162 const struct arm_exidx_entry *rhs)
2164 return lhs->addr < rhs->addr;
2167 static struct obj_section *
2168 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2170 struct obj_section *osect;
2172 ALL_OBJFILE_OSECTIONS (objfile, osect)
2173 if (bfd_get_section_flags (objfile->obfd,
2174 osect->the_bfd_section) & SEC_ALLOC)
2176 bfd_vma start, size;
2177 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2178 size = bfd_get_section_size (osect->the_bfd_section);
2180 if (start <= vma && vma < start + size)
2187 /* Parse contents of exception table and exception index sections
2188 of OBJFILE, and fill in the exception table entry cache.
2190 For each entry that refers to a standard ARM-defined personality
2191 routine, extract the frame unwinding instructions (from either
2192 the index or the table section). The unwinding instructions
2194 - extracting them from the rest of the table data
2195 - converting to host endianness
2196 - appending the implicit 0xb0 ("Finish") code
2198 The extracted and normalized instructions are stored for later
2199 retrieval by the arm_find_exidx_entry routine. */
2202 arm_exidx_new_objfile (struct objfile *objfile)
2204 struct cleanup *cleanups;
2205 struct arm_exidx_data *data;
2206 asection *exidx, *extab;
2207 bfd_vma exidx_vma = 0, extab_vma = 0;
2208 bfd_size_type exidx_size = 0, extab_size = 0;
2209 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2212 /* If we've already touched this file, do nothing. */
2213 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2215 cleanups = make_cleanup (null_cleanup, NULL);
2217 /* Read contents of exception table and index. */
2218 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2221 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2222 exidx_size = bfd_get_section_size (exidx);
2223 exidx_data = xmalloc (exidx_size);
2224 make_cleanup (xfree, exidx_data);
2226 if (!bfd_get_section_contents (objfile->obfd, exidx,
2227 exidx_data, 0, exidx_size))
2229 do_cleanups (cleanups);
2234 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2237 extab_vma = bfd_section_vma (objfile->obfd, extab);
2238 extab_size = bfd_get_section_size (extab);
2239 extab_data = xmalloc (extab_size);
2240 make_cleanup (xfree, extab_data);
2242 if (!bfd_get_section_contents (objfile->obfd, extab,
2243 extab_data, 0, extab_size))
2245 do_cleanups (cleanups);
2250 /* Allocate exception table data structure. */
2251 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2252 set_objfile_data (objfile, arm_exidx_data_key, data);
2253 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2254 objfile->obfd->section_count,
2255 VEC(arm_exidx_entry_s) *);
2257 /* Fill in exception table. */
2258 for (i = 0; i < exidx_size / 8; i++)
2260 struct arm_exidx_entry new_exidx_entry;
2261 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2262 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2263 bfd_vma addr = 0, word = 0;
2264 int n_bytes = 0, n_words = 0;
2265 struct obj_section *sec;
2266 gdb_byte *entry = NULL;
2268 /* Extract address of start of function. */
2269 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2270 idx += exidx_vma + i * 8;
2272 /* Find section containing function and compute section offset. */
2273 sec = arm_obj_section_from_vma (objfile, idx);
2276 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2278 /* Determine address of exception table entry. */
2281 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2283 else if ((val & 0xff000000) == 0x80000000)
2285 /* Exception table entry embedded in .ARM.exidx
2286 -- must be short form. */
2290 else if (!(val & 0x80000000))
2292 /* Exception table entry in .ARM.extab. */
2293 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2294 addr += exidx_vma + i * 8 + 4;
2296 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2298 word = bfd_h_get_32 (objfile->obfd,
2299 extab_data + addr - extab_vma);
2302 if ((word & 0xff000000) == 0x80000000)
2307 else if ((word & 0xff000000) == 0x81000000
2308 || (word & 0xff000000) == 0x82000000)
2312 n_words = ((word >> 16) & 0xff);
2314 else if (!(word & 0x80000000))
2317 struct obj_section *pers_sec;
2318 int gnu_personality = 0;
2320 /* Custom personality routine. */
2321 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2322 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2324 /* Check whether we've got one of the variants of the
2325 GNU personality routines. */
2326 pers_sec = arm_obj_section_from_vma (objfile, pers);
2329 static const char *personality[] =
2331 "__gcc_personality_v0",
2332 "__gxx_personality_v0",
2333 "__gcj_personality_v0",
2334 "__gnu_objc_personality_v0",
2338 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2341 for (k = 0; personality[k]; k++)
2342 if (lookup_minimal_symbol_by_pc_name
2343 (pc, personality[k], objfile))
2345 gnu_personality = 1;
2350 /* If so, the next word contains a word count in the high
2351 byte, followed by the same unwind instructions as the
2352 pre-defined forms. */
2354 && addr + 4 <= extab_vma + extab_size)
2356 word = bfd_h_get_32 (objfile->obfd,
2357 extab_data + addr - extab_vma);
2360 n_words = ((word >> 24) & 0xff);
2366 /* Sanity check address. */
2368 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2369 n_words = n_bytes = 0;
2371 /* The unwind instructions reside in WORD (only the N_BYTES least
2372 significant bytes are valid), followed by N_WORDS words in the
2373 extab section starting at ADDR. */
2374 if (n_bytes || n_words)
2376 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2377 n_bytes + n_words * 4 + 1);
2380 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2384 word = bfd_h_get_32 (objfile->obfd,
2385 extab_data + addr - extab_vma);
2388 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2389 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2390 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2391 *p++ = (gdb_byte) (word & 0xff);
2394 /* Implied "Finish" to terminate the list. */
2398 /* Push entry onto vector. They are guaranteed to always
2399 appear in order of increasing addresses. */
2400 new_exidx_entry.addr = idx;
2401 new_exidx_entry.entry = entry;
2402 VEC_safe_push (arm_exidx_entry_s,
2403 data->section_maps[sec->the_bfd_section->index],
2407 do_cleanups (cleanups);
2410 /* Search for the exception table entry covering MEMADDR. If one is found,
2411 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2412 set *START to the start of the region covered by this entry. */
2415 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2417 struct obj_section *sec;
2419 sec = find_pc_section (memaddr);
2422 struct arm_exidx_data *data;
2423 VEC(arm_exidx_entry_s) *map;
2424 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2427 data = objfile_data (sec->objfile, arm_exidx_data_key);
2430 map = data->section_maps[sec->the_bfd_section->index];
2431 if (!VEC_empty (arm_exidx_entry_s, map))
2433 struct arm_exidx_entry *map_sym;
2435 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2436 arm_compare_exidx_entries);
2438 /* VEC_lower_bound finds the earliest ordered insertion
2439 point. If the following symbol starts at this exact
2440 address, we use that; otherwise, the preceding
2441 exception table entry covers this address. */
2442 if (idx < VEC_length (arm_exidx_entry_s, map))
2444 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2445 if (map_sym->addr == map_key.addr)
2448 *start = map_sym->addr + obj_section_addr (sec);
2449 return map_sym->entry;
2455 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2457 *start = map_sym->addr + obj_section_addr (sec);
2458 return map_sym->entry;
2467 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2468 instruction list from the ARM exception table entry ENTRY, allocate and
2469 return a prologue cache structure describing how to unwind this frame.
2471 Return NULL if the unwinding instruction list contains a "spare",
2472 "reserved" or "refuse to unwind" instruction as defined in section
2473 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2474 for the ARM Architecture" document. */
2476 static struct arm_prologue_cache *
2477 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2482 struct arm_prologue_cache *cache;
2483 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2484 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2490 /* Whenever we reload SP, we actually have to retrieve its
2491 actual value in the current frame. */
2494 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2496 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2497 vsp = get_frame_register_unsigned (this_frame, reg);
2501 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2502 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2508 /* Decode next unwind instruction. */
2511 if ((insn & 0xc0) == 0)
2513 int offset = insn & 0x3f;
2514 vsp += (offset << 2) + 4;
2516 else if ((insn & 0xc0) == 0x40)
2518 int offset = insn & 0x3f;
2519 vsp -= (offset << 2) + 4;
2521 else if ((insn & 0xf0) == 0x80)
2523 int mask = ((insn & 0xf) << 8) | *entry++;
2526 /* The special case of an all-zero mask identifies
2527 "Refuse to unwind". We return NULL to fall back
2528 to the prologue analyzer. */
2532 /* Pop registers r4..r15 under mask. */
2533 for (i = 0; i < 12; i++)
2534 if (mask & (1 << i))
2536 cache->saved_regs[4 + i].addr = vsp;
2540 /* Special-case popping SP -- we need to reload vsp. */
2541 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2544 else if ((insn & 0xf0) == 0x90)
2546 int reg = insn & 0xf;
2548 /* Reserved cases. */
2549 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2552 /* Set SP from another register and mark VSP for reload. */
2553 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2556 else if ((insn & 0xf0) == 0xa0)
2558 int count = insn & 0x7;
2559 int pop_lr = (insn & 0x8) != 0;
2562 /* Pop r4..r[4+count]. */
2563 for (i = 0; i <= count; i++)
2565 cache->saved_regs[4 + i].addr = vsp;
2569 /* If indicated by flag, pop LR as well. */
2572 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2576 else if (insn == 0xb0)
2578 /* We could only have updated PC by popping into it; if so, it
2579 will show up as address. Otherwise, copy LR into PC. */
2580 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2581 cache->saved_regs[ARM_PC_REGNUM]
2582 = cache->saved_regs[ARM_LR_REGNUM];
2587 else if (insn == 0xb1)
2589 int mask = *entry++;
2592 /* All-zero mask and mask >= 16 is "spare". */
2593 if (mask == 0 || mask >= 16)
2596 /* Pop r0..r3 under mask. */
2597 for (i = 0; i < 4; i++)
2598 if (mask & (1 << i))
2600 cache->saved_regs[i].addr = vsp;
2604 else if (insn == 0xb2)
2606 ULONGEST offset = 0;
2611 offset |= (*entry & 0x7f) << shift;
2614 while (*entry++ & 0x80);
2616 vsp += 0x204 + (offset << 2);
2618 else if (insn == 0xb3)
2620 int start = *entry >> 4;
2621 int count = (*entry++) & 0xf;
2624 /* Only registers D0..D15 are valid here. */
2625 if (start + count >= 16)
2628 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2629 for (i = 0; i <= count; i++)
2631 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2635 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2638 else if ((insn & 0xf8) == 0xb8)
2640 int count = insn & 0x7;
2643 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2644 for (i = 0; i <= count; i++)
2646 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2650 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2653 else if (insn == 0xc6)
2655 int start = *entry >> 4;
2656 int count = (*entry++) & 0xf;
2659 /* Only registers WR0..WR15 are valid. */
2660 if (start + count >= 16)
2663 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2664 for (i = 0; i <= count; i++)
2666 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2670 else if (insn == 0xc7)
2672 int mask = *entry++;
2675 /* All-zero mask and mask >= 16 is "spare". */
2676 if (mask == 0 || mask >= 16)
2679 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2680 for (i = 0; i < 4; i++)
2681 if (mask & (1 << i))
2683 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2687 else if ((insn & 0xf8) == 0xc0)
2689 int count = insn & 0x7;
2692 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2693 for (i = 0; i <= count; i++)
2695 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2699 else if (insn == 0xc8)
2701 int start = *entry >> 4;
2702 int count = (*entry++) & 0xf;
2705 /* Only registers D0..D31 are valid. */
2706 if (start + count >= 16)
2709 /* Pop VFP double-precision registers
2710 D[16+start]..D[16+start+count]. */
2711 for (i = 0; i <= count; i++)
2713 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2717 else if (insn == 0xc9)
2719 int start = *entry >> 4;
2720 int count = (*entry++) & 0xf;
2723 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2724 for (i = 0; i <= count; i++)
2726 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2730 else if ((insn & 0xf8) == 0xd0)
2732 int count = insn & 0x7;
2735 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2736 for (i = 0; i <= count; i++)
2738 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2744 /* Everything else is "spare". */
2749 /* If we restore SP from a register, assume this was the frame register.
2750 Otherwise just fall back to SP as frame register. */
2751 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2752 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2754 cache->framereg = ARM_SP_REGNUM;
2756 /* Determine offset to previous frame. */
2758 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2760 /* We already got the previous SP. */
2761 cache->prev_sp = vsp;
2766 /* Unwinding via ARM exception table entries. Note that the sniffer
2767 already computes a filled-in prologue cache, which is then used
2768 with the same arm_prologue_this_id and arm_prologue_prev_register
2769 routines also used for prologue-parsing based unwinding. */
2772 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2773 struct frame_info *this_frame,
2774 void **this_prologue_cache)
2776 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2777 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2778 CORE_ADDR addr_in_block, exidx_region, func_start;
2779 struct arm_prologue_cache *cache;
2782 /* See if we have an ARM exception table entry covering this address. */
2783 addr_in_block = get_frame_address_in_block (this_frame);
2784 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2788 /* The ARM exception table does not describe unwind information
2789 for arbitrary PC values, but is guaranteed to be correct only
2790 at call sites. We have to decide here whether we want to use
2791 ARM exception table information for this frame, or fall back
2792 to using prologue parsing. (Note that if we have DWARF CFI,
2793 this sniffer isn't even called -- CFI is always preferred.)
2795 Before we make this decision, however, we check whether we
2796 actually have *symbol* information for the current frame.
2797 If not, prologue parsing would not work anyway, so we might
2798 as well use the exception table and hope for the best. */
2799 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2803 /* If the next frame is "normal", we are at a call site in this
2804 frame, so exception information is guaranteed to be valid. */
2805 if (get_next_frame (this_frame)
2806 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2809 /* We also assume exception information is valid if we're currently
2810 blocked in a system call. The system library is supposed to
2811 ensure this, so that e.g. pthread cancellation works. */
2812 if (arm_frame_is_thumb (this_frame))
2816 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2817 byte_order_for_code, &insn)
2818 && (insn & 0xff00) == 0xdf00 /* svc */)
2825 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2826 byte_order_for_code, &insn)
2827 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2831 /* Bail out if we don't know that exception information is valid. */
2835 /* The ARM exception index does not mark the *end* of the region
2836 covered by the entry, and some functions will not have any entry.
2837 To correctly recognize the end of the covered region, the linker
2838 should have inserted dummy records with a CANTUNWIND marker.
2840 Unfortunately, current versions of GNU ld do not reliably do
2841 this, and thus we may have found an incorrect entry above.
2842 As a (temporary) sanity check, we only use the entry if it
2843 lies *within* the bounds of the function. Note that this check
2844 might reject perfectly valid entries that just happen to cover
2845 multiple functions; therefore this check ought to be removed
2846 once the linker is fixed. */
2847 if (func_start > exidx_region)
2851 /* Decode the list of unwinding instructions into a prologue cache.
2852 Note that this may fail due to e.g. a "refuse to unwind" code. */
2853 cache = arm_exidx_fill_cache (this_frame, entry);
2857 *this_prologue_cache = cache;
2861 struct frame_unwind arm_exidx_unwind = {
2863 default_frame_unwind_stop_reason,
2864 arm_prologue_this_id,
2865 arm_prologue_prev_register,
2867 arm_exidx_unwind_sniffer
2870 static struct arm_prologue_cache *
2871 arm_make_stub_cache (struct frame_info *this_frame)
2873 struct arm_prologue_cache *cache;
2875 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2876 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2878 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2883 /* Our frame ID for a stub frame is the current SP and LR. */
2886 arm_stub_this_id (struct frame_info *this_frame,
2888 struct frame_id *this_id)
2890 struct arm_prologue_cache *cache;
2892 if (*this_cache == NULL)
2893 *this_cache = arm_make_stub_cache (this_frame);
2894 cache = *this_cache;
2896 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2900 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2901 struct frame_info *this_frame,
2902 void **this_prologue_cache)
2904 CORE_ADDR addr_in_block;
2907 addr_in_block = get_frame_address_in_block (this_frame);
2908 if (in_plt_section (addr_in_block, NULL)
2909 /* We also use the stub winder if the target memory is unreadable
2910 to avoid having the prologue unwinder trying to read it. */
2911 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2917 struct frame_unwind arm_stub_unwind = {
2919 default_frame_unwind_stop_reason,
2921 arm_prologue_prev_register,
2923 arm_stub_unwind_sniffer
2927 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2929 struct arm_prologue_cache *cache;
2931 if (*this_cache == NULL)
2932 *this_cache = arm_make_prologue_cache (this_frame);
2933 cache = *this_cache;
2935 return cache->prev_sp - cache->framesize;
2938 struct frame_base arm_normal_base = {
2939 &arm_prologue_unwind,
2940 arm_normal_frame_base,
2941 arm_normal_frame_base,
2942 arm_normal_frame_base
2945 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2946 dummy frame. The frame ID's base needs to match the TOS value
2947 saved by save_dummy_frame_tos() and returned from
2948 arm_push_dummy_call, and the PC needs to match the dummy frame's
2951 static struct frame_id
2952 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2954 return frame_id_build (get_frame_register_unsigned (this_frame,
2956 get_frame_pc (this_frame));
2959 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2960 be used to construct the previous frame's ID, after looking up the
2961 containing function). */
2964 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2967 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2968 return arm_addr_bits_remove (gdbarch, pc);
2972 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2974 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2977 static struct value *
2978 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2981 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2983 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2988 /* The PC is normally copied from the return column, which
2989 describes saves of LR. However, that version may have an
2990 extra bit set to indicate Thumb state. The bit is not
2992 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2993 return frame_unwind_got_constant (this_frame, regnum,
2994 arm_addr_bits_remove (gdbarch, lr));
2997 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2998 cpsr = get_frame_register_unsigned (this_frame, regnum);
2999 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3000 if (IS_THUMB_ADDR (lr))
3004 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3007 internal_error (__FILE__, __LINE__,
3008 _("Unexpected register %d"), regnum);
3013 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3014 struct dwarf2_frame_state_reg *reg,
3015 struct frame_info *this_frame)
3021 reg->how = DWARF2_FRAME_REG_FN;
3022 reg->loc.fn = arm_dwarf2_prev_register;
3025 reg->how = DWARF2_FRAME_REG_CFA;
3030 /* Return true if we are in the function's epilogue, i.e. after the
3031 instruction that destroyed the function's stack frame. */
3034 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3036 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3037 unsigned int insn, insn2;
3038 int found_return = 0, found_stack_adjust = 0;
3039 CORE_ADDR func_start, func_end;
3043 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3046 /* The epilogue is a sequence of instructions along the following lines:
3048 - add stack frame size to SP or FP
3049 - [if frame pointer used] restore SP from FP
3050 - restore registers from SP [may include PC]
3051 - a return-type instruction [if PC wasn't already restored]
3053 In a first pass, we scan forward from the current PC and verify the
3054 instructions we find as compatible with this sequence, ending in a
3057 However, this is not sufficient to distinguish indirect function calls
3058 within a function from indirect tail calls in the epilogue in some cases.
3059 Therefore, if we didn't already find any SP-changing instruction during
3060 forward scan, we add a backward scanning heuristic to ensure we actually
3061 are in the epilogue. */
3064 while (scan_pc < func_end && !found_return)
3066 if (target_read_memory (scan_pc, buf, 2))
3070 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3072 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3074 else if (insn == 0x46f7) /* mov pc, lr */
3076 else if (insn == 0x46bd) /* mov sp, r7 */
3077 found_stack_adjust = 1;
3078 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3079 found_stack_adjust = 1;
3080 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3082 found_stack_adjust = 1;
3083 if (insn & 0x0100) /* <registers> include PC. */
3086 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3088 if (target_read_memory (scan_pc, buf, 2))
3092 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3094 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3096 found_stack_adjust = 1;
3097 if (insn2 & 0x8000) /* <registers> include PC. */
3100 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3101 && (insn2 & 0x0fff) == 0x0b04)
3103 found_stack_adjust = 1;
3104 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3107 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3108 && (insn2 & 0x0e00) == 0x0a00)
3109 found_stack_adjust = 1;
3120 /* Since any instruction in the epilogue sequence, with the possible
3121 exception of return itself, updates the stack pointer, we need to
3122 scan backwards for at most one instruction. Try either a 16-bit or
3123 a 32-bit instruction. This is just a heuristic, so we do not worry
3124 too much about false positives. */
3126 if (!found_stack_adjust)
3128 if (pc - 4 < func_start)
3130 if (target_read_memory (pc - 4, buf, 4))
3133 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3134 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3136 if (insn2 == 0x46bd) /* mov sp, r7 */
3137 found_stack_adjust = 1;
3138 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3139 found_stack_adjust = 1;
3140 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3141 found_stack_adjust = 1;
3142 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3143 found_stack_adjust = 1;
3144 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3145 && (insn2 & 0x0fff) == 0x0b04)
3146 found_stack_adjust = 1;
3147 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3148 && (insn2 & 0x0e00) == 0x0a00)
3149 found_stack_adjust = 1;
3152 return found_stack_adjust;
3155 /* Return true if we are in the function's epilogue, i.e. after the
3156 instruction that destroyed the function's stack frame. */
3159 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3161 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3163 int found_return, found_stack_adjust;
3164 CORE_ADDR func_start, func_end;
3166 if (arm_pc_is_thumb (gdbarch, pc))
3167 return thumb_in_function_epilogue_p (gdbarch, pc);
3169 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3172 /* We are in the epilogue if the previous instruction was a stack
3173 adjustment and the next instruction is a possible return (bx, mov
3174 pc, or pop). We could have to scan backwards to find the stack
3175 adjustment, or forwards to find the return, but this is a decent
3176 approximation. First scan forwards. */
3179 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3180 if (bits (insn, 28, 31) != INST_NV)
3182 if ((insn & 0x0ffffff0) == 0x012fff10)
3185 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3188 else if ((insn & 0x0fff0000) == 0x08bd0000
3189 && (insn & 0x0000c000) != 0)
3190 /* POP (LDMIA), including PC or LR. */
3197 /* Scan backwards. This is just a heuristic, so do not worry about
3198 false positives from mode changes. */
3200 if (pc < func_start + 4)
3203 found_stack_adjust = 0;
3204 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3205 if (bits (insn, 28, 31) != INST_NV)
3207 if ((insn & 0x0df0f000) == 0x0080d000)
3208 /* ADD SP (register or immediate). */
3209 found_stack_adjust = 1;
3210 else if ((insn & 0x0df0f000) == 0x0040d000)
3211 /* SUB SP (register or immediate). */
3212 found_stack_adjust = 1;
3213 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3215 found_stack_adjust = 1;
3216 else if ((insn & 0x0fff0000) == 0x08bd0000)
3218 found_stack_adjust = 1;
3221 if (found_stack_adjust)
3228 /* When arguments must be pushed onto the stack, they go on in reverse
3229 order. The code below implements a FILO (stack) to do this. */
3234 struct stack_item *prev;
3238 static struct stack_item *
3239 push_stack_item (struct stack_item *prev, const void *contents, int len)
3241 struct stack_item *si;
3242 si = xmalloc (sizeof (struct stack_item));
3243 si->data = xmalloc (len);
3246 memcpy (si->data, contents, len);
3250 static struct stack_item *
3251 pop_stack_item (struct stack_item *si)
3253 struct stack_item *dead = si;
3261 /* Return the alignment (in bytes) of the given type. */
3264 arm_type_align (struct type *t)
3270 t = check_typedef (t);
3271 switch (TYPE_CODE (t))
3274 /* Should never happen. */
3275 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3279 case TYPE_CODE_ENUM:
3283 case TYPE_CODE_RANGE:
3284 case TYPE_CODE_BITSTRING:
3286 case TYPE_CODE_CHAR:
3287 case TYPE_CODE_BOOL:
3288 return TYPE_LENGTH (t);
3290 case TYPE_CODE_ARRAY:
3291 case TYPE_CODE_COMPLEX:
3292 /* TODO: What about vector types? */
3293 return arm_type_align (TYPE_TARGET_TYPE (t));
3295 case TYPE_CODE_STRUCT:
3296 case TYPE_CODE_UNION:
3298 for (n = 0; n < TYPE_NFIELDS (t); n++)
3300 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3308 /* Possible base types for a candidate for passing and returning in
3311 enum arm_vfp_cprc_base_type
3320 /* The length of one element of base type B. */
3323 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3327 case VFP_CPRC_SINGLE:
3329 case VFP_CPRC_DOUBLE:
3331 case VFP_CPRC_VEC64:
3333 case VFP_CPRC_VEC128:
3336 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3341 /* The character ('s', 'd' or 'q') for the type of VFP register used
3342 for passing base type B. */
3345 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3349 case VFP_CPRC_SINGLE:
3351 case VFP_CPRC_DOUBLE:
3353 case VFP_CPRC_VEC64:
3355 case VFP_CPRC_VEC128:
3358 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3363 /* Determine whether T may be part of a candidate for passing and
3364 returning in VFP registers, ignoring the limit on the total number
3365 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3366 classification of the first valid component found; if it is not
3367 VFP_CPRC_UNKNOWN, all components must have the same classification
3368 as *BASE_TYPE. If it is found that T contains a type not permitted
3369 for passing and returning in VFP registers, a type differently
3370 classified from *BASE_TYPE, or two types differently classified
3371 from each other, return -1, otherwise return the total number of
3372 base-type elements found (possibly 0 in an empty structure or
3373 array). Vectors and complex types are not currently supported,
3374 matching the generic AAPCS support. */
3377 arm_vfp_cprc_sub_candidate (struct type *t,
3378 enum arm_vfp_cprc_base_type *base_type)
3380 t = check_typedef (t);
3381 switch (TYPE_CODE (t))
3384 switch (TYPE_LENGTH (t))
3387 if (*base_type == VFP_CPRC_UNKNOWN)
3388 *base_type = VFP_CPRC_SINGLE;
3389 else if (*base_type != VFP_CPRC_SINGLE)
3394 if (*base_type == VFP_CPRC_UNKNOWN)
3395 *base_type = VFP_CPRC_DOUBLE;
3396 else if (*base_type != VFP_CPRC_DOUBLE)
3405 case TYPE_CODE_ARRAY:
3409 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3412 if (TYPE_LENGTH (t) == 0)
3414 gdb_assert (count == 0);
3417 else if (count == 0)
3419 unitlen = arm_vfp_cprc_unit_length (*base_type);
3420 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3421 return TYPE_LENGTH (t) / unitlen;
3425 case TYPE_CODE_STRUCT:
3430 for (i = 0; i < TYPE_NFIELDS (t); i++)
3432 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3434 if (sub_count == -1)
3438 if (TYPE_LENGTH (t) == 0)
3440 gdb_assert (count == 0);
3443 else if (count == 0)
3445 unitlen = arm_vfp_cprc_unit_length (*base_type);
3446 if (TYPE_LENGTH (t) != unitlen * count)
3451 case TYPE_CODE_UNION:
3456 for (i = 0; i < TYPE_NFIELDS (t); i++)
3458 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3460 if (sub_count == -1)
3462 count = (count > sub_count ? count : sub_count);
3464 if (TYPE_LENGTH (t) == 0)
3466 gdb_assert (count == 0);
3469 else if (count == 0)
3471 unitlen = arm_vfp_cprc_unit_length (*base_type);
3472 if (TYPE_LENGTH (t) != unitlen * count)
3484 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3485 if passed to or returned from a non-variadic function with the VFP
3486 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3487 *BASE_TYPE to the base type for T and *COUNT to the number of
3488 elements of that base type before returning. */
3491 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3494 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3495 int c = arm_vfp_cprc_sub_candidate (t, &b);
3496 if (c <= 0 || c > 4)
3503 /* Return 1 if the VFP ABI should be used for passing arguments to and
3504 returning values from a function of type FUNC_TYPE, 0
3508 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3510 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3511 /* Variadic functions always use the base ABI. Assume that functions
3512 without debug info are not variadic. */
3513 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3515 /* The VFP ABI is only supported as a variant of AAPCS. */
3516 if (tdep->arm_abi != ARM_ABI_AAPCS)
3518 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3521 /* We currently only support passing parameters in integer registers, which
3522 conforms with GCC's default model, and VFP argument passing following
3523 the VFP variant of AAPCS. Several other variants exist and
3524 we should probably support some of them based on the selected ABI. */
3527 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3528 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3529 struct value **args, CORE_ADDR sp, int struct_return,
3530 CORE_ADDR struct_addr)
3532 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3536 struct stack_item *si = NULL;
3539 unsigned vfp_regs_free = (1 << 16) - 1;
3541 /* Determine the type of this function and whether the VFP ABI
3543 ftype = check_typedef (value_type (function));
3544 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3545 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3546 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3548 /* Set the return address. For the ARM, the return breakpoint is
3549 always at BP_ADDR. */
3550 if (arm_pc_is_thumb (gdbarch, bp_addr))
3552 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3554 /* Walk through the list of args and determine how large a temporary
3555 stack is required. Need to take care here as structs may be
3556 passed on the stack, and we have to push them. */
3559 argreg = ARM_A1_REGNUM;
3562 /* The struct_return pointer occupies the first parameter
3563 passing register. */
3567 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3568 gdbarch_register_name (gdbarch, argreg),
3569 paddress (gdbarch, struct_addr));
3570 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3574 for (argnum = 0; argnum < nargs; argnum++)
3577 struct type *arg_type;
3578 struct type *target_type;
3579 enum type_code typecode;
3580 const bfd_byte *val;
3582 enum arm_vfp_cprc_base_type vfp_base_type;
3584 int may_use_core_reg = 1;
3586 arg_type = check_typedef (value_type (args[argnum]));
3587 len = TYPE_LENGTH (arg_type);
3588 target_type = TYPE_TARGET_TYPE (arg_type);
3589 typecode = TYPE_CODE (arg_type);
3590 val = value_contents (args[argnum]);
3592 align = arm_type_align (arg_type);
3593 /* Round alignment up to a whole number of words. */
3594 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3595 /* Different ABIs have different maximum alignments. */
3596 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3598 /* The APCS ABI only requires word alignment. */
3599 align = INT_REGISTER_SIZE;
3603 /* The AAPCS requires at most doubleword alignment. */
3604 if (align > INT_REGISTER_SIZE * 2)
3605 align = INT_REGISTER_SIZE * 2;
3609 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3617 /* Because this is a CPRC it cannot go in a core register or
3618 cause a core register to be skipped for alignment.
3619 Either it goes in VFP registers and the rest of this loop
3620 iteration is skipped for this argument, or it goes on the
3621 stack (and the stack alignment code is correct for this
3623 may_use_core_reg = 0;
3625 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3626 shift = unit_length / 4;
3627 mask = (1 << (shift * vfp_base_count)) - 1;
3628 for (regno = 0; regno < 16; regno += shift)
3629 if (((vfp_regs_free >> regno) & mask) == mask)
3638 vfp_regs_free &= ~(mask << regno);
3639 reg_scaled = regno / shift;
3640 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3641 for (i = 0; i < vfp_base_count; i++)
3645 if (reg_char == 'q')
3646 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3647 val + i * unit_length);
3650 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3651 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3653 regcache_cooked_write (regcache, regnum,
3654 val + i * unit_length);
3661 /* This CPRC could not go in VFP registers, so all VFP
3662 registers are now marked as used. */
3667 /* Push stack padding for dowubleword alignment. */
3668 if (nstack & (align - 1))
3670 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3671 nstack += INT_REGISTER_SIZE;
3674 /* Doubleword aligned quantities must go in even register pairs. */
3675 if (may_use_core_reg
3676 && argreg <= ARM_LAST_ARG_REGNUM
3677 && align > INT_REGISTER_SIZE
3681 /* If the argument is a pointer to a function, and it is a
3682 Thumb function, create a LOCAL copy of the value and set
3683 the THUMB bit in it. */
3684 if (TYPE_CODE_PTR == typecode
3685 && target_type != NULL
3686 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3688 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3689 if (arm_pc_is_thumb (gdbarch, regval))
3691 bfd_byte *copy = alloca (len);
3692 store_unsigned_integer (copy, len, byte_order,
3693 MAKE_THUMB_ADDR (regval));
3698 /* Copy the argument to general registers or the stack in
3699 register-sized pieces. Large arguments are split between
3700 registers and stack. */
3703 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3705 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3707 /* The argument is being passed in a general purpose
3710 = extract_unsigned_integer (val, partial_len, byte_order);
3711 if (byte_order == BFD_ENDIAN_BIG)
3712 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3714 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3716 gdbarch_register_name
3718 phex (regval, INT_REGISTER_SIZE));
3719 regcache_cooked_write_unsigned (regcache, argreg, regval);
3724 /* Push the arguments onto the stack. */
3726 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3728 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3729 nstack += INT_REGISTER_SIZE;
3736 /* If we have an odd number of words to push, then decrement the stack
3737 by one word now, so first stack argument will be dword aligned. */
3744 write_memory (sp, si->data, si->len);
3745 si = pop_stack_item (si);
3748 /* Finally, update teh SP register. */
3749 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3755 /* Always align the frame to an 8-byte boundary. This is required on
3756 some platforms and harmless on the rest. */
3759 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3761 /* Align the stack to eight bytes. */
3762 return sp & ~ (CORE_ADDR) 7;
3766 print_fpu_flags (int flags)
3768 if (flags & (1 << 0))
3769 fputs ("IVO ", stdout);
3770 if (flags & (1 << 1))
3771 fputs ("DVZ ", stdout);
3772 if (flags & (1 << 2))
3773 fputs ("OFL ", stdout);
3774 if (flags & (1 << 3))
3775 fputs ("UFL ", stdout);
3776 if (flags & (1 << 4))
3777 fputs ("INX ", stdout);
3781 /* Print interesting information about the floating point processor
3782 (if present) or emulator. */
3784 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3785 struct frame_info *frame, const char *args)
3787 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3790 type = (status >> 24) & 127;
3791 if (status & (1 << 31))
3792 printf (_("Hardware FPU type %d\n"), type);
3794 printf (_("Software FPU type %d\n"), type);
3795 /* i18n: [floating point unit] mask */
3796 fputs (_("mask: "), stdout);
3797 print_fpu_flags (status >> 16);
3798 /* i18n: [floating point unit] flags */
3799 fputs (_("flags: "), stdout);
3800 print_fpu_flags (status);
3803 /* Construct the ARM extended floating point type. */
3804 static struct type *
3805 arm_ext_type (struct gdbarch *gdbarch)
3807 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3809 if (!tdep->arm_ext_type)
3811 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3812 floatformats_arm_ext);
3814 return tdep->arm_ext_type;
3817 static struct type *
3818 arm_neon_double_type (struct gdbarch *gdbarch)
3820 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3822 if (tdep->neon_double_type == NULL)
3824 struct type *t, *elem;
3826 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3828 elem = builtin_type (gdbarch)->builtin_uint8;
3829 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3830 elem = builtin_type (gdbarch)->builtin_uint16;
3831 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3832 elem = builtin_type (gdbarch)->builtin_uint32;
3833 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3834 elem = builtin_type (gdbarch)->builtin_uint64;
3835 append_composite_type_field (t, "u64", elem);
3836 elem = builtin_type (gdbarch)->builtin_float;
3837 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3838 elem = builtin_type (gdbarch)->builtin_double;
3839 append_composite_type_field (t, "f64", elem);
3841 TYPE_VECTOR (t) = 1;
3842 TYPE_NAME (t) = "neon_d";
3843 tdep->neon_double_type = t;
3846 return tdep->neon_double_type;
3849 /* FIXME: The vector types are not correctly ordered on big-endian
3850 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3851 bits of d0 - regardless of what unit size is being held in d0. So
3852 the offset of the first uint8 in d0 is 7, but the offset of the
3853 first float is 4. This code works as-is for little-endian
3856 static struct type *
3857 arm_neon_quad_type (struct gdbarch *gdbarch)
3859 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3861 if (tdep->neon_quad_type == NULL)
3863 struct type *t, *elem;
3865 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3867 elem = builtin_type (gdbarch)->builtin_uint8;
3868 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3869 elem = builtin_type (gdbarch)->builtin_uint16;
3870 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3871 elem = builtin_type (gdbarch)->builtin_uint32;
3872 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3873 elem = builtin_type (gdbarch)->builtin_uint64;
3874 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3875 elem = builtin_type (gdbarch)->builtin_float;
3876 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3877 elem = builtin_type (gdbarch)->builtin_double;
3878 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3880 TYPE_VECTOR (t) = 1;
3881 TYPE_NAME (t) = "neon_q";
3882 tdep->neon_quad_type = t;
3885 return tdep->neon_quad_type;
3888 /* Return the GDB type object for the "standard" data type of data in
3891 static struct type *
3892 arm_register_type (struct gdbarch *gdbarch, int regnum)
3894 int num_regs = gdbarch_num_regs (gdbarch);
3896 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3897 && regnum >= num_regs && regnum < num_regs + 32)
3898 return builtin_type (gdbarch)->builtin_float;
3900 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3901 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3902 return arm_neon_quad_type (gdbarch);
3904 /* If the target description has register information, we are only
3905 in this function so that we can override the types of
3906 double-precision registers for NEON. */
3907 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3909 struct type *t = tdesc_register_type (gdbarch, regnum);
3911 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3912 && TYPE_CODE (t) == TYPE_CODE_FLT
3913 && gdbarch_tdep (gdbarch)->have_neon)
3914 return arm_neon_double_type (gdbarch);
3919 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3921 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3922 return builtin_type (gdbarch)->builtin_void;
3924 return arm_ext_type (gdbarch);
3926 else if (regnum == ARM_SP_REGNUM)
3927 return builtin_type (gdbarch)->builtin_data_ptr;
3928 else if (regnum == ARM_PC_REGNUM)
3929 return builtin_type (gdbarch)->builtin_func_ptr;
3930 else if (regnum >= ARRAY_SIZE (arm_register_names))
3931 /* These registers are only supported on targets which supply
3932 an XML description. */
3933 return builtin_type (gdbarch)->builtin_int0;
3935 return builtin_type (gdbarch)->builtin_uint32;
3938 /* Map a DWARF register REGNUM onto the appropriate GDB register
3942 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3944 /* Core integer regs. */
3945 if (reg >= 0 && reg <= 15)
3948 /* Legacy FPA encoding. These were once used in a way which
3949 overlapped with VFP register numbering, so their use is
3950 discouraged, but GDB doesn't support the ARM toolchain
3951 which used them for VFP. */
3952 if (reg >= 16 && reg <= 23)
3953 return ARM_F0_REGNUM + reg - 16;
3955 /* New assignments for the FPA registers. */
3956 if (reg >= 96 && reg <= 103)
3957 return ARM_F0_REGNUM + reg - 96;
3959 /* WMMX register assignments. */
3960 if (reg >= 104 && reg <= 111)
3961 return ARM_WCGR0_REGNUM + reg - 104;
3963 if (reg >= 112 && reg <= 127)
3964 return ARM_WR0_REGNUM + reg - 112;
3966 if (reg >= 192 && reg <= 199)
3967 return ARM_WC0_REGNUM + reg - 192;
3969 /* VFP v2 registers. A double precision value is actually
3970 in d1 rather than s2, but the ABI only defines numbering
3971 for the single precision registers. This will "just work"
3972 in GDB for little endian targets (we'll read eight bytes,
3973 starting in s0 and then progressing to s1), but will be
3974 reversed on big endian targets with VFP. This won't
3975 be a problem for the new Neon quad registers; you're supposed
3976 to use DW_OP_piece for those. */
3977 if (reg >= 64 && reg <= 95)
3981 sprintf (name_buf, "s%d", reg - 64);
3982 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3986 /* VFP v3 / Neon registers. This range is also used for VFP v2
3987 registers, except that it now describes d0 instead of s0. */
3988 if (reg >= 256 && reg <= 287)
3992 sprintf (name_buf, "d%d", reg - 256);
3993 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4000 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4002 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4005 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4007 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4008 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4010 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4011 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4013 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4014 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4016 if (reg < NUM_GREGS)
4017 return SIM_ARM_R0_REGNUM + reg;
4020 if (reg < NUM_FREGS)
4021 return SIM_ARM_FP0_REGNUM + reg;
4024 if (reg < NUM_SREGS)
4025 return SIM_ARM_FPS_REGNUM + reg;
4028 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4031 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4032 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4033 It is thought that this is is the floating-point register format on
4034 little-endian systems. */
4037 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4038 void *dbl, int endianess)
4042 if (endianess == BFD_ENDIAN_BIG)
4043 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4045 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4047 floatformat_from_doublest (fmt, &d, dbl);
4051 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4056 floatformat_to_doublest (fmt, ptr, &d);
4057 if (endianess == BFD_ENDIAN_BIG)
4058 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4060 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4065 condition_true (unsigned long cond, unsigned long status_reg)
4067 if (cond == INST_AL || cond == INST_NV)
4073 return ((status_reg & FLAG_Z) != 0);
4075 return ((status_reg & FLAG_Z) == 0);
4077 return ((status_reg & FLAG_C) != 0);
4079 return ((status_reg & FLAG_C) == 0);
4081 return ((status_reg & FLAG_N) != 0);
4083 return ((status_reg & FLAG_N) == 0);
4085 return ((status_reg & FLAG_V) != 0);
4087 return ((status_reg & FLAG_V) == 0);
4089 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4091 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4093 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4095 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4097 return (((status_reg & FLAG_Z) == 0)
4098 && (((status_reg & FLAG_N) == 0)
4099 == ((status_reg & FLAG_V) == 0)));
4101 return (((status_reg & FLAG_Z) != 0)
4102 || (((status_reg & FLAG_N) == 0)
4103 != ((status_reg & FLAG_V) == 0)));
4108 static unsigned long
4109 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4110 unsigned long pc_val, unsigned long status_reg)
4112 unsigned long res, shift;
4113 int rm = bits (inst, 0, 3);
4114 unsigned long shifttype = bits (inst, 5, 6);
4118 int rs = bits (inst, 8, 11);
4119 shift = (rs == 15 ? pc_val + 8
4120 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4123 shift = bits (inst, 7, 11);
4125 res = (rm == ARM_PC_REGNUM
4126 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4127 : get_frame_register_unsigned (frame, rm));
4132 res = shift >= 32 ? 0 : res << shift;
4136 res = shift >= 32 ? 0 : res >> shift;
4142 res = ((res & 0x80000000L)
4143 ? ~((~res) >> shift) : res >> shift);
4146 case 3: /* ROR/RRX */
4149 res = (res >> 1) | (carry ? 0x80000000L : 0);
4151 res = (res >> shift) | (res << (32 - shift));
4155 return res & 0xffffffff;
4158 /* Return number of 1-bits in VAL. */
4161 bitcount (unsigned long val)
4164 for (nbits = 0; val != 0; nbits++)
4165 val &= val - 1; /* Delete rightmost 1-bit in val. */
4169 /* Return the size in bytes of the complete Thumb instruction whose
4170 first halfword is INST1. */
4173 thumb_insn_size (unsigned short inst1)
4175 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4182 thumb_advance_itstate (unsigned int itstate)
4184 /* Preserve IT[7:5], the first three bits of the condition. Shift
4185 the upcoming condition flags left by one bit. */
4186 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4188 /* If we have finished the IT block, clear the state. */
4189 if ((itstate & 0x0f) == 0)
4195 /* Find the next PC after the current instruction executes. In some
4196 cases we can not statically determine the answer (see the IT state
4197 handling in this function); in that case, a breakpoint may be
4198 inserted in addition to the returned PC, which will be used to set
4199 another breakpoint by our caller. */
4202 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4204 struct gdbarch *gdbarch = get_frame_arch (frame);
4205 struct address_space *aspace = get_frame_address_space (frame);
4206 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4207 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4208 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4209 unsigned short inst1;
4210 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4211 unsigned long offset;
4212 ULONGEST status, itstate;
4214 nextpc = MAKE_THUMB_ADDR (nextpc);
4215 pc_val = MAKE_THUMB_ADDR (pc_val);
4217 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4219 /* Thumb-2 conditional execution support. There are eight bits in
4220 the CPSR which describe conditional execution state. Once
4221 reconstructed (they're in a funny order), the low five bits
4222 describe the low bit of the condition for each instruction and
4223 how many instructions remain. The high three bits describe the
4224 base condition. One of the low four bits will be set if an IT
4225 block is active. These bits read as zero on earlier
4227 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4228 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4230 /* If-Then handling. On GNU/Linux, where this routine is used, we
4231 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4232 can disable execution of the undefined instruction. So we might
4233 miss the breakpoint if we set it on a skipped conditional
4234 instruction. Because conditional instructions can change the
4235 flags, affecting the execution of further instructions, we may
4236 need to set two breakpoints. */
4238 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4240 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4242 /* An IT instruction. Because this instruction does not
4243 modify the flags, we can accurately predict the next
4244 executed instruction. */
4245 itstate = inst1 & 0x00ff;
4246 pc += thumb_insn_size (inst1);
4248 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4250 inst1 = read_memory_unsigned_integer (pc, 2,
4251 byte_order_for_code);
4252 pc += thumb_insn_size (inst1);
4253 itstate = thumb_advance_itstate (itstate);
4256 return MAKE_THUMB_ADDR (pc);
4258 else if (itstate != 0)
4260 /* We are in a conditional block. Check the condition. */
4261 if (! condition_true (itstate >> 4, status))
4263 /* Advance to the next executed instruction. */
4264 pc += thumb_insn_size (inst1);
4265 itstate = thumb_advance_itstate (itstate);
4267 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4269 inst1 = read_memory_unsigned_integer (pc, 2,
4270 byte_order_for_code);
4271 pc += thumb_insn_size (inst1);
4272 itstate = thumb_advance_itstate (itstate);
4275 return MAKE_THUMB_ADDR (pc);
4277 else if ((itstate & 0x0f) == 0x08)
4279 /* This is the last instruction of the conditional
4280 block, and it is executed. We can handle it normally
4281 because the following instruction is not conditional,
4282 and we must handle it normally because it is
4283 permitted to branch. Fall through. */
4289 /* There are conditional instructions after this one.
4290 If this instruction modifies the flags, then we can
4291 not predict what the next executed instruction will
4292 be. Fortunately, this instruction is architecturally
4293 forbidden to branch; we know it will fall through.
4294 Start by skipping past it. */
4295 pc += thumb_insn_size (inst1);
4296 itstate = thumb_advance_itstate (itstate);
4298 /* Set a breakpoint on the following instruction. */
4299 gdb_assert ((itstate & 0x0f) != 0);
4300 arm_insert_single_step_breakpoint (gdbarch, aspace,
4301 MAKE_THUMB_ADDR (pc));
4302 cond_negated = (itstate >> 4) & 1;
4304 /* Skip all following instructions with the same
4305 condition. If there is a later instruction in the IT
4306 block with the opposite condition, set the other
4307 breakpoint there. If not, then set a breakpoint on
4308 the instruction after the IT block. */
4311 inst1 = read_memory_unsigned_integer (pc, 2,
4312 byte_order_for_code);
4313 pc += thumb_insn_size (inst1);
4314 itstate = thumb_advance_itstate (itstate);
4316 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4318 return MAKE_THUMB_ADDR (pc);
4322 else if (itstate & 0x0f)
4324 /* We are in a conditional block. Check the condition. */
4325 int cond = itstate >> 4;
4327 if (! condition_true (cond, status))
4328 /* Advance to the next instruction. All the 32-bit
4329 instructions share a common prefix. */
4330 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4332 /* Otherwise, handle the instruction normally. */
4335 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4339 /* Fetch the saved PC from the stack. It's stored above
4340 all of the other registers. */
4341 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4342 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4343 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4345 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4347 unsigned long cond = bits (inst1, 8, 11);
4348 if (cond == 0x0f) /* 0x0f = SWI */
4350 struct gdbarch_tdep *tdep;
4351 tdep = gdbarch_tdep (gdbarch);
4353 if (tdep->syscall_next_pc != NULL)
4354 nextpc = tdep->syscall_next_pc (frame);
4357 else if (cond != 0x0f && condition_true (cond, status))
4358 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4360 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4362 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4364 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4366 unsigned short inst2;
4367 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4369 /* Default to the next instruction. */
4371 nextpc = MAKE_THUMB_ADDR (nextpc);
4373 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4375 /* Branches and miscellaneous control instructions. */
4377 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4380 int j1, j2, imm1, imm2;
4382 imm1 = sbits (inst1, 0, 10);
4383 imm2 = bits (inst2, 0, 10);
4384 j1 = bit (inst2, 13);
4385 j2 = bit (inst2, 11);
4387 offset = ((imm1 << 12) + (imm2 << 1));
4388 offset ^= ((!j2) << 22) | ((!j1) << 23);
4390 nextpc = pc_val + offset;
4391 /* For BLX make sure to clear the low bits. */
4392 if (bit (inst2, 12) == 0)
4393 nextpc = nextpc & 0xfffffffc;
4395 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4397 /* SUBS PC, LR, #imm8. */
4398 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4399 nextpc -= inst2 & 0x00ff;
4401 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4403 /* Conditional branch. */
4404 if (condition_true (bits (inst1, 6, 9), status))
4406 int sign, j1, j2, imm1, imm2;
4408 sign = sbits (inst1, 10, 10);
4409 imm1 = bits (inst1, 0, 5);
4410 imm2 = bits (inst2, 0, 10);
4411 j1 = bit (inst2, 13);
4412 j2 = bit (inst2, 11);
4414 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4415 offset += (imm1 << 12) + (imm2 << 1);
4417 nextpc = pc_val + offset;
4421 else if ((inst1 & 0xfe50) == 0xe810)
4423 /* Load multiple or RFE. */
4424 int rn, offset, load_pc = 1;
4426 rn = bits (inst1, 0, 3);
4427 if (bit (inst1, 7) && !bit (inst1, 8))
4430 if (!bit (inst2, 15))
4432 offset = bitcount (inst2) * 4 - 4;
4434 else if (!bit (inst1, 7) && bit (inst1, 8))
4437 if (!bit (inst2, 15))
4441 else if (bit (inst1, 7) && bit (inst1, 8))
4446 else if (!bit (inst1, 7) && !bit (inst1, 8))
4456 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4457 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4460 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4462 /* MOV PC or MOVS PC. */
4463 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4464 nextpc = MAKE_THUMB_ADDR (nextpc);
4466 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4470 int rn, load_pc = 1;
4472 rn = bits (inst1, 0, 3);
4473 base = get_frame_register_unsigned (frame, rn);
4474 if (rn == ARM_PC_REGNUM)
4476 base = (base + 4) & ~(CORE_ADDR) 0x3;
4478 base += bits (inst2, 0, 11);
4480 base -= bits (inst2, 0, 11);
4482 else if (bit (inst1, 7))
4483 base += bits (inst2, 0, 11);
4484 else if (bit (inst2, 11))
4486 if (bit (inst2, 10))
4489 base += bits (inst2, 0, 7);
4491 base -= bits (inst2, 0, 7);
4494 else if ((inst2 & 0x0fc0) == 0x0000)
4496 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4497 base += get_frame_register_unsigned (frame, rm) << shift;
4504 nextpc = get_frame_memory_unsigned (frame, base, 4);
4506 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4509 CORE_ADDR tbl_reg, table, offset, length;
4511 tbl_reg = bits (inst1, 0, 3);
4512 if (tbl_reg == 0x0f)
4513 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4515 table = get_frame_register_unsigned (frame, tbl_reg);
4517 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4518 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4519 nextpc = pc_val + length;
4521 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4524 CORE_ADDR tbl_reg, table, offset, length;
4526 tbl_reg = bits (inst1, 0, 3);
4527 if (tbl_reg == 0x0f)
4528 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4530 table = get_frame_register_unsigned (frame, tbl_reg);
4532 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4533 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4534 nextpc = pc_val + length;
4537 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4539 if (bits (inst1, 3, 6) == 0x0f)
4542 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4544 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4546 if (bits (inst1, 3, 6) == 0x0f)
4549 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4551 nextpc = MAKE_THUMB_ADDR (nextpc);
4553 else if ((inst1 & 0xf500) == 0xb100)
4556 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4557 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4559 if (bit (inst1, 11) && reg != 0)
4560 nextpc = pc_val + imm;
4561 else if (!bit (inst1, 11) && reg == 0)
4562 nextpc = pc_val + imm;
4567 /* Get the raw next address. PC is the current program counter, in
4568 FRAME, which is assumed to be executing in ARM mode.
4570 The value returned has the execution state of the next instruction
4571 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4572 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4576 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4578 struct gdbarch *gdbarch = get_frame_arch (frame);
4579 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4580 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4581 unsigned long pc_val;
4582 unsigned long this_instr;
4583 unsigned long status;
4586 pc_val = (unsigned long) pc;
4587 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4589 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4590 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4592 if (bits (this_instr, 28, 31) == INST_NV)
4593 switch (bits (this_instr, 24, 27))
4598 /* Branch with Link and change to Thumb. */
4599 nextpc = BranchDest (pc, this_instr);
4600 nextpc |= bit (this_instr, 24) << 1;
4601 nextpc = MAKE_THUMB_ADDR (nextpc);
4607 /* Coprocessor register transfer. */
4608 if (bits (this_instr, 12, 15) == 15)
4609 error (_("Invalid update to pc in instruction"));
4612 else if (condition_true (bits (this_instr, 28, 31), status))
4614 switch (bits (this_instr, 24, 27))
4617 case 0x1: /* data processing */
4621 unsigned long operand1, operand2, result = 0;
4625 if (bits (this_instr, 12, 15) != 15)
4628 if (bits (this_instr, 22, 25) == 0
4629 && bits (this_instr, 4, 7) == 9) /* multiply */
4630 error (_("Invalid update to pc in instruction"));
4632 /* BX <reg>, BLX <reg> */
4633 if (bits (this_instr, 4, 27) == 0x12fff1
4634 || bits (this_instr, 4, 27) == 0x12fff3)
4636 rn = bits (this_instr, 0, 3);
4637 nextpc = ((rn == ARM_PC_REGNUM)
4639 : get_frame_register_unsigned (frame, rn));
4644 /* Multiply into PC. */
4645 c = (status & FLAG_C) ? 1 : 0;
4646 rn = bits (this_instr, 16, 19);
4647 operand1 = ((rn == ARM_PC_REGNUM)
4649 : get_frame_register_unsigned (frame, rn));
4651 if (bit (this_instr, 25))
4653 unsigned long immval = bits (this_instr, 0, 7);
4654 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4655 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4658 else /* operand 2 is a shifted register. */
4659 operand2 = shifted_reg_val (frame, this_instr, c,
4662 switch (bits (this_instr, 21, 24))
4665 result = operand1 & operand2;
4669 result = operand1 ^ operand2;
4673 result = operand1 - operand2;
4677 result = operand2 - operand1;
4681 result = operand1 + operand2;
4685 result = operand1 + operand2 + c;
4689 result = operand1 - operand2 + c;
4693 result = operand2 - operand1 + c;
4699 case 0xb: /* tst, teq, cmp, cmn */
4700 result = (unsigned long) nextpc;
4704 result = operand1 | operand2;
4708 /* Always step into a function. */
4713 result = operand1 & ~operand2;
4721 /* In 26-bit APCS the bottom two bits of the result are
4722 ignored, and we always end up in ARM state. */
4724 nextpc = arm_addr_bits_remove (gdbarch, result);
4732 case 0x5: /* data transfer */
4735 if (bit (this_instr, 20))
4738 if (bits (this_instr, 12, 15) == 15)
4744 if (bit (this_instr, 22))
4745 error (_("Invalid update to pc in instruction"));
4747 /* byte write to PC */
4748 rn = bits (this_instr, 16, 19);
4749 base = ((rn == ARM_PC_REGNUM)
4751 : get_frame_register_unsigned (frame, rn));
4753 if (bit (this_instr, 24))
4756 int c = (status & FLAG_C) ? 1 : 0;
4757 unsigned long offset =
4758 (bit (this_instr, 25)
4759 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4760 : bits (this_instr, 0, 11));
4762 if (bit (this_instr, 23))
4768 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4775 case 0x9: /* block transfer */
4776 if (bit (this_instr, 20))
4779 if (bit (this_instr, 15))
4783 unsigned long rn_val
4784 = get_frame_register_unsigned (frame,
4785 bits (this_instr, 16, 19));
4787 if (bit (this_instr, 23))
4790 unsigned long reglist = bits (this_instr, 0, 14);
4791 offset = bitcount (reglist) * 4;
4792 if (bit (this_instr, 24)) /* pre */
4795 else if (bit (this_instr, 24))
4799 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4806 case 0xb: /* branch & link */
4807 case 0xa: /* branch */
4809 nextpc = BranchDest (pc, this_instr);
4815 case 0xe: /* coproc ops */
4819 struct gdbarch_tdep *tdep;
4820 tdep = gdbarch_tdep (gdbarch);
4822 if (tdep->syscall_next_pc != NULL)
4823 nextpc = tdep->syscall_next_pc (frame);
4829 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4837 /* Determine next PC after current instruction executes. Will call either
4838 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4839 loop is detected. */
4842 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4846 if (arm_frame_is_thumb (frame))
4848 nextpc = thumb_get_next_pc_raw (frame, pc);
4849 if (nextpc == MAKE_THUMB_ADDR (pc))
4850 error (_("Infinite loop detected"));
4854 nextpc = arm_get_next_pc_raw (frame, pc);
4856 error (_("Infinite loop detected"));
4862 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4863 of the appropriate mode (as encoded in the PC value), even if this
4864 differs from what would be expected according to the symbol tables. */
4867 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4868 struct address_space *aspace,
4871 struct cleanup *old_chain
4872 = make_cleanup_restore_integer (&arm_override_mode);
4874 arm_override_mode = IS_THUMB_ADDR (pc);
4875 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4877 insert_single_step_breakpoint (gdbarch, aspace, pc);
4879 do_cleanups (old_chain);
4882 /* single_step() is called just before we want to resume the inferior,
4883 if we want to single-step it but there is no hardware or kernel
4884 single-step support. We find the target of the coming instruction
4885 and breakpoint it. */
4888 arm_software_single_step (struct frame_info *frame)
4890 struct gdbarch *gdbarch = get_frame_arch (frame);
4891 struct address_space *aspace = get_frame_address_space (frame);
4892 CORE_ADDR next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
4894 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
4899 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4900 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4901 NULL if an error occurs. BUF is freed. */
4904 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4905 int old_len, int new_len)
4907 gdb_byte *new_buf, *middle;
4908 int bytes_to_read = new_len - old_len;
4910 new_buf = xmalloc (new_len);
4911 memcpy (new_buf + bytes_to_read, buf, old_len);
4913 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4921 /* An IT block is at most the 2-byte IT instruction followed by
4922 four 4-byte instructions. The furthest back we must search to
4923 find an IT block that affects the current instruction is thus
4924 2 + 3 * 4 == 14 bytes. */
4925 #define MAX_IT_BLOCK_PREFIX 14
4927 /* Use a quick scan if there are more than this many bytes of
4929 #define IT_SCAN_THRESHOLD 32
4931 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4932 A breakpoint in an IT block may not be hit, depending on the
4935 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4939 CORE_ADDR boundary, func_start;
4940 int buf_len, buf2_len;
4941 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4942 int i, any, last_it, last_it_count;
4944 /* If we are using BKPT breakpoints, none of this is necessary. */
4945 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4948 /* ARM mode does not have this problem. */
4949 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4952 /* We are setting a breakpoint in Thumb code that could potentially
4953 contain an IT block. The first step is to find how much Thumb
4954 code there is; we do not need to read outside of known Thumb
4956 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4958 /* Thumb-2 code must have mapping symbols to have a chance. */
4961 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4963 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4964 && func_start > boundary)
4965 boundary = func_start;
4967 /* Search for a candidate IT instruction. We have to do some fancy
4968 footwork to distinguish a real IT instruction from the second
4969 half of a 32-bit instruction, but there is no need for that if
4970 there's no candidate. */
4971 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4973 /* No room for an IT instruction. */
4976 buf = xmalloc (buf_len);
4977 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4980 for (i = 0; i < buf_len; i += 2)
4982 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4983 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4995 /* OK, the code bytes before this instruction contain at least one
4996 halfword which resembles an IT instruction. We know that it's
4997 Thumb code, but there are still two possibilities. Either the
4998 halfword really is an IT instruction, or it is the second half of
4999 a 32-bit Thumb instruction. The only way we can tell is to
5000 scan forwards from a known instruction boundary. */
5001 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5005 /* There's a lot of code before this instruction. Start with an
5006 optimistic search; it's easy to recognize halfwords that can
5007 not be the start of a 32-bit instruction, and use that to
5008 lock on to the instruction boundaries. */
5009 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5012 buf_len = IT_SCAN_THRESHOLD;
5015 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5017 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5018 if (thumb_insn_size (inst1) == 2)
5025 /* At this point, if DEFINITE, BUF[I] is the first place we
5026 are sure that we know the instruction boundaries, and it is far
5027 enough from BPADDR that we could not miss an IT instruction
5028 affecting BPADDR. If ! DEFINITE, give up - start from a
5032 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5036 buf_len = bpaddr - boundary;
5042 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5045 buf_len = bpaddr - boundary;
5049 /* Scan forwards. Find the last IT instruction before BPADDR. */
5054 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5056 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5061 else if (inst1 & 0x0002)
5063 else if (inst1 & 0x0004)
5068 i += thumb_insn_size (inst1);
5074 /* There wasn't really an IT instruction after all. */
5077 if (last_it_count < 1)
5078 /* It was too far away. */
5081 /* This really is a trouble spot. Move the breakpoint to the IT
5083 return bpaddr - buf_len + last_it;
5086 /* ARM displaced stepping support.
5088 Generally ARM displaced stepping works as follows:
5090 1. When an instruction is to be single-stepped, it is first decoded by
5091 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5092 Depending on the type of instruction, it is then copied to a scratch
5093 location, possibly in a modified form. The copy_* set of functions
5094 performs such modification, as necessary. A breakpoint is placed after
5095 the modified instruction in the scratch space to return control to GDB.
5096 Note in particular that instructions which modify the PC will no longer
5097 do so after modification.
5099 2. The instruction is single-stepped, by setting the PC to the scratch
5100 location address, and resuming. Control returns to GDB when the
5103 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5104 function used for the current instruction. This function's job is to
5105 put the CPU/memory state back to what it would have been if the
5106 instruction had been executed unmodified in its original location. */
5108 /* NOP instruction (mov r0, r0). */
5109 #define ARM_NOP 0xe1a00000
5110 #define THUMB_NOP 0x4600
5112 /* Helper for register reads for displaced stepping. In particular, this
5113 returns the PC as it would be seen by the instruction at its original
5117 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5121 CORE_ADDR from = dsc->insn_addr;
5123 if (regno == ARM_PC_REGNUM)
5125 /* Compute pipeline offset:
5126 - When executing an ARM instruction, PC reads as the address of the
5127 current instruction plus 8.
5128 - When executing a Thumb instruction, PC reads as the address of the
5129 current instruction plus 4. */
5136 if (debug_displaced)
5137 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5138 (unsigned long) from);
5139 return (ULONGEST) from;
5143 regcache_cooked_read_unsigned (regs, regno, &ret);
5144 if (debug_displaced)
5145 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5146 regno, (unsigned long) ret);
5152 displaced_in_arm_mode (struct regcache *regs)
5155 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5157 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5159 return (ps & t_bit) == 0;
5162 /* Write to the PC as from a branch instruction. */
5165 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5169 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5170 architecture versions < 6. */
5171 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5172 val & ~(ULONGEST) 0x3);
5174 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5175 val & ~(ULONGEST) 0x1);
5178 /* Write to the PC as from a branch-exchange instruction. */
5181 bx_write_pc (struct regcache *regs, ULONGEST val)
5184 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5186 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5190 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5191 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5193 else if ((val & 2) == 0)
5195 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5196 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5200 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5201 mode, align dest to 4 bytes). */
5202 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5203 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5204 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5208 /* Write to the PC as if from a load instruction. */
5211 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5214 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5215 bx_write_pc (regs, val);
5217 branch_write_pc (regs, dsc, val);
5220 /* Write to the PC as if from an ALU instruction. */
5223 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5226 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5227 bx_write_pc (regs, val);
5229 branch_write_pc (regs, dsc, val);
5232 /* Helper for writing to registers for displaced stepping. Writing to the PC
5233 has a varying effects depending on the instruction which does the write:
5234 this is controlled by the WRITE_PC argument. */
5237 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5238 int regno, ULONGEST val, enum pc_write_style write_pc)
5240 if (regno == ARM_PC_REGNUM)
5242 if (debug_displaced)
5243 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5244 (unsigned long) val);
5247 case BRANCH_WRITE_PC:
5248 branch_write_pc (regs, dsc, val);
5252 bx_write_pc (regs, val);
5256 load_write_pc (regs, dsc, val);
5260 alu_write_pc (regs, dsc, val);
5263 case CANNOT_WRITE_PC:
5264 warning (_("Instruction wrote to PC in an unexpected way when "
5265 "single-stepping"));
5269 internal_error (__FILE__, __LINE__,
5270 _("Invalid argument to displaced_write_reg"));
5273 dsc->wrote_to_pc = 1;
5277 if (debug_displaced)
5278 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5279 regno, (unsigned long) val);
5280 regcache_cooked_write_unsigned (regs, regno, val);
5284 /* This function is used to concisely determine if an instruction INSN
5285 references PC. Register fields of interest in INSN should have the
5286 corresponding fields of BITMASK set to 0b1111. The function
5287 returns return 1 if any of these fields in INSN reference the PC
5288 (also 0b1111, r15), else it returns 0. */
5291 insn_references_pc (uint32_t insn, uint32_t bitmask)
5293 uint32_t lowbit = 1;
5295 while (bitmask != 0)
5299 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5305 mask = lowbit * 0xf;
5307 if ((insn & mask) == mask)
5316 /* The simplest copy function. Many instructions have the same effect no
5317 matter what address they are executed at: in those cases, use this. */
5320 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5321 const char *iname, struct displaced_step_closure *dsc)
5323 if (debug_displaced)
5324 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5325 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5328 dsc->modinsn[0] = insn;
5334 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5335 uint16_t insn2, const char *iname,
5336 struct displaced_step_closure *dsc)
5338 if (debug_displaced)
5339 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5340 "opcode/class '%s' unmodified\n", insn1, insn2,
5343 dsc->modinsn[0] = insn1;
5344 dsc->modinsn[1] = insn2;
5350 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5353 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5355 struct displaced_step_closure *dsc)
5357 if (debug_displaced)
5358 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5359 "opcode/class '%s' unmodified\n", insn,
5362 dsc->modinsn[0] = insn;
5367 /* Preload instructions with immediate offset. */
5370 cleanup_preload (struct gdbarch *gdbarch,
5371 struct regcache *regs, struct displaced_step_closure *dsc)
5373 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5374 if (!dsc->u.preload.immed)
5375 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5379 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5380 struct displaced_step_closure *dsc, unsigned int rn)
5383 /* Preload instructions:
5385 {pli/pld} [rn, #+/-imm]
5387 {pli/pld} [r0, #+/-imm]. */
5389 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5390 rn_val = displaced_read_reg (regs, dsc, rn);
5391 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5392 dsc->u.preload.immed = 1;
5394 dsc->cleanup = &cleanup_preload;
5398 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5399 struct displaced_step_closure *dsc)
5401 unsigned int rn = bits (insn, 16, 19);
5403 if (!insn_references_pc (insn, 0x000f0000ul))
5404 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5406 if (debug_displaced)
5407 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5408 (unsigned long) insn);
5410 dsc->modinsn[0] = insn & 0xfff0ffff;
5412 install_preload (gdbarch, regs, dsc, rn);
5418 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5419 struct regcache *regs, struct displaced_step_closure *dsc)
5421 unsigned int rn = bits (insn1, 0, 3);
5422 unsigned int u_bit = bit (insn1, 7);
5423 int imm12 = bits (insn2, 0, 11);
5426 if (rn != ARM_PC_REGNUM)
5427 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5429 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5430 PLD (literal) Encoding T1. */
5431 if (debug_displaced)
5432 fprintf_unfiltered (gdb_stdlog,
5433 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5434 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5440 /* Rewrite instruction {pli/pld} PC imm12 into:
5441 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5445 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5447 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5448 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5450 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5452 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5453 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5454 dsc->u.preload.immed = 0;
5456 /* {pli/pld} [r0, r1] */
5457 dsc->modinsn[0] = insn1 & 0xfff0;
5458 dsc->modinsn[1] = 0xf001;
5461 dsc->cleanup = &cleanup_preload;
5465 /* Preload instructions with register offset. */
5468 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5469 struct displaced_step_closure *dsc, unsigned int rn,
5472 ULONGEST rn_val, rm_val;
5474 /* Preload register-offset instructions:
5476 {pli/pld} [rn, rm {, shift}]
5478 {pli/pld} [r0, r1 {, shift}]. */
5480 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5481 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5482 rn_val = displaced_read_reg (regs, dsc, rn);
5483 rm_val = displaced_read_reg (regs, dsc, rm);
5484 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5485 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5486 dsc->u.preload.immed = 0;
5488 dsc->cleanup = &cleanup_preload;
5492 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5493 struct regcache *regs,
5494 struct displaced_step_closure *dsc)
5496 unsigned int rn = bits (insn, 16, 19);
5497 unsigned int rm = bits (insn, 0, 3);
5500 if (!insn_references_pc (insn, 0x000f000ful))
5501 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5503 if (debug_displaced)
5504 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5505 (unsigned long) insn);
5507 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5509 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5513 /* Copy/cleanup coprocessor load and store instructions. */
5516 cleanup_copro_load_store (struct gdbarch *gdbarch,
5517 struct regcache *regs,
5518 struct displaced_step_closure *dsc)
5520 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5522 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5524 if (dsc->u.ldst.writeback)
5525 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5529 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5530 struct displaced_step_closure *dsc,
5531 int writeback, unsigned int rn)
5535 /* Coprocessor load/store instructions:
5537 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5539 {stc/stc2} [r0, #+/-imm].
5541 ldc/ldc2 are handled identically. */
5543 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5544 rn_val = displaced_read_reg (regs, dsc, rn);
5545 /* PC should be 4-byte aligned. */
5546 rn_val = rn_val & 0xfffffffc;
5547 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5549 dsc->u.ldst.writeback = writeback;
5550 dsc->u.ldst.rn = rn;
5552 dsc->cleanup = &cleanup_copro_load_store;
5556 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5557 struct regcache *regs,
5558 struct displaced_step_closure *dsc)
5560 unsigned int rn = bits (insn, 16, 19);
5562 if (!insn_references_pc (insn, 0x000f0000ul))
5563 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5565 if (debug_displaced)
5566 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5567 "load/store insn %.8lx\n", (unsigned long) insn);
5569 dsc->modinsn[0] = insn & 0xfff0ffff;
5571 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5577 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5578 uint16_t insn2, struct regcache *regs,
5579 struct displaced_step_closure *dsc)
5581 unsigned int rn = bits (insn1, 0, 3);
5583 if (rn != ARM_PC_REGNUM)
5584 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5585 "copro load/store", dsc);
5587 if (debug_displaced)
5588 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5589 "load/store insn %.4x%.4x\n", insn1, insn2);
5591 dsc->modinsn[0] = insn1 & 0xfff0;
5592 dsc->modinsn[1] = insn2;
5595 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5596 doesn't support writeback, so pass 0. */
5597 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5602 /* Clean up branch instructions (actually perform the branch, by setting
5606 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5607 struct displaced_step_closure *dsc)
5609 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5610 int branch_taken = condition_true (dsc->u.branch.cond, status);
5611 enum pc_write_style write_pc = dsc->u.branch.exchange
5612 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5617 if (dsc->u.branch.link)
5619 /* The value of LR should be the next insn of current one. In order
5620 not to confuse logic hanlding later insn `bx lr', if current insn mode
5621 is Thumb, the bit 0 of LR value should be set to 1. */
5622 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5625 next_insn_addr |= 0x1;
5627 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5631 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5634 /* Copy B/BL/BLX instructions with immediate destinations. */
5637 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5638 struct displaced_step_closure *dsc,
5639 unsigned int cond, int exchange, int link, long offset)
5641 /* Implement "BL<cond> <label>" as:
5643 Preparation: cond <- instruction condition
5644 Insn: mov r0, r0 (nop)
5645 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5647 B<cond> similar, but don't set r14 in cleanup. */
5649 dsc->u.branch.cond = cond;
5650 dsc->u.branch.link = link;
5651 dsc->u.branch.exchange = exchange;
5653 dsc->u.branch.dest = dsc->insn_addr;
5654 if (link && exchange)
5655 /* For BLX, offset is computed from the Align (PC, 4). */
5656 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5659 dsc->u.branch.dest += 4 + offset;
5661 dsc->u.branch.dest += 8 + offset;
5663 dsc->cleanup = &cleanup_branch;
5666 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5667 struct regcache *regs, struct displaced_step_closure *dsc)
5669 unsigned int cond = bits (insn, 28, 31);
5670 int exchange = (cond == 0xf);
5671 int link = exchange || bit (insn, 24);
5674 if (debug_displaced)
5675 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5676 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5677 (unsigned long) insn);
5679 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5680 then arrange the switch into Thumb mode. */
5681 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5683 offset = bits (insn, 0, 23) << 2;
5685 if (bit (offset, 25))
5686 offset = offset | ~0x3ffffff;
5688 dsc->modinsn[0] = ARM_NOP;
5690 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5695 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5696 uint16_t insn2, struct regcache *regs,
5697 struct displaced_step_closure *dsc)
5699 int link = bit (insn2, 14);
5700 int exchange = link && !bit (insn2, 12);
5703 int j1 = bit (insn2, 13);
5704 int j2 = bit (insn2, 11);
5705 int s = sbits (insn1, 10, 10);
5706 int i1 = !(j1 ^ bit (insn1, 10));
5707 int i2 = !(j2 ^ bit (insn1, 10));
5709 if (!link && !exchange) /* B */
5711 offset = (bits (insn2, 0, 10) << 1);
5712 if (bit (insn2, 12)) /* Encoding T4 */
5714 offset |= (bits (insn1, 0, 9) << 12)
5720 else /* Encoding T3 */
5722 offset |= (bits (insn1, 0, 5) << 12)
5726 cond = bits (insn1, 6, 9);
5731 offset = (bits (insn1, 0, 9) << 12);
5732 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5733 offset |= exchange ?
5734 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5737 if (debug_displaced)
5738 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5739 "%.4x %.4x with offset %.8lx\n",
5740 link ? (exchange) ? "blx" : "bl" : "b",
5741 insn1, insn2, offset);
5743 dsc->modinsn[0] = THUMB_NOP;
5745 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5749 /* Copy B Thumb instructions. */
5751 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
5752 struct displaced_step_closure *dsc)
5754 unsigned int cond = 0;
5756 unsigned short bit_12_15 = bits (insn, 12, 15);
5757 CORE_ADDR from = dsc->insn_addr;
5759 if (bit_12_15 == 0xd)
5761 /* offset = SignExtend (imm8:0, 32) */
5762 offset = sbits ((insn << 1), 0, 8);
5763 cond = bits (insn, 8, 11);
5765 else if (bit_12_15 == 0xe) /* Encoding T2 */
5767 offset = sbits ((insn << 1), 0, 11);
5771 if (debug_displaced)
5772 fprintf_unfiltered (gdb_stdlog,
5773 "displaced: copying b immediate insn %.4x "
5774 "with offset %d\n", insn, offset);
5776 dsc->u.branch.cond = cond;
5777 dsc->u.branch.link = 0;
5778 dsc->u.branch.exchange = 0;
5779 dsc->u.branch.dest = from + 4 + offset;
5781 dsc->modinsn[0] = THUMB_NOP;
5783 dsc->cleanup = &cleanup_branch;
5788 /* Copy BX/BLX with register-specified destinations. */
5791 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5792 struct displaced_step_closure *dsc, int link,
5793 unsigned int cond, unsigned int rm)
5795 /* Implement {BX,BLX}<cond> <reg>" as:
5797 Preparation: cond <- instruction condition
5798 Insn: mov r0, r0 (nop)
5799 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5801 Don't set r14 in cleanup for BX. */
5803 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5805 dsc->u.branch.cond = cond;
5806 dsc->u.branch.link = link;
5808 dsc->u.branch.exchange = 1;
5810 dsc->cleanup = &cleanup_branch;
5814 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5815 struct regcache *regs, struct displaced_step_closure *dsc)
5817 unsigned int cond = bits (insn, 28, 31);
5820 int link = bit (insn, 5);
5821 unsigned int rm = bits (insn, 0, 3);
5823 if (debug_displaced)
5824 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5825 (unsigned long) insn);
5827 dsc->modinsn[0] = ARM_NOP;
5829 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5834 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5835 struct regcache *regs,
5836 struct displaced_step_closure *dsc)
5838 int link = bit (insn, 7);
5839 unsigned int rm = bits (insn, 3, 6);
5841 if (debug_displaced)
5842 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5843 (unsigned short) insn);
5845 dsc->modinsn[0] = THUMB_NOP;
5847 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5853 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5856 cleanup_alu_imm (struct gdbarch *gdbarch,
5857 struct regcache *regs, struct displaced_step_closure *dsc)
5859 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5860 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5861 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5862 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5866 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5867 struct displaced_step_closure *dsc)
5869 unsigned int rn = bits (insn, 16, 19);
5870 unsigned int rd = bits (insn, 12, 15);
5871 unsigned int op = bits (insn, 21, 24);
5872 int is_mov = (op == 0xd);
5873 ULONGEST rd_val, rn_val;
5875 if (!insn_references_pc (insn, 0x000ff000ul))
5876 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5878 if (debug_displaced)
5879 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5880 "%.8lx\n", is_mov ? "move" : "ALU",
5881 (unsigned long) insn);
5883 /* Instruction is of form:
5885 <op><cond> rd, [rn,] #imm
5889 Preparation: tmp1, tmp2 <- r0, r1;
5891 Insn: <op><cond> r0, r1, #imm
5892 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5895 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5896 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5897 rn_val = displaced_read_reg (regs, dsc, rn);
5898 rd_val = displaced_read_reg (regs, dsc, rd);
5899 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5900 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5904 dsc->modinsn[0] = insn & 0xfff00fff;
5906 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5908 dsc->cleanup = &cleanup_alu_imm;
5914 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5915 uint16_t insn2, struct regcache *regs,
5916 struct displaced_step_closure *dsc)
5918 unsigned int op = bits (insn1, 5, 8);
5919 unsigned int rn, rm, rd;
5920 ULONGEST rd_val, rn_val;
5922 rn = bits (insn1, 0, 3); /* Rn */
5923 rm = bits (insn2, 0, 3); /* Rm */
5924 rd = bits (insn2, 8, 11); /* Rd */
5926 /* This routine is only called for instruction MOV. */
5927 gdb_assert (op == 0x2 && rn == 0xf);
5929 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5930 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5932 if (debug_displaced)
5933 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5934 "ALU", insn1, insn2);
5936 /* Instruction is of form:
5938 <op><cond> rd, [rn,] #imm
5942 Preparation: tmp1, tmp2 <- r0, r1;
5944 Insn: <op><cond> r0, r1, #imm
5945 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5948 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5949 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5950 rn_val = displaced_read_reg (regs, dsc, rn);
5951 rd_val = displaced_read_reg (regs, dsc, rd);
5952 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5953 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5956 dsc->modinsn[0] = insn1;
5957 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5960 dsc->cleanup = &cleanup_alu_imm;
5965 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5968 cleanup_alu_reg (struct gdbarch *gdbarch,
5969 struct regcache *regs, struct displaced_step_closure *dsc)
5974 rd_val = displaced_read_reg (regs, dsc, 0);
5976 for (i = 0; i < 3; i++)
5977 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5979 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5983 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5984 struct displaced_step_closure *dsc,
5985 unsigned int rd, unsigned int rn, unsigned int rm)
5987 ULONGEST rd_val, rn_val, rm_val;
5989 /* Instruction is of form:
5991 <op><cond> rd, [rn,] rm [, <shift>]
5995 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5996 r0, r1, r2 <- rd, rn, rm
5997 Insn: <op><cond> r0, r1, r2 [, <shift>]
5998 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6001 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6002 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6003 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6004 rd_val = displaced_read_reg (regs, dsc, rd);
6005 rn_val = displaced_read_reg (regs, dsc, rn);
6006 rm_val = displaced_read_reg (regs, dsc, rm);
6007 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6008 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6009 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6012 dsc->cleanup = &cleanup_alu_reg;
6016 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6017 struct displaced_step_closure *dsc)
6019 unsigned int op = bits (insn, 21, 24);
6020 int is_mov = (op == 0xd);
6022 if (!insn_references_pc (insn, 0x000ff00ful))
6023 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6025 if (debug_displaced)
6026 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6027 is_mov ? "move" : "ALU", (unsigned long) insn);
6030 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6032 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6034 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6040 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6041 struct regcache *regs,
6042 struct displaced_step_closure *dsc)
6044 unsigned rn, rm, rd;
6046 rd = bits (insn, 3, 6);
6047 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6050 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6051 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6053 if (debug_displaced)
6054 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6055 "ALU", (unsigned short) insn);
6057 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6059 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6064 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6067 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6068 struct regcache *regs,
6069 struct displaced_step_closure *dsc)
6071 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6074 for (i = 0; i < 4; i++)
6075 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6077 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6081 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6082 struct displaced_step_closure *dsc,
6083 unsigned int rd, unsigned int rn, unsigned int rm,
6087 ULONGEST rd_val, rn_val, rm_val, rs_val;
6089 /* Instruction is of form:
6091 <op><cond> rd, [rn,] rm, <shift> rs
6095 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6096 r0, r1, r2, r3 <- rd, rn, rm, rs
6097 Insn: <op><cond> r0, r1, r2, <shift> r3
6099 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6103 for (i = 0; i < 4; i++)
6104 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6106 rd_val = displaced_read_reg (regs, dsc, rd);
6107 rn_val = displaced_read_reg (regs, dsc, rn);
6108 rm_val = displaced_read_reg (regs, dsc, rm);
6109 rs_val = displaced_read_reg (regs, dsc, rs);
6110 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6111 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6112 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6113 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6115 dsc->cleanup = &cleanup_alu_shifted_reg;
6119 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6120 struct regcache *regs,
6121 struct displaced_step_closure *dsc)
6123 unsigned int op = bits (insn, 21, 24);
6124 int is_mov = (op == 0xd);
6125 unsigned int rd, rn, rm, rs;
6127 if (!insn_references_pc (insn, 0x000fff0ful))
6128 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6130 if (debug_displaced)
6131 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6132 "%.8lx\n", is_mov ? "move" : "ALU",
6133 (unsigned long) insn);
6135 rn = bits (insn, 16, 19);
6136 rm = bits (insn, 0, 3);
6137 rs = bits (insn, 8, 11);
6138 rd = bits (insn, 12, 15);
6141 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6143 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6145 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6150 /* Clean up load instructions. */
6153 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6154 struct displaced_step_closure *dsc)
6156 ULONGEST rt_val, rt_val2 = 0, rn_val;
6158 rt_val = displaced_read_reg (regs, dsc, 0);
6159 if (dsc->u.ldst.xfersize == 8)
6160 rt_val2 = displaced_read_reg (regs, dsc, 1);
6161 rn_val = displaced_read_reg (regs, dsc, 2);
6163 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6164 if (dsc->u.ldst.xfersize > 4)
6165 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6166 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6167 if (!dsc->u.ldst.immed)
6168 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6170 /* Handle register writeback. */
6171 if (dsc->u.ldst.writeback)
6172 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6173 /* Put result in right place. */
6174 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6175 if (dsc->u.ldst.xfersize == 8)
6176 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6179 /* Clean up store instructions. */
6182 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6183 struct displaced_step_closure *dsc)
6185 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6187 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6188 if (dsc->u.ldst.xfersize > 4)
6189 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6190 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6191 if (!dsc->u.ldst.immed)
6192 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6193 if (!dsc->u.ldst.restore_r4)
6194 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6197 if (dsc->u.ldst.writeback)
6198 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6201 /* Copy "extra" load/store instructions. These are halfword/doubleword
6202 transfers, which have a different encoding to byte/word transfers. */
6205 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6206 struct regcache *regs, struct displaced_step_closure *dsc)
6208 unsigned int op1 = bits (insn, 20, 24);
6209 unsigned int op2 = bits (insn, 5, 6);
6210 unsigned int rt = bits (insn, 12, 15);
6211 unsigned int rn = bits (insn, 16, 19);
6212 unsigned int rm = bits (insn, 0, 3);
6213 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6214 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6215 int immed = (op1 & 0x4) != 0;
6217 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6219 if (!insn_references_pc (insn, 0x000ff00ful))
6220 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6222 if (debug_displaced)
6223 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6224 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6225 (unsigned long) insn);
6227 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6230 internal_error (__FILE__, __LINE__,
6231 _("copy_extra_ld_st: instruction decode error"));
6233 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6234 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6235 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6237 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6239 rt_val = displaced_read_reg (regs, dsc, rt);
6240 if (bytesize[opcode] == 8)
6241 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6242 rn_val = displaced_read_reg (regs, dsc, rn);
6244 rm_val = displaced_read_reg (regs, dsc, rm);
6246 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6247 if (bytesize[opcode] == 8)
6248 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6249 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6251 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6254 dsc->u.ldst.xfersize = bytesize[opcode];
6255 dsc->u.ldst.rn = rn;
6256 dsc->u.ldst.immed = immed;
6257 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6258 dsc->u.ldst.restore_r4 = 0;
6261 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6263 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6264 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6266 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6268 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6269 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6271 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6276 /* Copy byte/half word/word loads and stores. */
6279 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6280 struct displaced_step_closure *dsc, int load,
6281 int immed, int writeback, int size, int usermode,
6282 int rt, int rm, int rn)
6284 ULONGEST rt_val, rn_val, rm_val = 0;
6286 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6287 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6289 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6291 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6293 rt_val = displaced_read_reg (regs, dsc, rt);
6294 rn_val = displaced_read_reg (regs, dsc, rn);
6296 rm_val = displaced_read_reg (regs, dsc, rm);
6298 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6299 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6301 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6303 dsc->u.ldst.xfersize = size;
6304 dsc->u.ldst.rn = rn;
6305 dsc->u.ldst.immed = immed;
6306 dsc->u.ldst.writeback = writeback;
6308 /* To write PC we can do:
6310 Before this sequence of instructions:
6311 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6312 r2 is the Rn value got from dispalced_read_reg.
6314 Insn1: push {pc} Write address of STR instruction + offset on stack
6315 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6316 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6317 = addr(Insn1) + offset - addr(Insn3) - 8
6319 Insn4: add r4, r4, #8 r4 = offset - 8
6320 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6322 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6324 Otherwise we don't know what value to write for PC, since the offset is
6325 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6326 of this can be found in Section "Saving from r15" in
6327 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6329 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6334 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6335 uint16_t insn2, struct regcache *regs,
6336 struct displaced_step_closure *dsc, int size)
6338 unsigned int u_bit = bit (insn1, 7);
6339 unsigned int rt = bits (insn2, 12, 15);
6340 int imm12 = bits (insn2, 0, 11);
6343 if (debug_displaced)
6344 fprintf_unfiltered (gdb_stdlog,
6345 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6346 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6352 /* Rewrite instruction LDR Rt imm12 into:
6354 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6358 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6361 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6362 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6363 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6365 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6367 pc_val = pc_val & 0xfffffffc;
6369 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6370 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6374 dsc->u.ldst.xfersize = size;
6375 dsc->u.ldst.immed = 0;
6376 dsc->u.ldst.writeback = 0;
6377 dsc->u.ldst.restore_r4 = 0;
6379 /* LDR R0, R2, R3 */
6380 dsc->modinsn[0] = 0xf852;
6381 dsc->modinsn[1] = 0x3;
6384 dsc->cleanup = &cleanup_load;
6390 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6391 uint16_t insn2, struct regcache *regs,
6392 struct displaced_step_closure *dsc,
6393 int writeback, int immed)
6395 unsigned int rt = bits (insn2, 12, 15);
6396 unsigned int rn = bits (insn1, 0, 3);
6397 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6398 /* In LDR (register), there is also a register Rm, which is not allowed to
6399 be PC, so we don't have to check it. */
6401 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6402 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6405 if (debug_displaced)
6406 fprintf_unfiltered (gdb_stdlog,
6407 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6408 rt, rn, insn1, insn2);
6410 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6413 dsc->u.ldst.restore_r4 = 0;
6416 /* ldr[b]<cond> rt, [rn, #imm], etc.
6418 ldr[b]<cond> r0, [r2, #imm]. */
6420 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6421 dsc->modinsn[1] = insn2 & 0x0fff;
6424 /* ldr[b]<cond> rt, [rn, rm], etc.
6426 ldr[b]<cond> r0, [r2, r3]. */
6428 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6429 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6439 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6440 struct regcache *regs,
6441 struct displaced_step_closure *dsc,
6442 int load, int size, int usermode)
6444 int immed = !bit (insn, 25);
6445 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6446 unsigned int rt = bits (insn, 12, 15);
6447 unsigned int rn = bits (insn, 16, 19);
6448 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6450 if (!insn_references_pc (insn, 0x000ff00ful))
6451 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6453 if (debug_displaced)
6454 fprintf_unfiltered (gdb_stdlog,
6455 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6456 load ? (size == 1 ? "ldrb" : "ldr")
6457 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6459 (unsigned long) insn);
6461 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6462 usermode, rt, rm, rn);
6464 if (load || rt != ARM_PC_REGNUM)
6466 dsc->u.ldst.restore_r4 = 0;
6469 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6471 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6472 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6474 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6476 {ldr,str}[b]<cond> r0, [r2, r3]. */
6477 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6481 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6482 dsc->u.ldst.restore_r4 = 1;
6483 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6484 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6485 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6486 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6487 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6491 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6493 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6498 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6503 /* Cleanup LDM instructions with fully-populated register list. This is an
6504 unfortunate corner case: it's impossible to implement correctly by modifying
6505 the instruction. The issue is as follows: we have an instruction,
6509 which we must rewrite to avoid loading PC. A possible solution would be to
6510 do the load in two halves, something like (with suitable cleanup
6514 ldm[id][ab] r8!, {r0-r7}
6516 ldm[id][ab] r8, {r7-r14}
6519 but at present there's no suitable place for <temp>, since the scratch space
6520 is overwritten before the cleanup routine is called. For now, we simply
6521 emulate the instruction. */
6524 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6525 struct displaced_step_closure *dsc)
6527 int inc = dsc->u.block.increment;
6528 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6529 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6530 uint32_t regmask = dsc->u.block.regmask;
6531 int regno = inc ? 0 : 15;
6532 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6533 int exception_return = dsc->u.block.load && dsc->u.block.user
6534 && (regmask & 0x8000) != 0;
6535 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6536 int do_transfer = condition_true (dsc->u.block.cond, status);
6537 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6542 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6543 sensible we can do here. Complain loudly. */
6544 if (exception_return)
6545 error (_("Cannot single-step exception return"));
6547 /* We don't handle any stores here for now. */
6548 gdb_assert (dsc->u.block.load != 0);
6550 if (debug_displaced)
6551 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6552 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6553 dsc->u.block.increment ? "inc" : "dec",
6554 dsc->u.block.before ? "before" : "after");
6561 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6564 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6567 xfer_addr += bump_before;
6569 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6570 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6572 xfer_addr += bump_after;
6574 regmask &= ~(1 << regno);
6577 if (dsc->u.block.writeback)
6578 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6582 /* Clean up an STM which included the PC in the register list. */
6585 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6586 struct displaced_step_closure *dsc)
6588 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6589 int store_executed = condition_true (dsc->u.block.cond, status);
6590 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6591 CORE_ADDR stm_insn_addr;
6594 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6596 /* If condition code fails, there's nothing else to do. */
6597 if (!store_executed)
6600 if (dsc->u.block.increment)
6602 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6604 if (dsc->u.block.before)
6609 pc_stored_at = dsc->u.block.xfer_addr;
6611 if (dsc->u.block.before)
6615 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6616 stm_insn_addr = dsc->scratch_base;
6617 offset = pc_val - stm_insn_addr;
6619 if (debug_displaced)
6620 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6621 "STM instruction\n", offset);
6623 /* Rewrite the stored PC to the proper value for the non-displaced original
6625 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6626 dsc->insn_addr + offset);
6629 /* Clean up an LDM which includes the PC in the register list. We clumped all
6630 the registers in the transferred list into a contiguous range r0...rX (to
6631 avoid loading PC directly and losing control of the debugged program), so we
6632 must undo that here. */
6635 cleanup_block_load_pc (struct gdbarch *gdbarch,
6636 struct regcache *regs,
6637 struct displaced_step_closure *dsc)
6639 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6640 int load_executed = condition_true (dsc->u.block.cond, status), i;
6641 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6642 unsigned int regs_loaded = bitcount (mask);
6643 unsigned int num_to_shuffle = regs_loaded, clobbered;
6645 /* The method employed here will fail if the register list is fully populated
6646 (we need to avoid loading PC directly). */
6647 gdb_assert (num_to_shuffle < 16);
6652 clobbered = (1 << num_to_shuffle) - 1;
6654 while (num_to_shuffle > 0)
6656 if ((mask & (1 << write_reg)) != 0)
6658 unsigned int read_reg = num_to_shuffle - 1;
6660 if (read_reg != write_reg)
6662 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6663 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6664 if (debug_displaced)
6665 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6666 "loaded register r%d to r%d\n"), read_reg,
6669 else if (debug_displaced)
6670 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6671 "r%d already in the right place\n"),
6674 clobbered &= ~(1 << write_reg);
6682 /* Restore any registers we scribbled over. */
6683 for (write_reg = 0; clobbered != 0; write_reg++)
6685 if ((clobbered & (1 << write_reg)) != 0)
6687 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6689 if (debug_displaced)
6690 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6691 "clobbered register r%d\n"), write_reg);
6692 clobbered &= ~(1 << write_reg);
6696 /* Perform register writeback manually. */
6697 if (dsc->u.block.writeback)
6699 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6701 if (dsc->u.block.increment)
6702 new_rn_val += regs_loaded * 4;
6704 new_rn_val -= regs_loaded * 4;
6706 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6711 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6712 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6715 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6716 struct regcache *regs,
6717 struct displaced_step_closure *dsc)
6719 int load = bit (insn, 20);
6720 int user = bit (insn, 22);
6721 int increment = bit (insn, 23);
6722 int before = bit (insn, 24);
6723 int writeback = bit (insn, 21);
6724 int rn = bits (insn, 16, 19);
6726 /* Block transfers which don't mention PC can be run directly
6728 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6729 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6731 if (rn == ARM_PC_REGNUM)
6733 warning (_("displaced: Unpredictable LDM or STM with "
6734 "base register r15"));
6735 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6738 if (debug_displaced)
6739 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6740 "%.8lx\n", (unsigned long) insn);
6742 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6743 dsc->u.block.rn = rn;
6745 dsc->u.block.load = load;
6746 dsc->u.block.user = user;
6747 dsc->u.block.increment = increment;
6748 dsc->u.block.before = before;
6749 dsc->u.block.writeback = writeback;
6750 dsc->u.block.cond = bits (insn, 28, 31);
6752 dsc->u.block.regmask = insn & 0xffff;
6756 if ((insn & 0xffff) == 0xffff)
6758 /* LDM with a fully-populated register list. This case is
6759 particularly tricky. Implement for now by fully emulating the
6760 instruction (which might not behave perfectly in all cases, but
6761 these instructions should be rare enough for that not to matter
6763 dsc->modinsn[0] = ARM_NOP;
6765 dsc->cleanup = &cleanup_block_load_all;
6769 /* LDM of a list of registers which includes PC. Implement by
6770 rewriting the list of registers to be transferred into a
6771 contiguous chunk r0...rX before doing the transfer, then shuffling
6772 registers into the correct places in the cleanup routine. */
6773 unsigned int regmask = insn & 0xffff;
6774 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6775 unsigned int to = 0, from = 0, i, new_rn;
6777 for (i = 0; i < num_in_list; i++)
6778 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6780 /* Writeback makes things complicated. We need to avoid clobbering
6781 the base register with one of the registers in our modified
6782 register list, but just using a different register can't work in
6785 ldm r14!, {r0-r13,pc}
6787 which would need to be rewritten as:
6791 but that can't work, because there's no free register for N.
6793 Solve this by turning off the writeback bit, and emulating
6794 writeback manually in the cleanup routine. */
6799 new_regmask = (1 << num_in_list) - 1;
6801 if (debug_displaced)
6802 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6803 "{..., pc}: original reg list %.4x, modified "
6804 "list %.4x\n"), rn, writeback ? "!" : "",
6805 (int) insn & 0xffff, new_regmask);
6807 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6809 dsc->cleanup = &cleanup_block_load_pc;
6814 /* STM of a list of registers which includes PC. Run the instruction
6815 as-is, but out of line: this will store the wrong value for the PC,
6816 so we must manually fix up the memory in the cleanup routine.
6817 Doing things this way has the advantage that we can auto-detect
6818 the offset of the PC write (which is architecture-dependent) in
6819 the cleanup routine. */
6820 dsc->modinsn[0] = insn;
6822 dsc->cleanup = &cleanup_block_store_pc;
6829 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6830 struct regcache *regs,
6831 struct displaced_step_closure *dsc)
6833 int rn = bits (insn1, 0, 3);
6834 int load = bit (insn1, 4);
6835 int writeback = bit (insn1, 5);
6837 /* Block transfers which don't mention PC can be run directly
6839 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6842 if (rn == ARM_PC_REGNUM)
6844 warning (_("displaced: Unpredictable LDM or STM with "
6845 "base register r15"));
6846 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6847 "unpredictable ldm/stm", dsc);
6850 if (debug_displaced)
6851 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6852 "%.4x%.4x\n", insn1, insn2);
6854 /* Clear bit 13, since it should be always zero. */
6855 dsc->u.block.regmask = (insn2 & 0xdfff);
6856 dsc->u.block.rn = rn;
6858 dsc->u.block.load = load;
6859 dsc->u.block.user = 0;
6860 dsc->u.block.increment = bit (insn1, 7);
6861 dsc->u.block.before = bit (insn1, 8);
6862 dsc->u.block.writeback = writeback;
6863 dsc->u.block.cond = INST_AL;
6864 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6868 if (dsc->u.block.regmask == 0xffff)
6870 /* This branch is impossible to happen. */
6875 unsigned int regmask = dsc->u.block.regmask;
6876 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6877 unsigned int to = 0, from = 0, i, new_rn;
6879 for (i = 0; i < num_in_list; i++)
6880 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6885 new_regmask = (1 << num_in_list) - 1;
6887 if (debug_displaced)
6888 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6889 "{..., pc}: original reg list %.4x, modified "
6890 "list %.4x\n"), rn, writeback ? "!" : "",
6891 (int) dsc->u.block.regmask, new_regmask);
6893 dsc->modinsn[0] = insn1;
6894 dsc->modinsn[1] = (new_regmask & 0xffff);
6897 dsc->cleanup = &cleanup_block_load_pc;
6902 dsc->modinsn[0] = insn1;
6903 dsc->modinsn[1] = insn2;
6905 dsc->cleanup = &cleanup_block_store_pc;
6910 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6911 for Linux, where some SVC instructions must be treated specially. */
6914 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6915 struct displaced_step_closure *dsc)
6917 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6919 if (debug_displaced)
6920 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6921 "%.8lx\n", (unsigned long) resume_addr);
6923 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6927 /* Common copy routine for svc instruciton. */
6930 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6931 struct displaced_step_closure *dsc)
6933 /* Preparation: none.
6934 Insn: unmodified svc.
6935 Cleanup: pc <- insn_addr + insn_size. */
6937 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6939 dsc->wrote_to_pc = 1;
6941 /* Allow OS-specific code to override SVC handling. */
6942 if (dsc->u.svc.copy_svc_os)
6943 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6946 dsc->cleanup = &cleanup_svc;
6952 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6953 struct regcache *regs, struct displaced_step_closure *dsc)
6956 if (debug_displaced)
6957 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6958 (unsigned long) insn);
6960 dsc->modinsn[0] = insn;
6962 return install_svc (gdbarch, regs, dsc);
6966 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6967 struct regcache *regs, struct displaced_step_closure *dsc)
6970 if (debug_displaced)
6971 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6974 dsc->modinsn[0] = insn;
6976 return install_svc (gdbarch, regs, dsc);
6979 /* Copy undefined instructions. */
6982 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6983 struct displaced_step_closure *dsc)
6985 if (debug_displaced)
6986 fprintf_unfiltered (gdb_stdlog,
6987 "displaced: copying undefined insn %.8lx\n",
6988 (unsigned long) insn);
6990 dsc->modinsn[0] = insn;
6996 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6997 struct displaced_step_closure *dsc)
7000 if (debug_displaced)
7001 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7002 "%.4x %.4x\n", (unsigned short) insn1,
7003 (unsigned short) insn2);
7005 dsc->modinsn[0] = insn1;
7006 dsc->modinsn[1] = insn2;
7012 /* Copy unpredictable instructions. */
7015 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7016 struct displaced_step_closure *dsc)
7018 if (debug_displaced)
7019 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7020 "%.8lx\n", (unsigned long) insn);
7022 dsc->modinsn[0] = insn;
7027 /* The decode_* functions are instruction decoding helpers. They mostly follow
7028 the presentation in the ARM ARM. */
7031 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7032 struct regcache *regs,
7033 struct displaced_step_closure *dsc)
7035 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7036 unsigned int rn = bits (insn, 16, 19);
7038 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7039 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7040 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7041 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7042 else if ((op1 & 0x60) == 0x20)
7043 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7044 else if ((op1 & 0x71) == 0x40)
7045 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7047 else if ((op1 & 0x77) == 0x41)
7048 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7049 else if ((op1 & 0x77) == 0x45)
7050 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7051 else if ((op1 & 0x77) == 0x51)
7054 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7056 return arm_copy_unpred (gdbarch, insn, dsc);
7058 else if ((op1 & 0x77) == 0x55)
7059 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7060 else if (op1 == 0x57)
7063 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7064 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7065 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7066 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7067 default: return arm_copy_unpred (gdbarch, insn, dsc);
7069 else if ((op1 & 0x63) == 0x43)
7070 return arm_copy_unpred (gdbarch, insn, dsc);
7071 else if ((op2 & 0x1) == 0x0)
7072 switch (op1 & ~0x80)
7075 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7077 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7078 case 0x71: case 0x75:
7080 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7081 case 0x63: case 0x67: case 0x73: case 0x77:
7082 return arm_copy_unpred (gdbarch, insn, dsc);
7084 return arm_copy_undef (gdbarch, insn, dsc);
7087 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7091 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7092 struct regcache *regs,
7093 struct displaced_step_closure *dsc)
7095 if (bit (insn, 27) == 0)
7096 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7097 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7098 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7101 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7104 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7106 case 0x4: case 0x5: case 0x6: case 0x7:
7107 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7110 switch ((insn & 0xe00000) >> 21)
7112 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7114 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7117 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7120 return arm_copy_undef (gdbarch, insn, dsc);
7125 int rn_f = (bits (insn, 16, 19) == 0xf);
7126 switch ((insn & 0xe00000) >> 21)
7129 /* ldc/ldc2 imm (undefined for rn == pc). */
7130 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7131 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7134 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7136 case 0x4: case 0x5: case 0x6: case 0x7:
7137 /* ldc/ldc2 lit (undefined for rn != pc). */
7138 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7139 : arm_copy_undef (gdbarch, insn, dsc);
7142 return arm_copy_undef (gdbarch, insn, dsc);
7147 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7150 if (bits (insn, 16, 19) == 0xf)
7152 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7154 return arm_copy_undef (gdbarch, insn, dsc);
7158 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7160 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7164 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7166 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7169 return arm_copy_undef (gdbarch, insn, dsc);
7173 /* Decode miscellaneous instructions in dp/misc encoding space. */
7176 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7177 struct regcache *regs,
7178 struct displaced_step_closure *dsc)
7180 unsigned int op2 = bits (insn, 4, 6);
7181 unsigned int op = bits (insn, 21, 22);
7182 unsigned int op1 = bits (insn, 16, 19);
7187 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7190 if (op == 0x1) /* bx. */
7191 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7193 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7195 return arm_copy_undef (gdbarch, insn, dsc);
7199 /* Not really supported. */
7200 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7202 return arm_copy_undef (gdbarch, insn, dsc);
7206 return arm_copy_bx_blx_reg (gdbarch, insn,
7207 regs, dsc); /* blx register. */
7209 return arm_copy_undef (gdbarch, insn, dsc);
7212 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7216 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7218 /* Not really supported. */
7219 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7222 return arm_copy_undef (gdbarch, insn, dsc);
7227 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7228 struct regcache *regs,
7229 struct displaced_step_closure *dsc)
7232 switch (bits (insn, 20, 24))
7235 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7238 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7240 case 0x12: case 0x16:
7241 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7244 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7248 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7250 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7251 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7252 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7253 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7254 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7255 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7256 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7257 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7258 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7259 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7260 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7261 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7262 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7263 /* 2nd arg means "unpriveleged". */
7264 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7268 /* Should be unreachable. */
7273 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7274 struct regcache *regs,
7275 struct displaced_step_closure *dsc)
7277 int a = bit (insn, 25), b = bit (insn, 4);
7278 uint32_t op1 = bits (insn, 20, 24);
7279 int rn_f = bits (insn, 16, 19) == 0xf;
7281 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7282 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7283 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7284 else if ((!a && (op1 & 0x17) == 0x02)
7285 || (a && (op1 & 0x17) == 0x02 && !b))
7286 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7287 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7288 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7289 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7290 else if ((!a && (op1 & 0x17) == 0x03)
7291 || (a && (op1 & 0x17) == 0x03 && !b))
7292 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7293 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7294 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7295 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7296 else if ((!a && (op1 & 0x17) == 0x06)
7297 || (a && (op1 & 0x17) == 0x06 && !b))
7298 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7299 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7300 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7301 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7302 else if ((!a && (op1 & 0x17) == 0x07)
7303 || (a && (op1 & 0x17) == 0x07 && !b))
7304 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7306 /* Should be unreachable. */
7311 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7312 struct displaced_step_closure *dsc)
7314 switch (bits (insn, 20, 24))
7316 case 0x00: case 0x01: case 0x02: case 0x03:
7317 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7319 case 0x04: case 0x05: case 0x06: case 0x07:
7320 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7322 case 0x08: case 0x09: case 0x0a: case 0x0b:
7323 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7324 return arm_copy_unmodified (gdbarch, insn,
7325 "decode/pack/unpack/saturate/reverse", dsc);
7328 if (bits (insn, 5, 7) == 0) /* op2. */
7330 if (bits (insn, 12, 15) == 0xf)
7331 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7333 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7336 return arm_copy_undef (gdbarch, insn, dsc);
7338 case 0x1a: case 0x1b:
7339 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7340 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7342 return arm_copy_undef (gdbarch, insn, dsc);
7344 case 0x1c: case 0x1d:
7345 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7347 if (bits (insn, 0, 3) == 0xf)
7348 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7350 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7353 return arm_copy_undef (gdbarch, insn, dsc);
7355 case 0x1e: case 0x1f:
7356 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7357 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7359 return arm_copy_undef (gdbarch, insn, dsc);
7362 /* Should be unreachable. */
7367 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7368 struct regcache *regs,
7369 struct displaced_step_closure *dsc)
7372 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7374 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7378 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7379 struct regcache *regs,
7380 struct displaced_step_closure *dsc)
7382 unsigned int opcode = bits (insn, 20, 24);
7386 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7387 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7389 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7390 case 0x12: case 0x16:
7391 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7393 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7394 case 0x13: case 0x17:
7395 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7397 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7398 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7399 /* Note: no writeback for these instructions. Bit 25 will always be
7400 zero though (via caller), so the following works OK. */
7401 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7404 /* Should be unreachable. */
7408 /* Decode shifted register instructions. */
7411 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7412 uint16_t insn2, struct regcache *regs,
7413 struct displaced_step_closure *dsc)
7415 /* PC is only allowed to be used in instruction MOV. */
7417 unsigned int op = bits (insn1, 5, 8);
7418 unsigned int rn = bits (insn1, 0, 3);
7420 if (op == 0x2 && rn == 0xf) /* MOV */
7421 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7423 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "dp (shift reg)", dsc);
7428 /* Decode extension register load/store. Exactly the same as
7429 arm_decode_ext_reg_ld_st. */
7432 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7433 uint16_t insn2, struct regcache *regs,
7434 struct displaced_step_closure *dsc)
7436 unsigned int opcode = bits (insn1, 4, 8);
7440 case 0x04: case 0x05:
7441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "vfp/neon vmov", dsc);
7444 case 0x08: case 0x0c: /* 01x00 */
7445 case 0x0a: case 0x0e: /* 01x10 */
7446 case 0x12: case 0x16: /* 10x10 */
7447 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7448 "vfp/neon vstm/vpush", dsc);
7450 case 0x09: case 0x0d: /* 01x01 */
7451 case 0x0b: case 0x0f: /* 01x11 */
7452 case 0x13: case 0x17: /* 10x11 */
7453 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7454 "vfp/neon vldm/vpop", dsc);
7456 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7457 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7459 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7460 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7463 /* Should be unreachable. */
7468 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7469 struct regcache *regs, struct displaced_step_closure *dsc)
7471 unsigned int op1 = bits (insn, 20, 25);
7472 int op = bit (insn, 4);
7473 unsigned int coproc = bits (insn, 8, 11);
7474 unsigned int rn = bits (insn, 16, 19);
7476 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7477 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7478 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7479 && (coproc & 0xe) != 0xa)
7481 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7482 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7483 && (coproc & 0xe) != 0xa)
7484 /* ldc/ldc2 imm/lit. */
7485 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7486 else if ((op1 & 0x3e) == 0x00)
7487 return arm_copy_undef (gdbarch, insn, dsc);
7488 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7489 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7490 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7491 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7492 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7493 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7494 else if ((op1 & 0x30) == 0x20 && !op)
7496 if ((coproc & 0xe) == 0xa)
7497 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7499 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7501 else if ((op1 & 0x30) == 0x20 && op)
7502 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7503 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7504 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7505 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7506 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7507 else if ((op1 & 0x30) == 0x30)
7508 return arm_copy_svc (gdbarch, insn, regs, dsc);
7510 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7514 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7515 uint16_t insn2, struct regcache *regs,
7516 struct displaced_step_closure *dsc)
7518 unsigned int coproc = bits (insn2, 8, 11);
7519 unsigned int op1 = bits (insn1, 4, 9);
7520 unsigned int bit_5_8 = bits (insn1, 5, 8);
7521 unsigned int bit_9 = bit (insn1, 9);
7522 unsigned int bit_4 = bit (insn1, 4);
7523 unsigned int rn = bits (insn1, 0, 3);
7528 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7531 else if (bit_5_8 == 0) /* UNDEFINED. */
7532 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7535 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7536 if ((coproc & 0xe) == 0xa)
7537 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7539 else /* coproc is not 101x. */
7541 if (bit_4 == 0) /* STC/STC2. */
7542 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7544 else /* LDC/LDC2 {literal, immeidate}. */
7545 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7551 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7557 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7558 struct displaced_step_closure *dsc, int rd)
7564 Preparation: Rd <- PC
7570 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7571 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7575 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7576 struct displaced_step_closure *dsc,
7577 int rd, unsigned int imm)
7580 /* Encoding T2: ADDS Rd, #imm */
7581 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7583 install_pc_relative (gdbarch, regs, dsc, rd);
7589 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7590 struct regcache *regs,
7591 struct displaced_step_closure *dsc)
7593 unsigned int rd = bits (insn, 8, 10);
7594 unsigned int imm8 = bits (insn, 0, 7);
7596 if (debug_displaced)
7597 fprintf_unfiltered (gdb_stdlog,
7598 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7601 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7605 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7606 uint16_t insn2, struct regcache *regs,
7607 struct displaced_step_closure *dsc)
7609 unsigned int rd = bits (insn2, 8, 11);
7610 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7611 extract raw immediate encoding rather than computing immediate. When
7612 generating ADD or SUB instruction, we can simply perform OR operation to
7613 set immediate into ADD. */
7614 unsigned int imm_3_8 = insn2 & 0x70ff;
7615 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7617 if (debug_displaced)
7618 fprintf_unfiltered (gdb_stdlog,
7619 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7620 rd, imm_i, imm_3_8, insn1, insn2);
7622 if (bit (insn1, 7)) /* Encoding T2 */
7624 /* Encoding T3: SUB Rd, Rd, #imm */
7625 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7626 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7628 else /* Encoding T3 */
7630 /* Encoding T3: ADD Rd, Rd, #imm */
7631 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7632 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7636 install_pc_relative (gdbarch, regs, dsc, rd);
7642 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7643 struct regcache *regs,
7644 struct displaced_step_closure *dsc)
7646 unsigned int rt = bits (insn1, 8, 10);
7648 int imm8 = (bits (insn1, 0, 7) << 2);
7649 CORE_ADDR from = dsc->insn_addr;
7655 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7657 Insn: LDR R0, [R2, R3];
7658 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7660 if (debug_displaced)
7661 fprintf_unfiltered (gdb_stdlog,
7662 "displaced: copying thumb ldr r%d [pc #%d]\n"
7665 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7666 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7667 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7668 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7669 /* The assembler calculates the required value of the offset from the
7670 Align(PC,4) value of this instruction to the label. */
7671 pc = pc & 0xfffffffc;
7673 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7674 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7677 dsc->u.ldst.xfersize = 4;
7679 dsc->u.ldst.immed = 0;
7680 dsc->u.ldst.writeback = 0;
7681 dsc->u.ldst.restore_r4 = 0;
7683 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7685 dsc->cleanup = &cleanup_load;
7690 /* Copy Thumb cbnz/cbz insruction. */
7693 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7694 struct regcache *regs,
7695 struct displaced_step_closure *dsc)
7697 int non_zero = bit (insn1, 11);
7698 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7699 CORE_ADDR from = dsc->insn_addr;
7700 int rn = bits (insn1, 0, 2);
7701 int rn_val = displaced_read_reg (regs, dsc, rn);
7703 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7704 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7705 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7706 condition is false, let it be, cleanup_branch will do nothing. */
7707 if (dsc->u.branch.cond)
7709 dsc->u.branch.cond = INST_AL;
7710 dsc->u.branch.dest = from + 4 + imm5;
7713 dsc->u.branch.dest = from + 2;
7715 dsc->u.branch.link = 0;
7716 dsc->u.branch.exchange = 0;
7718 if (debug_displaced)
7719 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7720 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7721 rn, rn_val, insn1, dsc->u.branch.dest);
7723 dsc->modinsn[0] = THUMB_NOP;
7725 dsc->cleanup = &cleanup_branch;
7729 /* Copy Table Branch Byte/Halfword */
7731 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7732 uint16_t insn2, struct regcache *regs,
7733 struct displaced_step_closure *dsc)
7735 ULONGEST rn_val, rm_val;
7736 int is_tbh = bit (insn2, 4);
7737 CORE_ADDR halfwords = 0;
7738 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7740 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7741 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7747 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7748 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7754 target_read_memory (rn_val + rm_val, buf, 1);
7755 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7758 if (debug_displaced)
7759 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7760 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7761 (unsigned int) rn_val, (unsigned int) rm_val,
7762 (unsigned int) halfwords);
7764 dsc->u.branch.cond = INST_AL;
7765 dsc->u.branch.link = 0;
7766 dsc->u.branch.exchange = 0;
7767 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7769 dsc->cleanup = &cleanup_branch;
7775 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7776 struct displaced_step_closure *dsc)
7779 int val = displaced_read_reg (regs, dsc, 7);
7780 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7783 val = displaced_read_reg (regs, dsc, 8);
7784 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7787 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7792 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
7793 struct regcache *regs,
7794 struct displaced_step_closure *dsc)
7796 dsc->u.block.regmask = insn1 & 0x00ff;
7798 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7801 (1) register list is full, that is, r0-r7 are used.
7802 Prepare: tmp[0] <- r8
7804 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7805 MOV r8, r7; Move value of r7 to r8;
7806 POP {r7}; Store PC value into r7.
7808 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7810 (2) register list is not full, supposing there are N registers in
7811 register list (except PC, 0 <= N <= 7).
7812 Prepare: for each i, 0 - N, tmp[i] <- ri.
7814 POP {r0, r1, ...., rN};
7816 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7817 from tmp[] properly.
7819 if (debug_displaced)
7820 fprintf_unfiltered (gdb_stdlog,
7821 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7822 dsc->u.block.regmask, insn1);
7824 if (dsc->u.block.regmask == 0xff)
7826 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7828 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7829 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7830 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7833 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7837 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7838 unsigned int new_regmask, bit = 1;
7839 unsigned int to = 0, from = 0, i, new_rn;
7841 for (i = 0; i < num_in_list + 1; i++)
7842 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7844 new_regmask = (1 << (num_in_list + 1)) - 1;
7846 if (debug_displaced)
7847 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7848 "{..., pc}: original reg list %.4x,"
7849 " modified list %.4x\n"),
7850 (int) dsc->u.block.regmask, new_regmask);
7852 dsc->u.block.regmask |= 0x8000;
7853 dsc->u.block.writeback = 0;
7854 dsc->u.block.cond = INST_AL;
7856 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7858 dsc->cleanup = &cleanup_block_load_pc;
7865 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7866 struct regcache *regs,
7867 struct displaced_step_closure *dsc)
7869 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7870 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7873 /* 16-bit thumb instructions. */
7874 switch (op_bit_12_15)
7876 /* Shift (imme), add, subtract, move and compare. */
7877 case 0: case 1: case 2: case 3:
7878 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7879 "shift/add/sub/mov/cmp",
7883 switch (op_bit_10_11)
7885 case 0: /* Data-processing */
7886 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7890 case 1: /* Special data instructions and branch and exchange. */
7892 unsigned short op = bits (insn1, 7, 9);
7893 if (op == 6 || op == 7) /* BX or BLX */
7894 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7895 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7896 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7898 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7902 default: /* LDR (literal) */
7903 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7906 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7907 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7910 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7911 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7912 else /* Generate SP-relative address */
7913 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7915 case 11: /* Misc 16-bit instructions */
7917 switch (bits (insn1, 8, 11))
7919 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7920 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7922 case 12: case 13: /* POP */
7923 if (bit (insn1, 8)) /* PC is in register list. */
7924 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7926 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7928 case 15: /* If-Then, and hints */
7929 if (bits (insn1, 0, 3))
7930 /* If-Then makes up to four following instructions conditional.
7931 IT instruction itself is not conditional, so handle it as a
7932 common unmodified instruction. */
7933 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7936 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7939 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7944 if (op_bit_10_11 < 2) /* Store multiple registers */
7945 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7946 else /* Load multiple registers */
7947 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7949 case 13: /* Conditional branch and supervisor call */
7950 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7951 err = thumb_copy_b (gdbarch, insn1, dsc);
7953 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7955 case 14: /* Unconditional branch */
7956 err = thumb_copy_b (gdbarch, insn1, dsc);
7963 internal_error (__FILE__, __LINE__,
7964 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7968 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7969 uint16_t insn1, uint16_t insn2,
7970 struct regcache *regs,
7971 struct displaced_step_closure *dsc)
7973 int rt = bits (insn2, 12, 15);
7974 int rn = bits (insn1, 0, 3);
7975 int op1 = bits (insn1, 7, 8);
7978 switch (bits (insn1, 5, 6))
7980 case 0: /* Load byte and memory hints */
7981 if (rt == 0xf) /* PLD/PLI */
7984 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7985 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7987 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7992 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7993 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7996 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7997 "ldrb{reg, immediate}/ldrbt",
8002 case 1: /* Load halfword and memory hints. */
8003 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8004 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8005 "pld/unalloc memhint", dsc);
8009 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8012 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8016 case 2: /* Load word */
8018 int insn2_bit_8_11 = bits (insn2, 8, 11);
8021 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8022 else if (op1 == 0x1) /* Encoding T3 */
8023 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8025 else /* op1 == 0x0 */
8027 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8028 /* LDR (immediate) */
8029 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8030 dsc, bit (insn2, 8), 1);
8031 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8032 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8035 /* LDR (register) */
8036 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8042 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8049 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8050 uint16_t insn2, struct regcache *regs,
8051 struct displaced_step_closure *dsc)
8054 unsigned short op = bit (insn2, 15);
8055 unsigned int op1 = bits (insn1, 11, 12);
8061 switch (bits (insn1, 9, 10))
8066 /* Load/store {dual, execlusive}, table branch. */
8067 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8068 && bits (insn2, 5, 7) == 0)
8069 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8072 /* PC is not allowed to use in load/store {dual, exclusive}
8074 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8075 "load/store dual/ex", dsc);
8077 else /* load/store multiple */
8079 switch (bits (insn1, 7, 8))
8081 case 0: case 3: /* SRS, RFE */
8082 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8085 case 1: case 2: /* LDM/STM/PUSH/POP */
8086 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8093 /* Data-processing (shift register). */
8094 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8097 default: /* Coprocessor instructions. */
8098 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8103 case 2: /* op1 = 2 */
8104 if (op) /* Branch and misc control. */
8106 if (bit (insn2, 14) /* BLX/BL */
8107 || bit (insn2, 12) /* Unconditional branch */
8108 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8109 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8111 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8116 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8118 int op = bits (insn1, 4, 8);
8119 int rn = bits (insn1, 0, 3);
8120 if ((op == 0 || op == 0xa) && rn == 0xf)
8121 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8124 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8127 else /* Data processing (modified immeidate) */
8128 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8132 case 3: /* op1 = 3 */
8133 switch (bits (insn1, 9, 10))
8137 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8139 else /* NEON Load/Store and Store single data item */
8140 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8141 "neon elt/struct load/store",
8144 case 1: /* op1 = 3, bits (9, 10) == 1 */
8145 switch (bits (insn1, 7, 8))
8147 case 0: case 1: /* Data processing (register) */
8148 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8151 case 2: /* Multiply and absolute difference */
8152 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8153 "mul/mua/diff", dsc);
8155 case 3: /* Long multiply and divide */
8156 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8161 default: /* Coprocessor instructions */
8162 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8171 internal_error (__FILE__, __LINE__,
8172 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8177 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8178 CORE_ADDR to, struct regcache *regs,
8179 struct displaced_step_closure *dsc)
8181 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8183 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8185 if (debug_displaced)
8186 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8187 "at %.8lx\n", insn1, (unsigned long) from);
8190 dsc->insn_size = thumb_insn_size (insn1);
8191 if (thumb_insn_size (insn1) == 4)
8194 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8195 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8198 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8202 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8203 CORE_ADDR to, struct regcache *regs,
8204 struct displaced_step_closure *dsc)
8207 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8210 /* Most displaced instructions use a 1-instruction scratch space, so set this
8211 here and override below if/when necessary. */
8213 dsc->insn_addr = from;
8214 dsc->scratch_base = to;
8215 dsc->cleanup = NULL;
8216 dsc->wrote_to_pc = 0;
8218 if (!displaced_in_arm_mode (regs))
8219 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8223 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8224 if (debug_displaced)
8225 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8226 "at %.8lx\n", (unsigned long) insn,
8227 (unsigned long) from);
8229 if ((insn & 0xf0000000) == 0xf0000000)
8230 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8231 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8233 case 0x0: case 0x1: case 0x2: case 0x3:
8234 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8237 case 0x4: case 0x5: case 0x6:
8238 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8242 err = arm_decode_media (gdbarch, insn, dsc);
8245 case 0x8: case 0x9: case 0xa: case 0xb:
8246 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8249 case 0xc: case 0xd: case 0xe: case 0xf:
8250 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8255 internal_error (__FILE__, __LINE__,
8256 _("arm_process_displaced_insn: Instruction decode error"));
8259 /* Actually set up the scratch space for a displaced instruction. */
8262 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8263 CORE_ADDR to, struct displaced_step_closure *dsc)
8265 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8266 unsigned int i, len, offset;
8267 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8268 int size = dsc->is_thumb? 2 : 4;
8269 const unsigned char *bkp_insn;
8272 /* Poke modified instruction(s). */
8273 for (i = 0; i < dsc->numinsns; i++)
8275 if (debug_displaced)
8277 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8279 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8282 fprintf_unfiltered (gdb_stdlog, "%.4x",
8283 (unsigned short)dsc->modinsn[i]);
8285 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8286 (unsigned long) to + offset);
8289 write_memory_unsigned_integer (to + offset, size,
8290 byte_order_for_code,
8295 /* Choose the correct breakpoint instruction. */
8298 bkp_insn = tdep->thumb_breakpoint;
8299 len = tdep->thumb_breakpoint_size;
8303 bkp_insn = tdep->arm_breakpoint;
8304 len = tdep->arm_breakpoint_size;
8307 /* Put breakpoint afterwards. */
8308 write_memory (to + offset, bkp_insn, len);
8310 if (debug_displaced)
8311 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8312 paddress (gdbarch, from), paddress (gdbarch, to));
8315 /* Entry point for copying an instruction into scratch space for displaced
8318 struct displaced_step_closure *
8319 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8320 CORE_ADDR from, CORE_ADDR to,
8321 struct regcache *regs)
8323 struct displaced_step_closure *dsc
8324 = xmalloc (sizeof (struct displaced_step_closure));
8325 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8326 arm_displaced_init_closure (gdbarch, from, to, dsc);
8331 /* Entry point for cleaning things up after a displaced instruction has been
8335 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8336 struct displaced_step_closure *dsc,
8337 CORE_ADDR from, CORE_ADDR to,
8338 struct regcache *regs)
8341 dsc->cleanup (gdbarch, regs, dsc);
8343 if (!dsc->wrote_to_pc)
8344 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8345 dsc->insn_addr + dsc->insn_size);
8349 #include "bfd-in2.h"
8350 #include "libcoff.h"
8353 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8355 struct gdbarch *gdbarch = info->application_data;
8357 if (arm_pc_is_thumb (gdbarch, memaddr))
8359 static asymbol *asym;
8360 static combined_entry_type ce;
8361 static struct coff_symbol_struct csym;
8362 static struct bfd fake_bfd;
8363 static bfd_target fake_target;
8365 if (csym.native == NULL)
8367 /* Create a fake symbol vector containing a Thumb symbol.
8368 This is solely so that the code in print_insn_little_arm()
8369 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8370 the presence of a Thumb symbol and switch to decoding
8371 Thumb instructions. */
8373 fake_target.flavour = bfd_target_coff_flavour;
8374 fake_bfd.xvec = &fake_target;
8375 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8377 csym.symbol.the_bfd = &fake_bfd;
8378 csym.symbol.name = "fake";
8379 asym = (asymbol *) & csym;
8382 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8383 info->symbols = &asym;
8386 info->symbols = NULL;
8388 if (info->endian == BFD_ENDIAN_BIG)
8389 return print_insn_big_arm (memaddr, info);
8391 return print_insn_little_arm (memaddr, info);
8394 /* The following define instruction sequences that will cause ARM
8395 cpu's to take an undefined instruction trap. These are used to
8396 signal a breakpoint to GDB.
8398 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8399 modes. A different instruction is required for each mode. The ARM
8400 cpu's can also be big or little endian. Thus four different
8401 instructions are needed to support all cases.
8403 Note: ARMv4 defines several new instructions that will take the
8404 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8405 not in fact add the new instructions. The new undefined
8406 instructions in ARMv4 are all instructions that had no defined
8407 behaviour in earlier chips. There is no guarantee that they will
8408 raise an exception, but may be treated as NOP's. In practice, it
8409 may only safe to rely on instructions matching:
8411 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8412 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8413 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8415 Even this may only true if the condition predicate is true. The
8416 following use a condition predicate of ALWAYS so it is always TRUE.
8418 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8419 and NetBSD all use a software interrupt rather than an undefined
8420 instruction to force a trap. This can be handled by by the
8421 abi-specific code during establishment of the gdbarch vector. */
8423 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8424 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8425 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8426 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8428 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8429 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8430 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8431 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8433 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8434 the program counter value to determine whether a 16-bit or 32-bit
8435 breakpoint should be used. It returns a pointer to a string of
8436 bytes that encode a breakpoint instruction, stores the length of
8437 the string to *lenptr, and adjusts the program counter (if
8438 necessary) to point to the actual memory location where the
8439 breakpoint should be inserted. */
8441 static const unsigned char *
8442 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8444 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8445 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8447 if (arm_pc_is_thumb (gdbarch, *pcptr))
8449 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8451 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8452 check whether we are replacing a 32-bit instruction. */
8453 if (tdep->thumb2_breakpoint != NULL)
8456 if (target_read_memory (*pcptr, buf, 2) == 0)
8458 unsigned short inst1;
8459 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8460 if (thumb_insn_size (inst1) == 4)
8462 *lenptr = tdep->thumb2_breakpoint_size;
8463 return tdep->thumb2_breakpoint;
8468 *lenptr = tdep->thumb_breakpoint_size;
8469 return tdep->thumb_breakpoint;
8473 *lenptr = tdep->arm_breakpoint_size;
8474 return tdep->arm_breakpoint;
8479 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8482 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8484 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8486 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8487 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8488 that this is not confused with a 32-bit ARM breakpoint. */
8492 /* Extract from an array REGBUF containing the (raw) register state a
8493 function return value of type TYPE, and copy that, in virtual
8494 format, into VALBUF. */
8497 arm_extract_return_value (struct type *type, struct regcache *regs,
8500 struct gdbarch *gdbarch = get_regcache_arch (regs);
8501 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8503 if (TYPE_CODE_FLT == TYPE_CODE (type))
8505 switch (gdbarch_tdep (gdbarch)->fp_model)
8509 /* The value is in register F0 in internal format. We need to
8510 extract the raw value and then convert it to the desired
8512 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8514 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8515 convert_from_extended (floatformat_from_type (type), tmpbuf,
8516 valbuf, gdbarch_byte_order (gdbarch));
8520 case ARM_FLOAT_SOFT_FPA:
8521 case ARM_FLOAT_SOFT_VFP:
8522 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8523 not using the VFP ABI code. */
8525 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8526 if (TYPE_LENGTH (type) > 4)
8527 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8528 valbuf + INT_REGISTER_SIZE);
8532 internal_error (__FILE__, __LINE__,
8533 _("arm_extract_return_value: "
8534 "Floating point model not supported"));
8538 else if (TYPE_CODE (type) == TYPE_CODE_INT
8539 || TYPE_CODE (type) == TYPE_CODE_CHAR
8540 || TYPE_CODE (type) == TYPE_CODE_BOOL
8541 || TYPE_CODE (type) == TYPE_CODE_PTR
8542 || TYPE_CODE (type) == TYPE_CODE_REF
8543 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8545 /* If the type is a plain integer, then the access is
8546 straight-forward. Otherwise we have to play around a bit
8548 int len = TYPE_LENGTH (type);
8549 int regno = ARM_A1_REGNUM;
8554 /* By using store_unsigned_integer we avoid having to do
8555 anything special for small big-endian values. */
8556 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8557 store_unsigned_integer (valbuf,
8558 (len > INT_REGISTER_SIZE
8559 ? INT_REGISTER_SIZE : len),
8561 len -= INT_REGISTER_SIZE;
8562 valbuf += INT_REGISTER_SIZE;
8567 /* For a structure or union the behaviour is as if the value had
8568 been stored to word-aligned memory and then loaded into
8569 registers with 32-bit load instruction(s). */
8570 int len = TYPE_LENGTH (type);
8571 int regno = ARM_A1_REGNUM;
8572 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8576 regcache_cooked_read (regs, regno++, tmpbuf);
8577 memcpy (valbuf, tmpbuf,
8578 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8579 len -= INT_REGISTER_SIZE;
8580 valbuf += INT_REGISTER_SIZE;
8586 /* Will a function return an aggregate type in memory or in a
8587 register? Return 0 if an aggregate type can be returned in a
8588 register, 1 if it must be returned in memory. */
8591 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8594 enum type_code code;
8596 CHECK_TYPEDEF (type);
8598 /* In the ARM ABI, "integer" like aggregate types are returned in
8599 registers. For an aggregate type to be integer like, its size
8600 must be less than or equal to INT_REGISTER_SIZE and the
8601 offset of each addressable subfield must be zero. Note that bit
8602 fields are not addressable, and all addressable subfields of
8603 unions always start at offset zero.
8605 This function is based on the behaviour of GCC 2.95.1.
8606 See: gcc/arm.c: arm_return_in_memory() for details.
8608 Note: All versions of GCC before GCC 2.95.2 do not set up the
8609 parameters correctly for a function returning the following
8610 structure: struct { float f;}; This should be returned in memory,
8611 not a register. Richard Earnshaw sent me a patch, but I do not
8612 know of any way to detect if a function like the above has been
8613 compiled with the correct calling convention. */
8615 /* All aggregate types that won't fit in a register must be returned
8617 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8622 /* The AAPCS says all aggregates not larger than a word are returned
8624 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8627 /* The only aggregate types that can be returned in a register are
8628 structs and unions. Arrays must be returned in memory. */
8629 code = TYPE_CODE (type);
8630 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8635 /* Assume all other aggregate types can be returned in a register.
8636 Run a check for structures, unions and arrays. */
8639 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8642 /* Need to check if this struct/union is "integer" like. For
8643 this to be true, its size must be less than or equal to
8644 INT_REGISTER_SIZE and the offset of each addressable
8645 subfield must be zero. Note that bit fields are not
8646 addressable, and unions always start at offset zero. If any
8647 of the subfields is a floating point type, the struct/union
8648 cannot be an integer type. */
8650 /* For each field in the object, check:
8651 1) Is it FP? --> yes, nRc = 1;
8652 2) Is it addressable (bitpos != 0) and
8653 not packed (bitsize == 0)?
8657 for (i = 0; i < TYPE_NFIELDS (type); i++)
8659 enum type_code field_type_code;
8660 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8663 /* Is it a floating point type field? */
8664 if (field_type_code == TYPE_CODE_FLT)
8670 /* If bitpos != 0, then we have to care about it. */
8671 if (TYPE_FIELD_BITPOS (type, i) != 0)
8673 /* Bitfields are not addressable. If the field bitsize is
8674 zero, then the field is not packed. Hence it cannot be
8675 a bitfield or any other packed type. */
8676 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8688 /* Write into appropriate registers a function return value of type
8689 TYPE, given in virtual format. */
8692 arm_store_return_value (struct type *type, struct regcache *regs,
8693 const gdb_byte *valbuf)
8695 struct gdbarch *gdbarch = get_regcache_arch (regs);
8696 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8698 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8700 char buf[MAX_REGISTER_SIZE];
8702 switch (gdbarch_tdep (gdbarch)->fp_model)
8706 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8707 gdbarch_byte_order (gdbarch));
8708 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8711 case ARM_FLOAT_SOFT_FPA:
8712 case ARM_FLOAT_SOFT_VFP:
8713 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8714 not using the VFP ABI code. */
8716 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8717 if (TYPE_LENGTH (type) > 4)
8718 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8719 valbuf + INT_REGISTER_SIZE);
8723 internal_error (__FILE__, __LINE__,
8724 _("arm_store_return_value: Floating "
8725 "point model not supported"));
8729 else if (TYPE_CODE (type) == TYPE_CODE_INT
8730 || TYPE_CODE (type) == TYPE_CODE_CHAR
8731 || TYPE_CODE (type) == TYPE_CODE_BOOL
8732 || TYPE_CODE (type) == TYPE_CODE_PTR
8733 || TYPE_CODE (type) == TYPE_CODE_REF
8734 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8736 if (TYPE_LENGTH (type) <= 4)
8738 /* Values of one word or less are zero/sign-extended and
8740 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8741 LONGEST val = unpack_long (type, valbuf);
8743 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8744 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8748 /* Integral values greater than one word are stored in consecutive
8749 registers starting with r0. This will always be a multiple of
8750 the regiser size. */
8751 int len = TYPE_LENGTH (type);
8752 int regno = ARM_A1_REGNUM;
8756 regcache_cooked_write (regs, regno++, valbuf);
8757 len -= INT_REGISTER_SIZE;
8758 valbuf += INT_REGISTER_SIZE;
8764 /* For a structure or union the behaviour is as if the value had
8765 been stored to word-aligned memory and then loaded into
8766 registers with 32-bit load instruction(s). */
8767 int len = TYPE_LENGTH (type);
8768 int regno = ARM_A1_REGNUM;
8769 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8773 memcpy (tmpbuf, valbuf,
8774 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8775 regcache_cooked_write (regs, regno++, tmpbuf);
8776 len -= INT_REGISTER_SIZE;
8777 valbuf += INT_REGISTER_SIZE;
8783 /* Handle function return values. */
8785 static enum return_value_convention
8786 arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
8787 struct type *valtype, struct regcache *regcache,
8788 gdb_byte *readbuf, const gdb_byte *writebuf)
8790 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8791 enum arm_vfp_cprc_base_type vfp_base_type;
8794 if (arm_vfp_abi_for_function (gdbarch, func_type)
8795 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8797 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8798 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8800 for (i = 0; i < vfp_base_count; i++)
8802 if (reg_char == 'q')
8805 arm_neon_quad_write (gdbarch, regcache, i,
8806 writebuf + i * unit_length);
8809 arm_neon_quad_read (gdbarch, regcache, i,
8810 readbuf + i * unit_length);
8817 sprintf (name_buf, "%c%d", reg_char, i);
8818 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8821 regcache_cooked_write (regcache, regnum,
8822 writebuf + i * unit_length);
8824 regcache_cooked_read (regcache, regnum,
8825 readbuf + i * unit_length);
8828 return RETURN_VALUE_REGISTER_CONVENTION;
8831 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8832 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8833 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8835 if (tdep->struct_return == pcc_struct_return
8836 || arm_return_in_memory (gdbarch, valtype))
8837 return RETURN_VALUE_STRUCT_CONVENTION;
8841 arm_store_return_value (valtype, regcache, writebuf);
8844 arm_extract_return_value (valtype, regcache, readbuf);
8846 return RETURN_VALUE_REGISTER_CONVENTION;
8851 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8853 struct gdbarch *gdbarch = get_frame_arch (frame);
8854 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8855 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8857 char buf[INT_REGISTER_SIZE];
8859 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8861 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8865 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8869 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8870 return the target PC. Otherwise return 0. */
8873 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8877 CORE_ADDR start_addr;
8879 /* Find the starting address and name of the function containing the PC. */
8880 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8883 /* If PC is in a Thumb call or return stub, return the address of the
8884 target PC, which is in a register. The thunk functions are called
8885 _call_via_xx, where x is the register name. The possible names
8886 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8887 functions, named __ARM_call_via_r[0-7]. */
8888 if (strncmp (name, "_call_via_", 10) == 0
8889 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
8891 /* Use the name suffix to determine which register contains the
8893 static char *table[15] =
8894 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8895 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8898 int offset = strlen (name) - 2;
8900 for (regno = 0; regno <= 14; regno++)
8901 if (strcmp (&name[offset], table[regno]) == 0)
8902 return get_frame_register_unsigned (frame, regno);
8905 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8906 non-interworking calls to foo. We could decode the stubs
8907 to find the target but it's easier to use the symbol table. */
8908 namelen = strlen (name);
8909 if (name[0] == '_' && name[1] == '_'
8910 && ((namelen > 2 + strlen ("_from_thumb")
8911 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
8912 strlen ("_from_thumb")) == 0)
8913 || (namelen > 2 + strlen ("_from_arm")
8914 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
8915 strlen ("_from_arm")) == 0)))
8918 int target_len = namelen - 2;
8919 struct minimal_symbol *minsym;
8920 struct objfile *objfile;
8921 struct obj_section *sec;
8923 if (name[namelen - 1] == 'b')
8924 target_len -= strlen ("_from_thumb");
8926 target_len -= strlen ("_from_arm");
8928 target_name = alloca (target_len + 1);
8929 memcpy (target_name, name + 2, target_len);
8930 target_name[target_len] = '\0';
8932 sec = find_pc_section (pc);
8933 objfile = (sec == NULL) ? NULL : sec->objfile;
8934 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8936 return SYMBOL_VALUE_ADDRESS (minsym);
8941 return 0; /* not a stub */
8945 set_arm_command (char *args, int from_tty)
8947 printf_unfiltered (_("\
8948 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8949 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8953 show_arm_command (char *args, int from_tty)
8955 cmd_show_list (showarmcmdlist, from_tty, "");
8959 arm_update_current_architecture (void)
8961 struct gdbarch_info info;
8963 /* If the current architecture is not ARM, we have nothing to do. */
8964 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
8967 /* Update the architecture. */
8968 gdbarch_info_init (&info);
8970 if (!gdbarch_update_p (info))
8971 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8975 set_fp_model_sfunc (char *args, int from_tty,
8976 struct cmd_list_element *c)
8978 enum arm_float_model fp_model;
8980 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8981 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8983 arm_fp_model = fp_model;
8987 if (fp_model == ARM_FLOAT_LAST)
8988 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8991 arm_update_current_architecture ();
8995 show_fp_model (struct ui_file *file, int from_tty,
8996 struct cmd_list_element *c, const char *value)
8998 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9000 if (arm_fp_model == ARM_FLOAT_AUTO
9001 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9002 fprintf_filtered (file, _("\
9003 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9004 fp_model_strings[tdep->fp_model]);
9006 fprintf_filtered (file, _("\
9007 The current ARM floating point model is \"%s\".\n"),
9008 fp_model_strings[arm_fp_model]);
9012 arm_set_abi (char *args, int from_tty,
9013 struct cmd_list_element *c)
9015 enum arm_abi_kind arm_abi;
9017 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9018 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9020 arm_abi_global = arm_abi;
9024 if (arm_abi == ARM_ABI_LAST)
9025 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9028 arm_update_current_architecture ();
9032 arm_show_abi (struct ui_file *file, int from_tty,
9033 struct cmd_list_element *c, const char *value)
9035 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9037 if (arm_abi_global == ARM_ABI_AUTO
9038 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9039 fprintf_filtered (file, _("\
9040 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9041 arm_abi_strings[tdep->arm_abi]);
9043 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9048 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9049 struct cmd_list_element *c, const char *value)
9051 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9053 fprintf_filtered (file,
9054 _("The current execution mode assumed "
9055 "(when symbols are unavailable) is \"%s\".\n"),
9056 arm_fallback_mode_string);
9060 arm_show_force_mode (struct ui_file *file, int from_tty,
9061 struct cmd_list_element *c, const char *value)
9063 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9065 fprintf_filtered (file,
9066 _("The current execution mode assumed "
9067 "(even when symbols are available) is \"%s\".\n"),
9068 arm_force_mode_string);
9071 /* If the user changes the register disassembly style used for info
9072 register and other commands, we have to also switch the style used
9073 in opcodes for disassembly output. This function is run in the "set
9074 arm disassembly" command, and does that. */
9077 set_disassembly_style_sfunc (char *args, int from_tty,
9078 struct cmd_list_element *c)
9080 set_disassembly_style ();
9083 /* Return the ARM register name corresponding to register I. */
9085 arm_register_name (struct gdbarch *gdbarch, int i)
9087 const int num_regs = gdbarch_num_regs (gdbarch);
9089 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9090 && i >= num_regs && i < num_regs + 32)
9092 static const char *const vfp_pseudo_names[] = {
9093 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9094 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9095 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9096 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9099 return vfp_pseudo_names[i - num_regs];
9102 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9103 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9105 static const char *const neon_pseudo_names[] = {
9106 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9107 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9110 return neon_pseudo_names[i - num_regs - 32];
9113 if (i >= ARRAY_SIZE (arm_register_names))
9114 /* These registers are only supported on targets which supply
9115 an XML description. */
9118 return arm_register_names[i];
9122 set_disassembly_style (void)
9126 /* Find the style that the user wants. */
9127 for (current = 0; current < num_disassembly_options; current++)
9128 if (disassembly_style == valid_disassembly_styles[current])
9130 gdb_assert (current < num_disassembly_options);
9132 /* Synchronize the disassembler. */
9133 set_arm_regname_option (current);
9136 /* Test whether the coff symbol specific value corresponds to a Thumb
9140 coff_sym_is_thumb (int val)
9142 return (val == C_THUMBEXT
9143 || val == C_THUMBSTAT
9144 || val == C_THUMBEXTFUNC
9145 || val == C_THUMBSTATFUNC
9146 || val == C_THUMBLABEL);
9149 /* arm_coff_make_msymbol_special()
9150 arm_elf_make_msymbol_special()
9152 These functions test whether the COFF or ELF symbol corresponds to
9153 an address in thumb code, and set a "special" bit in a minimal
9154 symbol to indicate that it does. */
9157 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9159 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9160 == ST_BRANCH_TO_THUMB)
9161 MSYMBOL_SET_SPECIAL (msym);
9165 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9167 if (coff_sym_is_thumb (val))
9168 MSYMBOL_SET_SPECIAL (msym);
9172 arm_objfile_data_free (struct objfile *objfile, void *arg)
9174 struct arm_per_objfile *data = arg;
9177 for (i = 0; i < objfile->obfd->section_count; i++)
9178 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9182 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9185 const char *name = bfd_asymbol_name (sym);
9186 struct arm_per_objfile *data;
9187 VEC(arm_mapping_symbol_s) **map_p;
9188 struct arm_mapping_symbol new_map_sym;
9190 gdb_assert (name[0] == '$');
9191 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9194 data = objfile_data (objfile, arm_objfile_data_key);
9197 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9198 struct arm_per_objfile);
9199 set_objfile_data (objfile, arm_objfile_data_key, data);
9200 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9201 objfile->obfd->section_count,
9202 VEC(arm_mapping_symbol_s) *);
9204 map_p = &data->section_maps[bfd_get_section (sym)->index];
9206 new_map_sym.value = sym->value;
9207 new_map_sym.type = name[1];
9209 /* Assume that most mapping symbols appear in order of increasing
9210 value. If they were randomly distributed, it would be faster to
9211 always push here and then sort at first use. */
9212 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9214 struct arm_mapping_symbol *prev_map_sym;
9216 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9217 if (prev_map_sym->value >= sym->value)
9220 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9221 arm_compare_mapping_symbols);
9222 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9227 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9231 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9233 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9234 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9236 /* If necessary, set the T bit. */
9239 ULONGEST val, t_bit;
9240 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9241 t_bit = arm_psr_thumb_bit (gdbarch);
9242 if (arm_pc_is_thumb (gdbarch, pc))
9243 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9246 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9251 /* Read the contents of a NEON quad register, by reading from two
9252 double registers. This is used to implement the quad pseudo
9253 registers, and for argument passing in case the quad registers are
9254 missing; vectors are passed in quad registers when using the VFP
9255 ABI, even if a NEON unit is not present. REGNUM is the index of
9256 the quad register, in [0, 15]. */
9258 static enum register_status
9259 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9260 int regnum, gdb_byte *buf)
9263 gdb_byte reg_buf[8];
9264 int offset, double_regnum;
9265 enum register_status status;
9267 sprintf (name_buf, "d%d", regnum << 1);
9268 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9271 /* d0 is always the least significant half of q0. */
9272 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9277 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9278 if (status != REG_VALID)
9280 memcpy (buf + offset, reg_buf, 8);
9282 offset = 8 - offset;
9283 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9284 if (status != REG_VALID)
9286 memcpy (buf + offset, reg_buf, 8);
9291 static enum register_status
9292 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9293 int regnum, gdb_byte *buf)
9295 const int num_regs = gdbarch_num_regs (gdbarch);
9297 gdb_byte reg_buf[8];
9298 int offset, double_regnum;
9300 gdb_assert (regnum >= num_regs);
9303 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9304 /* Quad-precision register. */
9305 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9308 enum register_status status;
9310 /* Single-precision register. */
9311 gdb_assert (regnum < 32);
9313 /* s0 is always the least significant half of d0. */
9314 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9315 offset = (regnum & 1) ? 0 : 4;
9317 offset = (regnum & 1) ? 4 : 0;
9319 sprintf (name_buf, "d%d", regnum >> 1);
9320 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9323 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9324 if (status == REG_VALID)
9325 memcpy (buf, reg_buf + offset, 4);
9330 /* Store the contents of BUF to a NEON quad register, by writing to
9331 two double registers. This is used to implement the quad pseudo
9332 registers, and for argument passing in case the quad registers are
9333 missing; vectors are passed in quad registers when using the VFP
9334 ABI, even if a NEON unit is not present. REGNUM is the index
9335 of the quad register, in [0, 15]. */
9338 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9339 int regnum, const gdb_byte *buf)
9342 gdb_byte reg_buf[8];
9343 int offset, double_regnum;
9345 sprintf (name_buf, "d%d", regnum << 1);
9346 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9349 /* d0 is always the least significant half of q0. */
9350 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9355 regcache_raw_write (regcache, double_regnum, buf + offset);
9356 offset = 8 - offset;
9357 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9361 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9362 int regnum, const gdb_byte *buf)
9364 const int num_regs = gdbarch_num_regs (gdbarch);
9366 gdb_byte reg_buf[8];
9367 int offset, double_regnum;
9369 gdb_assert (regnum >= num_regs);
9372 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9373 /* Quad-precision register. */
9374 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9377 /* Single-precision register. */
9378 gdb_assert (regnum < 32);
9380 /* s0 is always the least significant half of d0. */
9381 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9382 offset = (regnum & 1) ? 0 : 4;
9384 offset = (regnum & 1) ? 4 : 0;
9386 sprintf (name_buf, "d%d", regnum >> 1);
9387 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9390 regcache_raw_read (regcache, double_regnum, reg_buf);
9391 memcpy (reg_buf + offset, buf, 4);
9392 regcache_raw_write (regcache, double_regnum, reg_buf);
9396 static struct value *
9397 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9399 const int *reg_p = baton;
9400 return value_of_register (*reg_p, frame);
9403 static enum gdb_osabi
9404 arm_elf_osabi_sniffer (bfd *abfd)
9406 unsigned int elfosabi;
9407 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9409 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9411 if (elfosabi == ELFOSABI_ARM)
9412 /* GNU tools use this value. Check note sections in this case,
9414 bfd_map_over_sections (abfd,
9415 generic_elf_osabi_sniff_abi_tag_sections,
9418 /* Anything else will be handled by the generic ELF sniffer. */
9423 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9424 struct reggroup *group)
9426 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9427 this, FPS register belongs to save_regroup, restore_reggroup, and
9428 all_reggroup, of course. */
9429 if (regnum == ARM_FPS_REGNUM)
9430 return (group == float_reggroup
9431 || group == save_reggroup
9432 || group == restore_reggroup
9433 || group == all_reggroup);
9435 return default_register_reggroup_p (gdbarch, regnum, group);
9439 /* Initialize the current architecture based on INFO. If possible,
9440 re-use an architecture from ARCHES, which is a list of
9441 architectures already created during this debugging session.
9443 Called e.g. at program startup, when reading a core file, and when
9444 reading a binary file. */
9446 static struct gdbarch *
9447 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9449 struct gdbarch_tdep *tdep;
9450 struct gdbarch *gdbarch;
9451 struct gdbarch_list *best_arch;
9452 enum arm_abi_kind arm_abi = arm_abi_global;
9453 enum arm_float_model fp_model = arm_fp_model;
9454 struct tdesc_arch_data *tdesc_data = NULL;
9456 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9458 int have_fpa_registers = 1;
9459 const struct target_desc *tdesc = info.target_desc;
9461 /* If we have an object to base this architecture on, try to determine
9464 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9466 int ei_osabi, e_flags;
9468 switch (bfd_get_flavour (info.abfd))
9470 case bfd_target_aout_flavour:
9471 /* Assume it's an old APCS-style ABI. */
9472 arm_abi = ARM_ABI_APCS;
9475 case bfd_target_coff_flavour:
9476 /* Assume it's an old APCS-style ABI. */
9478 arm_abi = ARM_ABI_APCS;
9481 case bfd_target_elf_flavour:
9482 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9483 e_flags = elf_elfheader (info.abfd)->e_flags;
9485 if (ei_osabi == ELFOSABI_ARM)
9487 /* GNU tools used to use this value, but do not for EABI
9488 objects. There's nowhere to tag an EABI version
9489 anyway, so assume APCS. */
9490 arm_abi = ARM_ABI_APCS;
9492 else if (ei_osabi == ELFOSABI_NONE)
9494 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9495 int attr_arch, attr_profile;
9499 case EF_ARM_EABI_UNKNOWN:
9500 /* Assume GNU tools. */
9501 arm_abi = ARM_ABI_APCS;
9504 case EF_ARM_EABI_VER4:
9505 case EF_ARM_EABI_VER5:
9506 arm_abi = ARM_ABI_AAPCS;
9507 /* EABI binaries default to VFP float ordering.
9508 They may also contain build attributes that can
9509 be used to identify if the VFP argument-passing
9511 if (fp_model == ARM_FLOAT_AUTO)
9514 switch (bfd_elf_get_obj_attr_int (info.abfd,
9519 /* "The user intended FP parameter/result
9520 passing to conform to AAPCS, base
9522 fp_model = ARM_FLOAT_SOFT_VFP;
9525 /* "The user intended FP parameter/result
9526 passing to conform to AAPCS, VFP
9528 fp_model = ARM_FLOAT_VFP;
9531 /* "The user intended FP parameter/result
9532 passing to conform to tool chain-specific
9533 conventions" - we don't know any such
9534 conventions, so leave it as "auto". */
9537 /* Attribute value not mentioned in the
9538 October 2008 ABI, so leave it as
9543 fp_model = ARM_FLOAT_SOFT_VFP;
9549 /* Leave it as "auto". */
9550 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9555 /* Detect M-profile programs. This only works if the
9556 executable file includes build attributes; GCC does
9557 copy them to the executable, but e.g. RealView does
9559 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9561 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9563 Tag_CPU_arch_profile);
9564 /* GCC specifies the profile for v6-M; RealView only
9565 specifies the profile for architectures starting with
9566 V7 (as opposed to architectures with a tag
9567 numerically greater than TAG_CPU_ARCH_V7). */
9568 if (!tdesc_has_registers (tdesc)
9569 && (attr_arch == TAG_CPU_ARCH_V6_M
9570 || attr_arch == TAG_CPU_ARCH_V6S_M
9571 || attr_profile == 'M'))
9572 tdesc = tdesc_arm_with_m;
9576 if (fp_model == ARM_FLOAT_AUTO)
9578 int e_flags = elf_elfheader (info.abfd)->e_flags;
9580 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9583 /* Leave it as "auto". Strictly speaking this case
9584 means FPA, but almost nobody uses that now, and
9585 many toolchains fail to set the appropriate bits
9586 for the floating-point model they use. */
9588 case EF_ARM_SOFT_FLOAT:
9589 fp_model = ARM_FLOAT_SOFT_FPA;
9591 case EF_ARM_VFP_FLOAT:
9592 fp_model = ARM_FLOAT_VFP;
9594 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9595 fp_model = ARM_FLOAT_SOFT_VFP;
9600 if (e_flags & EF_ARM_BE8)
9601 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9606 /* Leave it as "auto". */
9611 /* Check any target description for validity. */
9612 if (tdesc_has_registers (tdesc))
9614 /* For most registers we require GDB's default names; but also allow
9615 the numeric names for sp / lr / pc, as a convenience. */
9616 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9617 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9618 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9620 const struct tdesc_feature *feature;
9623 feature = tdesc_find_feature (tdesc,
9624 "org.gnu.gdb.arm.core");
9625 if (feature == NULL)
9627 feature = tdesc_find_feature (tdesc,
9628 "org.gnu.gdb.arm.m-profile");
9629 if (feature == NULL)
9635 tdesc_data = tdesc_data_alloc ();
9638 for (i = 0; i < ARM_SP_REGNUM; i++)
9639 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9640 arm_register_names[i]);
9641 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9644 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9647 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9651 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9652 ARM_PS_REGNUM, "xpsr");
9654 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9655 ARM_PS_REGNUM, "cpsr");
9659 tdesc_data_cleanup (tdesc_data);
9663 feature = tdesc_find_feature (tdesc,
9664 "org.gnu.gdb.arm.fpa");
9665 if (feature != NULL)
9668 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9669 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9670 arm_register_names[i]);
9673 tdesc_data_cleanup (tdesc_data);
9678 have_fpa_registers = 0;
9680 feature = tdesc_find_feature (tdesc,
9681 "org.gnu.gdb.xscale.iwmmxt");
9682 if (feature != NULL)
9684 static const char *const iwmmxt_names[] = {
9685 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9686 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9687 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9688 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9692 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9694 &= tdesc_numbered_register (feature, tdesc_data, i,
9695 iwmmxt_names[i - ARM_WR0_REGNUM]);
9697 /* Check for the control registers, but do not fail if they
9699 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9700 tdesc_numbered_register (feature, tdesc_data, i,
9701 iwmmxt_names[i - ARM_WR0_REGNUM]);
9703 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9705 &= tdesc_numbered_register (feature, tdesc_data, i,
9706 iwmmxt_names[i - ARM_WR0_REGNUM]);
9710 tdesc_data_cleanup (tdesc_data);
9715 /* If we have a VFP unit, check whether the single precision registers
9716 are present. If not, then we will synthesize them as pseudo
9718 feature = tdesc_find_feature (tdesc,
9719 "org.gnu.gdb.arm.vfp");
9720 if (feature != NULL)
9722 static const char *const vfp_double_names[] = {
9723 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9724 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9725 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9726 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9729 /* Require the double precision registers. There must be either
9732 for (i = 0; i < 32; i++)
9734 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9736 vfp_double_names[i]);
9740 if (!valid_p && i == 16)
9743 /* Also require FPSCR. */
9744 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9745 ARM_FPSCR_REGNUM, "fpscr");
9748 tdesc_data_cleanup (tdesc_data);
9752 if (tdesc_unnumbered_register (feature, "s0") == 0)
9753 have_vfp_pseudos = 1;
9755 have_vfp_registers = 1;
9757 /* If we have VFP, also check for NEON. The architecture allows
9758 NEON without VFP (integer vector operations only), but GDB
9759 does not support that. */
9760 feature = tdesc_find_feature (tdesc,
9761 "org.gnu.gdb.arm.neon");
9762 if (feature != NULL)
9764 /* NEON requires 32 double-precision registers. */
9767 tdesc_data_cleanup (tdesc_data);
9771 /* If there are quad registers defined by the stub, use
9772 their type; otherwise (normally) provide them with
9773 the default type. */
9774 if (tdesc_unnumbered_register (feature, "q0") == 0)
9775 have_neon_pseudos = 1;
9782 /* If there is already a candidate, use it. */
9783 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9785 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9787 if (arm_abi != ARM_ABI_AUTO
9788 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9791 if (fp_model != ARM_FLOAT_AUTO
9792 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9795 /* There are various other properties in tdep that we do not
9796 need to check here: those derived from a target description,
9797 since gdbarches with a different target description are
9798 automatically disqualified. */
9800 /* Do check is_m, though, since it might come from the binary. */
9801 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9804 /* Found a match. */
9808 if (best_arch != NULL)
9810 if (tdesc_data != NULL)
9811 tdesc_data_cleanup (tdesc_data);
9812 return best_arch->gdbarch;
9815 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
9816 gdbarch = gdbarch_alloc (&info, tdep);
9818 /* Record additional information about the architecture we are defining.
9819 These are gdbarch discriminators, like the OSABI. */
9820 tdep->arm_abi = arm_abi;
9821 tdep->fp_model = fp_model;
9823 tdep->have_fpa_registers = have_fpa_registers;
9824 tdep->have_vfp_registers = have_vfp_registers;
9825 tdep->have_vfp_pseudos = have_vfp_pseudos;
9826 tdep->have_neon_pseudos = have_neon_pseudos;
9827 tdep->have_neon = have_neon;
9830 switch (info.byte_order_for_code)
9832 case BFD_ENDIAN_BIG:
9833 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9834 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9835 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9836 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9840 case BFD_ENDIAN_LITTLE:
9841 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9842 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9843 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9844 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9849 internal_error (__FILE__, __LINE__,
9850 _("arm_gdbarch_init: bad byte order for float format"));
9853 /* On ARM targets char defaults to unsigned. */
9854 set_gdbarch_char_signed (gdbarch, 0);
9856 /* Note: for displaced stepping, this includes the breakpoint, and one word
9857 of additional scratch space. This setting isn't used for anything beside
9858 displaced stepping at present. */
9859 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9861 /* This should be low enough for everything. */
9862 tdep->lowest_pc = 0x20;
9863 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9865 /* The default, for both APCS and AAPCS, is to return small
9866 structures in registers. */
9867 tdep->struct_return = reg_struct_return;
9869 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9870 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9872 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9874 /* Frame handling. */
9875 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9876 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9877 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9879 frame_base_set_default (gdbarch, &arm_normal_base);
9881 /* Address manipulation. */
9882 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
9883 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9885 /* Advance PC across function entry code. */
9886 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9888 /* Detect whether PC is in function epilogue. */
9889 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
9891 /* Skip trampolines. */
9892 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9894 /* The stack grows downward. */
9895 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9897 /* Breakpoint manipulation. */
9898 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9899 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9900 arm_remote_breakpoint_from_pc);
9902 /* Information about registers, etc. */
9903 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9904 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9905 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9906 set_gdbarch_register_type (gdbarch, arm_register_type);
9907 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9909 /* This "info float" is FPA-specific. Use the generic version if we
9911 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9912 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9914 /* Internal <-> external register number maps. */
9915 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9916 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9918 set_gdbarch_register_name (gdbarch, arm_register_name);
9920 /* Returning results. */
9921 set_gdbarch_return_value (gdbarch, arm_return_value);
9924 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9926 /* Minsymbol frobbing. */
9927 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9928 set_gdbarch_coff_make_msymbol_special (gdbarch,
9929 arm_coff_make_msymbol_special);
9930 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9932 /* Thumb-2 IT block support. */
9933 set_gdbarch_adjust_breakpoint_address (gdbarch,
9934 arm_adjust_breakpoint_address);
9936 /* Virtual tables. */
9937 set_gdbarch_vbit_in_delta (gdbarch, 1);
9939 /* Hook in the ABI-specific overrides, if they have been registered. */
9940 gdbarch_init_osabi (info, gdbarch);
9942 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9944 /* Add some default predicates. */
9945 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9946 dwarf2_append_unwinders (gdbarch);
9947 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9948 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9950 /* Now we have tuned the configuration, set a few final things,
9951 based on what the OS ABI has told us. */
9953 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9954 binaries are always marked. */
9955 if (tdep->arm_abi == ARM_ABI_AUTO)
9956 tdep->arm_abi = ARM_ABI_APCS;
9958 /* Watchpoints are not steppable. */
9959 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9961 /* We used to default to FPA for generic ARM, but almost nobody
9962 uses that now, and we now provide a way for the user to force
9963 the model. So default to the most useful variant. */
9964 if (tdep->fp_model == ARM_FLOAT_AUTO)
9965 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9967 if (tdep->jb_pc >= 0)
9968 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9970 /* Floating point sizes and format. */
9971 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9972 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9974 set_gdbarch_double_format
9975 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9976 set_gdbarch_long_double_format
9977 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9981 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9982 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9985 if (have_vfp_pseudos)
9987 /* NOTE: These are the only pseudo registers used by
9988 the ARM target at the moment. If more are added, a
9989 little more care in numbering will be needed. */
9991 int num_pseudos = 32;
9992 if (have_neon_pseudos)
9994 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9995 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9996 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10001 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10003 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10005 /* Override tdesc_register_type to adjust the types of VFP
10006 registers for NEON. */
10007 set_gdbarch_register_type (gdbarch, arm_register_type);
10010 /* Add standard register aliases. We add aliases even for those
10011 nanes which are used by the current architecture - it's simpler,
10012 and does no harm, since nothing ever lists user registers. */
10013 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10014 user_reg_add (gdbarch, arm_register_aliases[i].name,
10015 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10021 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10023 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10028 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10029 (unsigned long) tdep->lowest_pc);
10032 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10035 _initialize_arm_tdep (void)
10037 struct ui_file *stb;
10039 struct cmd_list_element *new_set, *new_show;
10040 const char *setname;
10041 const char *setdesc;
10042 const char *const *regnames;
10044 static char *helptext;
10045 char regdesc[1024], *rdptr = regdesc;
10046 size_t rest = sizeof (regdesc);
10048 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10050 arm_objfile_data_key
10051 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10053 /* Add ourselves to objfile event chain. */
10054 observer_attach_new_objfile (arm_exidx_new_objfile);
10056 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10058 /* Register an ELF OS ABI sniffer for ARM binaries. */
10059 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10060 bfd_target_elf_flavour,
10061 arm_elf_osabi_sniffer);
10063 /* Initialize the standard target descriptions. */
10064 initialize_tdesc_arm_with_m ();
10065 initialize_tdesc_arm_with_iwmmxt ();
10066 initialize_tdesc_arm_with_vfpv2 ();
10067 initialize_tdesc_arm_with_vfpv3 ();
10068 initialize_tdesc_arm_with_neon ();
10070 /* Get the number of possible sets of register names defined in opcodes. */
10071 num_disassembly_options = get_arm_regname_num_options ();
10073 /* Add root prefix command for all "set arm"/"show arm" commands. */
10074 add_prefix_cmd ("arm", no_class, set_arm_command,
10075 _("Various ARM-specific commands."),
10076 &setarmcmdlist, "set arm ", 0, &setlist);
10078 add_prefix_cmd ("arm", no_class, show_arm_command,
10079 _("Various ARM-specific commands."),
10080 &showarmcmdlist, "show arm ", 0, &showlist);
10082 /* Sync the opcode insn printer with our register viewer. */
10083 parse_arm_disassembler_option ("reg-names-std");
10085 /* Initialize the array that will be passed to
10086 add_setshow_enum_cmd(). */
10087 valid_disassembly_styles
10088 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10089 for (i = 0; i < num_disassembly_options; i++)
10091 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10092 valid_disassembly_styles[i] = setname;
10093 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10096 /* When we find the default names, tell the disassembler to use
10098 if (!strcmp (setname, "std"))
10100 disassembly_style = setname;
10101 set_arm_regname_option (i);
10104 /* Mark the end of valid options. */
10105 valid_disassembly_styles[num_disassembly_options] = NULL;
10107 /* Create the help text. */
10108 stb = mem_fileopen ();
10109 fprintf_unfiltered (stb, "%s%s%s",
10110 _("The valid values are:\n"),
10112 _("The default is \"std\"."));
10113 helptext = ui_file_xstrdup (stb, NULL);
10114 ui_file_delete (stb);
10116 add_setshow_enum_cmd("disassembler", no_class,
10117 valid_disassembly_styles, &disassembly_style,
10118 _("Set the disassembly style."),
10119 _("Show the disassembly style."),
10121 set_disassembly_style_sfunc,
10122 NULL, /* FIXME: i18n: The disassembly style is
10124 &setarmcmdlist, &showarmcmdlist);
10126 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10127 _("Set usage of ARM 32-bit mode."),
10128 _("Show usage of ARM 32-bit mode."),
10129 _("When off, a 26-bit PC will be used."),
10131 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10133 &setarmcmdlist, &showarmcmdlist);
10135 /* Add a command to allow the user to force the FPU model. */
10136 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10137 _("Set the floating point type."),
10138 _("Show the floating point type."),
10139 _("auto - Determine the FP typefrom the OS-ABI.\n\
10140 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10141 fpa - FPA co-processor (GCC compiled).\n\
10142 softvfp - Software FP with pure-endian doubles.\n\
10143 vfp - VFP co-processor."),
10144 set_fp_model_sfunc, show_fp_model,
10145 &setarmcmdlist, &showarmcmdlist);
10147 /* Add a command to allow the user to force the ABI. */
10148 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10150 _("Show the ABI."),
10151 NULL, arm_set_abi, arm_show_abi,
10152 &setarmcmdlist, &showarmcmdlist);
10154 /* Add two commands to allow the user to force the assumed
10156 add_setshow_enum_cmd ("fallback-mode", class_support,
10157 arm_mode_strings, &arm_fallback_mode_string,
10158 _("Set the mode assumed when symbols are unavailable."),
10159 _("Show the mode assumed when symbols are unavailable."),
10160 NULL, NULL, arm_show_fallback_mode,
10161 &setarmcmdlist, &showarmcmdlist);
10162 add_setshow_enum_cmd ("force-mode", class_support,
10163 arm_mode_strings, &arm_force_mode_string,
10164 _("Set the mode assumed even when symbols are available."),
10165 _("Show the mode assumed even when symbols are available."),
10166 NULL, NULL, arm_show_force_mode,
10167 &setarmcmdlist, &showarmcmdlist);
10169 /* Debugging flag. */
10170 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10171 _("Set ARM debugging."),
10172 _("Show ARM debugging."),
10173 _("When on, arm-specific debugging is enabled."),
10175 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10176 &setdebuglist, &showdebuglist);