1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-1989, 1991-1993, 1995-1996, 1998-2012 Free
4 Software Foundation, Inc.
6 This file is part of GDB.
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
21 #include <ctype.h> /* XXX for isupper (). */
28 #include "gdb_string.h"
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct minimal_symbol *sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
451 return UNMAKE_THUMB_ADDR (val);
453 return (val & 0x03fffffc);
456 /* When reading symbols, we need to zap the low bit of the address,
457 which may be set to 1 for Thumb functions. */
459 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct minimal_symbol *msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
476 && SYMBOL_VALUE_ADDRESS (msym) == pc
477 && SYMBOL_LINKAGE_NAME (msym) != NULL)
479 const char *name = SYMBOL_LINKAGE_NAME (msym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
703 struct pv_area *stack;
704 struct cleanup *back_to;
706 CORE_ADDR unrecognized_pc = 0;
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
713 while (start < limit)
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
740 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
743 offset = (insn & 0x7f) << 2; /* get scaled offset */
744 if (insn & 0x80) /* Check for SUB. */
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
791 if (pv_area_store_would_trash (stack, addr))
794 pv_area_store (stack, addr, 4, regs[regno]);
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
805 if (pv_area_store_would_trash (stack, addr))
808 pv_area_store (stack, addr, 4, regs[rd]);
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
846 unsigned short inst2;
848 inst2 = read_memory_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
857 int j1, j2, imm1, imm2;
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 pv_t addr = regs[bits (insn, 0, 3)];
884 if (pv_area_store_would_trash (stack, addr))
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
891 addr = pv_add_constant (addr, -4);
892 pv_area_store (stack, addr, 4, regs[regno]);
896 regs[bits (insn, 0, 3)] = addr;
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
907 offset = inst2 & 0xff;
909 addr = pv_add_constant (addr, offset);
911 addr = pv_add_constant (addr, -offset);
913 if (pv_area_store_would_trash (stack, addr))
916 pv_area_store (stack, addr, 4, regs[regno1]);
917 pv_area_store (stack, pv_add_constant (addr, 4),
921 regs[bits (insn, 0, 3)] = addr;
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
931 offset = inst2 & 0xff;
933 addr = pv_add_constant (addr, offset);
935 addr = pv_add_constant (addr, -offset);
937 if (pv_area_store_would_trash (stack, addr))
940 pv_area_store (stack, addr, 4, regs[regno]);
943 regs[bits (insn, 0, 3)] = addr;
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 int regno = bits (inst2, 12, 15);
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
955 if (pv_area_store_would_trash (stack, addr))
958 pv_area_store (stack, addr, 4, regs[regno]);
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
980 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1071 /* Constant pool loads. */
1072 unsigned int constant;
1075 offset = bits (insn, 0, 11);
1077 loc = start + 4 + offset;
1079 loc = start + 4 - offset;
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1087 /* Constant pool loads. */
1088 unsigned int constant;
1091 offset = bits (insn, 0, 7) << 2;
1093 loc = start + 4 + offset;
1095 loc = start + 4 - offset;
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1106 /* Don't scan past anything that might change control flow. */
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1118 else if (thumb_instruction_changes_pc (insn))
1120 /* Don't scan past anything that might change control flow. */
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1134 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1135 paddress (gdbarch, start));
1137 if (unrecognized_pc == 0)
1138 unrecognized_pc = start;
1142 do_cleanups (back_to);
1143 return unrecognized_pc;
1146 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1148 /* Frame pointer is fp. Frame size is constant. */
1149 cache->framereg = ARM_FP_REGNUM;
1150 cache->framesize = -regs[ARM_FP_REGNUM].k;
1152 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1154 /* Frame pointer is r7. Frame size is constant. */
1155 cache->framereg = THUMB_FP_REGNUM;
1156 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 /* Try the stack pointer... this is a bit desperate. */
1161 cache->framereg = ARM_SP_REGNUM;
1162 cache->framesize = -regs[ARM_SP_REGNUM].k;
1165 for (i = 0; i < 16; i++)
1166 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1167 cache->saved_regs[i].addr = offset;
1169 do_cleanups (back_to);
1170 return unrecognized_pc;
1174 /* Try to analyze the instructions starting from PC, which load symbol
1175 __stack_chk_guard. Return the address of instruction after loading this
1176 symbol, set the dest register number to *BASEREG, and set the size of
1177 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1182 unsigned int *destreg, int *offset)
1184 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1185 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1186 unsigned int low, high, address;
1191 unsigned short insn1
1192 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1194 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1196 *destreg = bits (insn1, 8, 10);
1198 address = bits (insn1, 0, 7);
1200 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1202 unsigned short insn2
1203 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1205 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1208 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1210 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1212 /* movt Rd, #const */
1213 if ((insn1 & 0xfbc0) == 0xf2c0)
1215 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1216 *destreg = bits (insn2, 8, 11);
1218 address = (high << 16 | low);
1225 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1227 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1229 address = bits (insn, 0, 11);
1230 *destreg = bits (insn, 12, 15);
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1245 address = (high << 16 | low);
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1276 .word __stack_chk_guard
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct minimal_symbol *stack_chk_guard;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* If name of symbol doesn't start with '__stack_chk_guard', this
1301 instruction sequence is not for stack protector. If symbol is
1302 removed, we conservatively think this sequence is for stack protector. */
1304 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1305 strlen ("__stack_chk_guard")) != 0)
1310 unsigned int destreg;
1312 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1314 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1315 if ((insn & 0xf800) != 0x6800)
1317 if (bits (insn, 3, 5) != basereg)
1319 destreg = bits (insn, 0, 2);
1321 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1322 byte_order_for_code);
1323 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1324 if ((insn & 0xf800) != 0x6000)
1326 if (destreg != bits (insn, 0, 2))
1331 unsigned int destreg;
1333 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1335 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1336 if ((insn & 0x0e500000) != 0x04100000)
1338 if (bits (insn, 16, 19) != basereg)
1340 destreg = bits (insn, 12, 15);
1341 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1342 insn = read_memory_unsigned_integer (pc + offset + 4,
1343 4, byte_order_for_code);
1344 if ((insn & 0x0e500000) != 0x04000000)
1346 if (bits (insn, 12, 15) != destreg)
1349 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1352 return pc + offset + 4;
1354 return pc + offset + 8;
1357 /* Advance the PC across any function entry prologue instructions to
1358 reach some "real" code.
1360 The APCS (ARM Procedure Call Standard) defines the following
1364 [stmfd sp!, {a1,a2,a3,a4}]
1365 stmfd sp!, {...,fp,ip,lr,pc}
1366 [stfe f7, [sp, #-12]!]
1367 [stfe f6, [sp, #-12]!]
1368 [stfe f5, [sp, #-12]!]
1369 [stfe f4, [sp, #-12]!]
1370 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1373 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1375 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1378 CORE_ADDR func_addr, limit_pc;
1380 /* See if we can determine the end of the prologue via the symbol table.
1381 If so, then return either PC, or the PC after the prologue, whichever
1383 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1385 CORE_ADDR post_prologue_pc
1386 = skip_prologue_using_sal (gdbarch, func_addr);
1387 struct symtab *s = find_pc_symtab (func_addr);
1389 if (post_prologue_pc)
1391 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1394 /* GCC always emits a line note before the prologue and another
1395 one after, even if the two are at the same address or on the
1396 same line. Take advantage of this so that we do not need to
1397 know every instruction that might appear in the prologue. We
1398 will have producer information for most binaries; if it is
1399 missing (e.g. for -gstabs), assuming the GNU tools. */
1400 if (post_prologue_pc
1402 || s->producer == NULL
1403 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1404 return post_prologue_pc;
1406 if (post_prologue_pc != 0)
1408 CORE_ADDR analyzed_limit;
1410 /* For non-GCC compilers, make sure the entire line is an
1411 acceptable prologue; GDB will round this function's
1412 return value up to the end of the following line so we
1413 can not skip just part of a line (and we do not want to).
1415 RealView does not treat the prologue specially, but does
1416 associate prologue code with the opening brace; so this
1417 lets us skip the first line if we think it is the opening
1419 if (arm_pc_is_thumb (gdbarch, func_addr))
1420 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1421 post_prologue_pc, NULL);
1423 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1424 post_prologue_pc, NULL);
1426 if (analyzed_limit != post_prologue_pc)
1429 return post_prologue_pc;
1433 /* Can't determine prologue from the symbol table, need to examine
1436 /* Find an upper limit on the function prologue using the debug
1437 information. If the debug information could not be used to provide
1438 that bound, then use an arbitrary large number as the upper bound. */
1439 /* Like arm_scan_prologue, stop no later than pc + 64. */
1440 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1442 limit_pc = pc + 64; /* Magic. */
1445 /* Check if this is Thumb code. */
1446 if (arm_pc_is_thumb (gdbarch, pc))
1447 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1449 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1451 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1453 /* "mov ip, sp" is no longer a required part of the prologue. */
1454 if (inst == 0xe1a0c00d) /* mov ip, sp */
1457 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1460 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1463 /* Some prologues begin with "str lr, [sp, #-4]!". */
1464 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1467 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1470 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1473 /* Any insns after this point may float into the code, if it makes
1474 for better instruction scheduling, so we skip them only if we
1475 find them, but still consider the function to be frame-ful. */
1477 /* We may have either one sfmfd instruction here, or several stfe
1478 insns, depending on the version of floating point code we
1480 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1483 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1486 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1489 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1492 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1493 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1494 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1497 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1498 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1499 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1502 /* Un-recognized instruction; stop scanning. */
1506 return skip_pc; /* End of prologue. */
1510 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1511 This function decodes a Thumb function prologue to determine:
1512 1) the size of the stack frame
1513 2) which registers are saved on it
1514 3) the offsets of saved regs
1515 4) the offset from the stack pointer to the frame pointer
1517 A typical Thumb function prologue would create this stack frame
1518 (offsets relative to FP)
1519 old SP -> 24 stack parameters
1522 R7 -> 0 local variables (16 bytes)
1523 SP -> -12 additional stack space (12 bytes)
1524 The frame size would thus be 36 bytes, and the frame offset would be
1525 12 bytes. The frame register is R7.
1527 The comments for thumb_skip_prolog() describe the algorithm we use
1528 to detect the end of the prolog. */
1532 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1533 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1535 CORE_ADDR prologue_start;
1536 CORE_ADDR prologue_end;
1538 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1541 /* See comment in arm_scan_prologue for an explanation of
1543 if (prologue_end > prologue_start + 64)
1545 prologue_end = prologue_start + 64;
1549 /* We're in the boondocks: we have no idea where the start of the
1553 prologue_end = min (prologue_end, prev_pc);
1555 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1558 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1561 arm_instruction_changes_pc (uint32_t this_instr)
1563 if (bits (this_instr, 28, 31) == INST_NV)
1564 /* Unconditional instructions. */
1565 switch (bits (this_instr, 24, 27))
1569 /* Branch with Link and change to Thumb. */
1574 /* Coprocessor register transfer. */
1575 if (bits (this_instr, 12, 15) == 15)
1576 error (_("Invalid update to pc in instruction"));
1582 switch (bits (this_instr, 25, 27))
1585 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1587 /* Multiplies and extra load/stores. */
1588 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1589 /* Neither multiplies nor extension load/stores are allowed
1593 /* Otherwise, miscellaneous instructions. */
1595 /* BX <reg>, BXJ <reg>, BLX <reg> */
1596 if (bits (this_instr, 4, 27) == 0x12fff1
1597 || bits (this_instr, 4, 27) == 0x12fff2
1598 || bits (this_instr, 4, 27) == 0x12fff3)
1601 /* Other miscellaneous instructions are unpredictable if they
1605 /* Data processing instruction. Fall through. */
1608 if (bits (this_instr, 12, 15) == 15)
1615 /* Media instructions and architecturally undefined instructions. */
1616 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1620 if (bit (this_instr, 20) == 0)
1624 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1630 /* Load/store multiple. */
1631 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1637 /* Branch and branch with link. */
1642 /* Coprocessor transfers or SWIs can not affect PC. */
1646 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1650 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1651 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1652 fill it in. Return the first address not recognized as a prologue
1655 We recognize all the instructions typically found in ARM prologues,
1656 plus harmless instructions which can be skipped (either for analysis
1657 purposes, or a more restrictive set that can be skipped when finding
1658 the end of the prologue). */
1661 arm_analyze_prologue (struct gdbarch *gdbarch,
1662 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1663 struct arm_prologue_cache *cache)
1665 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1666 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1668 CORE_ADDR offset, current_pc;
1669 pv_t regs[ARM_FPS_REGNUM];
1670 struct pv_area *stack;
1671 struct cleanup *back_to;
1672 int framereg, framesize;
1673 CORE_ADDR unrecognized_pc = 0;
1675 /* Search the prologue looking for instructions that set up the
1676 frame pointer, adjust the stack pointer, and save registers.
1678 Be careful, however, and if it doesn't look like a prologue,
1679 don't try to scan it. If, for instance, a frameless function
1680 begins with stmfd sp!, then we will tell ourselves there is
1681 a frame, which will confuse stack traceback, as well as "finish"
1682 and other operations that rely on a knowledge of the stack
1685 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1686 regs[regno] = pv_register (regno, 0);
1687 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1688 back_to = make_cleanup_free_pv_area (stack);
1690 for (current_pc = prologue_start;
1691 current_pc < prologue_end;
1695 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1697 if (insn == 0xe1a0c00d) /* mov ip, sp */
1699 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1702 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1703 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1705 unsigned imm = insn & 0xff; /* immediate value */
1706 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1707 int rd = bits (insn, 12, 15);
1708 imm = (imm >> rot) | (imm << (32 - rot));
1709 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1712 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1713 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1715 unsigned imm = insn & 0xff; /* immediate value */
1716 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1717 int rd = bits (insn, 12, 15);
1718 imm = (imm >> rot) | (imm << (32 - rot));
1719 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1722 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1725 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1727 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1728 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1729 regs[bits (insn, 12, 15)]);
1732 else if ((insn & 0xffff0000) == 0xe92d0000)
1733 /* stmfd sp!, {..., fp, ip, lr, pc}
1735 stmfd sp!, {a1, a2, a3, a4} */
1737 int mask = insn & 0xffff;
1739 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1742 /* Calculate offsets of saved registers. */
1743 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1744 if (mask & (1 << regno))
1747 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1748 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1751 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1752 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1753 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1755 /* No need to add this to saved_regs -- it's just an arg reg. */
1758 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1759 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1760 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1762 /* No need to add this to saved_regs -- it's just an arg reg. */
1765 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1767 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1769 /* No need to add this to saved_regs -- it's just arg regs. */
1772 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1774 unsigned imm = insn & 0xff; /* immediate value */
1775 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1776 imm = (imm >> rot) | (imm << (32 - rot));
1777 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1779 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1781 unsigned imm = insn & 0xff; /* immediate value */
1782 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1783 imm = (imm >> rot) | (imm << (32 - rot));
1784 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1786 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1788 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1790 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1793 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1794 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1795 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1797 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1801 int n_saved_fp_regs;
1802 unsigned int fp_start_reg, fp_bound_reg;
1804 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1807 if ((insn & 0x800) == 0x800) /* N0 is set */
1809 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1810 n_saved_fp_regs = 3;
1812 n_saved_fp_regs = 1;
1816 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1817 n_saved_fp_regs = 2;
1819 n_saved_fp_regs = 4;
1822 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1823 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1824 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1826 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1827 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1828 regs[fp_start_reg++]);
1831 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1833 /* Allow some special function calls when skipping the
1834 prologue; GCC generates these before storing arguments to
1836 CORE_ADDR dest = BranchDest (current_pc, insn);
1838 if (skip_prologue_function (gdbarch, dest, 0))
1843 else if ((insn & 0xf0000000) != 0xe0000000)
1844 break; /* Condition not true, exit early. */
1845 else if (arm_instruction_changes_pc (insn))
1846 /* Don't scan past anything that might change control flow. */
1848 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1849 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1850 /* Ignore block loads from the stack, potentially copying
1851 parameters from memory. */
1853 else if ((insn & 0xfc500000) == 0xe4100000
1854 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1855 /* Similarly ignore single loads from the stack. */
1857 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1858 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1859 register instead of the stack. */
1863 /* The optimizer might shove anything into the prologue,
1864 so we just skip what we don't recognize. */
1865 unrecognized_pc = current_pc;
1870 if (unrecognized_pc == 0)
1871 unrecognized_pc = current_pc;
1873 /* The frame size is just the distance from the frame register
1874 to the original stack pointer. */
1875 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1877 /* Frame pointer is fp. */
1878 framereg = ARM_FP_REGNUM;
1879 framesize = -regs[ARM_FP_REGNUM].k;
1883 /* Try the stack pointer... this is a bit desperate. */
1884 framereg = ARM_SP_REGNUM;
1885 framesize = -regs[ARM_SP_REGNUM].k;
1890 cache->framereg = framereg;
1891 cache->framesize = framesize;
1893 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1895 cache->saved_regs[regno].addr = offset;
1899 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1900 paddress (gdbarch, unrecognized_pc));
1902 do_cleanups (back_to);
1903 return unrecognized_pc;
1907 arm_scan_prologue (struct frame_info *this_frame,
1908 struct arm_prologue_cache *cache)
1910 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1911 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1913 CORE_ADDR prologue_start, prologue_end, current_pc;
1914 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1915 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1916 pv_t regs[ARM_FPS_REGNUM];
1917 struct pv_area *stack;
1918 struct cleanup *back_to;
1921 /* Assume there is no frame until proven otherwise. */
1922 cache->framereg = ARM_SP_REGNUM;
1923 cache->framesize = 0;
1925 /* Check for Thumb prologue. */
1926 if (arm_frame_is_thumb (this_frame))
1928 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1932 /* Find the function prologue. If we can't find the function in
1933 the symbol table, peek in the stack frame to find the PC. */
1934 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1937 /* One way to find the end of the prologue (which works well
1938 for unoptimized code) is to do the following:
1940 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1943 prologue_end = prev_pc;
1944 else if (sal.end < prologue_end)
1945 prologue_end = sal.end;
1947 This mechanism is very accurate so long as the optimizer
1948 doesn't move any instructions from the function body into the
1949 prologue. If this happens, sal.end will be the last
1950 instruction in the first hunk of prologue code just before
1951 the first instruction that the scheduler has moved from
1952 the body to the prologue.
1954 In order to make sure that we scan all of the prologue
1955 instructions, we use a slightly less accurate mechanism which
1956 may scan more than necessary. To help compensate for this
1957 lack of accuracy, the prologue scanning loop below contains
1958 several clauses which'll cause the loop to terminate early if
1959 an implausible prologue instruction is encountered.
1965 is a suitable endpoint since it accounts for the largest
1966 possible prologue plus up to five instructions inserted by
1969 if (prologue_end > prologue_start + 64)
1971 prologue_end = prologue_start + 64; /* See above. */
1976 /* We have no symbol information. Our only option is to assume this
1977 function has a standard stack frame and the normal frame register.
1978 Then, we can find the value of our frame pointer on entrance to
1979 the callee (or at the present moment if this is the innermost frame).
1980 The value stored there should be the address of the stmfd + 8. */
1981 CORE_ADDR frame_loc;
1982 LONGEST return_value;
1984 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1985 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1989 prologue_start = gdbarch_addr_bits_remove
1990 (gdbarch, return_value) - 8;
1991 prologue_end = prologue_start + 64; /* See above. */
1995 if (prev_pc < prologue_end)
1996 prologue_end = prev_pc;
1998 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2001 static struct arm_prologue_cache *
2002 arm_make_prologue_cache (struct frame_info *this_frame)
2005 struct arm_prologue_cache *cache;
2006 CORE_ADDR unwound_fp;
2008 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2009 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2011 arm_scan_prologue (this_frame, cache);
2013 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2014 if (unwound_fp == 0)
2017 cache->prev_sp = unwound_fp + cache->framesize;
2019 /* Calculate actual addresses of saved registers using offsets
2020 determined by arm_scan_prologue. */
2021 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2022 if (trad_frame_addr_p (cache->saved_regs, reg))
2023 cache->saved_regs[reg].addr += cache->prev_sp;
2028 /* Our frame ID for a normal frame is the current function's starting PC
2029 and the caller's SP when we were called. */
2032 arm_prologue_this_id (struct frame_info *this_frame,
2034 struct frame_id *this_id)
2036 struct arm_prologue_cache *cache;
2040 if (*this_cache == NULL)
2041 *this_cache = arm_make_prologue_cache (this_frame);
2042 cache = *this_cache;
2044 /* This is meant to halt the backtrace at "_start". */
2045 pc = get_frame_pc (this_frame);
2046 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2049 /* If we've hit a wall, stop. */
2050 if (cache->prev_sp == 0)
2053 /* Use function start address as part of the frame ID. If we cannot
2054 identify the start address (due to missing symbol information),
2055 fall back to just using the current PC. */
2056 func = get_frame_func (this_frame);
2060 id = frame_id_build (cache->prev_sp, func);
2064 static struct value *
2065 arm_prologue_prev_register (struct frame_info *this_frame,
2069 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2070 struct arm_prologue_cache *cache;
2072 if (*this_cache == NULL)
2073 *this_cache = arm_make_prologue_cache (this_frame);
2074 cache = *this_cache;
2076 /* If we are asked to unwind the PC, then we need to return the LR
2077 instead. The prologue may save PC, but it will point into this
2078 frame's prologue, not the next frame's resume location. Also
2079 strip the saved T bit. A valid LR may have the low bit set, but
2080 a valid PC never does. */
2081 if (prev_regnum == ARM_PC_REGNUM)
2085 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2086 return frame_unwind_got_constant (this_frame, prev_regnum,
2087 arm_addr_bits_remove (gdbarch, lr));
2090 /* SP is generally not saved to the stack, but this frame is
2091 identified by the next frame's stack pointer at the time of the call.
2092 The value was already reconstructed into PREV_SP. */
2093 if (prev_regnum == ARM_SP_REGNUM)
2094 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2096 /* The CPSR may have been changed by the call instruction and by the
2097 called function. The only bit we can reconstruct is the T bit,
2098 by checking the low bit of LR as of the call. This is a reliable
2099 indicator of Thumb-ness except for some ARM v4T pre-interworking
2100 Thumb code, which could get away with a clear low bit as long as
2101 the called function did not use bx. Guess that all other
2102 bits are unchanged; the condition flags are presumably lost,
2103 but the processor status is likely valid. */
2104 if (prev_regnum == ARM_PS_REGNUM)
2107 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2109 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2110 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2111 if (IS_THUMB_ADDR (lr))
2115 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2118 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2122 struct frame_unwind arm_prologue_unwind = {
2124 default_frame_unwind_stop_reason,
2125 arm_prologue_this_id,
2126 arm_prologue_prev_register,
2128 default_frame_sniffer
2131 /* Maintain a list of ARM exception table entries per objfile, similar to the
2132 list of mapping symbols. We only cache entries for standard ARM-defined
2133 personality routines; the cache will contain only the frame unwinding
2134 instructions associated with the entry (not the descriptors). */
2136 static const struct objfile_data *arm_exidx_data_key;
2138 struct arm_exidx_entry
2143 typedef struct arm_exidx_entry arm_exidx_entry_s;
2144 DEF_VEC_O(arm_exidx_entry_s);
2146 struct arm_exidx_data
2148 VEC(arm_exidx_entry_s) **section_maps;
2152 arm_exidx_data_free (struct objfile *objfile, void *arg)
2154 struct arm_exidx_data *data = arg;
2157 for (i = 0; i < objfile->obfd->section_count; i++)
2158 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2162 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2163 const struct arm_exidx_entry *rhs)
2165 return lhs->addr < rhs->addr;
2168 static struct obj_section *
2169 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2171 struct obj_section *osect;
2173 ALL_OBJFILE_OSECTIONS (objfile, osect)
2174 if (bfd_get_section_flags (objfile->obfd,
2175 osect->the_bfd_section) & SEC_ALLOC)
2177 bfd_vma start, size;
2178 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2179 size = bfd_get_section_size (osect->the_bfd_section);
2181 if (start <= vma && vma < start + size)
2188 /* Parse contents of exception table and exception index sections
2189 of OBJFILE, and fill in the exception table entry cache.
2191 For each entry that refers to a standard ARM-defined personality
2192 routine, extract the frame unwinding instructions (from either
2193 the index or the table section). The unwinding instructions
2195 - extracting them from the rest of the table data
2196 - converting to host endianness
2197 - appending the implicit 0xb0 ("Finish") code
2199 The extracted and normalized instructions are stored for later
2200 retrieval by the arm_find_exidx_entry routine. */
2203 arm_exidx_new_objfile (struct objfile *objfile)
2205 struct cleanup *cleanups;
2206 struct arm_exidx_data *data;
2207 asection *exidx, *extab;
2208 bfd_vma exidx_vma = 0, extab_vma = 0;
2209 bfd_size_type exidx_size = 0, extab_size = 0;
2210 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2213 /* If we've already touched this file, do nothing. */
2214 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2216 cleanups = make_cleanup (null_cleanup, NULL);
2218 /* Read contents of exception table and index. */
2219 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2222 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2223 exidx_size = bfd_get_section_size (exidx);
2224 exidx_data = xmalloc (exidx_size);
2225 make_cleanup (xfree, exidx_data);
2227 if (!bfd_get_section_contents (objfile->obfd, exidx,
2228 exidx_data, 0, exidx_size))
2230 do_cleanups (cleanups);
2235 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2238 extab_vma = bfd_section_vma (objfile->obfd, extab);
2239 extab_size = bfd_get_section_size (extab);
2240 extab_data = xmalloc (extab_size);
2241 make_cleanup (xfree, extab_data);
2243 if (!bfd_get_section_contents (objfile->obfd, extab,
2244 extab_data, 0, extab_size))
2246 do_cleanups (cleanups);
2251 /* Allocate exception table data structure. */
2252 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2253 set_objfile_data (objfile, arm_exidx_data_key, data);
2254 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2255 objfile->obfd->section_count,
2256 VEC(arm_exidx_entry_s) *);
2258 /* Fill in exception table. */
2259 for (i = 0; i < exidx_size / 8; i++)
2261 struct arm_exidx_entry new_exidx_entry;
2262 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2263 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2264 bfd_vma addr = 0, word = 0;
2265 int n_bytes = 0, n_words = 0;
2266 struct obj_section *sec;
2267 gdb_byte *entry = NULL;
2269 /* Extract address of start of function. */
2270 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2271 idx += exidx_vma + i * 8;
2273 /* Find section containing function and compute section offset. */
2274 sec = arm_obj_section_from_vma (objfile, idx);
2277 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2279 /* Determine address of exception table entry. */
2282 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2284 else if ((val & 0xff000000) == 0x80000000)
2286 /* Exception table entry embedded in .ARM.exidx
2287 -- must be short form. */
2291 else if (!(val & 0x80000000))
2293 /* Exception table entry in .ARM.extab. */
2294 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2295 addr += exidx_vma + i * 8 + 4;
2297 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2299 word = bfd_h_get_32 (objfile->obfd,
2300 extab_data + addr - extab_vma);
2303 if ((word & 0xff000000) == 0x80000000)
2308 else if ((word & 0xff000000) == 0x81000000
2309 || (word & 0xff000000) == 0x82000000)
2313 n_words = ((word >> 16) & 0xff);
2315 else if (!(word & 0x80000000))
2318 struct obj_section *pers_sec;
2319 int gnu_personality = 0;
2321 /* Custom personality routine. */
2322 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2323 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2325 /* Check whether we've got one of the variants of the
2326 GNU personality routines. */
2327 pers_sec = arm_obj_section_from_vma (objfile, pers);
2330 static const char *personality[] =
2332 "__gcc_personality_v0",
2333 "__gxx_personality_v0",
2334 "__gcj_personality_v0",
2335 "__gnu_objc_personality_v0",
2339 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2342 for (k = 0; personality[k]; k++)
2343 if (lookup_minimal_symbol_by_pc_name
2344 (pc, personality[k], objfile))
2346 gnu_personality = 1;
2351 /* If so, the next word contains a word count in the high
2352 byte, followed by the same unwind instructions as the
2353 pre-defined forms. */
2355 && addr + 4 <= extab_vma + extab_size)
2357 word = bfd_h_get_32 (objfile->obfd,
2358 extab_data + addr - extab_vma);
2361 n_words = ((word >> 24) & 0xff);
2367 /* Sanity check address. */
2369 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2370 n_words = n_bytes = 0;
2372 /* The unwind instructions reside in WORD (only the N_BYTES least
2373 significant bytes are valid), followed by N_WORDS words in the
2374 extab section starting at ADDR. */
2375 if (n_bytes || n_words)
2377 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2378 n_bytes + n_words * 4 + 1);
2381 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2385 word = bfd_h_get_32 (objfile->obfd,
2386 extab_data + addr - extab_vma);
2389 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2390 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2391 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2392 *p++ = (gdb_byte) (word & 0xff);
2395 /* Implied "Finish" to terminate the list. */
2399 /* Push entry onto vector. They are guaranteed to always
2400 appear in order of increasing addresses. */
2401 new_exidx_entry.addr = idx;
2402 new_exidx_entry.entry = entry;
2403 VEC_safe_push (arm_exidx_entry_s,
2404 data->section_maps[sec->the_bfd_section->index],
2408 do_cleanups (cleanups);
2411 /* Search for the exception table entry covering MEMADDR. If one is found,
2412 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2413 set *START to the start of the region covered by this entry. */
2416 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2418 struct obj_section *sec;
2420 sec = find_pc_section (memaddr);
2423 struct arm_exidx_data *data;
2424 VEC(arm_exidx_entry_s) *map;
2425 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2428 data = objfile_data (sec->objfile, arm_exidx_data_key);
2431 map = data->section_maps[sec->the_bfd_section->index];
2432 if (!VEC_empty (arm_exidx_entry_s, map))
2434 struct arm_exidx_entry *map_sym;
2436 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2437 arm_compare_exidx_entries);
2439 /* VEC_lower_bound finds the earliest ordered insertion
2440 point. If the following symbol starts at this exact
2441 address, we use that; otherwise, the preceding
2442 exception table entry covers this address. */
2443 if (idx < VEC_length (arm_exidx_entry_s, map))
2445 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2446 if (map_sym->addr == map_key.addr)
2449 *start = map_sym->addr + obj_section_addr (sec);
2450 return map_sym->entry;
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2458 *start = map_sym->addr + obj_section_addr (sec);
2459 return map_sym->entry;
2468 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2469 instruction list from the ARM exception table entry ENTRY, allocate and
2470 return a prologue cache structure describing how to unwind this frame.
2472 Return NULL if the unwinding instruction list contains a "spare",
2473 "reserved" or "refuse to unwind" instruction as defined in section
2474 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2475 for the ARM Architecture" document. */
2477 static struct arm_prologue_cache *
2478 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2483 struct arm_prologue_cache *cache;
2484 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2485 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2491 /* Whenever we reload SP, we actually have to retrieve its
2492 actual value in the current frame. */
2495 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2497 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2498 vsp = get_frame_register_unsigned (this_frame, reg);
2502 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2503 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2509 /* Decode next unwind instruction. */
2512 if ((insn & 0xc0) == 0)
2514 int offset = insn & 0x3f;
2515 vsp += (offset << 2) + 4;
2517 else if ((insn & 0xc0) == 0x40)
2519 int offset = insn & 0x3f;
2520 vsp -= (offset << 2) + 4;
2522 else if ((insn & 0xf0) == 0x80)
2524 int mask = ((insn & 0xf) << 8) | *entry++;
2527 /* The special case of an all-zero mask identifies
2528 "Refuse to unwind". We return NULL to fall back
2529 to the prologue analyzer. */
2533 /* Pop registers r4..r15 under mask. */
2534 for (i = 0; i < 12; i++)
2535 if (mask & (1 << i))
2537 cache->saved_regs[4 + i].addr = vsp;
2541 /* Special-case popping SP -- we need to reload vsp. */
2542 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2545 else if ((insn & 0xf0) == 0x90)
2547 int reg = insn & 0xf;
2549 /* Reserved cases. */
2550 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2553 /* Set SP from another register and mark VSP for reload. */
2554 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2557 else if ((insn & 0xf0) == 0xa0)
2559 int count = insn & 0x7;
2560 int pop_lr = (insn & 0x8) != 0;
2563 /* Pop r4..r[4+count]. */
2564 for (i = 0; i <= count; i++)
2566 cache->saved_regs[4 + i].addr = vsp;
2570 /* If indicated by flag, pop LR as well. */
2573 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2577 else if (insn == 0xb0)
2579 /* We could only have updated PC by popping into it; if so, it
2580 will show up as address. Otherwise, copy LR into PC. */
2581 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2582 cache->saved_regs[ARM_PC_REGNUM]
2583 = cache->saved_regs[ARM_LR_REGNUM];
2588 else if (insn == 0xb1)
2590 int mask = *entry++;
2593 /* All-zero mask and mask >= 16 is "spare". */
2594 if (mask == 0 || mask >= 16)
2597 /* Pop r0..r3 under mask. */
2598 for (i = 0; i < 4; i++)
2599 if (mask & (1 << i))
2601 cache->saved_regs[i].addr = vsp;
2605 else if (insn == 0xb2)
2607 ULONGEST offset = 0;
2612 offset |= (*entry & 0x7f) << shift;
2615 while (*entry++ & 0x80);
2617 vsp += 0x204 + (offset << 2);
2619 else if (insn == 0xb3)
2621 int start = *entry >> 4;
2622 int count = (*entry++) & 0xf;
2625 /* Only registers D0..D15 are valid here. */
2626 if (start + count >= 16)
2629 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2630 for (i = 0; i <= count; i++)
2632 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2636 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2639 else if ((insn & 0xf8) == 0xb8)
2641 int count = insn & 0x7;
2644 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2645 for (i = 0; i <= count; i++)
2647 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2651 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2654 else if (insn == 0xc6)
2656 int start = *entry >> 4;
2657 int count = (*entry++) & 0xf;
2660 /* Only registers WR0..WR15 are valid. */
2661 if (start + count >= 16)
2664 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2665 for (i = 0; i <= count; i++)
2667 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2671 else if (insn == 0xc7)
2673 int mask = *entry++;
2676 /* All-zero mask and mask >= 16 is "spare". */
2677 if (mask == 0 || mask >= 16)
2680 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2681 for (i = 0; i < 4; i++)
2682 if (mask & (1 << i))
2684 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2688 else if ((insn & 0xf8) == 0xc0)
2690 int count = insn & 0x7;
2693 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2694 for (i = 0; i <= count; i++)
2696 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2700 else if (insn == 0xc8)
2702 int start = *entry >> 4;
2703 int count = (*entry++) & 0xf;
2706 /* Only registers D0..D31 are valid. */
2707 if (start + count >= 16)
2710 /* Pop VFP double-precision registers
2711 D[16+start]..D[16+start+count]. */
2712 for (i = 0; i <= count; i++)
2714 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2718 else if (insn == 0xc9)
2720 int start = *entry >> 4;
2721 int count = (*entry++) & 0xf;
2724 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2725 for (i = 0; i <= count; i++)
2727 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2731 else if ((insn & 0xf8) == 0xd0)
2733 int count = insn & 0x7;
2736 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2737 for (i = 0; i <= count; i++)
2739 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2745 /* Everything else is "spare". */
2750 /* If we restore SP from a register, assume this was the frame register.
2751 Otherwise just fall back to SP as frame register. */
2752 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2753 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2755 cache->framereg = ARM_SP_REGNUM;
2757 /* Determine offset to previous frame. */
2759 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2761 /* We already got the previous SP. */
2762 cache->prev_sp = vsp;
2767 /* Unwinding via ARM exception table entries. Note that the sniffer
2768 already computes a filled-in prologue cache, which is then used
2769 with the same arm_prologue_this_id and arm_prologue_prev_register
2770 routines also used for prologue-parsing based unwinding. */
2773 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2774 struct frame_info *this_frame,
2775 void **this_prologue_cache)
2777 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2778 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2779 CORE_ADDR addr_in_block, exidx_region, func_start;
2780 struct arm_prologue_cache *cache;
2783 /* See if we have an ARM exception table entry covering this address. */
2784 addr_in_block = get_frame_address_in_block (this_frame);
2785 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2789 /* The ARM exception table does not describe unwind information
2790 for arbitrary PC values, but is guaranteed to be correct only
2791 at call sites. We have to decide here whether we want to use
2792 ARM exception table information for this frame, or fall back
2793 to using prologue parsing. (Note that if we have DWARF CFI,
2794 this sniffer isn't even called -- CFI is always preferred.)
2796 Before we make this decision, however, we check whether we
2797 actually have *symbol* information for the current frame.
2798 If not, prologue parsing would not work anyway, so we might
2799 as well use the exception table and hope for the best. */
2800 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2804 /* If the next frame is "normal", we are at a call site in this
2805 frame, so exception information is guaranteed to be valid. */
2806 if (get_next_frame (this_frame)
2807 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2810 /* We also assume exception information is valid if we're currently
2811 blocked in a system call. The system library is supposed to
2812 ensure this, so that e.g. pthread cancellation works. */
2813 if (arm_frame_is_thumb (this_frame))
2817 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2818 byte_order_for_code, &insn)
2819 && (insn & 0xff00) == 0xdf00 /* svc */)
2826 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2827 byte_order_for_code, &insn)
2828 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2832 /* Bail out if we don't know that exception information is valid. */
2836 /* The ARM exception index does not mark the *end* of the region
2837 covered by the entry, and some functions will not have any entry.
2838 To correctly recognize the end of the covered region, the linker
2839 should have inserted dummy records with a CANTUNWIND marker.
2841 Unfortunately, current versions of GNU ld do not reliably do
2842 this, and thus we may have found an incorrect entry above.
2843 As a (temporary) sanity check, we only use the entry if it
2844 lies *within* the bounds of the function. Note that this check
2845 might reject perfectly valid entries that just happen to cover
2846 multiple functions; therefore this check ought to be removed
2847 once the linker is fixed. */
2848 if (func_start > exidx_region)
2852 /* Decode the list of unwinding instructions into a prologue cache.
2853 Note that this may fail due to e.g. a "refuse to unwind" code. */
2854 cache = arm_exidx_fill_cache (this_frame, entry);
2858 *this_prologue_cache = cache;
2862 struct frame_unwind arm_exidx_unwind = {
2864 default_frame_unwind_stop_reason,
2865 arm_prologue_this_id,
2866 arm_prologue_prev_register,
2868 arm_exidx_unwind_sniffer
2871 static struct arm_prologue_cache *
2872 arm_make_stub_cache (struct frame_info *this_frame)
2874 struct arm_prologue_cache *cache;
2876 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2877 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2879 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2884 /* Our frame ID for a stub frame is the current SP and LR. */
2887 arm_stub_this_id (struct frame_info *this_frame,
2889 struct frame_id *this_id)
2891 struct arm_prologue_cache *cache;
2893 if (*this_cache == NULL)
2894 *this_cache = arm_make_stub_cache (this_frame);
2895 cache = *this_cache;
2897 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2901 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 struct frame_info *this_frame,
2903 void **this_prologue_cache)
2905 CORE_ADDR addr_in_block;
2908 addr_in_block = get_frame_address_in_block (this_frame);
2909 if (in_plt_section (addr_in_block, NULL)
2910 /* We also use the stub winder if the target memory is unreadable
2911 to avoid having the prologue unwinder trying to read it. */
2912 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2918 struct frame_unwind arm_stub_unwind = {
2920 default_frame_unwind_stop_reason,
2922 arm_prologue_prev_register,
2924 arm_stub_unwind_sniffer
2928 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2930 struct arm_prologue_cache *cache;
2932 if (*this_cache == NULL)
2933 *this_cache = arm_make_prologue_cache (this_frame);
2934 cache = *this_cache;
2936 return cache->prev_sp - cache->framesize;
2939 struct frame_base arm_normal_base = {
2940 &arm_prologue_unwind,
2941 arm_normal_frame_base,
2942 arm_normal_frame_base,
2943 arm_normal_frame_base
2946 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2947 dummy frame. The frame ID's base needs to match the TOS value
2948 saved by save_dummy_frame_tos() and returned from
2949 arm_push_dummy_call, and the PC needs to match the dummy frame's
2952 static struct frame_id
2953 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2955 return frame_id_build (get_frame_register_unsigned (this_frame,
2957 get_frame_pc (this_frame));
2960 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2961 be used to construct the previous frame's ID, after looking up the
2962 containing function). */
2965 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2968 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2969 return arm_addr_bits_remove (gdbarch, pc);
2973 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2975 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2978 static struct value *
2979 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2982 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2984 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2989 /* The PC is normally copied from the return column, which
2990 describes saves of LR. However, that version may have an
2991 extra bit set to indicate Thumb state. The bit is not
2993 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2994 return frame_unwind_got_constant (this_frame, regnum,
2995 arm_addr_bits_remove (gdbarch, lr));
2998 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2999 cpsr = get_frame_register_unsigned (this_frame, regnum);
3000 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3001 if (IS_THUMB_ADDR (lr))
3005 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3008 internal_error (__FILE__, __LINE__,
3009 _("Unexpected register %d"), regnum);
3014 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3015 struct dwarf2_frame_state_reg *reg,
3016 struct frame_info *this_frame)
3022 reg->how = DWARF2_FRAME_REG_FN;
3023 reg->loc.fn = arm_dwarf2_prev_register;
3026 reg->how = DWARF2_FRAME_REG_CFA;
3031 /* Return true if we are in the function's epilogue, i.e. after the
3032 instruction that destroyed the function's stack frame. */
3035 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3037 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3038 unsigned int insn, insn2;
3039 int found_return = 0, found_stack_adjust = 0;
3040 CORE_ADDR func_start, func_end;
3044 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3047 /* The epilogue is a sequence of instructions along the following lines:
3049 - add stack frame size to SP or FP
3050 - [if frame pointer used] restore SP from FP
3051 - restore registers from SP [may include PC]
3052 - a return-type instruction [if PC wasn't already restored]
3054 In a first pass, we scan forward from the current PC and verify the
3055 instructions we find as compatible with this sequence, ending in a
3058 However, this is not sufficient to distinguish indirect function calls
3059 within a function from indirect tail calls in the epilogue in some cases.
3060 Therefore, if we didn't already find any SP-changing instruction during
3061 forward scan, we add a backward scanning heuristic to ensure we actually
3062 are in the epilogue. */
3065 while (scan_pc < func_end && !found_return)
3067 if (target_read_memory (scan_pc, buf, 2))
3071 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3073 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3075 else if (insn == 0x46f7) /* mov pc, lr */
3077 else if (insn == 0x46bd) /* mov sp, r7 */
3078 found_stack_adjust = 1;
3079 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3080 found_stack_adjust = 1;
3081 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3083 found_stack_adjust = 1;
3084 if (insn & 0x0100) /* <registers> include PC. */
3087 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3089 if (target_read_memory (scan_pc, buf, 2))
3093 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3095 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3097 found_stack_adjust = 1;
3098 if (insn2 & 0x8000) /* <registers> include PC. */
3101 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3102 && (insn2 & 0x0fff) == 0x0b04)
3104 found_stack_adjust = 1;
3105 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3108 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3109 && (insn2 & 0x0e00) == 0x0a00)
3110 found_stack_adjust = 1;
3121 /* Since any instruction in the epilogue sequence, with the possible
3122 exception of return itself, updates the stack pointer, we need to
3123 scan backwards for at most one instruction. Try either a 16-bit or
3124 a 32-bit instruction. This is just a heuristic, so we do not worry
3125 too much about false positives. */
3127 if (!found_stack_adjust)
3129 if (pc - 4 < func_start)
3131 if (target_read_memory (pc - 4, buf, 4))
3134 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3135 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3137 if (insn2 == 0x46bd) /* mov sp, r7 */
3138 found_stack_adjust = 1;
3139 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3140 found_stack_adjust = 1;
3141 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3142 found_stack_adjust = 1;
3143 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3144 found_stack_adjust = 1;
3145 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3146 && (insn2 & 0x0fff) == 0x0b04)
3147 found_stack_adjust = 1;
3148 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3149 && (insn2 & 0x0e00) == 0x0a00)
3150 found_stack_adjust = 1;
3153 return found_stack_adjust;
3156 /* Return true if we are in the function's epilogue, i.e. after the
3157 instruction that destroyed the function's stack frame. */
3160 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3162 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3164 int found_return, found_stack_adjust;
3165 CORE_ADDR func_start, func_end;
3167 if (arm_pc_is_thumb (gdbarch, pc))
3168 return thumb_in_function_epilogue_p (gdbarch, pc);
3170 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3173 /* We are in the epilogue if the previous instruction was a stack
3174 adjustment and the next instruction is a possible return (bx, mov
3175 pc, or pop). We could have to scan backwards to find the stack
3176 adjustment, or forwards to find the return, but this is a decent
3177 approximation. First scan forwards. */
3180 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3181 if (bits (insn, 28, 31) != INST_NV)
3183 if ((insn & 0x0ffffff0) == 0x012fff10)
3186 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3189 else if ((insn & 0x0fff0000) == 0x08bd0000
3190 && (insn & 0x0000c000) != 0)
3191 /* POP (LDMIA), including PC or LR. */
3198 /* Scan backwards. This is just a heuristic, so do not worry about
3199 false positives from mode changes. */
3201 if (pc < func_start + 4)
3204 found_stack_adjust = 0;
3205 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3206 if (bits (insn, 28, 31) != INST_NV)
3208 if ((insn & 0x0df0f000) == 0x0080d000)
3209 /* ADD SP (register or immediate). */
3210 found_stack_adjust = 1;
3211 else if ((insn & 0x0df0f000) == 0x0040d000)
3212 /* SUB SP (register or immediate). */
3213 found_stack_adjust = 1;
3214 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3216 found_stack_adjust = 1;
3217 else if ((insn & 0x0fff0000) == 0x08bd0000)
3219 found_stack_adjust = 1;
3222 if (found_stack_adjust)
3229 /* When arguments must be pushed onto the stack, they go on in reverse
3230 order. The code below implements a FILO (stack) to do this. */
3235 struct stack_item *prev;
3239 static struct stack_item *
3240 push_stack_item (struct stack_item *prev, const void *contents, int len)
3242 struct stack_item *si;
3243 si = xmalloc (sizeof (struct stack_item));
3244 si->data = xmalloc (len);
3247 memcpy (si->data, contents, len);
3251 static struct stack_item *
3252 pop_stack_item (struct stack_item *si)
3254 struct stack_item *dead = si;
3262 /* Return the alignment (in bytes) of the given type. */
3265 arm_type_align (struct type *t)
3271 t = check_typedef (t);
3272 switch (TYPE_CODE (t))
3275 /* Should never happen. */
3276 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3280 case TYPE_CODE_ENUM:
3284 case TYPE_CODE_RANGE:
3285 case TYPE_CODE_BITSTRING:
3287 case TYPE_CODE_CHAR:
3288 case TYPE_CODE_BOOL:
3289 return TYPE_LENGTH (t);
3291 case TYPE_CODE_ARRAY:
3292 case TYPE_CODE_COMPLEX:
3293 /* TODO: What about vector types? */
3294 return arm_type_align (TYPE_TARGET_TYPE (t));
3296 case TYPE_CODE_STRUCT:
3297 case TYPE_CODE_UNION:
3299 for (n = 0; n < TYPE_NFIELDS (t); n++)
3301 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3309 /* Possible base types for a candidate for passing and returning in
3312 enum arm_vfp_cprc_base_type
3321 /* The length of one element of base type B. */
3324 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3328 case VFP_CPRC_SINGLE:
3330 case VFP_CPRC_DOUBLE:
3332 case VFP_CPRC_VEC64:
3334 case VFP_CPRC_VEC128:
3337 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3342 /* The character ('s', 'd' or 'q') for the type of VFP register used
3343 for passing base type B. */
3346 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3350 case VFP_CPRC_SINGLE:
3352 case VFP_CPRC_DOUBLE:
3354 case VFP_CPRC_VEC64:
3356 case VFP_CPRC_VEC128:
3359 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3364 /* Determine whether T may be part of a candidate for passing and
3365 returning in VFP registers, ignoring the limit on the total number
3366 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3367 classification of the first valid component found; if it is not
3368 VFP_CPRC_UNKNOWN, all components must have the same classification
3369 as *BASE_TYPE. If it is found that T contains a type not permitted
3370 for passing and returning in VFP registers, a type differently
3371 classified from *BASE_TYPE, or two types differently classified
3372 from each other, return -1, otherwise return the total number of
3373 base-type elements found (possibly 0 in an empty structure or
3374 array). Vectors and complex types are not currently supported,
3375 matching the generic AAPCS support. */
3378 arm_vfp_cprc_sub_candidate (struct type *t,
3379 enum arm_vfp_cprc_base_type *base_type)
3381 t = check_typedef (t);
3382 switch (TYPE_CODE (t))
3385 switch (TYPE_LENGTH (t))
3388 if (*base_type == VFP_CPRC_UNKNOWN)
3389 *base_type = VFP_CPRC_SINGLE;
3390 else if (*base_type != VFP_CPRC_SINGLE)
3395 if (*base_type == VFP_CPRC_UNKNOWN)
3396 *base_type = VFP_CPRC_DOUBLE;
3397 else if (*base_type != VFP_CPRC_DOUBLE)
3406 case TYPE_CODE_ARRAY:
3410 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3413 if (TYPE_LENGTH (t) == 0)
3415 gdb_assert (count == 0);
3418 else if (count == 0)
3420 unitlen = arm_vfp_cprc_unit_length (*base_type);
3421 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3422 return TYPE_LENGTH (t) / unitlen;
3426 case TYPE_CODE_STRUCT:
3431 for (i = 0; i < TYPE_NFIELDS (t); i++)
3433 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3435 if (sub_count == -1)
3439 if (TYPE_LENGTH (t) == 0)
3441 gdb_assert (count == 0);
3444 else if (count == 0)
3446 unitlen = arm_vfp_cprc_unit_length (*base_type);
3447 if (TYPE_LENGTH (t) != unitlen * count)
3452 case TYPE_CODE_UNION:
3457 for (i = 0; i < TYPE_NFIELDS (t); i++)
3459 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3461 if (sub_count == -1)
3463 count = (count > sub_count ? count : sub_count);
3465 if (TYPE_LENGTH (t) == 0)
3467 gdb_assert (count == 0);
3470 else if (count == 0)
3472 unitlen = arm_vfp_cprc_unit_length (*base_type);
3473 if (TYPE_LENGTH (t) != unitlen * count)
3485 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3486 if passed to or returned from a non-variadic function with the VFP
3487 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3488 *BASE_TYPE to the base type for T and *COUNT to the number of
3489 elements of that base type before returning. */
3492 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3495 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3496 int c = arm_vfp_cprc_sub_candidate (t, &b);
3497 if (c <= 0 || c > 4)
3504 /* Return 1 if the VFP ABI should be used for passing arguments to and
3505 returning values from a function of type FUNC_TYPE, 0
3509 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3511 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3512 /* Variadic functions always use the base ABI. Assume that functions
3513 without debug info are not variadic. */
3514 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3516 /* The VFP ABI is only supported as a variant of AAPCS. */
3517 if (tdep->arm_abi != ARM_ABI_AAPCS)
3519 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3522 /* We currently only support passing parameters in integer registers, which
3523 conforms with GCC's default model, and VFP argument passing following
3524 the VFP variant of AAPCS. Several other variants exist and
3525 we should probably support some of them based on the selected ABI. */
3528 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3529 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3530 struct value **args, CORE_ADDR sp, int struct_return,
3531 CORE_ADDR struct_addr)
3533 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3537 struct stack_item *si = NULL;
3540 unsigned vfp_regs_free = (1 << 16) - 1;
3542 /* Determine the type of this function and whether the VFP ABI
3544 ftype = check_typedef (value_type (function));
3545 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3546 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3547 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3549 /* Set the return address. For the ARM, the return breakpoint is
3550 always at BP_ADDR. */
3551 if (arm_pc_is_thumb (gdbarch, bp_addr))
3553 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3555 /* Walk through the list of args and determine how large a temporary
3556 stack is required. Need to take care here as structs may be
3557 passed on the stack, and we have to push them. */
3560 argreg = ARM_A1_REGNUM;
3563 /* The struct_return pointer occupies the first parameter
3564 passing register. */
3568 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3569 gdbarch_register_name (gdbarch, argreg),
3570 paddress (gdbarch, struct_addr));
3571 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3575 for (argnum = 0; argnum < nargs; argnum++)
3578 struct type *arg_type;
3579 struct type *target_type;
3580 enum type_code typecode;
3581 const bfd_byte *val;
3583 enum arm_vfp_cprc_base_type vfp_base_type;
3585 int may_use_core_reg = 1;
3587 arg_type = check_typedef (value_type (args[argnum]));
3588 len = TYPE_LENGTH (arg_type);
3589 target_type = TYPE_TARGET_TYPE (arg_type);
3590 typecode = TYPE_CODE (arg_type);
3591 val = value_contents (args[argnum]);
3593 align = arm_type_align (arg_type);
3594 /* Round alignment up to a whole number of words. */
3595 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3596 /* Different ABIs have different maximum alignments. */
3597 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3599 /* The APCS ABI only requires word alignment. */
3600 align = INT_REGISTER_SIZE;
3604 /* The AAPCS requires at most doubleword alignment. */
3605 if (align > INT_REGISTER_SIZE * 2)
3606 align = INT_REGISTER_SIZE * 2;
3610 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3618 /* Because this is a CPRC it cannot go in a core register or
3619 cause a core register to be skipped for alignment.
3620 Either it goes in VFP registers and the rest of this loop
3621 iteration is skipped for this argument, or it goes on the
3622 stack (and the stack alignment code is correct for this
3624 may_use_core_reg = 0;
3626 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3627 shift = unit_length / 4;
3628 mask = (1 << (shift * vfp_base_count)) - 1;
3629 for (regno = 0; regno < 16; regno += shift)
3630 if (((vfp_regs_free >> regno) & mask) == mask)
3639 vfp_regs_free &= ~(mask << regno);
3640 reg_scaled = regno / shift;
3641 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3642 for (i = 0; i < vfp_base_count; i++)
3646 if (reg_char == 'q')
3647 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3648 val + i * unit_length);
3651 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3652 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3654 regcache_cooked_write (regcache, regnum,
3655 val + i * unit_length);
3662 /* This CPRC could not go in VFP registers, so all VFP
3663 registers are now marked as used. */
3668 /* Push stack padding for dowubleword alignment. */
3669 if (nstack & (align - 1))
3671 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3672 nstack += INT_REGISTER_SIZE;
3675 /* Doubleword aligned quantities must go in even register pairs. */
3676 if (may_use_core_reg
3677 && argreg <= ARM_LAST_ARG_REGNUM
3678 && align > INT_REGISTER_SIZE
3682 /* If the argument is a pointer to a function, and it is a
3683 Thumb function, create a LOCAL copy of the value and set
3684 the THUMB bit in it. */
3685 if (TYPE_CODE_PTR == typecode
3686 && target_type != NULL
3687 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3689 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3690 if (arm_pc_is_thumb (gdbarch, regval))
3692 bfd_byte *copy = alloca (len);
3693 store_unsigned_integer (copy, len, byte_order,
3694 MAKE_THUMB_ADDR (regval));
3699 /* Copy the argument to general registers or the stack in
3700 register-sized pieces. Large arguments are split between
3701 registers and stack. */
3704 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3706 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3708 /* The argument is being passed in a general purpose
3711 = extract_unsigned_integer (val, partial_len, byte_order);
3712 if (byte_order == BFD_ENDIAN_BIG)
3713 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3715 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3717 gdbarch_register_name
3719 phex (regval, INT_REGISTER_SIZE));
3720 regcache_cooked_write_unsigned (regcache, argreg, regval);
3725 /* Push the arguments onto the stack. */
3727 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3729 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3730 nstack += INT_REGISTER_SIZE;
3737 /* If we have an odd number of words to push, then decrement the stack
3738 by one word now, so first stack argument will be dword aligned. */
3745 write_memory (sp, si->data, si->len);
3746 si = pop_stack_item (si);
3749 /* Finally, update teh SP register. */
3750 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3756 /* Always align the frame to an 8-byte boundary. This is required on
3757 some platforms and harmless on the rest. */
3760 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3762 /* Align the stack to eight bytes. */
3763 return sp & ~ (CORE_ADDR) 7;
3767 print_fpu_flags (int flags)
3769 if (flags & (1 << 0))
3770 fputs ("IVO ", stdout);
3771 if (flags & (1 << 1))
3772 fputs ("DVZ ", stdout);
3773 if (flags & (1 << 2))
3774 fputs ("OFL ", stdout);
3775 if (flags & (1 << 3))
3776 fputs ("UFL ", stdout);
3777 if (flags & (1 << 4))
3778 fputs ("INX ", stdout);
3782 /* Print interesting information about the floating point processor
3783 (if present) or emulator. */
3785 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3786 struct frame_info *frame, const char *args)
3788 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3791 type = (status >> 24) & 127;
3792 if (status & (1 << 31))
3793 printf (_("Hardware FPU type %d\n"), type);
3795 printf (_("Software FPU type %d\n"), type);
3796 /* i18n: [floating point unit] mask */
3797 fputs (_("mask: "), stdout);
3798 print_fpu_flags (status >> 16);
3799 /* i18n: [floating point unit] flags */
3800 fputs (_("flags: "), stdout);
3801 print_fpu_flags (status);
3804 /* Construct the ARM extended floating point type. */
3805 static struct type *
3806 arm_ext_type (struct gdbarch *gdbarch)
3808 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3810 if (!tdep->arm_ext_type)
3812 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3813 floatformats_arm_ext);
3815 return tdep->arm_ext_type;
3818 static struct type *
3819 arm_neon_double_type (struct gdbarch *gdbarch)
3821 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3823 if (tdep->neon_double_type == NULL)
3825 struct type *t, *elem;
3827 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3829 elem = builtin_type (gdbarch)->builtin_uint8;
3830 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3831 elem = builtin_type (gdbarch)->builtin_uint16;
3832 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3833 elem = builtin_type (gdbarch)->builtin_uint32;
3834 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3835 elem = builtin_type (gdbarch)->builtin_uint64;
3836 append_composite_type_field (t, "u64", elem);
3837 elem = builtin_type (gdbarch)->builtin_float;
3838 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3839 elem = builtin_type (gdbarch)->builtin_double;
3840 append_composite_type_field (t, "f64", elem);
3842 TYPE_VECTOR (t) = 1;
3843 TYPE_NAME (t) = "neon_d";
3844 tdep->neon_double_type = t;
3847 return tdep->neon_double_type;
3850 /* FIXME: The vector types are not correctly ordered on big-endian
3851 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3852 bits of d0 - regardless of what unit size is being held in d0. So
3853 the offset of the first uint8 in d0 is 7, but the offset of the
3854 first float is 4. This code works as-is for little-endian
3857 static struct type *
3858 arm_neon_quad_type (struct gdbarch *gdbarch)
3860 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3862 if (tdep->neon_quad_type == NULL)
3864 struct type *t, *elem;
3866 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3868 elem = builtin_type (gdbarch)->builtin_uint8;
3869 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3870 elem = builtin_type (gdbarch)->builtin_uint16;
3871 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3872 elem = builtin_type (gdbarch)->builtin_uint32;
3873 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3874 elem = builtin_type (gdbarch)->builtin_uint64;
3875 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3876 elem = builtin_type (gdbarch)->builtin_float;
3877 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3878 elem = builtin_type (gdbarch)->builtin_double;
3879 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3881 TYPE_VECTOR (t) = 1;
3882 TYPE_NAME (t) = "neon_q";
3883 tdep->neon_quad_type = t;
3886 return tdep->neon_quad_type;
3889 /* Return the GDB type object for the "standard" data type of data in
3892 static struct type *
3893 arm_register_type (struct gdbarch *gdbarch, int regnum)
3895 int num_regs = gdbarch_num_regs (gdbarch);
3897 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3898 && regnum >= num_regs && regnum < num_regs + 32)
3899 return builtin_type (gdbarch)->builtin_float;
3901 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3902 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3903 return arm_neon_quad_type (gdbarch);
3905 /* If the target description has register information, we are only
3906 in this function so that we can override the types of
3907 double-precision registers for NEON. */
3908 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3910 struct type *t = tdesc_register_type (gdbarch, regnum);
3912 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3913 && TYPE_CODE (t) == TYPE_CODE_FLT
3914 && gdbarch_tdep (gdbarch)->have_neon)
3915 return arm_neon_double_type (gdbarch);
3920 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3922 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3923 return builtin_type (gdbarch)->builtin_void;
3925 return arm_ext_type (gdbarch);
3927 else if (regnum == ARM_SP_REGNUM)
3928 return builtin_type (gdbarch)->builtin_data_ptr;
3929 else if (regnum == ARM_PC_REGNUM)
3930 return builtin_type (gdbarch)->builtin_func_ptr;
3931 else if (regnum >= ARRAY_SIZE (arm_register_names))
3932 /* These registers are only supported on targets which supply
3933 an XML description. */
3934 return builtin_type (gdbarch)->builtin_int0;
3936 return builtin_type (gdbarch)->builtin_uint32;
3939 /* Map a DWARF register REGNUM onto the appropriate GDB register
3943 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3945 /* Core integer regs. */
3946 if (reg >= 0 && reg <= 15)
3949 /* Legacy FPA encoding. These were once used in a way which
3950 overlapped with VFP register numbering, so their use is
3951 discouraged, but GDB doesn't support the ARM toolchain
3952 which used them for VFP. */
3953 if (reg >= 16 && reg <= 23)
3954 return ARM_F0_REGNUM + reg - 16;
3956 /* New assignments for the FPA registers. */
3957 if (reg >= 96 && reg <= 103)
3958 return ARM_F0_REGNUM + reg - 96;
3960 /* WMMX register assignments. */
3961 if (reg >= 104 && reg <= 111)
3962 return ARM_WCGR0_REGNUM + reg - 104;
3964 if (reg >= 112 && reg <= 127)
3965 return ARM_WR0_REGNUM + reg - 112;
3967 if (reg >= 192 && reg <= 199)
3968 return ARM_WC0_REGNUM + reg - 192;
3970 /* VFP v2 registers. A double precision value is actually
3971 in d1 rather than s2, but the ABI only defines numbering
3972 for the single precision registers. This will "just work"
3973 in GDB for little endian targets (we'll read eight bytes,
3974 starting in s0 and then progressing to s1), but will be
3975 reversed on big endian targets with VFP. This won't
3976 be a problem for the new Neon quad registers; you're supposed
3977 to use DW_OP_piece for those. */
3978 if (reg >= 64 && reg <= 95)
3982 sprintf (name_buf, "s%d", reg - 64);
3983 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3987 /* VFP v3 / Neon registers. This range is also used for VFP v2
3988 registers, except that it now describes d0 instead of s0. */
3989 if (reg >= 256 && reg <= 287)
3993 sprintf (name_buf, "d%d", reg - 256);
3994 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4001 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4003 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4006 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4008 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4009 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4011 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4012 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4014 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4015 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4017 if (reg < NUM_GREGS)
4018 return SIM_ARM_R0_REGNUM + reg;
4021 if (reg < NUM_FREGS)
4022 return SIM_ARM_FP0_REGNUM + reg;
4025 if (reg < NUM_SREGS)
4026 return SIM_ARM_FPS_REGNUM + reg;
4029 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4032 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4033 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4034 It is thought that this is is the floating-point register format on
4035 little-endian systems. */
4038 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4039 void *dbl, int endianess)
4043 if (endianess == BFD_ENDIAN_BIG)
4044 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4046 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4048 floatformat_from_doublest (fmt, &d, dbl);
4052 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4057 floatformat_to_doublest (fmt, ptr, &d);
4058 if (endianess == BFD_ENDIAN_BIG)
4059 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4061 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4066 condition_true (unsigned long cond, unsigned long status_reg)
4068 if (cond == INST_AL || cond == INST_NV)
4074 return ((status_reg & FLAG_Z) != 0);
4076 return ((status_reg & FLAG_Z) == 0);
4078 return ((status_reg & FLAG_C) != 0);
4080 return ((status_reg & FLAG_C) == 0);
4082 return ((status_reg & FLAG_N) != 0);
4084 return ((status_reg & FLAG_N) == 0);
4086 return ((status_reg & FLAG_V) != 0);
4088 return ((status_reg & FLAG_V) == 0);
4090 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4092 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4094 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4096 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4098 return (((status_reg & FLAG_Z) == 0)
4099 && (((status_reg & FLAG_N) == 0)
4100 == ((status_reg & FLAG_V) == 0)));
4102 return (((status_reg & FLAG_Z) != 0)
4103 || (((status_reg & FLAG_N) == 0)
4104 != ((status_reg & FLAG_V) == 0)));
4109 static unsigned long
4110 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4111 unsigned long pc_val, unsigned long status_reg)
4113 unsigned long res, shift;
4114 int rm = bits (inst, 0, 3);
4115 unsigned long shifttype = bits (inst, 5, 6);
4119 int rs = bits (inst, 8, 11);
4120 shift = (rs == 15 ? pc_val + 8
4121 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4124 shift = bits (inst, 7, 11);
4126 res = (rm == ARM_PC_REGNUM
4127 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4128 : get_frame_register_unsigned (frame, rm));
4133 res = shift >= 32 ? 0 : res << shift;
4137 res = shift >= 32 ? 0 : res >> shift;
4143 res = ((res & 0x80000000L)
4144 ? ~((~res) >> shift) : res >> shift);
4147 case 3: /* ROR/RRX */
4150 res = (res >> 1) | (carry ? 0x80000000L : 0);
4152 res = (res >> shift) | (res << (32 - shift));
4156 return res & 0xffffffff;
4159 /* Return number of 1-bits in VAL. */
4162 bitcount (unsigned long val)
4165 for (nbits = 0; val != 0; nbits++)
4166 val &= val - 1; /* Delete rightmost 1-bit in val. */
4170 /* Return the size in bytes of the complete Thumb instruction whose
4171 first halfword is INST1. */
4174 thumb_insn_size (unsigned short inst1)
4176 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4183 thumb_advance_itstate (unsigned int itstate)
4185 /* Preserve IT[7:5], the first three bits of the condition. Shift
4186 the upcoming condition flags left by one bit. */
4187 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4189 /* If we have finished the IT block, clear the state. */
4190 if ((itstate & 0x0f) == 0)
4196 /* Find the next PC after the current instruction executes. In some
4197 cases we can not statically determine the answer (see the IT state
4198 handling in this function); in that case, a breakpoint may be
4199 inserted in addition to the returned PC, which will be used to set
4200 another breakpoint by our caller. */
4203 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4205 struct gdbarch *gdbarch = get_frame_arch (frame);
4206 struct address_space *aspace = get_frame_address_space (frame);
4207 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4208 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4209 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4210 unsigned short inst1;
4211 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4212 unsigned long offset;
4213 ULONGEST status, itstate;
4215 nextpc = MAKE_THUMB_ADDR (nextpc);
4216 pc_val = MAKE_THUMB_ADDR (pc_val);
4218 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4220 /* Thumb-2 conditional execution support. There are eight bits in
4221 the CPSR which describe conditional execution state. Once
4222 reconstructed (they're in a funny order), the low five bits
4223 describe the low bit of the condition for each instruction and
4224 how many instructions remain. The high three bits describe the
4225 base condition. One of the low four bits will be set if an IT
4226 block is active. These bits read as zero on earlier
4228 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4229 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4231 /* If-Then handling. On GNU/Linux, where this routine is used, we
4232 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4233 can disable execution of the undefined instruction. So we might
4234 miss the breakpoint if we set it on a skipped conditional
4235 instruction. Because conditional instructions can change the
4236 flags, affecting the execution of further instructions, we may
4237 need to set two breakpoints. */
4239 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4241 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4243 /* An IT instruction. Because this instruction does not
4244 modify the flags, we can accurately predict the next
4245 executed instruction. */
4246 itstate = inst1 & 0x00ff;
4247 pc += thumb_insn_size (inst1);
4249 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4251 inst1 = read_memory_unsigned_integer (pc, 2,
4252 byte_order_for_code);
4253 pc += thumb_insn_size (inst1);
4254 itstate = thumb_advance_itstate (itstate);
4257 return MAKE_THUMB_ADDR (pc);
4259 else if (itstate != 0)
4261 /* We are in a conditional block. Check the condition. */
4262 if (! condition_true (itstate >> 4, status))
4264 /* Advance to the next executed instruction. */
4265 pc += thumb_insn_size (inst1);
4266 itstate = thumb_advance_itstate (itstate);
4268 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4270 inst1 = read_memory_unsigned_integer (pc, 2,
4271 byte_order_for_code);
4272 pc += thumb_insn_size (inst1);
4273 itstate = thumb_advance_itstate (itstate);
4276 return MAKE_THUMB_ADDR (pc);
4278 else if ((itstate & 0x0f) == 0x08)
4280 /* This is the last instruction of the conditional
4281 block, and it is executed. We can handle it normally
4282 because the following instruction is not conditional,
4283 and we must handle it normally because it is
4284 permitted to branch. Fall through. */
4290 /* There are conditional instructions after this one.
4291 If this instruction modifies the flags, then we can
4292 not predict what the next executed instruction will
4293 be. Fortunately, this instruction is architecturally
4294 forbidden to branch; we know it will fall through.
4295 Start by skipping past it. */
4296 pc += thumb_insn_size (inst1);
4297 itstate = thumb_advance_itstate (itstate);
4299 /* Set a breakpoint on the following instruction. */
4300 gdb_assert ((itstate & 0x0f) != 0);
4301 arm_insert_single_step_breakpoint (gdbarch, aspace,
4302 MAKE_THUMB_ADDR (pc));
4303 cond_negated = (itstate >> 4) & 1;
4305 /* Skip all following instructions with the same
4306 condition. If there is a later instruction in the IT
4307 block with the opposite condition, set the other
4308 breakpoint there. If not, then set a breakpoint on
4309 the instruction after the IT block. */
4312 inst1 = read_memory_unsigned_integer (pc, 2,
4313 byte_order_for_code);
4314 pc += thumb_insn_size (inst1);
4315 itstate = thumb_advance_itstate (itstate);
4317 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4319 return MAKE_THUMB_ADDR (pc);
4323 else if (itstate & 0x0f)
4325 /* We are in a conditional block. Check the condition. */
4326 int cond = itstate >> 4;
4328 if (! condition_true (cond, status))
4329 /* Advance to the next instruction. All the 32-bit
4330 instructions share a common prefix. */
4331 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4333 /* Otherwise, handle the instruction normally. */
4336 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4340 /* Fetch the saved PC from the stack. It's stored above
4341 all of the other registers. */
4342 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4343 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4344 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4346 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4348 unsigned long cond = bits (inst1, 8, 11);
4349 if (cond == 0x0f) /* 0x0f = SWI */
4351 struct gdbarch_tdep *tdep;
4352 tdep = gdbarch_tdep (gdbarch);
4354 if (tdep->syscall_next_pc != NULL)
4355 nextpc = tdep->syscall_next_pc (frame);
4358 else if (cond != 0x0f && condition_true (cond, status))
4359 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4361 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4363 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4365 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4367 unsigned short inst2;
4368 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4370 /* Default to the next instruction. */
4372 nextpc = MAKE_THUMB_ADDR (nextpc);
4374 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4376 /* Branches and miscellaneous control instructions. */
4378 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4381 int j1, j2, imm1, imm2;
4383 imm1 = sbits (inst1, 0, 10);
4384 imm2 = bits (inst2, 0, 10);
4385 j1 = bit (inst2, 13);
4386 j2 = bit (inst2, 11);
4388 offset = ((imm1 << 12) + (imm2 << 1));
4389 offset ^= ((!j2) << 22) | ((!j1) << 23);
4391 nextpc = pc_val + offset;
4392 /* For BLX make sure to clear the low bits. */
4393 if (bit (inst2, 12) == 0)
4394 nextpc = nextpc & 0xfffffffc;
4396 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4398 /* SUBS PC, LR, #imm8. */
4399 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4400 nextpc -= inst2 & 0x00ff;
4402 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4404 /* Conditional branch. */
4405 if (condition_true (bits (inst1, 6, 9), status))
4407 int sign, j1, j2, imm1, imm2;
4409 sign = sbits (inst1, 10, 10);
4410 imm1 = bits (inst1, 0, 5);
4411 imm2 = bits (inst2, 0, 10);
4412 j1 = bit (inst2, 13);
4413 j2 = bit (inst2, 11);
4415 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4416 offset += (imm1 << 12) + (imm2 << 1);
4418 nextpc = pc_val + offset;
4422 else if ((inst1 & 0xfe50) == 0xe810)
4424 /* Load multiple or RFE. */
4425 int rn, offset, load_pc = 1;
4427 rn = bits (inst1, 0, 3);
4428 if (bit (inst1, 7) && !bit (inst1, 8))
4431 if (!bit (inst2, 15))
4433 offset = bitcount (inst2) * 4 - 4;
4435 else if (!bit (inst1, 7) && bit (inst1, 8))
4438 if (!bit (inst2, 15))
4442 else if (bit (inst1, 7) && bit (inst1, 8))
4447 else if (!bit (inst1, 7) && !bit (inst1, 8))
4457 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4458 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4461 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4463 /* MOV PC or MOVS PC. */
4464 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4465 nextpc = MAKE_THUMB_ADDR (nextpc);
4467 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4471 int rn, load_pc = 1;
4473 rn = bits (inst1, 0, 3);
4474 base = get_frame_register_unsigned (frame, rn);
4475 if (rn == ARM_PC_REGNUM)
4477 base = (base + 4) & ~(CORE_ADDR) 0x3;
4479 base += bits (inst2, 0, 11);
4481 base -= bits (inst2, 0, 11);
4483 else if (bit (inst1, 7))
4484 base += bits (inst2, 0, 11);
4485 else if (bit (inst2, 11))
4487 if (bit (inst2, 10))
4490 base += bits (inst2, 0, 7);
4492 base -= bits (inst2, 0, 7);
4495 else if ((inst2 & 0x0fc0) == 0x0000)
4497 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4498 base += get_frame_register_unsigned (frame, rm) << shift;
4505 nextpc = get_frame_memory_unsigned (frame, base, 4);
4507 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4510 CORE_ADDR tbl_reg, table, offset, length;
4512 tbl_reg = bits (inst1, 0, 3);
4513 if (tbl_reg == 0x0f)
4514 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4516 table = get_frame_register_unsigned (frame, tbl_reg);
4518 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4519 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4520 nextpc = pc_val + length;
4522 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4525 CORE_ADDR tbl_reg, table, offset, length;
4527 tbl_reg = bits (inst1, 0, 3);
4528 if (tbl_reg == 0x0f)
4529 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4531 table = get_frame_register_unsigned (frame, tbl_reg);
4533 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4534 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4535 nextpc = pc_val + length;
4538 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4540 if (bits (inst1, 3, 6) == 0x0f)
4543 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4545 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4547 if (bits (inst1, 3, 6) == 0x0f)
4550 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4552 nextpc = MAKE_THUMB_ADDR (nextpc);
4554 else if ((inst1 & 0xf500) == 0xb100)
4557 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4558 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4560 if (bit (inst1, 11) && reg != 0)
4561 nextpc = pc_val + imm;
4562 else if (!bit (inst1, 11) && reg == 0)
4563 nextpc = pc_val + imm;
4568 /* Get the raw next address. PC is the current program counter, in
4569 FRAME, which is assumed to be executing in ARM mode.
4571 The value returned has the execution state of the next instruction
4572 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4573 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4577 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4579 struct gdbarch *gdbarch = get_frame_arch (frame);
4580 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4581 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4582 unsigned long pc_val;
4583 unsigned long this_instr;
4584 unsigned long status;
4587 pc_val = (unsigned long) pc;
4588 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4590 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4591 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4593 if (bits (this_instr, 28, 31) == INST_NV)
4594 switch (bits (this_instr, 24, 27))
4599 /* Branch with Link and change to Thumb. */
4600 nextpc = BranchDest (pc, this_instr);
4601 nextpc |= bit (this_instr, 24) << 1;
4602 nextpc = MAKE_THUMB_ADDR (nextpc);
4608 /* Coprocessor register transfer. */
4609 if (bits (this_instr, 12, 15) == 15)
4610 error (_("Invalid update to pc in instruction"));
4613 else if (condition_true (bits (this_instr, 28, 31), status))
4615 switch (bits (this_instr, 24, 27))
4618 case 0x1: /* data processing */
4622 unsigned long operand1, operand2, result = 0;
4626 if (bits (this_instr, 12, 15) != 15)
4629 if (bits (this_instr, 22, 25) == 0
4630 && bits (this_instr, 4, 7) == 9) /* multiply */
4631 error (_("Invalid update to pc in instruction"));
4633 /* BX <reg>, BLX <reg> */
4634 if (bits (this_instr, 4, 27) == 0x12fff1
4635 || bits (this_instr, 4, 27) == 0x12fff3)
4637 rn = bits (this_instr, 0, 3);
4638 nextpc = ((rn == ARM_PC_REGNUM)
4640 : get_frame_register_unsigned (frame, rn));
4645 /* Multiply into PC. */
4646 c = (status & FLAG_C) ? 1 : 0;
4647 rn = bits (this_instr, 16, 19);
4648 operand1 = ((rn == ARM_PC_REGNUM)
4650 : get_frame_register_unsigned (frame, rn));
4652 if (bit (this_instr, 25))
4654 unsigned long immval = bits (this_instr, 0, 7);
4655 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4656 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4659 else /* operand 2 is a shifted register. */
4660 operand2 = shifted_reg_val (frame, this_instr, c,
4663 switch (bits (this_instr, 21, 24))
4666 result = operand1 & operand2;
4670 result = operand1 ^ operand2;
4674 result = operand1 - operand2;
4678 result = operand2 - operand1;
4682 result = operand1 + operand2;
4686 result = operand1 + operand2 + c;
4690 result = operand1 - operand2 + c;
4694 result = operand2 - operand1 + c;
4700 case 0xb: /* tst, teq, cmp, cmn */
4701 result = (unsigned long) nextpc;
4705 result = operand1 | operand2;
4709 /* Always step into a function. */
4714 result = operand1 & ~operand2;
4722 /* In 26-bit APCS the bottom two bits of the result are
4723 ignored, and we always end up in ARM state. */
4725 nextpc = arm_addr_bits_remove (gdbarch, result);
4733 case 0x5: /* data transfer */
4736 if (bit (this_instr, 20))
4739 if (bits (this_instr, 12, 15) == 15)
4745 if (bit (this_instr, 22))
4746 error (_("Invalid update to pc in instruction"));
4748 /* byte write to PC */
4749 rn = bits (this_instr, 16, 19);
4750 base = ((rn == ARM_PC_REGNUM)
4752 : get_frame_register_unsigned (frame, rn));
4754 if (bit (this_instr, 24))
4757 int c = (status & FLAG_C) ? 1 : 0;
4758 unsigned long offset =
4759 (bit (this_instr, 25)
4760 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4761 : bits (this_instr, 0, 11));
4763 if (bit (this_instr, 23))
4769 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4776 case 0x9: /* block transfer */
4777 if (bit (this_instr, 20))
4780 if (bit (this_instr, 15))
4784 unsigned long rn_val
4785 = get_frame_register_unsigned (frame,
4786 bits (this_instr, 16, 19));
4788 if (bit (this_instr, 23))
4791 unsigned long reglist = bits (this_instr, 0, 14);
4792 offset = bitcount (reglist) * 4;
4793 if (bit (this_instr, 24)) /* pre */
4796 else if (bit (this_instr, 24))
4800 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4807 case 0xb: /* branch & link */
4808 case 0xa: /* branch */
4810 nextpc = BranchDest (pc, this_instr);
4816 case 0xe: /* coproc ops */
4820 struct gdbarch_tdep *tdep;
4821 tdep = gdbarch_tdep (gdbarch);
4823 if (tdep->syscall_next_pc != NULL)
4824 nextpc = tdep->syscall_next_pc (frame);
4830 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4838 /* Determine next PC after current instruction executes. Will call either
4839 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4840 loop is detected. */
4843 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4847 if (arm_frame_is_thumb (frame))
4849 nextpc = thumb_get_next_pc_raw (frame, pc);
4850 if (nextpc == MAKE_THUMB_ADDR (pc))
4851 error (_("Infinite loop detected"));
4855 nextpc = arm_get_next_pc_raw (frame, pc);
4857 error (_("Infinite loop detected"));
4863 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4864 of the appropriate mode (as encoded in the PC value), even if this
4865 differs from what would be expected according to the symbol tables. */
4868 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4869 struct address_space *aspace,
4872 struct cleanup *old_chain
4873 = make_cleanup_restore_integer (&arm_override_mode);
4875 arm_override_mode = IS_THUMB_ADDR (pc);
4876 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4878 insert_single_step_breakpoint (gdbarch, aspace, pc);
4880 do_cleanups (old_chain);
4883 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
4884 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
4885 is found, attempt to step through it. A breakpoint is placed at the end of
4889 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
4891 struct gdbarch *gdbarch = get_frame_arch (frame);
4892 struct address_space *aspace = get_frame_address_space (frame);
4893 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4894 CORE_ADDR pc = get_frame_pc (frame);
4895 CORE_ADDR breaks[2] = {-1, -1};
4897 unsigned short insn1, insn2;
4900 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
4901 const int atomic_sequence_length = 16; /* Instruction sequence length. */
4902 ULONGEST status, itstate;
4904 /* We currently do not support atomic sequences within an IT block. */
4905 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4906 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4910 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
4911 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4913 if (thumb_insn_size (insn1) != 4)
4916 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4918 if (!((insn1 & 0xfff0) == 0xe850
4919 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
4922 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
4924 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
4926 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4929 if (thumb_insn_size (insn1) != 4)
4931 /* Assume that there is at most one conditional branch in the
4932 atomic sequence. If a conditional branch is found, put a
4933 breakpoint in its destination address. */
4934 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
4936 if (last_breakpoint > 0)
4937 return 0; /* More than one conditional branch found,
4938 fallback to the standard code. */
4940 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
4944 /* We do not support atomic sequences that use any *other*
4945 instructions but conditional branches to change the PC.
4946 Fall back to standard code to avoid losing control of
4948 else if (thumb_instruction_changes_pc (insn1))
4953 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4956 /* Assume that there is at most one conditional branch in the
4957 atomic sequence. If a conditional branch is found, put a
4958 breakpoint in its destination address. */
4959 if ((insn1 & 0xf800) == 0xf000
4960 && (insn2 & 0xd000) == 0x8000
4961 && (insn1 & 0x0380) != 0x0380)
4963 int sign, j1, j2, imm1, imm2;
4964 unsigned int offset;
4966 sign = sbits (insn1, 10, 10);
4967 imm1 = bits (insn1, 0, 5);
4968 imm2 = bits (insn2, 0, 10);
4969 j1 = bit (insn2, 13);
4970 j2 = bit (insn2, 11);
4972 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4973 offset += (imm1 << 12) + (imm2 << 1);
4975 if (last_breakpoint > 0)
4976 return 0; /* More than one conditional branch found,
4977 fallback to the standard code. */
4979 breaks[1] = loc + offset;
4983 /* We do not support atomic sequences that use any *other*
4984 instructions but conditional branches to change the PC.
4985 Fall back to standard code to avoid losing control of
4987 else if (thumb2_instruction_changes_pc (insn1, insn2))
4990 /* If we find a strex{,b,h,d}, we're done. */
4991 if ((insn1 & 0xfff0) == 0xe840
4992 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
4997 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
4998 if (insn_count == atomic_sequence_length)
5001 /* Insert a breakpoint right after the end of the atomic sequence. */
5004 /* Check for duplicated breakpoints. Check also for a breakpoint
5005 placed (branch instruction's destination) anywhere in sequence. */
5007 && (breaks[1] == breaks[0]
5008 || (breaks[1] >= pc && breaks[1] < loc)))
5009 last_breakpoint = 0;
5011 /* Effectively inserts the breakpoints. */
5012 for (index = 0; index <= last_breakpoint; index++)
5013 arm_insert_single_step_breakpoint (gdbarch, aspace,
5014 MAKE_THUMB_ADDR (breaks[index]));
5020 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5022 struct gdbarch *gdbarch = get_frame_arch (frame);
5023 struct address_space *aspace = get_frame_address_space (frame);
5024 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5025 CORE_ADDR pc = get_frame_pc (frame);
5026 CORE_ADDR breaks[2] = {-1, -1};
5031 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5032 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5034 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5035 Note that we do not currently support conditionally executed atomic
5037 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5039 if ((insn & 0xff9000f0) != 0xe1900090)
5042 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5044 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5046 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5049 /* Assume that there is at most one conditional branch in the atomic
5050 sequence. If a conditional branch is found, put a breakpoint in
5051 its destination address. */
5052 if (bits (insn, 24, 27) == 0xa)
5054 if (last_breakpoint > 0)
5055 return 0; /* More than one conditional branch found, fallback
5056 to the standard single-step code. */
5058 breaks[1] = BranchDest (loc - 4, insn);
5062 /* We do not support atomic sequences that use any *other* instructions
5063 but conditional branches to change the PC. Fall back to standard
5064 code to avoid losing control of execution. */
5065 else if (arm_instruction_changes_pc (insn))
5068 /* If we find a strex{,b,h,d}, we're done. */
5069 if ((insn & 0xff9000f0) == 0xe1800090)
5073 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5074 if (insn_count == atomic_sequence_length)
5077 /* Insert a breakpoint right after the end of the atomic sequence. */
5080 /* Check for duplicated breakpoints. Check also for a breakpoint
5081 placed (branch instruction's destination) anywhere in sequence. */
5083 && (breaks[1] == breaks[0]
5084 || (breaks[1] >= pc && breaks[1] < loc)))
5085 last_breakpoint = 0;
5087 /* Effectively inserts the breakpoints. */
5088 for (index = 0; index <= last_breakpoint; index++)
5089 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5095 arm_deal_with_atomic_sequence (struct frame_info *frame)
5097 if (arm_frame_is_thumb (frame))
5098 return thumb_deal_with_atomic_sequence_raw (frame);
5100 return arm_deal_with_atomic_sequence_raw (frame);
5103 /* single_step() is called just before we want to resume the inferior,
5104 if we want to single-step it but there is no hardware or kernel
5105 single-step support. We find the target of the coming instruction
5106 and breakpoint it. */
5109 arm_software_single_step (struct frame_info *frame)
5111 struct gdbarch *gdbarch = get_frame_arch (frame);
5112 struct address_space *aspace = get_frame_address_space (frame);
5115 if (arm_deal_with_atomic_sequence (frame))
5118 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5119 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5124 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5125 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5126 NULL if an error occurs. BUF is freed. */
5129 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5130 int old_len, int new_len)
5133 int bytes_to_read = new_len - old_len;
5135 new_buf = xmalloc (new_len);
5136 memcpy (new_buf + bytes_to_read, buf, old_len);
5138 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5146 /* An IT block is at most the 2-byte IT instruction followed by
5147 four 4-byte instructions. The furthest back we must search to
5148 find an IT block that affects the current instruction is thus
5149 2 + 3 * 4 == 14 bytes. */
5150 #define MAX_IT_BLOCK_PREFIX 14
5152 /* Use a quick scan if there are more than this many bytes of
5154 #define IT_SCAN_THRESHOLD 32
5156 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5157 A breakpoint in an IT block may not be hit, depending on the
5160 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5164 CORE_ADDR boundary, func_start;
5166 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5167 int i, any, last_it, last_it_count;
5169 /* If we are using BKPT breakpoints, none of this is necessary. */
5170 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5173 /* ARM mode does not have this problem. */
5174 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5177 /* We are setting a breakpoint in Thumb code that could potentially
5178 contain an IT block. The first step is to find how much Thumb
5179 code there is; we do not need to read outside of known Thumb
5181 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5183 /* Thumb-2 code must have mapping symbols to have a chance. */
5186 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5188 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5189 && func_start > boundary)
5190 boundary = func_start;
5192 /* Search for a candidate IT instruction. We have to do some fancy
5193 footwork to distinguish a real IT instruction from the second
5194 half of a 32-bit instruction, but there is no need for that if
5195 there's no candidate. */
5196 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5198 /* No room for an IT instruction. */
5201 buf = xmalloc (buf_len);
5202 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5205 for (i = 0; i < buf_len; i += 2)
5207 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5208 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5220 /* OK, the code bytes before this instruction contain at least one
5221 halfword which resembles an IT instruction. We know that it's
5222 Thumb code, but there are still two possibilities. Either the
5223 halfword really is an IT instruction, or it is the second half of
5224 a 32-bit Thumb instruction. The only way we can tell is to
5225 scan forwards from a known instruction boundary. */
5226 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5230 /* There's a lot of code before this instruction. Start with an
5231 optimistic search; it's easy to recognize halfwords that can
5232 not be the start of a 32-bit instruction, and use that to
5233 lock on to the instruction boundaries. */
5234 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5237 buf_len = IT_SCAN_THRESHOLD;
5240 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5242 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5243 if (thumb_insn_size (inst1) == 2)
5250 /* At this point, if DEFINITE, BUF[I] is the first place we
5251 are sure that we know the instruction boundaries, and it is far
5252 enough from BPADDR that we could not miss an IT instruction
5253 affecting BPADDR. If ! DEFINITE, give up - start from a
5257 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5261 buf_len = bpaddr - boundary;
5267 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5270 buf_len = bpaddr - boundary;
5274 /* Scan forwards. Find the last IT instruction before BPADDR. */
5279 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5281 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5286 else if (inst1 & 0x0002)
5288 else if (inst1 & 0x0004)
5293 i += thumb_insn_size (inst1);
5299 /* There wasn't really an IT instruction after all. */
5302 if (last_it_count < 1)
5303 /* It was too far away. */
5306 /* This really is a trouble spot. Move the breakpoint to the IT
5308 return bpaddr - buf_len + last_it;
5311 /* ARM displaced stepping support.
5313 Generally ARM displaced stepping works as follows:
5315 1. When an instruction is to be single-stepped, it is first decoded by
5316 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5317 Depending on the type of instruction, it is then copied to a scratch
5318 location, possibly in a modified form. The copy_* set of functions
5319 performs such modification, as necessary. A breakpoint is placed after
5320 the modified instruction in the scratch space to return control to GDB.
5321 Note in particular that instructions which modify the PC will no longer
5322 do so after modification.
5324 2. The instruction is single-stepped, by setting the PC to the scratch
5325 location address, and resuming. Control returns to GDB when the
5328 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5329 function used for the current instruction. This function's job is to
5330 put the CPU/memory state back to what it would have been if the
5331 instruction had been executed unmodified in its original location. */
5333 /* NOP instruction (mov r0, r0). */
5334 #define ARM_NOP 0xe1a00000
5335 #define THUMB_NOP 0x4600
5337 /* Helper for register reads for displaced stepping. In particular, this
5338 returns the PC as it would be seen by the instruction at its original
5342 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5346 CORE_ADDR from = dsc->insn_addr;
5348 if (regno == ARM_PC_REGNUM)
5350 /* Compute pipeline offset:
5351 - When executing an ARM instruction, PC reads as the address of the
5352 current instruction plus 8.
5353 - When executing a Thumb instruction, PC reads as the address of the
5354 current instruction plus 4. */
5361 if (debug_displaced)
5362 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5363 (unsigned long) from);
5364 return (ULONGEST) from;
5368 regcache_cooked_read_unsigned (regs, regno, &ret);
5369 if (debug_displaced)
5370 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5371 regno, (unsigned long) ret);
5377 displaced_in_arm_mode (struct regcache *regs)
5380 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5382 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5384 return (ps & t_bit) == 0;
5387 /* Write to the PC as from a branch instruction. */
5390 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5394 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5395 architecture versions < 6. */
5396 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5397 val & ~(ULONGEST) 0x3);
5399 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5400 val & ~(ULONGEST) 0x1);
5403 /* Write to the PC as from a branch-exchange instruction. */
5406 bx_write_pc (struct regcache *regs, ULONGEST val)
5409 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5411 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5415 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5416 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5418 else if ((val & 2) == 0)
5420 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5421 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5425 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5426 mode, align dest to 4 bytes). */
5427 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5428 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5429 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5433 /* Write to the PC as if from a load instruction. */
5436 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5439 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5440 bx_write_pc (regs, val);
5442 branch_write_pc (regs, dsc, val);
5445 /* Write to the PC as if from an ALU instruction. */
5448 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5451 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5452 bx_write_pc (regs, val);
5454 branch_write_pc (regs, dsc, val);
5457 /* Helper for writing to registers for displaced stepping. Writing to the PC
5458 has a varying effects depending on the instruction which does the write:
5459 this is controlled by the WRITE_PC argument. */
5462 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5463 int regno, ULONGEST val, enum pc_write_style write_pc)
5465 if (regno == ARM_PC_REGNUM)
5467 if (debug_displaced)
5468 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5469 (unsigned long) val);
5472 case BRANCH_WRITE_PC:
5473 branch_write_pc (regs, dsc, val);
5477 bx_write_pc (regs, val);
5481 load_write_pc (regs, dsc, val);
5485 alu_write_pc (regs, dsc, val);
5488 case CANNOT_WRITE_PC:
5489 warning (_("Instruction wrote to PC in an unexpected way when "
5490 "single-stepping"));
5494 internal_error (__FILE__, __LINE__,
5495 _("Invalid argument to displaced_write_reg"));
5498 dsc->wrote_to_pc = 1;
5502 if (debug_displaced)
5503 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5504 regno, (unsigned long) val);
5505 regcache_cooked_write_unsigned (regs, regno, val);
5509 /* This function is used to concisely determine if an instruction INSN
5510 references PC. Register fields of interest in INSN should have the
5511 corresponding fields of BITMASK set to 0b1111. The function
5512 returns return 1 if any of these fields in INSN reference the PC
5513 (also 0b1111, r15), else it returns 0. */
5516 insn_references_pc (uint32_t insn, uint32_t bitmask)
5518 uint32_t lowbit = 1;
5520 while (bitmask != 0)
5524 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5530 mask = lowbit * 0xf;
5532 if ((insn & mask) == mask)
5541 /* The simplest copy function. Many instructions have the same effect no
5542 matter what address they are executed at: in those cases, use this. */
5545 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5546 const char *iname, struct displaced_step_closure *dsc)
5548 if (debug_displaced)
5549 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5550 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5553 dsc->modinsn[0] = insn;
5559 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5560 uint16_t insn2, const char *iname,
5561 struct displaced_step_closure *dsc)
5563 if (debug_displaced)
5564 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5565 "opcode/class '%s' unmodified\n", insn1, insn2,
5568 dsc->modinsn[0] = insn1;
5569 dsc->modinsn[1] = insn2;
5575 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5578 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5580 struct displaced_step_closure *dsc)
5582 if (debug_displaced)
5583 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5584 "opcode/class '%s' unmodified\n", insn,
5587 dsc->modinsn[0] = insn;
5592 /* Preload instructions with immediate offset. */
5595 cleanup_preload (struct gdbarch *gdbarch,
5596 struct regcache *regs, struct displaced_step_closure *dsc)
5598 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5599 if (!dsc->u.preload.immed)
5600 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5604 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5605 struct displaced_step_closure *dsc, unsigned int rn)
5608 /* Preload instructions:
5610 {pli/pld} [rn, #+/-imm]
5612 {pli/pld} [r0, #+/-imm]. */
5614 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5615 rn_val = displaced_read_reg (regs, dsc, rn);
5616 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5617 dsc->u.preload.immed = 1;
5619 dsc->cleanup = &cleanup_preload;
5623 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5624 struct displaced_step_closure *dsc)
5626 unsigned int rn = bits (insn, 16, 19);
5628 if (!insn_references_pc (insn, 0x000f0000ul))
5629 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5631 if (debug_displaced)
5632 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5633 (unsigned long) insn);
5635 dsc->modinsn[0] = insn & 0xfff0ffff;
5637 install_preload (gdbarch, regs, dsc, rn);
5643 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5644 struct regcache *regs, struct displaced_step_closure *dsc)
5646 unsigned int rn = bits (insn1, 0, 3);
5647 unsigned int u_bit = bit (insn1, 7);
5648 int imm12 = bits (insn2, 0, 11);
5651 if (rn != ARM_PC_REGNUM)
5652 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5654 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5655 PLD (literal) Encoding T1. */
5656 if (debug_displaced)
5657 fprintf_unfiltered (gdb_stdlog,
5658 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5659 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5665 /* Rewrite instruction {pli/pld} PC imm12 into:
5666 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5670 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5672 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5673 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5675 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5677 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5678 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5679 dsc->u.preload.immed = 0;
5681 /* {pli/pld} [r0, r1] */
5682 dsc->modinsn[0] = insn1 & 0xfff0;
5683 dsc->modinsn[1] = 0xf001;
5686 dsc->cleanup = &cleanup_preload;
5690 /* Preload instructions with register offset. */
5693 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5694 struct displaced_step_closure *dsc, unsigned int rn,
5697 ULONGEST rn_val, rm_val;
5699 /* Preload register-offset instructions:
5701 {pli/pld} [rn, rm {, shift}]
5703 {pli/pld} [r0, r1 {, shift}]. */
5705 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5706 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5707 rn_val = displaced_read_reg (regs, dsc, rn);
5708 rm_val = displaced_read_reg (regs, dsc, rm);
5709 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5710 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5711 dsc->u.preload.immed = 0;
5713 dsc->cleanup = &cleanup_preload;
5717 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5718 struct regcache *regs,
5719 struct displaced_step_closure *dsc)
5721 unsigned int rn = bits (insn, 16, 19);
5722 unsigned int rm = bits (insn, 0, 3);
5725 if (!insn_references_pc (insn, 0x000f000ful))
5726 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5730 (unsigned long) insn);
5732 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5734 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5738 /* Copy/cleanup coprocessor load and store instructions. */
5741 cleanup_copro_load_store (struct gdbarch *gdbarch,
5742 struct regcache *regs,
5743 struct displaced_step_closure *dsc)
5745 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5747 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5749 if (dsc->u.ldst.writeback)
5750 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5754 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5755 struct displaced_step_closure *dsc,
5756 int writeback, unsigned int rn)
5760 /* Coprocessor load/store instructions:
5762 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5764 {stc/stc2} [r0, #+/-imm].
5766 ldc/ldc2 are handled identically. */
5768 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5769 rn_val = displaced_read_reg (regs, dsc, rn);
5770 /* PC should be 4-byte aligned. */
5771 rn_val = rn_val & 0xfffffffc;
5772 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5774 dsc->u.ldst.writeback = writeback;
5775 dsc->u.ldst.rn = rn;
5777 dsc->cleanup = &cleanup_copro_load_store;
5781 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5782 struct regcache *regs,
5783 struct displaced_step_closure *dsc)
5785 unsigned int rn = bits (insn, 16, 19);
5787 if (!insn_references_pc (insn, 0x000f0000ul))
5788 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5790 if (debug_displaced)
5791 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5792 "load/store insn %.8lx\n", (unsigned long) insn);
5794 dsc->modinsn[0] = insn & 0xfff0ffff;
5796 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5802 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5803 uint16_t insn2, struct regcache *regs,
5804 struct displaced_step_closure *dsc)
5806 unsigned int rn = bits (insn1, 0, 3);
5808 if (rn != ARM_PC_REGNUM)
5809 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5810 "copro load/store", dsc);
5812 if (debug_displaced)
5813 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5814 "load/store insn %.4x%.4x\n", insn1, insn2);
5816 dsc->modinsn[0] = insn1 & 0xfff0;
5817 dsc->modinsn[1] = insn2;
5820 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5821 doesn't support writeback, so pass 0. */
5822 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5827 /* Clean up branch instructions (actually perform the branch, by setting
5831 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5832 struct displaced_step_closure *dsc)
5834 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5835 int branch_taken = condition_true (dsc->u.branch.cond, status);
5836 enum pc_write_style write_pc = dsc->u.branch.exchange
5837 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5842 if (dsc->u.branch.link)
5844 /* The value of LR should be the next insn of current one. In order
5845 not to confuse logic hanlding later insn `bx lr', if current insn mode
5846 is Thumb, the bit 0 of LR value should be set to 1. */
5847 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5850 next_insn_addr |= 0x1;
5852 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5856 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5859 /* Copy B/BL/BLX instructions with immediate destinations. */
5862 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5863 struct displaced_step_closure *dsc,
5864 unsigned int cond, int exchange, int link, long offset)
5866 /* Implement "BL<cond> <label>" as:
5868 Preparation: cond <- instruction condition
5869 Insn: mov r0, r0 (nop)
5870 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5872 B<cond> similar, but don't set r14 in cleanup. */
5874 dsc->u.branch.cond = cond;
5875 dsc->u.branch.link = link;
5876 dsc->u.branch.exchange = exchange;
5878 dsc->u.branch.dest = dsc->insn_addr;
5879 if (link && exchange)
5880 /* For BLX, offset is computed from the Align (PC, 4). */
5881 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5884 dsc->u.branch.dest += 4 + offset;
5886 dsc->u.branch.dest += 8 + offset;
5888 dsc->cleanup = &cleanup_branch;
5891 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5892 struct regcache *regs, struct displaced_step_closure *dsc)
5894 unsigned int cond = bits (insn, 28, 31);
5895 int exchange = (cond == 0xf);
5896 int link = exchange || bit (insn, 24);
5899 if (debug_displaced)
5900 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5901 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5902 (unsigned long) insn);
5904 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5905 then arrange the switch into Thumb mode. */
5906 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5908 offset = bits (insn, 0, 23) << 2;
5910 if (bit (offset, 25))
5911 offset = offset | ~0x3ffffff;
5913 dsc->modinsn[0] = ARM_NOP;
5915 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5920 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5921 uint16_t insn2, struct regcache *regs,
5922 struct displaced_step_closure *dsc)
5924 int link = bit (insn2, 14);
5925 int exchange = link && !bit (insn2, 12);
5928 int j1 = bit (insn2, 13);
5929 int j2 = bit (insn2, 11);
5930 int s = sbits (insn1, 10, 10);
5931 int i1 = !(j1 ^ bit (insn1, 10));
5932 int i2 = !(j2 ^ bit (insn1, 10));
5934 if (!link && !exchange) /* B */
5936 offset = (bits (insn2, 0, 10) << 1);
5937 if (bit (insn2, 12)) /* Encoding T4 */
5939 offset |= (bits (insn1, 0, 9) << 12)
5945 else /* Encoding T3 */
5947 offset |= (bits (insn1, 0, 5) << 12)
5951 cond = bits (insn1, 6, 9);
5956 offset = (bits (insn1, 0, 9) << 12);
5957 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5958 offset |= exchange ?
5959 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5962 if (debug_displaced)
5963 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5964 "%.4x %.4x with offset %.8lx\n",
5965 link ? (exchange) ? "blx" : "bl" : "b",
5966 insn1, insn2, offset);
5968 dsc->modinsn[0] = THUMB_NOP;
5970 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5974 /* Copy B Thumb instructions. */
5976 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
5977 struct displaced_step_closure *dsc)
5979 unsigned int cond = 0;
5981 unsigned short bit_12_15 = bits (insn, 12, 15);
5982 CORE_ADDR from = dsc->insn_addr;
5984 if (bit_12_15 == 0xd)
5986 /* offset = SignExtend (imm8:0, 32) */
5987 offset = sbits ((insn << 1), 0, 8);
5988 cond = bits (insn, 8, 11);
5990 else if (bit_12_15 == 0xe) /* Encoding T2 */
5992 offset = sbits ((insn << 1), 0, 11);
5996 if (debug_displaced)
5997 fprintf_unfiltered (gdb_stdlog,
5998 "displaced: copying b immediate insn %.4x "
5999 "with offset %d\n", insn, offset);
6001 dsc->u.branch.cond = cond;
6002 dsc->u.branch.link = 0;
6003 dsc->u.branch.exchange = 0;
6004 dsc->u.branch.dest = from + 4 + offset;
6006 dsc->modinsn[0] = THUMB_NOP;
6008 dsc->cleanup = &cleanup_branch;
6013 /* Copy BX/BLX with register-specified destinations. */
6016 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6017 struct displaced_step_closure *dsc, int link,
6018 unsigned int cond, unsigned int rm)
6020 /* Implement {BX,BLX}<cond> <reg>" as:
6022 Preparation: cond <- instruction condition
6023 Insn: mov r0, r0 (nop)
6024 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6026 Don't set r14 in cleanup for BX. */
6028 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6030 dsc->u.branch.cond = cond;
6031 dsc->u.branch.link = link;
6033 dsc->u.branch.exchange = 1;
6035 dsc->cleanup = &cleanup_branch;
6039 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6040 struct regcache *regs, struct displaced_step_closure *dsc)
6042 unsigned int cond = bits (insn, 28, 31);
6045 int link = bit (insn, 5);
6046 unsigned int rm = bits (insn, 0, 3);
6048 if (debug_displaced)
6049 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6050 (unsigned long) insn);
6052 dsc->modinsn[0] = ARM_NOP;
6054 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6059 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6060 struct regcache *regs,
6061 struct displaced_step_closure *dsc)
6063 int link = bit (insn, 7);
6064 unsigned int rm = bits (insn, 3, 6);
6066 if (debug_displaced)
6067 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6068 (unsigned short) insn);
6070 dsc->modinsn[0] = THUMB_NOP;
6072 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6078 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6081 cleanup_alu_imm (struct gdbarch *gdbarch,
6082 struct regcache *regs, struct displaced_step_closure *dsc)
6084 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6085 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6086 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6087 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6091 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6092 struct displaced_step_closure *dsc)
6094 unsigned int rn = bits (insn, 16, 19);
6095 unsigned int rd = bits (insn, 12, 15);
6096 unsigned int op = bits (insn, 21, 24);
6097 int is_mov = (op == 0xd);
6098 ULONGEST rd_val, rn_val;
6100 if (!insn_references_pc (insn, 0x000ff000ul))
6101 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6103 if (debug_displaced)
6104 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6105 "%.8lx\n", is_mov ? "move" : "ALU",
6106 (unsigned long) insn);
6108 /* Instruction is of form:
6110 <op><cond> rd, [rn,] #imm
6114 Preparation: tmp1, tmp2 <- r0, r1;
6116 Insn: <op><cond> r0, r1, #imm
6117 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6120 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6121 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6122 rn_val = displaced_read_reg (regs, dsc, rn);
6123 rd_val = displaced_read_reg (regs, dsc, rd);
6124 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6125 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6129 dsc->modinsn[0] = insn & 0xfff00fff;
6131 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6133 dsc->cleanup = &cleanup_alu_imm;
6139 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6140 uint16_t insn2, struct regcache *regs,
6141 struct displaced_step_closure *dsc)
6143 unsigned int op = bits (insn1, 5, 8);
6144 unsigned int rn, rm, rd;
6145 ULONGEST rd_val, rn_val;
6147 rn = bits (insn1, 0, 3); /* Rn */
6148 rm = bits (insn2, 0, 3); /* Rm */
6149 rd = bits (insn2, 8, 11); /* Rd */
6151 /* This routine is only called for instruction MOV. */
6152 gdb_assert (op == 0x2 && rn == 0xf);
6154 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6157 if (debug_displaced)
6158 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6159 "ALU", insn1, insn2);
6161 /* Instruction is of form:
6163 <op><cond> rd, [rn,] #imm
6167 Preparation: tmp1, tmp2 <- r0, r1;
6169 Insn: <op><cond> r0, r1, #imm
6170 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6173 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6174 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6175 rn_val = displaced_read_reg (regs, dsc, rn);
6176 rd_val = displaced_read_reg (regs, dsc, rd);
6177 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6178 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6181 dsc->modinsn[0] = insn1;
6182 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6185 dsc->cleanup = &cleanup_alu_imm;
6190 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6193 cleanup_alu_reg (struct gdbarch *gdbarch,
6194 struct regcache *regs, struct displaced_step_closure *dsc)
6199 rd_val = displaced_read_reg (regs, dsc, 0);
6201 for (i = 0; i < 3; i++)
6202 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6204 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6208 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6209 struct displaced_step_closure *dsc,
6210 unsigned int rd, unsigned int rn, unsigned int rm)
6212 ULONGEST rd_val, rn_val, rm_val;
6214 /* Instruction is of form:
6216 <op><cond> rd, [rn,] rm [, <shift>]
6220 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6221 r0, r1, r2 <- rd, rn, rm
6222 Insn: <op><cond> r0, r1, r2 [, <shift>]
6223 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6226 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6227 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6228 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6229 rd_val = displaced_read_reg (regs, dsc, rd);
6230 rn_val = displaced_read_reg (regs, dsc, rn);
6231 rm_val = displaced_read_reg (regs, dsc, rm);
6232 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6233 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6234 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6237 dsc->cleanup = &cleanup_alu_reg;
6241 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6242 struct displaced_step_closure *dsc)
6244 unsigned int op = bits (insn, 21, 24);
6245 int is_mov = (op == 0xd);
6247 if (!insn_references_pc (insn, 0x000ff00ful))
6248 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6250 if (debug_displaced)
6251 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6252 is_mov ? "move" : "ALU", (unsigned long) insn);
6255 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6257 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6259 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6265 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6266 struct regcache *regs,
6267 struct displaced_step_closure *dsc)
6269 unsigned rn, rm, rd;
6271 rd = bits (insn, 3, 6);
6272 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6275 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6276 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6278 if (debug_displaced)
6279 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6280 "ALU", (unsigned short) insn);
6282 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6284 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6289 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6292 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6293 struct regcache *regs,
6294 struct displaced_step_closure *dsc)
6296 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6299 for (i = 0; i < 4; i++)
6300 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6302 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6306 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6307 struct displaced_step_closure *dsc,
6308 unsigned int rd, unsigned int rn, unsigned int rm,
6312 ULONGEST rd_val, rn_val, rm_val, rs_val;
6314 /* Instruction is of form:
6316 <op><cond> rd, [rn,] rm, <shift> rs
6320 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6321 r0, r1, r2, r3 <- rd, rn, rm, rs
6322 Insn: <op><cond> r0, r1, r2, <shift> r3
6324 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6328 for (i = 0; i < 4; i++)
6329 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6331 rd_val = displaced_read_reg (regs, dsc, rd);
6332 rn_val = displaced_read_reg (regs, dsc, rn);
6333 rm_val = displaced_read_reg (regs, dsc, rm);
6334 rs_val = displaced_read_reg (regs, dsc, rs);
6335 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6336 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6337 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6338 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6340 dsc->cleanup = &cleanup_alu_shifted_reg;
6344 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6345 struct regcache *regs,
6346 struct displaced_step_closure *dsc)
6348 unsigned int op = bits (insn, 21, 24);
6349 int is_mov = (op == 0xd);
6350 unsigned int rd, rn, rm, rs;
6352 if (!insn_references_pc (insn, 0x000fff0ful))
6353 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6355 if (debug_displaced)
6356 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6357 "%.8lx\n", is_mov ? "move" : "ALU",
6358 (unsigned long) insn);
6360 rn = bits (insn, 16, 19);
6361 rm = bits (insn, 0, 3);
6362 rs = bits (insn, 8, 11);
6363 rd = bits (insn, 12, 15);
6366 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6368 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6370 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6375 /* Clean up load instructions. */
6378 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6379 struct displaced_step_closure *dsc)
6381 ULONGEST rt_val, rt_val2 = 0, rn_val;
6383 rt_val = displaced_read_reg (regs, dsc, 0);
6384 if (dsc->u.ldst.xfersize == 8)
6385 rt_val2 = displaced_read_reg (regs, dsc, 1);
6386 rn_val = displaced_read_reg (regs, dsc, 2);
6388 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6389 if (dsc->u.ldst.xfersize > 4)
6390 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6391 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6392 if (!dsc->u.ldst.immed)
6393 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6395 /* Handle register writeback. */
6396 if (dsc->u.ldst.writeback)
6397 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6398 /* Put result in right place. */
6399 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6400 if (dsc->u.ldst.xfersize == 8)
6401 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6404 /* Clean up store instructions. */
6407 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6408 struct displaced_step_closure *dsc)
6410 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6412 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6413 if (dsc->u.ldst.xfersize > 4)
6414 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6415 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6416 if (!dsc->u.ldst.immed)
6417 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6418 if (!dsc->u.ldst.restore_r4)
6419 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6422 if (dsc->u.ldst.writeback)
6423 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6426 /* Copy "extra" load/store instructions. These are halfword/doubleword
6427 transfers, which have a different encoding to byte/word transfers. */
6430 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6431 struct regcache *regs, struct displaced_step_closure *dsc)
6433 unsigned int op1 = bits (insn, 20, 24);
6434 unsigned int op2 = bits (insn, 5, 6);
6435 unsigned int rt = bits (insn, 12, 15);
6436 unsigned int rn = bits (insn, 16, 19);
6437 unsigned int rm = bits (insn, 0, 3);
6438 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6439 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6440 int immed = (op1 & 0x4) != 0;
6442 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6444 if (!insn_references_pc (insn, 0x000ff00ful))
6445 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6447 if (debug_displaced)
6448 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6449 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6450 (unsigned long) insn);
6452 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6455 internal_error (__FILE__, __LINE__,
6456 _("copy_extra_ld_st: instruction decode error"));
6458 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6459 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6460 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6462 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6464 rt_val = displaced_read_reg (regs, dsc, rt);
6465 if (bytesize[opcode] == 8)
6466 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6467 rn_val = displaced_read_reg (regs, dsc, rn);
6469 rm_val = displaced_read_reg (regs, dsc, rm);
6471 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6472 if (bytesize[opcode] == 8)
6473 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6474 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6476 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6479 dsc->u.ldst.xfersize = bytesize[opcode];
6480 dsc->u.ldst.rn = rn;
6481 dsc->u.ldst.immed = immed;
6482 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6483 dsc->u.ldst.restore_r4 = 0;
6486 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6488 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6489 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6491 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6493 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6494 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6496 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6501 /* Copy byte/half word/word loads and stores. */
6504 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6505 struct displaced_step_closure *dsc, int load,
6506 int immed, int writeback, int size, int usermode,
6507 int rt, int rm, int rn)
6509 ULONGEST rt_val, rn_val, rm_val = 0;
6511 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6512 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6514 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6516 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6518 rt_val = displaced_read_reg (regs, dsc, rt);
6519 rn_val = displaced_read_reg (regs, dsc, rn);
6521 rm_val = displaced_read_reg (regs, dsc, rm);
6523 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6524 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6526 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6528 dsc->u.ldst.xfersize = size;
6529 dsc->u.ldst.rn = rn;
6530 dsc->u.ldst.immed = immed;
6531 dsc->u.ldst.writeback = writeback;
6533 /* To write PC we can do:
6535 Before this sequence of instructions:
6536 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6537 r2 is the Rn value got from dispalced_read_reg.
6539 Insn1: push {pc} Write address of STR instruction + offset on stack
6540 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6541 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6542 = addr(Insn1) + offset - addr(Insn3) - 8
6544 Insn4: add r4, r4, #8 r4 = offset - 8
6545 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6547 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6549 Otherwise we don't know what value to write for PC, since the offset is
6550 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6551 of this can be found in Section "Saving from r15" in
6552 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6554 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6559 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6560 uint16_t insn2, struct regcache *regs,
6561 struct displaced_step_closure *dsc, int size)
6563 unsigned int u_bit = bit (insn1, 7);
6564 unsigned int rt = bits (insn2, 12, 15);
6565 int imm12 = bits (insn2, 0, 11);
6568 if (debug_displaced)
6569 fprintf_unfiltered (gdb_stdlog,
6570 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6571 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6577 /* Rewrite instruction LDR Rt imm12 into:
6579 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6583 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6586 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6587 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6590 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6592 pc_val = pc_val & 0xfffffffc;
6594 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6595 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6599 dsc->u.ldst.xfersize = size;
6600 dsc->u.ldst.immed = 0;
6601 dsc->u.ldst.writeback = 0;
6602 dsc->u.ldst.restore_r4 = 0;
6604 /* LDR R0, R2, R3 */
6605 dsc->modinsn[0] = 0xf852;
6606 dsc->modinsn[1] = 0x3;
6609 dsc->cleanup = &cleanup_load;
6615 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6616 uint16_t insn2, struct regcache *regs,
6617 struct displaced_step_closure *dsc,
6618 int writeback, int immed)
6620 unsigned int rt = bits (insn2, 12, 15);
6621 unsigned int rn = bits (insn1, 0, 3);
6622 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6623 /* In LDR (register), there is also a register Rm, which is not allowed to
6624 be PC, so we don't have to check it. */
6626 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6627 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6630 if (debug_displaced)
6631 fprintf_unfiltered (gdb_stdlog,
6632 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6633 rt, rn, insn1, insn2);
6635 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6638 dsc->u.ldst.restore_r4 = 0;
6641 /* ldr[b]<cond> rt, [rn, #imm], etc.
6643 ldr[b]<cond> r0, [r2, #imm]. */
6645 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6646 dsc->modinsn[1] = insn2 & 0x0fff;
6649 /* ldr[b]<cond> rt, [rn, rm], etc.
6651 ldr[b]<cond> r0, [r2, r3]. */
6653 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6654 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6664 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6665 struct regcache *regs,
6666 struct displaced_step_closure *dsc,
6667 int load, int size, int usermode)
6669 int immed = !bit (insn, 25);
6670 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6671 unsigned int rt = bits (insn, 12, 15);
6672 unsigned int rn = bits (insn, 16, 19);
6673 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6675 if (!insn_references_pc (insn, 0x000ff00ful))
6676 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6678 if (debug_displaced)
6679 fprintf_unfiltered (gdb_stdlog,
6680 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6681 load ? (size == 1 ? "ldrb" : "ldr")
6682 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6684 (unsigned long) insn);
6686 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6687 usermode, rt, rm, rn);
6689 if (load || rt != ARM_PC_REGNUM)
6691 dsc->u.ldst.restore_r4 = 0;
6694 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6696 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6697 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6699 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6701 {ldr,str}[b]<cond> r0, [r2, r3]. */
6702 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6706 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6707 dsc->u.ldst.restore_r4 = 1;
6708 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6709 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6710 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6711 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6712 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6716 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6718 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6723 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6728 /* Cleanup LDM instructions with fully-populated register list. This is an
6729 unfortunate corner case: it's impossible to implement correctly by modifying
6730 the instruction. The issue is as follows: we have an instruction,
6734 which we must rewrite to avoid loading PC. A possible solution would be to
6735 do the load in two halves, something like (with suitable cleanup
6739 ldm[id][ab] r8!, {r0-r7}
6741 ldm[id][ab] r8, {r7-r14}
6744 but at present there's no suitable place for <temp>, since the scratch space
6745 is overwritten before the cleanup routine is called. For now, we simply
6746 emulate the instruction. */
6749 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6750 struct displaced_step_closure *dsc)
6752 int inc = dsc->u.block.increment;
6753 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6754 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6755 uint32_t regmask = dsc->u.block.regmask;
6756 int regno = inc ? 0 : 15;
6757 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6758 int exception_return = dsc->u.block.load && dsc->u.block.user
6759 && (regmask & 0x8000) != 0;
6760 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6761 int do_transfer = condition_true (dsc->u.block.cond, status);
6762 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6767 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6768 sensible we can do here. Complain loudly. */
6769 if (exception_return)
6770 error (_("Cannot single-step exception return"));
6772 /* We don't handle any stores here for now. */
6773 gdb_assert (dsc->u.block.load != 0);
6775 if (debug_displaced)
6776 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6777 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6778 dsc->u.block.increment ? "inc" : "dec",
6779 dsc->u.block.before ? "before" : "after");
6786 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6789 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6792 xfer_addr += bump_before;
6794 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6795 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6797 xfer_addr += bump_after;
6799 regmask &= ~(1 << regno);
6802 if (dsc->u.block.writeback)
6803 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6807 /* Clean up an STM which included the PC in the register list. */
6810 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6811 struct displaced_step_closure *dsc)
6813 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6814 int store_executed = condition_true (dsc->u.block.cond, status);
6815 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6816 CORE_ADDR stm_insn_addr;
6819 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6821 /* If condition code fails, there's nothing else to do. */
6822 if (!store_executed)
6825 if (dsc->u.block.increment)
6827 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6829 if (dsc->u.block.before)
6834 pc_stored_at = dsc->u.block.xfer_addr;
6836 if (dsc->u.block.before)
6840 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6841 stm_insn_addr = dsc->scratch_base;
6842 offset = pc_val - stm_insn_addr;
6844 if (debug_displaced)
6845 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6846 "STM instruction\n", offset);
6848 /* Rewrite the stored PC to the proper value for the non-displaced original
6850 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6851 dsc->insn_addr + offset);
6854 /* Clean up an LDM which includes the PC in the register list. We clumped all
6855 the registers in the transferred list into a contiguous range r0...rX (to
6856 avoid loading PC directly and losing control of the debugged program), so we
6857 must undo that here. */
6860 cleanup_block_load_pc (struct gdbarch *gdbarch,
6861 struct regcache *regs,
6862 struct displaced_step_closure *dsc)
6864 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6865 int load_executed = condition_true (dsc->u.block.cond, status);
6866 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6867 unsigned int regs_loaded = bitcount (mask);
6868 unsigned int num_to_shuffle = regs_loaded, clobbered;
6870 /* The method employed here will fail if the register list is fully populated
6871 (we need to avoid loading PC directly). */
6872 gdb_assert (num_to_shuffle < 16);
6877 clobbered = (1 << num_to_shuffle) - 1;
6879 while (num_to_shuffle > 0)
6881 if ((mask & (1 << write_reg)) != 0)
6883 unsigned int read_reg = num_to_shuffle - 1;
6885 if (read_reg != write_reg)
6887 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6888 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6889 if (debug_displaced)
6890 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6891 "loaded register r%d to r%d\n"), read_reg,
6894 else if (debug_displaced)
6895 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6896 "r%d already in the right place\n"),
6899 clobbered &= ~(1 << write_reg);
6907 /* Restore any registers we scribbled over. */
6908 for (write_reg = 0; clobbered != 0; write_reg++)
6910 if ((clobbered & (1 << write_reg)) != 0)
6912 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6914 if (debug_displaced)
6915 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6916 "clobbered register r%d\n"), write_reg);
6917 clobbered &= ~(1 << write_reg);
6921 /* Perform register writeback manually. */
6922 if (dsc->u.block.writeback)
6924 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6926 if (dsc->u.block.increment)
6927 new_rn_val += regs_loaded * 4;
6929 new_rn_val -= regs_loaded * 4;
6931 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6936 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6937 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6940 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6941 struct regcache *regs,
6942 struct displaced_step_closure *dsc)
6944 int load = bit (insn, 20);
6945 int user = bit (insn, 22);
6946 int increment = bit (insn, 23);
6947 int before = bit (insn, 24);
6948 int writeback = bit (insn, 21);
6949 int rn = bits (insn, 16, 19);
6951 /* Block transfers which don't mention PC can be run directly
6953 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6954 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6956 if (rn == ARM_PC_REGNUM)
6958 warning (_("displaced: Unpredictable LDM or STM with "
6959 "base register r15"));
6960 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6963 if (debug_displaced)
6964 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6965 "%.8lx\n", (unsigned long) insn);
6967 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6968 dsc->u.block.rn = rn;
6970 dsc->u.block.load = load;
6971 dsc->u.block.user = user;
6972 dsc->u.block.increment = increment;
6973 dsc->u.block.before = before;
6974 dsc->u.block.writeback = writeback;
6975 dsc->u.block.cond = bits (insn, 28, 31);
6977 dsc->u.block.regmask = insn & 0xffff;
6981 if ((insn & 0xffff) == 0xffff)
6983 /* LDM with a fully-populated register list. This case is
6984 particularly tricky. Implement for now by fully emulating the
6985 instruction (which might not behave perfectly in all cases, but
6986 these instructions should be rare enough for that not to matter
6988 dsc->modinsn[0] = ARM_NOP;
6990 dsc->cleanup = &cleanup_block_load_all;
6994 /* LDM of a list of registers which includes PC. Implement by
6995 rewriting the list of registers to be transferred into a
6996 contiguous chunk r0...rX before doing the transfer, then shuffling
6997 registers into the correct places in the cleanup routine. */
6998 unsigned int regmask = insn & 0xffff;
6999 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7000 unsigned int to = 0, from = 0, i, new_rn;
7002 for (i = 0; i < num_in_list; i++)
7003 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7005 /* Writeback makes things complicated. We need to avoid clobbering
7006 the base register with one of the registers in our modified
7007 register list, but just using a different register can't work in
7010 ldm r14!, {r0-r13,pc}
7012 which would need to be rewritten as:
7016 but that can't work, because there's no free register for N.
7018 Solve this by turning off the writeback bit, and emulating
7019 writeback manually in the cleanup routine. */
7024 new_regmask = (1 << num_in_list) - 1;
7026 if (debug_displaced)
7027 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7028 "{..., pc}: original reg list %.4x, modified "
7029 "list %.4x\n"), rn, writeback ? "!" : "",
7030 (int) insn & 0xffff, new_regmask);
7032 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7034 dsc->cleanup = &cleanup_block_load_pc;
7039 /* STM of a list of registers which includes PC. Run the instruction
7040 as-is, but out of line: this will store the wrong value for the PC,
7041 so we must manually fix up the memory in the cleanup routine.
7042 Doing things this way has the advantage that we can auto-detect
7043 the offset of the PC write (which is architecture-dependent) in
7044 the cleanup routine. */
7045 dsc->modinsn[0] = insn;
7047 dsc->cleanup = &cleanup_block_store_pc;
7054 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7055 struct regcache *regs,
7056 struct displaced_step_closure *dsc)
7058 int rn = bits (insn1, 0, 3);
7059 int load = bit (insn1, 4);
7060 int writeback = bit (insn1, 5);
7062 /* Block transfers which don't mention PC can be run directly
7064 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7065 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7067 if (rn == ARM_PC_REGNUM)
7069 warning (_("displaced: Unpredictable LDM or STM with "
7070 "base register r15"));
7071 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7072 "unpredictable ldm/stm", dsc);
7075 if (debug_displaced)
7076 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7077 "%.4x%.4x\n", insn1, insn2);
7079 /* Clear bit 13, since it should be always zero. */
7080 dsc->u.block.regmask = (insn2 & 0xdfff);
7081 dsc->u.block.rn = rn;
7083 dsc->u.block.load = load;
7084 dsc->u.block.user = 0;
7085 dsc->u.block.increment = bit (insn1, 7);
7086 dsc->u.block.before = bit (insn1, 8);
7087 dsc->u.block.writeback = writeback;
7088 dsc->u.block.cond = INST_AL;
7089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7093 if (dsc->u.block.regmask == 0xffff)
7095 /* This branch is impossible to happen. */
7100 unsigned int regmask = dsc->u.block.regmask;
7101 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7102 unsigned int to = 0, from = 0, i, new_rn;
7104 for (i = 0; i < num_in_list; i++)
7105 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7110 new_regmask = (1 << num_in_list) - 1;
7112 if (debug_displaced)
7113 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7114 "{..., pc}: original reg list %.4x, modified "
7115 "list %.4x\n"), rn, writeback ? "!" : "",
7116 (int) dsc->u.block.regmask, new_regmask);
7118 dsc->modinsn[0] = insn1;
7119 dsc->modinsn[1] = (new_regmask & 0xffff);
7122 dsc->cleanup = &cleanup_block_load_pc;
7127 dsc->modinsn[0] = insn1;
7128 dsc->modinsn[1] = insn2;
7130 dsc->cleanup = &cleanup_block_store_pc;
7135 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7136 for Linux, where some SVC instructions must be treated specially. */
7139 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7140 struct displaced_step_closure *dsc)
7142 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7144 if (debug_displaced)
7145 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7146 "%.8lx\n", (unsigned long) resume_addr);
7148 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7152 /* Common copy routine for svc instruciton. */
7155 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7156 struct displaced_step_closure *dsc)
7158 /* Preparation: none.
7159 Insn: unmodified svc.
7160 Cleanup: pc <- insn_addr + insn_size. */
7162 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7164 dsc->wrote_to_pc = 1;
7166 /* Allow OS-specific code to override SVC handling. */
7167 if (dsc->u.svc.copy_svc_os)
7168 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7171 dsc->cleanup = &cleanup_svc;
7177 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7178 struct regcache *regs, struct displaced_step_closure *dsc)
7181 if (debug_displaced)
7182 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7183 (unsigned long) insn);
7185 dsc->modinsn[0] = insn;
7187 return install_svc (gdbarch, regs, dsc);
7191 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7192 struct regcache *regs, struct displaced_step_closure *dsc)
7195 if (debug_displaced)
7196 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7199 dsc->modinsn[0] = insn;
7201 return install_svc (gdbarch, regs, dsc);
7204 /* Copy undefined instructions. */
7207 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7208 struct displaced_step_closure *dsc)
7210 if (debug_displaced)
7211 fprintf_unfiltered (gdb_stdlog,
7212 "displaced: copying undefined insn %.8lx\n",
7213 (unsigned long) insn);
7215 dsc->modinsn[0] = insn;
7221 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7222 struct displaced_step_closure *dsc)
7225 if (debug_displaced)
7226 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7227 "%.4x %.4x\n", (unsigned short) insn1,
7228 (unsigned short) insn2);
7230 dsc->modinsn[0] = insn1;
7231 dsc->modinsn[1] = insn2;
7237 /* Copy unpredictable instructions. */
7240 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7241 struct displaced_step_closure *dsc)
7243 if (debug_displaced)
7244 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7245 "%.8lx\n", (unsigned long) insn);
7247 dsc->modinsn[0] = insn;
7252 /* The decode_* functions are instruction decoding helpers. They mostly follow
7253 the presentation in the ARM ARM. */
7256 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7257 struct regcache *regs,
7258 struct displaced_step_closure *dsc)
7260 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7261 unsigned int rn = bits (insn, 16, 19);
7263 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7264 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7265 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7266 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7267 else if ((op1 & 0x60) == 0x20)
7268 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7269 else if ((op1 & 0x71) == 0x40)
7270 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7272 else if ((op1 & 0x77) == 0x41)
7273 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7274 else if ((op1 & 0x77) == 0x45)
7275 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7276 else if ((op1 & 0x77) == 0x51)
7279 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7281 return arm_copy_unpred (gdbarch, insn, dsc);
7283 else if ((op1 & 0x77) == 0x55)
7284 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7285 else if (op1 == 0x57)
7288 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7289 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7290 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7291 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7292 default: return arm_copy_unpred (gdbarch, insn, dsc);
7294 else if ((op1 & 0x63) == 0x43)
7295 return arm_copy_unpred (gdbarch, insn, dsc);
7296 else if ((op2 & 0x1) == 0x0)
7297 switch (op1 & ~0x80)
7300 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7302 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7303 case 0x71: case 0x75:
7305 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7306 case 0x63: case 0x67: case 0x73: case 0x77:
7307 return arm_copy_unpred (gdbarch, insn, dsc);
7309 return arm_copy_undef (gdbarch, insn, dsc);
7312 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7316 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7317 struct regcache *regs,
7318 struct displaced_step_closure *dsc)
7320 if (bit (insn, 27) == 0)
7321 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7322 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7323 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7326 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7329 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7331 case 0x4: case 0x5: case 0x6: case 0x7:
7332 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7335 switch ((insn & 0xe00000) >> 21)
7337 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7339 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7342 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7345 return arm_copy_undef (gdbarch, insn, dsc);
7350 int rn_f = (bits (insn, 16, 19) == 0xf);
7351 switch ((insn & 0xe00000) >> 21)
7354 /* ldc/ldc2 imm (undefined for rn == pc). */
7355 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7356 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7359 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7361 case 0x4: case 0x5: case 0x6: case 0x7:
7362 /* ldc/ldc2 lit (undefined for rn != pc). */
7363 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7364 : arm_copy_undef (gdbarch, insn, dsc);
7367 return arm_copy_undef (gdbarch, insn, dsc);
7372 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7375 if (bits (insn, 16, 19) == 0xf)
7377 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7379 return arm_copy_undef (gdbarch, insn, dsc);
7383 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7385 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7389 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7391 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7394 return arm_copy_undef (gdbarch, insn, dsc);
7398 /* Decode miscellaneous instructions in dp/misc encoding space. */
7401 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7402 struct regcache *regs,
7403 struct displaced_step_closure *dsc)
7405 unsigned int op2 = bits (insn, 4, 6);
7406 unsigned int op = bits (insn, 21, 22);
7407 unsigned int op1 = bits (insn, 16, 19);
7412 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7415 if (op == 0x1) /* bx. */
7416 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7418 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7420 return arm_copy_undef (gdbarch, insn, dsc);
7424 /* Not really supported. */
7425 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7427 return arm_copy_undef (gdbarch, insn, dsc);
7431 return arm_copy_bx_blx_reg (gdbarch, insn,
7432 regs, dsc); /* blx register. */
7434 return arm_copy_undef (gdbarch, insn, dsc);
7437 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7441 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7443 /* Not really supported. */
7444 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7447 return arm_copy_undef (gdbarch, insn, dsc);
7452 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7453 struct regcache *regs,
7454 struct displaced_step_closure *dsc)
7457 switch (bits (insn, 20, 24))
7460 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7463 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7465 case 0x12: case 0x16:
7466 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7469 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7473 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7475 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7476 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7477 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7478 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7479 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7480 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7481 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7482 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7483 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7484 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7485 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7486 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7487 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7488 /* 2nd arg means "unpriveleged". */
7489 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7493 /* Should be unreachable. */
7498 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7499 struct regcache *regs,
7500 struct displaced_step_closure *dsc)
7502 int a = bit (insn, 25), b = bit (insn, 4);
7503 uint32_t op1 = bits (insn, 20, 24);
7504 int rn_f = bits (insn, 16, 19) == 0xf;
7506 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7507 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7508 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7509 else if ((!a && (op1 & 0x17) == 0x02)
7510 || (a && (op1 & 0x17) == 0x02 && !b))
7511 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7512 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7513 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7514 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7515 else if ((!a && (op1 & 0x17) == 0x03)
7516 || (a && (op1 & 0x17) == 0x03 && !b))
7517 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7518 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7519 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7520 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7521 else if ((!a && (op1 & 0x17) == 0x06)
7522 || (a && (op1 & 0x17) == 0x06 && !b))
7523 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7524 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7525 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7526 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7527 else if ((!a && (op1 & 0x17) == 0x07)
7528 || (a && (op1 & 0x17) == 0x07 && !b))
7529 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7531 /* Should be unreachable. */
7536 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7537 struct displaced_step_closure *dsc)
7539 switch (bits (insn, 20, 24))
7541 case 0x00: case 0x01: case 0x02: case 0x03:
7542 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7544 case 0x04: case 0x05: case 0x06: case 0x07:
7545 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7547 case 0x08: case 0x09: case 0x0a: case 0x0b:
7548 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7549 return arm_copy_unmodified (gdbarch, insn,
7550 "decode/pack/unpack/saturate/reverse", dsc);
7553 if (bits (insn, 5, 7) == 0) /* op2. */
7555 if (bits (insn, 12, 15) == 0xf)
7556 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7558 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7561 return arm_copy_undef (gdbarch, insn, dsc);
7563 case 0x1a: case 0x1b:
7564 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7565 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7567 return arm_copy_undef (gdbarch, insn, dsc);
7569 case 0x1c: case 0x1d:
7570 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7572 if (bits (insn, 0, 3) == 0xf)
7573 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7575 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7578 return arm_copy_undef (gdbarch, insn, dsc);
7580 case 0x1e: case 0x1f:
7581 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7582 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7584 return arm_copy_undef (gdbarch, insn, dsc);
7587 /* Should be unreachable. */
7592 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7593 struct regcache *regs,
7594 struct displaced_step_closure *dsc)
7597 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7599 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7603 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7604 struct regcache *regs,
7605 struct displaced_step_closure *dsc)
7607 unsigned int opcode = bits (insn, 20, 24);
7611 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7612 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7614 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7615 case 0x12: case 0x16:
7616 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7618 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7619 case 0x13: case 0x17:
7620 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7622 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7623 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7624 /* Note: no writeback for these instructions. Bit 25 will always be
7625 zero though (via caller), so the following works OK. */
7626 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7629 /* Should be unreachable. */
7633 /* Decode shifted register instructions. */
7636 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7637 uint16_t insn2, struct regcache *regs,
7638 struct displaced_step_closure *dsc)
7640 /* PC is only allowed to be used in instruction MOV. */
7642 unsigned int op = bits (insn1, 5, 8);
7643 unsigned int rn = bits (insn1, 0, 3);
7645 if (op == 0x2 && rn == 0xf) /* MOV */
7646 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7648 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7649 "dp (shift reg)", dsc);
7653 /* Decode extension register load/store. Exactly the same as
7654 arm_decode_ext_reg_ld_st. */
7657 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7658 uint16_t insn2, struct regcache *regs,
7659 struct displaced_step_closure *dsc)
7661 unsigned int opcode = bits (insn1, 4, 8);
7665 case 0x04: case 0x05:
7666 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7667 "vfp/neon vmov", dsc);
7669 case 0x08: case 0x0c: /* 01x00 */
7670 case 0x0a: case 0x0e: /* 01x10 */
7671 case 0x12: case 0x16: /* 10x10 */
7672 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7673 "vfp/neon vstm/vpush", dsc);
7675 case 0x09: case 0x0d: /* 01x01 */
7676 case 0x0b: case 0x0f: /* 01x11 */
7677 case 0x13: case 0x17: /* 10x11 */
7678 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7679 "vfp/neon vldm/vpop", dsc);
7681 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7682 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7684 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7685 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7688 /* Should be unreachable. */
7693 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7694 struct regcache *regs, struct displaced_step_closure *dsc)
7696 unsigned int op1 = bits (insn, 20, 25);
7697 int op = bit (insn, 4);
7698 unsigned int coproc = bits (insn, 8, 11);
7699 unsigned int rn = bits (insn, 16, 19);
7701 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7702 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7703 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7704 && (coproc & 0xe) != 0xa)
7706 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7707 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7708 && (coproc & 0xe) != 0xa)
7709 /* ldc/ldc2 imm/lit. */
7710 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7711 else if ((op1 & 0x3e) == 0x00)
7712 return arm_copy_undef (gdbarch, insn, dsc);
7713 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7714 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7715 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7716 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7717 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7718 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7719 else if ((op1 & 0x30) == 0x20 && !op)
7721 if ((coproc & 0xe) == 0xa)
7722 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7724 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7726 else if ((op1 & 0x30) == 0x20 && op)
7727 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7728 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7729 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7730 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7731 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7732 else if ((op1 & 0x30) == 0x30)
7733 return arm_copy_svc (gdbarch, insn, regs, dsc);
7735 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7739 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7740 uint16_t insn2, struct regcache *regs,
7741 struct displaced_step_closure *dsc)
7743 unsigned int coproc = bits (insn2, 8, 11);
7744 unsigned int op1 = bits (insn1, 4, 9);
7745 unsigned int bit_5_8 = bits (insn1, 5, 8);
7746 unsigned int bit_9 = bit (insn1, 9);
7747 unsigned int bit_4 = bit (insn1, 4);
7748 unsigned int rn = bits (insn1, 0, 3);
7753 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7754 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7756 else if (bit_5_8 == 0) /* UNDEFINED. */
7757 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7760 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7761 if ((coproc & 0xe) == 0xa)
7762 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7764 else /* coproc is not 101x. */
7766 if (bit_4 == 0) /* STC/STC2. */
7767 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7769 else /* LDC/LDC2 {literal, immeidate}. */
7770 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7776 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7782 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7783 struct displaced_step_closure *dsc, int rd)
7789 Preparation: Rd <- PC
7795 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7796 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7800 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7801 struct displaced_step_closure *dsc,
7802 int rd, unsigned int imm)
7805 /* Encoding T2: ADDS Rd, #imm */
7806 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7808 install_pc_relative (gdbarch, regs, dsc, rd);
7814 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7815 struct regcache *regs,
7816 struct displaced_step_closure *dsc)
7818 unsigned int rd = bits (insn, 8, 10);
7819 unsigned int imm8 = bits (insn, 0, 7);
7821 if (debug_displaced)
7822 fprintf_unfiltered (gdb_stdlog,
7823 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7826 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7830 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7831 uint16_t insn2, struct regcache *regs,
7832 struct displaced_step_closure *dsc)
7834 unsigned int rd = bits (insn2, 8, 11);
7835 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7836 extract raw immediate encoding rather than computing immediate. When
7837 generating ADD or SUB instruction, we can simply perform OR operation to
7838 set immediate into ADD. */
7839 unsigned int imm_3_8 = insn2 & 0x70ff;
7840 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7842 if (debug_displaced)
7843 fprintf_unfiltered (gdb_stdlog,
7844 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7845 rd, imm_i, imm_3_8, insn1, insn2);
7847 if (bit (insn1, 7)) /* Encoding T2 */
7849 /* Encoding T3: SUB Rd, Rd, #imm */
7850 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7851 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7853 else /* Encoding T3 */
7855 /* Encoding T3: ADD Rd, Rd, #imm */
7856 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7857 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7861 install_pc_relative (gdbarch, regs, dsc, rd);
7867 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7868 struct regcache *regs,
7869 struct displaced_step_closure *dsc)
7871 unsigned int rt = bits (insn1, 8, 10);
7873 int imm8 = (bits (insn1, 0, 7) << 2);
7874 CORE_ADDR from = dsc->insn_addr;
7880 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7882 Insn: LDR R0, [R2, R3];
7883 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7885 if (debug_displaced)
7886 fprintf_unfiltered (gdb_stdlog,
7887 "displaced: copying thumb ldr r%d [pc #%d]\n"
7890 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7891 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7892 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7893 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7894 /* The assembler calculates the required value of the offset from the
7895 Align(PC,4) value of this instruction to the label. */
7896 pc = pc & 0xfffffffc;
7898 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7899 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7902 dsc->u.ldst.xfersize = 4;
7904 dsc->u.ldst.immed = 0;
7905 dsc->u.ldst.writeback = 0;
7906 dsc->u.ldst.restore_r4 = 0;
7908 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7910 dsc->cleanup = &cleanup_load;
7915 /* Copy Thumb cbnz/cbz insruction. */
7918 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7919 struct regcache *regs,
7920 struct displaced_step_closure *dsc)
7922 int non_zero = bit (insn1, 11);
7923 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7924 CORE_ADDR from = dsc->insn_addr;
7925 int rn = bits (insn1, 0, 2);
7926 int rn_val = displaced_read_reg (regs, dsc, rn);
7928 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7929 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7930 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7931 condition is false, let it be, cleanup_branch will do nothing. */
7932 if (dsc->u.branch.cond)
7934 dsc->u.branch.cond = INST_AL;
7935 dsc->u.branch.dest = from + 4 + imm5;
7938 dsc->u.branch.dest = from + 2;
7940 dsc->u.branch.link = 0;
7941 dsc->u.branch.exchange = 0;
7943 if (debug_displaced)
7944 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7945 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7946 rn, rn_val, insn1, dsc->u.branch.dest);
7948 dsc->modinsn[0] = THUMB_NOP;
7950 dsc->cleanup = &cleanup_branch;
7954 /* Copy Table Branch Byte/Halfword */
7956 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7957 uint16_t insn2, struct regcache *regs,
7958 struct displaced_step_closure *dsc)
7960 ULONGEST rn_val, rm_val;
7961 int is_tbh = bit (insn2, 4);
7962 CORE_ADDR halfwords = 0;
7963 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7965 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7966 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7972 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7973 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7979 target_read_memory (rn_val + rm_val, buf, 1);
7980 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7983 if (debug_displaced)
7984 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7985 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7986 (unsigned int) rn_val, (unsigned int) rm_val,
7987 (unsigned int) halfwords);
7989 dsc->u.branch.cond = INST_AL;
7990 dsc->u.branch.link = 0;
7991 dsc->u.branch.exchange = 0;
7992 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7994 dsc->cleanup = &cleanup_branch;
8000 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8001 struct displaced_step_closure *dsc)
8004 int val = displaced_read_reg (regs, dsc, 7);
8005 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8008 val = displaced_read_reg (regs, dsc, 8);
8009 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8012 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8017 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8018 struct regcache *regs,
8019 struct displaced_step_closure *dsc)
8021 dsc->u.block.regmask = insn1 & 0x00ff;
8023 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8026 (1) register list is full, that is, r0-r7 are used.
8027 Prepare: tmp[0] <- r8
8029 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8030 MOV r8, r7; Move value of r7 to r8;
8031 POP {r7}; Store PC value into r7.
8033 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8035 (2) register list is not full, supposing there are N registers in
8036 register list (except PC, 0 <= N <= 7).
8037 Prepare: for each i, 0 - N, tmp[i] <- ri.
8039 POP {r0, r1, ...., rN};
8041 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8042 from tmp[] properly.
8044 if (debug_displaced)
8045 fprintf_unfiltered (gdb_stdlog,
8046 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8047 dsc->u.block.regmask, insn1);
8049 if (dsc->u.block.regmask == 0xff)
8051 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8053 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8054 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8055 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8058 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8062 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8063 unsigned int new_regmask, bit = 1;
8064 unsigned int to = 0, from = 0, i, new_rn;
8066 for (i = 0; i < num_in_list + 1; i++)
8067 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8069 new_regmask = (1 << (num_in_list + 1)) - 1;
8071 if (debug_displaced)
8072 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8073 "{..., pc}: original reg list %.4x,"
8074 " modified list %.4x\n"),
8075 (int) dsc->u.block.regmask, new_regmask);
8077 dsc->u.block.regmask |= 0x8000;
8078 dsc->u.block.writeback = 0;
8079 dsc->u.block.cond = INST_AL;
8081 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8083 dsc->cleanup = &cleanup_block_load_pc;
8090 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8091 struct regcache *regs,
8092 struct displaced_step_closure *dsc)
8094 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8095 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8098 /* 16-bit thumb instructions. */
8099 switch (op_bit_12_15)
8101 /* Shift (imme), add, subtract, move and compare. */
8102 case 0: case 1: case 2: case 3:
8103 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8104 "shift/add/sub/mov/cmp",
8108 switch (op_bit_10_11)
8110 case 0: /* Data-processing */
8111 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8115 case 1: /* Special data instructions and branch and exchange. */
8117 unsigned short op = bits (insn1, 7, 9);
8118 if (op == 6 || op == 7) /* BX or BLX */
8119 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8120 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8121 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8123 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8127 default: /* LDR (literal) */
8128 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8131 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8132 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8135 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8136 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8137 else /* Generate SP-relative address */
8138 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8140 case 11: /* Misc 16-bit instructions */
8142 switch (bits (insn1, 8, 11))
8144 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8145 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8147 case 12: case 13: /* POP */
8148 if (bit (insn1, 8)) /* PC is in register list. */
8149 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8151 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8153 case 15: /* If-Then, and hints */
8154 if (bits (insn1, 0, 3))
8155 /* If-Then makes up to four following instructions conditional.
8156 IT instruction itself is not conditional, so handle it as a
8157 common unmodified instruction. */
8158 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8161 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8164 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8169 if (op_bit_10_11 < 2) /* Store multiple registers */
8170 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8171 else /* Load multiple registers */
8172 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8174 case 13: /* Conditional branch and supervisor call */
8175 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8176 err = thumb_copy_b (gdbarch, insn1, dsc);
8178 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8180 case 14: /* Unconditional branch */
8181 err = thumb_copy_b (gdbarch, insn1, dsc);
8188 internal_error (__FILE__, __LINE__,
8189 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8193 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8194 uint16_t insn1, uint16_t insn2,
8195 struct regcache *regs,
8196 struct displaced_step_closure *dsc)
8198 int rt = bits (insn2, 12, 15);
8199 int rn = bits (insn1, 0, 3);
8200 int op1 = bits (insn1, 7, 8);
8203 switch (bits (insn1, 5, 6))
8205 case 0: /* Load byte and memory hints */
8206 if (rt == 0xf) /* PLD/PLI */
8209 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8210 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8212 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8217 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8218 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8221 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8222 "ldrb{reg, immediate}/ldrbt",
8227 case 1: /* Load halfword and memory hints. */
8228 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8229 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8230 "pld/unalloc memhint", dsc);
8234 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8237 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8241 case 2: /* Load word */
8243 int insn2_bit_8_11 = bits (insn2, 8, 11);
8246 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8247 else if (op1 == 0x1) /* Encoding T3 */
8248 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8250 else /* op1 == 0x0 */
8252 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8253 /* LDR (immediate) */
8254 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8255 dsc, bit (insn2, 8), 1);
8256 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8257 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8260 /* LDR (register) */
8261 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8267 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8274 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8275 uint16_t insn2, struct regcache *regs,
8276 struct displaced_step_closure *dsc)
8279 unsigned short op = bit (insn2, 15);
8280 unsigned int op1 = bits (insn1, 11, 12);
8286 switch (bits (insn1, 9, 10))
8291 /* Load/store {dual, execlusive}, table branch. */
8292 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8293 && bits (insn2, 5, 7) == 0)
8294 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8297 /* PC is not allowed to use in load/store {dual, exclusive}
8299 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8300 "load/store dual/ex", dsc);
8302 else /* load/store multiple */
8304 switch (bits (insn1, 7, 8))
8306 case 0: case 3: /* SRS, RFE */
8307 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8310 case 1: case 2: /* LDM/STM/PUSH/POP */
8311 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8318 /* Data-processing (shift register). */
8319 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8322 default: /* Coprocessor instructions. */
8323 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8328 case 2: /* op1 = 2 */
8329 if (op) /* Branch and misc control. */
8331 if (bit (insn2, 14) /* BLX/BL */
8332 || bit (insn2, 12) /* Unconditional branch */
8333 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8334 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8336 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8341 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8343 int op = bits (insn1, 4, 8);
8344 int rn = bits (insn1, 0, 3);
8345 if ((op == 0 || op == 0xa) && rn == 0xf)
8346 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8349 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8352 else /* Data processing (modified immeidate) */
8353 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8357 case 3: /* op1 = 3 */
8358 switch (bits (insn1, 9, 10))
8362 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8364 else /* NEON Load/Store and Store single data item */
8365 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8366 "neon elt/struct load/store",
8369 case 1: /* op1 = 3, bits (9, 10) == 1 */
8370 switch (bits (insn1, 7, 8))
8372 case 0: case 1: /* Data processing (register) */
8373 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8376 case 2: /* Multiply and absolute difference */
8377 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8378 "mul/mua/diff", dsc);
8380 case 3: /* Long multiply and divide */
8381 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8386 default: /* Coprocessor instructions */
8387 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8396 internal_error (__FILE__, __LINE__,
8397 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8402 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8403 CORE_ADDR to, struct regcache *regs,
8404 struct displaced_step_closure *dsc)
8406 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8408 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8410 if (debug_displaced)
8411 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8412 "at %.8lx\n", insn1, (unsigned long) from);
8415 dsc->insn_size = thumb_insn_size (insn1);
8416 if (thumb_insn_size (insn1) == 4)
8419 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8420 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8423 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8427 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8428 CORE_ADDR to, struct regcache *regs,
8429 struct displaced_step_closure *dsc)
8432 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8435 /* Most displaced instructions use a 1-instruction scratch space, so set this
8436 here and override below if/when necessary. */
8438 dsc->insn_addr = from;
8439 dsc->scratch_base = to;
8440 dsc->cleanup = NULL;
8441 dsc->wrote_to_pc = 0;
8443 if (!displaced_in_arm_mode (regs))
8444 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8448 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8449 if (debug_displaced)
8450 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8451 "at %.8lx\n", (unsigned long) insn,
8452 (unsigned long) from);
8454 if ((insn & 0xf0000000) == 0xf0000000)
8455 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8456 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8458 case 0x0: case 0x1: case 0x2: case 0x3:
8459 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8462 case 0x4: case 0x5: case 0x6:
8463 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8467 err = arm_decode_media (gdbarch, insn, dsc);
8470 case 0x8: case 0x9: case 0xa: case 0xb:
8471 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8474 case 0xc: case 0xd: case 0xe: case 0xf:
8475 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8480 internal_error (__FILE__, __LINE__,
8481 _("arm_process_displaced_insn: Instruction decode error"));
8484 /* Actually set up the scratch space for a displaced instruction. */
8487 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8488 CORE_ADDR to, struct displaced_step_closure *dsc)
8490 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8491 unsigned int i, len, offset;
8492 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8493 int size = dsc->is_thumb? 2 : 4;
8494 const unsigned char *bkp_insn;
8497 /* Poke modified instruction(s). */
8498 for (i = 0; i < dsc->numinsns; i++)
8500 if (debug_displaced)
8502 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8504 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8507 fprintf_unfiltered (gdb_stdlog, "%.4x",
8508 (unsigned short)dsc->modinsn[i]);
8510 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8511 (unsigned long) to + offset);
8514 write_memory_unsigned_integer (to + offset, size,
8515 byte_order_for_code,
8520 /* Choose the correct breakpoint instruction. */
8523 bkp_insn = tdep->thumb_breakpoint;
8524 len = tdep->thumb_breakpoint_size;
8528 bkp_insn = tdep->arm_breakpoint;
8529 len = tdep->arm_breakpoint_size;
8532 /* Put breakpoint afterwards. */
8533 write_memory (to + offset, bkp_insn, len);
8535 if (debug_displaced)
8536 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8537 paddress (gdbarch, from), paddress (gdbarch, to));
8540 /* Entry point for copying an instruction into scratch space for displaced
8543 struct displaced_step_closure *
8544 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8545 CORE_ADDR from, CORE_ADDR to,
8546 struct regcache *regs)
8548 struct displaced_step_closure *dsc
8549 = xmalloc (sizeof (struct displaced_step_closure));
8550 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8551 arm_displaced_init_closure (gdbarch, from, to, dsc);
8556 /* Entry point for cleaning things up after a displaced instruction has been
8560 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8561 struct displaced_step_closure *dsc,
8562 CORE_ADDR from, CORE_ADDR to,
8563 struct regcache *regs)
8566 dsc->cleanup (gdbarch, regs, dsc);
8568 if (!dsc->wrote_to_pc)
8569 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8570 dsc->insn_addr + dsc->insn_size);
8574 #include "bfd-in2.h"
8575 #include "libcoff.h"
8578 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8580 struct gdbarch *gdbarch = info->application_data;
8582 if (arm_pc_is_thumb (gdbarch, memaddr))
8584 static asymbol *asym;
8585 static combined_entry_type ce;
8586 static struct coff_symbol_struct csym;
8587 static struct bfd fake_bfd;
8588 static bfd_target fake_target;
8590 if (csym.native == NULL)
8592 /* Create a fake symbol vector containing a Thumb symbol.
8593 This is solely so that the code in print_insn_little_arm()
8594 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8595 the presence of a Thumb symbol and switch to decoding
8596 Thumb instructions. */
8598 fake_target.flavour = bfd_target_coff_flavour;
8599 fake_bfd.xvec = &fake_target;
8600 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8602 csym.symbol.the_bfd = &fake_bfd;
8603 csym.symbol.name = "fake";
8604 asym = (asymbol *) & csym;
8607 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8608 info->symbols = &asym;
8611 info->symbols = NULL;
8613 if (info->endian == BFD_ENDIAN_BIG)
8614 return print_insn_big_arm (memaddr, info);
8616 return print_insn_little_arm (memaddr, info);
8619 /* The following define instruction sequences that will cause ARM
8620 cpu's to take an undefined instruction trap. These are used to
8621 signal a breakpoint to GDB.
8623 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8624 modes. A different instruction is required for each mode. The ARM
8625 cpu's can also be big or little endian. Thus four different
8626 instructions are needed to support all cases.
8628 Note: ARMv4 defines several new instructions that will take the
8629 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8630 not in fact add the new instructions. The new undefined
8631 instructions in ARMv4 are all instructions that had no defined
8632 behaviour in earlier chips. There is no guarantee that they will
8633 raise an exception, but may be treated as NOP's. In practice, it
8634 may only safe to rely on instructions matching:
8636 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8637 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8638 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8640 Even this may only true if the condition predicate is true. The
8641 following use a condition predicate of ALWAYS so it is always TRUE.
8643 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8644 and NetBSD all use a software interrupt rather than an undefined
8645 instruction to force a trap. This can be handled by by the
8646 abi-specific code during establishment of the gdbarch vector. */
8648 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8649 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8650 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8651 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8653 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8654 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8655 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8656 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8658 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8659 the program counter value to determine whether a 16-bit or 32-bit
8660 breakpoint should be used. It returns a pointer to a string of
8661 bytes that encode a breakpoint instruction, stores the length of
8662 the string to *lenptr, and adjusts the program counter (if
8663 necessary) to point to the actual memory location where the
8664 breakpoint should be inserted. */
8666 static const unsigned char *
8667 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8669 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8670 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8672 if (arm_pc_is_thumb (gdbarch, *pcptr))
8674 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8676 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8677 check whether we are replacing a 32-bit instruction. */
8678 if (tdep->thumb2_breakpoint != NULL)
8681 if (target_read_memory (*pcptr, buf, 2) == 0)
8683 unsigned short inst1;
8684 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8685 if (thumb_insn_size (inst1) == 4)
8687 *lenptr = tdep->thumb2_breakpoint_size;
8688 return tdep->thumb2_breakpoint;
8693 *lenptr = tdep->thumb_breakpoint_size;
8694 return tdep->thumb_breakpoint;
8698 *lenptr = tdep->arm_breakpoint_size;
8699 return tdep->arm_breakpoint;
8704 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8707 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8709 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8710 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8711 that this is not confused with a 32-bit ARM breakpoint. */
8715 /* Extract from an array REGBUF containing the (raw) register state a
8716 function return value of type TYPE, and copy that, in virtual
8717 format, into VALBUF. */
8720 arm_extract_return_value (struct type *type, struct regcache *regs,
8723 struct gdbarch *gdbarch = get_regcache_arch (regs);
8724 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8726 if (TYPE_CODE_FLT == TYPE_CODE (type))
8728 switch (gdbarch_tdep (gdbarch)->fp_model)
8732 /* The value is in register F0 in internal format. We need to
8733 extract the raw value and then convert it to the desired
8735 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8737 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8738 convert_from_extended (floatformat_from_type (type), tmpbuf,
8739 valbuf, gdbarch_byte_order (gdbarch));
8743 case ARM_FLOAT_SOFT_FPA:
8744 case ARM_FLOAT_SOFT_VFP:
8745 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8746 not using the VFP ABI code. */
8748 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8749 if (TYPE_LENGTH (type) > 4)
8750 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8751 valbuf + INT_REGISTER_SIZE);
8755 internal_error (__FILE__, __LINE__,
8756 _("arm_extract_return_value: "
8757 "Floating point model not supported"));
8761 else if (TYPE_CODE (type) == TYPE_CODE_INT
8762 || TYPE_CODE (type) == TYPE_CODE_CHAR
8763 || TYPE_CODE (type) == TYPE_CODE_BOOL
8764 || TYPE_CODE (type) == TYPE_CODE_PTR
8765 || TYPE_CODE (type) == TYPE_CODE_REF
8766 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8768 /* If the type is a plain integer, then the access is
8769 straight-forward. Otherwise we have to play around a bit
8771 int len = TYPE_LENGTH (type);
8772 int regno = ARM_A1_REGNUM;
8777 /* By using store_unsigned_integer we avoid having to do
8778 anything special for small big-endian values. */
8779 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8780 store_unsigned_integer (valbuf,
8781 (len > INT_REGISTER_SIZE
8782 ? INT_REGISTER_SIZE : len),
8784 len -= INT_REGISTER_SIZE;
8785 valbuf += INT_REGISTER_SIZE;
8790 /* For a structure or union the behaviour is as if the value had
8791 been stored to word-aligned memory and then loaded into
8792 registers with 32-bit load instruction(s). */
8793 int len = TYPE_LENGTH (type);
8794 int regno = ARM_A1_REGNUM;
8795 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8799 regcache_cooked_read (regs, regno++, tmpbuf);
8800 memcpy (valbuf, tmpbuf,
8801 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8802 len -= INT_REGISTER_SIZE;
8803 valbuf += INT_REGISTER_SIZE;
8809 /* Will a function return an aggregate type in memory or in a
8810 register? Return 0 if an aggregate type can be returned in a
8811 register, 1 if it must be returned in memory. */
8814 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8817 enum type_code code;
8819 CHECK_TYPEDEF (type);
8821 /* In the ARM ABI, "integer" like aggregate types are returned in
8822 registers. For an aggregate type to be integer like, its size
8823 must be less than or equal to INT_REGISTER_SIZE and the
8824 offset of each addressable subfield must be zero. Note that bit
8825 fields are not addressable, and all addressable subfields of
8826 unions always start at offset zero.
8828 This function is based on the behaviour of GCC 2.95.1.
8829 See: gcc/arm.c: arm_return_in_memory() for details.
8831 Note: All versions of GCC before GCC 2.95.2 do not set up the
8832 parameters correctly for a function returning the following
8833 structure: struct { float f;}; This should be returned in memory,
8834 not a register. Richard Earnshaw sent me a patch, but I do not
8835 know of any way to detect if a function like the above has been
8836 compiled with the correct calling convention. */
8838 /* All aggregate types that won't fit in a register must be returned
8840 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8845 /* The AAPCS says all aggregates not larger than a word are returned
8847 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8850 /* The only aggregate types that can be returned in a register are
8851 structs and unions. Arrays must be returned in memory. */
8852 code = TYPE_CODE (type);
8853 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8858 /* Assume all other aggregate types can be returned in a register.
8859 Run a check for structures, unions and arrays. */
8862 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8865 /* Need to check if this struct/union is "integer" like. For
8866 this to be true, its size must be less than or equal to
8867 INT_REGISTER_SIZE and the offset of each addressable
8868 subfield must be zero. Note that bit fields are not
8869 addressable, and unions always start at offset zero. If any
8870 of the subfields is a floating point type, the struct/union
8871 cannot be an integer type. */
8873 /* For each field in the object, check:
8874 1) Is it FP? --> yes, nRc = 1;
8875 2) Is it addressable (bitpos != 0) and
8876 not packed (bitsize == 0)?
8880 for (i = 0; i < TYPE_NFIELDS (type); i++)
8882 enum type_code field_type_code;
8883 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8886 /* Is it a floating point type field? */
8887 if (field_type_code == TYPE_CODE_FLT)
8893 /* If bitpos != 0, then we have to care about it. */
8894 if (TYPE_FIELD_BITPOS (type, i) != 0)
8896 /* Bitfields are not addressable. If the field bitsize is
8897 zero, then the field is not packed. Hence it cannot be
8898 a bitfield or any other packed type. */
8899 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8911 /* Write into appropriate registers a function return value of type
8912 TYPE, given in virtual format. */
8915 arm_store_return_value (struct type *type, struct regcache *regs,
8916 const gdb_byte *valbuf)
8918 struct gdbarch *gdbarch = get_regcache_arch (regs);
8919 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8921 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8923 char buf[MAX_REGISTER_SIZE];
8925 switch (gdbarch_tdep (gdbarch)->fp_model)
8929 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8930 gdbarch_byte_order (gdbarch));
8931 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8934 case ARM_FLOAT_SOFT_FPA:
8935 case ARM_FLOAT_SOFT_VFP:
8936 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8937 not using the VFP ABI code. */
8939 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8940 if (TYPE_LENGTH (type) > 4)
8941 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8942 valbuf + INT_REGISTER_SIZE);
8946 internal_error (__FILE__, __LINE__,
8947 _("arm_store_return_value: Floating "
8948 "point model not supported"));
8952 else if (TYPE_CODE (type) == TYPE_CODE_INT
8953 || TYPE_CODE (type) == TYPE_CODE_CHAR
8954 || TYPE_CODE (type) == TYPE_CODE_BOOL
8955 || TYPE_CODE (type) == TYPE_CODE_PTR
8956 || TYPE_CODE (type) == TYPE_CODE_REF
8957 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8959 if (TYPE_LENGTH (type) <= 4)
8961 /* Values of one word or less are zero/sign-extended and
8963 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8964 LONGEST val = unpack_long (type, valbuf);
8966 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8967 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8971 /* Integral values greater than one word are stored in consecutive
8972 registers starting with r0. This will always be a multiple of
8973 the regiser size. */
8974 int len = TYPE_LENGTH (type);
8975 int regno = ARM_A1_REGNUM;
8979 regcache_cooked_write (regs, regno++, valbuf);
8980 len -= INT_REGISTER_SIZE;
8981 valbuf += INT_REGISTER_SIZE;
8987 /* For a structure or union the behaviour is as if the value had
8988 been stored to word-aligned memory and then loaded into
8989 registers with 32-bit load instruction(s). */
8990 int len = TYPE_LENGTH (type);
8991 int regno = ARM_A1_REGNUM;
8992 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8996 memcpy (tmpbuf, valbuf,
8997 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8998 regcache_cooked_write (regs, regno++, tmpbuf);
8999 len -= INT_REGISTER_SIZE;
9000 valbuf += INT_REGISTER_SIZE;
9006 /* Handle function return values. */
9008 static enum return_value_convention
9009 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9010 struct type *valtype, struct regcache *regcache,
9011 gdb_byte *readbuf, const gdb_byte *writebuf)
9013 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9014 struct type *func_type = function ? value_type (function) : NULL;
9015 enum arm_vfp_cprc_base_type vfp_base_type;
9018 if (arm_vfp_abi_for_function (gdbarch, func_type)
9019 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9021 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9022 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9024 for (i = 0; i < vfp_base_count; i++)
9026 if (reg_char == 'q')
9029 arm_neon_quad_write (gdbarch, regcache, i,
9030 writebuf + i * unit_length);
9033 arm_neon_quad_read (gdbarch, regcache, i,
9034 readbuf + i * unit_length);
9041 sprintf (name_buf, "%c%d", reg_char, i);
9042 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9045 regcache_cooked_write (regcache, regnum,
9046 writebuf + i * unit_length);
9048 regcache_cooked_read (regcache, regnum,
9049 readbuf + i * unit_length);
9052 return RETURN_VALUE_REGISTER_CONVENTION;
9055 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9056 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9057 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9059 if (tdep->struct_return == pcc_struct_return
9060 || arm_return_in_memory (gdbarch, valtype))
9061 return RETURN_VALUE_STRUCT_CONVENTION;
9064 /* AAPCS returns complex types longer than a register in memory. */
9065 if (tdep->arm_abi != ARM_ABI_APCS
9066 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9067 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9068 return RETURN_VALUE_STRUCT_CONVENTION;
9071 arm_store_return_value (valtype, regcache, writebuf);
9074 arm_extract_return_value (valtype, regcache, readbuf);
9076 return RETURN_VALUE_REGISTER_CONVENTION;
9081 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9083 struct gdbarch *gdbarch = get_frame_arch (frame);
9084 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9085 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9087 char buf[INT_REGISTER_SIZE];
9089 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9091 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9095 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9099 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9100 return the target PC. Otherwise return 0. */
9103 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9107 CORE_ADDR start_addr;
9109 /* Find the starting address and name of the function containing the PC. */
9110 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9113 /* If PC is in a Thumb call or return stub, return the address of the
9114 target PC, which is in a register. The thunk functions are called
9115 _call_via_xx, where x is the register name. The possible names
9116 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9117 functions, named __ARM_call_via_r[0-7]. */
9118 if (strncmp (name, "_call_via_", 10) == 0
9119 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9121 /* Use the name suffix to determine which register contains the
9123 static char *table[15] =
9124 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9125 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9128 int offset = strlen (name) - 2;
9130 for (regno = 0; regno <= 14; regno++)
9131 if (strcmp (&name[offset], table[regno]) == 0)
9132 return get_frame_register_unsigned (frame, regno);
9135 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9136 non-interworking calls to foo. We could decode the stubs
9137 to find the target but it's easier to use the symbol table. */
9138 namelen = strlen (name);
9139 if (name[0] == '_' && name[1] == '_'
9140 && ((namelen > 2 + strlen ("_from_thumb")
9141 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9142 strlen ("_from_thumb")) == 0)
9143 || (namelen > 2 + strlen ("_from_arm")
9144 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9145 strlen ("_from_arm")) == 0)))
9148 int target_len = namelen - 2;
9149 struct minimal_symbol *minsym;
9150 struct objfile *objfile;
9151 struct obj_section *sec;
9153 if (name[namelen - 1] == 'b')
9154 target_len -= strlen ("_from_thumb");
9156 target_len -= strlen ("_from_arm");
9158 target_name = alloca (target_len + 1);
9159 memcpy (target_name, name + 2, target_len);
9160 target_name[target_len] = '\0';
9162 sec = find_pc_section (pc);
9163 objfile = (sec == NULL) ? NULL : sec->objfile;
9164 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9166 return SYMBOL_VALUE_ADDRESS (minsym);
9171 return 0; /* not a stub */
9175 set_arm_command (char *args, int from_tty)
9177 printf_unfiltered (_("\
9178 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9179 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9183 show_arm_command (char *args, int from_tty)
9185 cmd_show_list (showarmcmdlist, from_tty, "");
9189 arm_update_current_architecture (void)
9191 struct gdbarch_info info;
9193 /* If the current architecture is not ARM, we have nothing to do. */
9194 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
9197 /* Update the architecture. */
9198 gdbarch_info_init (&info);
9200 if (!gdbarch_update_p (info))
9201 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9205 set_fp_model_sfunc (char *args, int from_tty,
9206 struct cmd_list_element *c)
9208 enum arm_float_model fp_model;
9210 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9211 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9213 arm_fp_model = fp_model;
9217 if (fp_model == ARM_FLOAT_LAST)
9218 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9221 arm_update_current_architecture ();
9225 show_fp_model (struct ui_file *file, int from_tty,
9226 struct cmd_list_element *c, const char *value)
9228 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9230 if (arm_fp_model == ARM_FLOAT_AUTO
9231 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9232 fprintf_filtered (file, _("\
9233 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9234 fp_model_strings[tdep->fp_model]);
9236 fprintf_filtered (file, _("\
9237 The current ARM floating point model is \"%s\".\n"),
9238 fp_model_strings[arm_fp_model]);
9242 arm_set_abi (char *args, int from_tty,
9243 struct cmd_list_element *c)
9245 enum arm_abi_kind arm_abi;
9247 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9248 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9250 arm_abi_global = arm_abi;
9254 if (arm_abi == ARM_ABI_LAST)
9255 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9258 arm_update_current_architecture ();
9262 arm_show_abi (struct ui_file *file, int from_tty,
9263 struct cmd_list_element *c, const char *value)
9265 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9267 if (arm_abi_global == ARM_ABI_AUTO
9268 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9269 fprintf_filtered (file, _("\
9270 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9271 arm_abi_strings[tdep->arm_abi]);
9273 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9278 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9279 struct cmd_list_element *c, const char *value)
9281 fprintf_filtered (file,
9282 _("The current execution mode assumed "
9283 "(when symbols are unavailable) is \"%s\".\n"),
9284 arm_fallback_mode_string);
9288 arm_show_force_mode (struct ui_file *file, int from_tty,
9289 struct cmd_list_element *c, const char *value)
9291 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9293 fprintf_filtered (file,
9294 _("The current execution mode assumed "
9295 "(even when symbols are available) is \"%s\".\n"),
9296 arm_force_mode_string);
9299 /* If the user changes the register disassembly style used for info
9300 register and other commands, we have to also switch the style used
9301 in opcodes for disassembly output. This function is run in the "set
9302 arm disassembly" command, and does that. */
9305 set_disassembly_style_sfunc (char *args, int from_tty,
9306 struct cmd_list_element *c)
9308 set_disassembly_style ();
9311 /* Return the ARM register name corresponding to register I. */
9313 arm_register_name (struct gdbarch *gdbarch, int i)
9315 const int num_regs = gdbarch_num_regs (gdbarch);
9317 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9318 && i >= num_regs && i < num_regs + 32)
9320 static const char *const vfp_pseudo_names[] = {
9321 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9322 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9323 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9324 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9327 return vfp_pseudo_names[i - num_regs];
9330 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9331 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9333 static const char *const neon_pseudo_names[] = {
9334 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9335 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9338 return neon_pseudo_names[i - num_regs - 32];
9341 if (i >= ARRAY_SIZE (arm_register_names))
9342 /* These registers are only supported on targets which supply
9343 an XML description. */
9346 return arm_register_names[i];
9350 set_disassembly_style (void)
9354 /* Find the style that the user wants. */
9355 for (current = 0; current < num_disassembly_options; current++)
9356 if (disassembly_style == valid_disassembly_styles[current])
9358 gdb_assert (current < num_disassembly_options);
9360 /* Synchronize the disassembler. */
9361 set_arm_regname_option (current);
9364 /* Test whether the coff symbol specific value corresponds to a Thumb
9368 coff_sym_is_thumb (int val)
9370 return (val == C_THUMBEXT
9371 || val == C_THUMBSTAT
9372 || val == C_THUMBEXTFUNC
9373 || val == C_THUMBSTATFUNC
9374 || val == C_THUMBLABEL);
9377 /* arm_coff_make_msymbol_special()
9378 arm_elf_make_msymbol_special()
9380 These functions test whether the COFF or ELF symbol corresponds to
9381 an address in thumb code, and set a "special" bit in a minimal
9382 symbol to indicate that it does. */
9385 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9387 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9388 == ST_BRANCH_TO_THUMB)
9389 MSYMBOL_SET_SPECIAL (msym);
9393 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9395 if (coff_sym_is_thumb (val))
9396 MSYMBOL_SET_SPECIAL (msym);
9400 arm_objfile_data_free (struct objfile *objfile, void *arg)
9402 struct arm_per_objfile *data = arg;
9405 for (i = 0; i < objfile->obfd->section_count; i++)
9406 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9410 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9413 const char *name = bfd_asymbol_name (sym);
9414 struct arm_per_objfile *data;
9415 VEC(arm_mapping_symbol_s) **map_p;
9416 struct arm_mapping_symbol new_map_sym;
9418 gdb_assert (name[0] == '$');
9419 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9422 data = objfile_data (objfile, arm_objfile_data_key);
9425 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9426 struct arm_per_objfile);
9427 set_objfile_data (objfile, arm_objfile_data_key, data);
9428 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9429 objfile->obfd->section_count,
9430 VEC(arm_mapping_symbol_s) *);
9432 map_p = &data->section_maps[bfd_get_section (sym)->index];
9434 new_map_sym.value = sym->value;
9435 new_map_sym.type = name[1];
9437 /* Assume that most mapping symbols appear in order of increasing
9438 value. If they were randomly distributed, it would be faster to
9439 always push here and then sort at first use. */
9440 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9442 struct arm_mapping_symbol *prev_map_sym;
9444 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9445 if (prev_map_sym->value >= sym->value)
9448 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9449 arm_compare_mapping_symbols);
9450 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9455 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9459 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9461 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9462 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9464 /* If necessary, set the T bit. */
9467 ULONGEST val, t_bit;
9468 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9469 t_bit = arm_psr_thumb_bit (gdbarch);
9470 if (arm_pc_is_thumb (gdbarch, pc))
9471 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9474 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9479 /* Read the contents of a NEON quad register, by reading from two
9480 double registers. This is used to implement the quad pseudo
9481 registers, and for argument passing in case the quad registers are
9482 missing; vectors are passed in quad registers when using the VFP
9483 ABI, even if a NEON unit is not present. REGNUM is the index of
9484 the quad register, in [0, 15]. */
9486 static enum register_status
9487 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9488 int regnum, gdb_byte *buf)
9491 gdb_byte reg_buf[8];
9492 int offset, double_regnum;
9493 enum register_status status;
9495 sprintf (name_buf, "d%d", regnum << 1);
9496 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9499 /* d0 is always the least significant half of q0. */
9500 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9505 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9506 if (status != REG_VALID)
9508 memcpy (buf + offset, reg_buf, 8);
9510 offset = 8 - offset;
9511 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9512 if (status != REG_VALID)
9514 memcpy (buf + offset, reg_buf, 8);
9519 static enum register_status
9520 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9521 int regnum, gdb_byte *buf)
9523 const int num_regs = gdbarch_num_regs (gdbarch);
9525 gdb_byte reg_buf[8];
9526 int offset, double_regnum;
9528 gdb_assert (regnum >= num_regs);
9531 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9532 /* Quad-precision register. */
9533 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9536 enum register_status status;
9538 /* Single-precision register. */
9539 gdb_assert (regnum < 32);
9541 /* s0 is always the least significant half of d0. */
9542 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9543 offset = (regnum & 1) ? 0 : 4;
9545 offset = (regnum & 1) ? 4 : 0;
9547 sprintf (name_buf, "d%d", regnum >> 1);
9548 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9551 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9552 if (status == REG_VALID)
9553 memcpy (buf, reg_buf + offset, 4);
9558 /* Store the contents of BUF to a NEON quad register, by writing to
9559 two double registers. This is used to implement the quad pseudo
9560 registers, and for argument passing in case the quad registers are
9561 missing; vectors are passed in quad registers when using the VFP
9562 ABI, even if a NEON unit is not present. REGNUM is the index
9563 of the quad register, in [0, 15]. */
9566 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9567 int regnum, const gdb_byte *buf)
9570 int offset, double_regnum;
9572 sprintf (name_buf, "d%d", regnum << 1);
9573 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9576 /* d0 is always the least significant half of q0. */
9577 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9582 regcache_raw_write (regcache, double_regnum, buf + offset);
9583 offset = 8 - offset;
9584 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9588 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9589 int regnum, const gdb_byte *buf)
9591 const int num_regs = gdbarch_num_regs (gdbarch);
9593 gdb_byte reg_buf[8];
9594 int offset, double_regnum;
9596 gdb_assert (regnum >= num_regs);
9599 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9600 /* Quad-precision register. */
9601 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9604 /* Single-precision register. */
9605 gdb_assert (regnum < 32);
9607 /* s0 is always the least significant half of d0. */
9608 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9609 offset = (regnum & 1) ? 0 : 4;
9611 offset = (regnum & 1) ? 4 : 0;
9613 sprintf (name_buf, "d%d", regnum >> 1);
9614 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9617 regcache_raw_read (regcache, double_regnum, reg_buf);
9618 memcpy (reg_buf + offset, buf, 4);
9619 regcache_raw_write (regcache, double_regnum, reg_buf);
9623 static struct value *
9624 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9626 const int *reg_p = baton;
9627 return value_of_register (*reg_p, frame);
9630 static enum gdb_osabi
9631 arm_elf_osabi_sniffer (bfd *abfd)
9633 unsigned int elfosabi;
9634 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9636 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9638 if (elfosabi == ELFOSABI_ARM)
9639 /* GNU tools use this value. Check note sections in this case,
9641 bfd_map_over_sections (abfd,
9642 generic_elf_osabi_sniff_abi_tag_sections,
9645 /* Anything else will be handled by the generic ELF sniffer. */
9650 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9651 struct reggroup *group)
9653 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9654 this, FPS register belongs to save_regroup, restore_reggroup, and
9655 all_reggroup, of course. */
9656 if (regnum == ARM_FPS_REGNUM)
9657 return (group == float_reggroup
9658 || group == save_reggroup
9659 || group == restore_reggroup
9660 || group == all_reggroup);
9662 return default_register_reggroup_p (gdbarch, regnum, group);
9666 /* For backward-compatibility we allow two 'g' packet lengths with
9667 the remote protocol depending on whether FPA registers are
9668 supplied. M-profile targets do not have FPA registers, but some
9669 stubs already exist in the wild which use a 'g' packet which
9670 supplies them albeit with dummy values. The packet format which
9671 includes FPA registers should be considered deprecated for
9672 M-profile targets. */
9675 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9677 if (gdbarch_tdep (gdbarch)->is_m)
9679 /* If we know from the executable this is an M-profile target,
9680 cater for remote targets whose register set layout is the
9681 same as the FPA layout. */
9682 register_remote_g_packet_guess (gdbarch,
9683 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9684 (16 * INT_REGISTER_SIZE)
9685 + (8 * FP_REGISTER_SIZE)
9686 + (2 * INT_REGISTER_SIZE),
9687 tdesc_arm_with_m_fpa_layout);
9689 /* The regular M-profile layout. */
9690 register_remote_g_packet_guess (gdbarch,
9691 /* r0-r12,sp,lr,pc; xpsr */
9692 (16 * INT_REGISTER_SIZE)
9693 + INT_REGISTER_SIZE,
9696 /* M-profile plus M4F VFP. */
9697 register_remote_g_packet_guess (gdbarch,
9698 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9699 (16 * INT_REGISTER_SIZE)
9700 + (16 * VFP_REGISTER_SIZE)
9701 + (2 * INT_REGISTER_SIZE),
9702 tdesc_arm_with_m_vfp_d16);
9705 /* Otherwise we don't have a useful guess. */
9709 /* Initialize the current architecture based on INFO. If possible,
9710 re-use an architecture from ARCHES, which is a list of
9711 architectures already created during this debugging session.
9713 Called e.g. at program startup, when reading a core file, and when
9714 reading a binary file. */
9716 static struct gdbarch *
9717 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9719 struct gdbarch_tdep *tdep;
9720 struct gdbarch *gdbarch;
9721 struct gdbarch_list *best_arch;
9722 enum arm_abi_kind arm_abi = arm_abi_global;
9723 enum arm_float_model fp_model = arm_fp_model;
9724 struct tdesc_arch_data *tdesc_data = NULL;
9726 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9728 int have_fpa_registers = 1;
9729 const struct target_desc *tdesc = info.target_desc;
9731 /* If we have an object to base this architecture on, try to determine
9734 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9736 int ei_osabi, e_flags;
9738 switch (bfd_get_flavour (info.abfd))
9740 case bfd_target_aout_flavour:
9741 /* Assume it's an old APCS-style ABI. */
9742 arm_abi = ARM_ABI_APCS;
9745 case bfd_target_coff_flavour:
9746 /* Assume it's an old APCS-style ABI. */
9748 arm_abi = ARM_ABI_APCS;
9751 case bfd_target_elf_flavour:
9752 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9753 e_flags = elf_elfheader (info.abfd)->e_flags;
9755 if (ei_osabi == ELFOSABI_ARM)
9757 /* GNU tools used to use this value, but do not for EABI
9758 objects. There's nowhere to tag an EABI version
9759 anyway, so assume APCS. */
9760 arm_abi = ARM_ABI_APCS;
9762 else if (ei_osabi == ELFOSABI_NONE)
9764 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9765 int attr_arch, attr_profile;
9769 case EF_ARM_EABI_UNKNOWN:
9770 /* Assume GNU tools. */
9771 arm_abi = ARM_ABI_APCS;
9774 case EF_ARM_EABI_VER4:
9775 case EF_ARM_EABI_VER5:
9776 arm_abi = ARM_ABI_AAPCS;
9777 /* EABI binaries default to VFP float ordering.
9778 They may also contain build attributes that can
9779 be used to identify if the VFP argument-passing
9781 if (fp_model == ARM_FLOAT_AUTO)
9784 switch (bfd_elf_get_obj_attr_int (info.abfd,
9789 /* "The user intended FP parameter/result
9790 passing to conform to AAPCS, base
9792 fp_model = ARM_FLOAT_SOFT_VFP;
9795 /* "The user intended FP parameter/result
9796 passing to conform to AAPCS, VFP
9798 fp_model = ARM_FLOAT_VFP;
9801 /* "The user intended FP parameter/result
9802 passing to conform to tool chain-specific
9803 conventions" - we don't know any such
9804 conventions, so leave it as "auto". */
9807 /* Attribute value not mentioned in the
9808 October 2008 ABI, so leave it as
9813 fp_model = ARM_FLOAT_SOFT_VFP;
9819 /* Leave it as "auto". */
9820 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9825 /* Detect M-profile programs. This only works if the
9826 executable file includes build attributes; GCC does
9827 copy them to the executable, but e.g. RealView does
9829 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9831 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9833 Tag_CPU_arch_profile);
9834 /* GCC specifies the profile for v6-M; RealView only
9835 specifies the profile for architectures starting with
9836 V7 (as opposed to architectures with a tag
9837 numerically greater than TAG_CPU_ARCH_V7). */
9838 if (!tdesc_has_registers (tdesc)
9839 && (attr_arch == TAG_CPU_ARCH_V6_M
9840 || attr_arch == TAG_CPU_ARCH_V6S_M
9841 || attr_profile == 'M'))
9846 if (fp_model == ARM_FLOAT_AUTO)
9848 int e_flags = elf_elfheader (info.abfd)->e_flags;
9850 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9853 /* Leave it as "auto". Strictly speaking this case
9854 means FPA, but almost nobody uses that now, and
9855 many toolchains fail to set the appropriate bits
9856 for the floating-point model they use. */
9858 case EF_ARM_SOFT_FLOAT:
9859 fp_model = ARM_FLOAT_SOFT_FPA;
9861 case EF_ARM_VFP_FLOAT:
9862 fp_model = ARM_FLOAT_VFP;
9864 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9865 fp_model = ARM_FLOAT_SOFT_VFP;
9870 if (e_flags & EF_ARM_BE8)
9871 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9876 /* Leave it as "auto". */
9881 /* Check any target description for validity. */
9882 if (tdesc_has_registers (tdesc))
9884 /* For most registers we require GDB's default names; but also allow
9885 the numeric names for sp / lr / pc, as a convenience. */
9886 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9887 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9888 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9890 const struct tdesc_feature *feature;
9893 feature = tdesc_find_feature (tdesc,
9894 "org.gnu.gdb.arm.core");
9895 if (feature == NULL)
9897 feature = tdesc_find_feature (tdesc,
9898 "org.gnu.gdb.arm.m-profile");
9899 if (feature == NULL)
9905 tdesc_data = tdesc_data_alloc ();
9908 for (i = 0; i < ARM_SP_REGNUM; i++)
9909 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9910 arm_register_names[i]);
9911 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9914 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9917 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9921 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9922 ARM_PS_REGNUM, "xpsr");
9924 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9925 ARM_PS_REGNUM, "cpsr");
9929 tdesc_data_cleanup (tdesc_data);
9933 feature = tdesc_find_feature (tdesc,
9934 "org.gnu.gdb.arm.fpa");
9935 if (feature != NULL)
9938 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9939 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9940 arm_register_names[i]);
9943 tdesc_data_cleanup (tdesc_data);
9948 have_fpa_registers = 0;
9950 feature = tdesc_find_feature (tdesc,
9951 "org.gnu.gdb.xscale.iwmmxt");
9952 if (feature != NULL)
9954 static const char *const iwmmxt_names[] = {
9955 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9956 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9957 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9958 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9962 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9964 &= tdesc_numbered_register (feature, tdesc_data, i,
9965 iwmmxt_names[i - ARM_WR0_REGNUM]);
9967 /* Check for the control registers, but do not fail if they
9969 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9970 tdesc_numbered_register (feature, tdesc_data, i,
9971 iwmmxt_names[i - ARM_WR0_REGNUM]);
9973 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9975 &= tdesc_numbered_register (feature, tdesc_data, i,
9976 iwmmxt_names[i - ARM_WR0_REGNUM]);
9980 tdesc_data_cleanup (tdesc_data);
9985 /* If we have a VFP unit, check whether the single precision registers
9986 are present. If not, then we will synthesize them as pseudo
9988 feature = tdesc_find_feature (tdesc,
9989 "org.gnu.gdb.arm.vfp");
9990 if (feature != NULL)
9992 static const char *const vfp_double_names[] = {
9993 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9994 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9995 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9996 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9999 /* Require the double precision registers. There must be either
10002 for (i = 0; i < 32; i++)
10004 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10006 vfp_double_names[i]);
10010 if (!valid_p && i == 16)
10013 /* Also require FPSCR. */
10014 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10015 ARM_FPSCR_REGNUM, "fpscr");
10018 tdesc_data_cleanup (tdesc_data);
10022 if (tdesc_unnumbered_register (feature, "s0") == 0)
10023 have_vfp_pseudos = 1;
10025 have_vfp_registers = 1;
10027 /* If we have VFP, also check for NEON. The architecture allows
10028 NEON without VFP (integer vector operations only), but GDB
10029 does not support that. */
10030 feature = tdesc_find_feature (tdesc,
10031 "org.gnu.gdb.arm.neon");
10032 if (feature != NULL)
10034 /* NEON requires 32 double-precision registers. */
10037 tdesc_data_cleanup (tdesc_data);
10041 /* If there are quad registers defined by the stub, use
10042 their type; otherwise (normally) provide them with
10043 the default type. */
10044 if (tdesc_unnumbered_register (feature, "q0") == 0)
10045 have_neon_pseudos = 1;
10052 /* If there is already a candidate, use it. */
10053 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10055 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10057 if (arm_abi != ARM_ABI_AUTO
10058 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10061 if (fp_model != ARM_FLOAT_AUTO
10062 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10065 /* There are various other properties in tdep that we do not
10066 need to check here: those derived from a target description,
10067 since gdbarches with a different target description are
10068 automatically disqualified. */
10070 /* Do check is_m, though, since it might come from the binary. */
10071 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10074 /* Found a match. */
10078 if (best_arch != NULL)
10080 if (tdesc_data != NULL)
10081 tdesc_data_cleanup (tdesc_data);
10082 return best_arch->gdbarch;
10085 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10086 gdbarch = gdbarch_alloc (&info, tdep);
10088 /* Record additional information about the architecture we are defining.
10089 These are gdbarch discriminators, like the OSABI. */
10090 tdep->arm_abi = arm_abi;
10091 tdep->fp_model = fp_model;
10093 tdep->have_fpa_registers = have_fpa_registers;
10094 tdep->have_vfp_registers = have_vfp_registers;
10095 tdep->have_vfp_pseudos = have_vfp_pseudos;
10096 tdep->have_neon_pseudos = have_neon_pseudos;
10097 tdep->have_neon = have_neon;
10099 arm_register_g_packet_guesses (gdbarch);
10102 switch (info.byte_order_for_code)
10104 case BFD_ENDIAN_BIG:
10105 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10106 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10107 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10108 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10112 case BFD_ENDIAN_LITTLE:
10113 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10114 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10115 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10116 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10121 internal_error (__FILE__, __LINE__,
10122 _("arm_gdbarch_init: bad byte order for float format"));
10125 /* On ARM targets char defaults to unsigned. */
10126 set_gdbarch_char_signed (gdbarch, 0);
10128 /* Note: for displaced stepping, this includes the breakpoint, and one word
10129 of additional scratch space. This setting isn't used for anything beside
10130 displaced stepping at present. */
10131 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10133 /* This should be low enough for everything. */
10134 tdep->lowest_pc = 0x20;
10135 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10137 /* The default, for both APCS and AAPCS, is to return small
10138 structures in registers. */
10139 tdep->struct_return = reg_struct_return;
10141 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10142 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10144 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10146 /* Frame handling. */
10147 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10148 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10149 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10151 frame_base_set_default (gdbarch, &arm_normal_base);
10153 /* Address manipulation. */
10154 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
10155 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10157 /* Advance PC across function entry code. */
10158 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10160 /* Detect whether PC is in function epilogue. */
10161 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10163 /* Skip trampolines. */
10164 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10166 /* The stack grows downward. */
10167 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10169 /* Breakpoint manipulation. */
10170 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10171 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10172 arm_remote_breakpoint_from_pc);
10174 /* Information about registers, etc. */
10175 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10176 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10177 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10178 set_gdbarch_register_type (gdbarch, arm_register_type);
10179 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10181 /* This "info float" is FPA-specific. Use the generic version if we
10182 do not have FPA. */
10183 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10184 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10186 /* Internal <-> external register number maps. */
10187 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10188 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10190 set_gdbarch_register_name (gdbarch, arm_register_name);
10192 /* Returning results. */
10193 set_gdbarch_return_value (gdbarch, arm_return_value);
10196 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10198 /* Minsymbol frobbing. */
10199 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10200 set_gdbarch_coff_make_msymbol_special (gdbarch,
10201 arm_coff_make_msymbol_special);
10202 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10204 /* Thumb-2 IT block support. */
10205 set_gdbarch_adjust_breakpoint_address (gdbarch,
10206 arm_adjust_breakpoint_address);
10208 /* Virtual tables. */
10209 set_gdbarch_vbit_in_delta (gdbarch, 1);
10211 /* Hook in the ABI-specific overrides, if they have been registered. */
10212 gdbarch_init_osabi (info, gdbarch);
10214 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10216 /* Add some default predicates. */
10217 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10218 dwarf2_append_unwinders (gdbarch);
10219 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10220 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10222 /* Now we have tuned the configuration, set a few final things,
10223 based on what the OS ABI has told us. */
10225 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10226 binaries are always marked. */
10227 if (tdep->arm_abi == ARM_ABI_AUTO)
10228 tdep->arm_abi = ARM_ABI_APCS;
10230 /* Watchpoints are not steppable. */
10231 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10233 /* We used to default to FPA for generic ARM, but almost nobody
10234 uses that now, and we now provide a way for the user to force
10235 the model. So default to the most useful variant. */
10236 if (tdep->fp_model == ARM_FLOAT_AUTO)
10237 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10239 if (tdep->jb_pc >= 0)
10240 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10242 /* Floating point sizes and format. */
10243 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10244 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10246 set_gdbarch_double_format
10247 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10248 set_gdbarch_long_double_format
10249 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10253 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10254 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10257 if (have_vfp_pseudos)
10259 /* NOTE: These are the only pseudo registers used by
10260 the ARM target at the moment. If more are added, a
10261 little more care in numbering will be needed. */
10263 int num_pseudos = 32;
10264 if (have_neon_pseudos)
10266 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10267 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10268 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10273 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10275 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10277 /* Override tdesc_register_type to adjust the types of VFP
10278 registers for NEON. */
10279 set_gdbarch_register_type (gdbarch, arm_register_type);
10282 /* Add standard register aliases. We add aliases even for those
10283 nanes which are used by the current architecture - it's simpler,
10284 and does no harm, since nothing ever lists user registers. */
10285 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10286 user_reg_add (gdbarch, arm_register_aliases[i].name,
10287 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10293 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10295 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10300 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10301 (unsigned long) tdep->lowest_pc);
10304 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10307 _initialize_arm_tdep (void)
10309 struct ui_file *stb;
10311 struct cmd_list_element *new_set, *new_show;
10312 const char *setname;
10313 const char *setdesc;
10314 const char *const *regnames;
10316 static char *helptext;
10317 char regdesc[1024], *rdptr = regdesc;
10318 size_t rest = sizeof (regdesc);
10320 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10322 arm_objfile_data_key
10323 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10325 /* Add ourselves to objfile event chain. */
10326 observer_attach_new_objfile (arm_exidx_new_objfile);
10328 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10330 /* Register an ELF OS ABI sniffer for ARM binaries. */
10331 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10332 bfd_target_elf_flavour,
10333 arm_elf_osabi_sniffer);
10335 /* Initialize the standard target descriptions. */
10336 initialize_tdesc_arm_with_m ();
10337 initialize_tdesc_arm_with_m_fpa_layout ();
10338 initialize_tdesc_arm_with_m_vfp_d16 ();
10339 initialize_tdesc_arm_with_iwmmxt ();
10340 initialize_tdesc_arm_with_vfpv2 ();
10341 initialize_tdesc_arm_with_vfpv3 ();
10342 initialize_tdesc_arm_with_neon ();
10344 /* Get the number of possible sets of register names defined in opcodes. */
10345 num_disassembly_options = get_arm_regname_num_options ();
10347 /* Add root prefix command for all "set arm"/"show arm" commands. */
10348 add_prefix_cmd ("arm", no_class, set_arm_command,
10349 _("Various ARM-specific commands."),
10350 &setarmcmdlist, "set arm ", 0, &setlist);
10352 add_prefix_cmd ("arm", no_class, show_arm_command,
10353 _("Various ARM-specific commands."),
10354 &showarmcmdlist, "show arm ", 0, &showlist);
10356 /* Sync the opcode insn printer with our register viewer. */
10357 parse_arm_disassembler_option ("reg-names-std");
10359 /* Initialize the array that will be passed to
10360 add_setshow_enum_cmd(). */
10361 valid_disassembly_styles
10362 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10363 for (i = 0; i < num_disassembly_options; i++)
10365 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10366 valid_disassembly_styles[i] = setname;
10367 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10370 /* When we find the default names, tell the disassembler to use
10372 if (!strcmp (setname, "std"))
10374 disassembly_style = setname;
10375 set_arm_regname_option (i);
10378 /* Mark the end of valid options. */
10379 valid_disassembly_styles[num_disassembly_options] = NULL;
10381 /* Create the help text. */
10382 stb = mem_fileopen ();
10383 fprintf_unfiltered (stb, "%s%s%s",
10384 _("The valid values are:\n"),
10386 _("The default is \"std\"."));
10387 helptext = ui_file_xstrdup (stb, NULL);
10388 ui_file_delete (stb);
10390 add_setshow_enum_cmd("disassembler", no_class,
10391 valid_disassembly_styles, &disassembly_style,
10392 _("Set the disassembly style."),
10393 _("Show the disassembly style."),
10395 set_disassembly_style_sfunc,
10396 NULL, /* FIXME: i18n: The disassembly style is
10398 &setarmcmdlist, &showarmcmdlist);
10400 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10401 _("Set usage of ARM 32-bit mode."),
10402 _("Show usage of ARM 32-bit mode."),
10403 _("When off, a 26-bit PC will be used."),
10405 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10407 &setarmcmdlist, &showarmcmdlist);
10409 /* Add a command to allow the user to force the FPU model. */
10410 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10411 _("Set the floating point type."),
10412 _("Show the floating point type."),
10413 _("auto - Determine the FP typefrom the OS-ABI.\n\
10414 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10415 fpa - FPA co-processor (GCC compiled).\n\
10416 softvfp - Software FP with pure-endian doubles.\n\
10417 vfp - VFP co-processor."),
10418 set_fp_model_sfunc, show_fp_model,
10419 &setarmcmdlist, &showarmcmdlist);
10421 /* Add a command to allow the user to force the ABI. */
10422 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10424 _("Show the ABI."),
10425 NULL, arm_set_abi, arm_show_abi,
10426 &setarmcmdlist, &showarmcmdlist);
10428 /* Add two commands to allow the user to force the assumed
10430 add_setshow_enum_cmd ("fallback-mode", class_support,
10431 arm_mode_strings, &arm_fallback_mode_string,
10432 _("Set the mode assumed when symbols are unavailable."),
10433 _("Show the mode assumed when symbols are unavailable."),
10434 NULL, NULL, arm_show_fallback_mode,
10435 &setarmcmdlist, &showarmcmdlist);
10436 add_setshow_enum_cmd ("force-mode", class_support,
10437 arm_mode_strings, &arm_force_mode_string,
10438 _("Set the mode assumed even when symbols are available."),
10439 _("Show the mode assumed even when symbols are available."),
10440 NULL, NULL, arm_show_force_mode,
10441 &setarmcmdlist, &showarmcmdlist);
10443 /* Debugging flag. */
10444 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10445 _("Set ARM debugging."),
10446 _("Show ARM debugging."),
10447 _("When on, arm-specific debugging is enabled."),
10449 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10450 &setdebuglist, &showdebuglist);
10453 /* ARM-reversible process record data structures. */
10455 #define ARM_INSN_SIZE_BYTES 4
10456 #define THUMB_INSN_SIZE_BYTES 2
10457 #define THUMB2_INSN_SIZE_BYTES 4
10460 #define INSN_S_L_BIT_NUM 20
10462 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10465 unsigned int reg_len = LENGTH; \
10468 REGS = XNEWVEC (uint32_t, reg_len); \
10469 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10474 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10477 unsigned int mem_len = LENGTH; \
10480 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10481 memcpy(&MEMS->len, &RECORD_BUF[0], \
10482 sizeof(struct arm_mem_r) * LENGTH); \
10487 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10488 #define INSN_RECORDED(ARM_RECORD) \
10489 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10491 /* ARM memory record structure. */
10494 uint32_t len; /* Record length. */
10495 CORE_ADDR addr; /* Memory address. */
10498 /* ARM instruction record contains opcode of current insn
10499 and execution state (before entry to decode_insn()),
10500 contains list of to-be-modified registers and
10501 memory blocks (on return from decode_insn()). */
10503 typedef struct insn_decode_record_t
10505 struct gdbarch *gdbarch;
10506 struct regcache *regcache;
10507 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10508 uint32_t arm_insn; /* Should accommodate thumb. */
10509 uint32_t cond; /* Condition code. */
10510 uint32_t opcode; /* Insn opcode. */
10511 uint32_t decode; /* Insn decode bits. */
10512 uint32_t mem_rec_count; /* No of mem records. */
10513 uint32_t reg_rec_count; /* No of reg records. */
10514 uint32_t *arm_regs; /* Registers to be saved for this record. */
10515 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10516 } insn_decode_record;
10519 /* Checks ARM SBZ and SBO mandatory fields. */
10522 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10524 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10547 } arm_record_strx_t;
10558 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10559 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10562 struct regcache *reg_cache = arm_insn_r->regcache;
10563 ULONGEST u_regval[2]= {0};
10565 uint32_t reg_src1 = 0, reg_src2 = 0;
10566 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10567 uint32_t opcode1 = 0;
10569 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10570 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10571 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10574 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10576 /* 1) Handle misc store, immediate offset. */
10577 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10578 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10579 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10580 regcache_raw_read_unsigned (reg_cache, reg_src1,
10582 if (ARM_PC_REGNUM == reg_src1)
10584 /* If R15 was used as Rn, hence current PC+8. */
10585 u_regval[0] = u_regval[0] + 8;
10587 offset_8 = (immed_high << 4) | immed_low;
10588 /* Calculate target store address. */
10589 if (14 == arm_insn_r->opcode)
10591 tgt_mem_addr = u_regval[0] + offset_8;
10595 tgt_mem_addr = u_regval[0] - offset_8;
10597 if (ARM_RECORD_STRH == str_type)
10599 record_buf_mem[0] = 2;
10600 record_buf_mem[1] = tgt_mem_addr;
10601 arm_insn_r->mem_rec_count = 1;
10603 else if (ARM_RECORD_STRD == str_type)
10605 record_buf_mem[0] = 4;
10606 record_buf_mem[1] = tgt_mem_addr;
10607 record_buf_mem[2] = 4;
10608 record_buf_mem[3] = tgt_mem_addr + 4;
10609 arm_insn_r->mem_rec_count = 2;
10612 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10614 /* 2) Store, register offset. */
10616 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10618 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10619 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10620 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10621 if (15 == reg_src2)
10623 /* If R15 was used as Rn, hence current PC+8. */
10624 u_regval[0] = u_regval[0] + 8;
10626 /* Calculate target store address, Rn +/- Rm, register offset. */
10627 if (12 == arm_insn_r->opcode)
10629 tgt_mem_addr = u_regval[0] + u_regval[1];
10633 tgt_mem_addr = u_regval[1] - u_regval[0];
10635 if (ARM_RECORD_STRH == str_type)
10637 record_buf_mem[0] = 2;
10638 record_buf_mem[1] = tgt_mem_addr;
10639 arm_insn_r->mem_rec_count = 1;
10641 else if (ARM_RECORD_STRD == str_type)
10643 record_buf_mem[0] = 4;
10644 record_buf_mem[1] = tgt_mem_addr;
10645 record_buf_mem[2] = 4;
10646 record_buf_mem[3] = tgt_mem_addr + 4;
10647 arm_insn_r->mem_rec_count = 2;
10650 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10651 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10653 /* 3) Store, immediate pre-indexed. */
10654 /* 5) Store, immediate post-indexed. */
10655 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10656 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10657 offset_8 = (immed_high << 4) | immed_low;
10658 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10659 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10660 /* Calculate target store address, Rn +/- Rm, register offset. */
10661 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10663 tgt_mem_addr = u_regval[0] + offset_8;
10667 tgt_mem_addr = u_regval[0] - offset_8;
10669 if (ARM_RECORD_STRH == str_type)
10671 record_buf_mem[0] = 2;
10672 record_buf_mem[1] = tgt_mem_addr;
10673 arm_insn_r->mem_rec_count = 1;
10675 else if (ARM_RECORD_STRD == str_type)
10677 record_buf_mem[0] = 4;
10678 record_buf_mem[1] = tgt_mem_addr;
10679 record_buf_mem[2] = 4;
10680 record_buf_mem[3] = tgt_mem_addr + 4;
10681 arm_insn_r->mem_rec_count = 2;
10683 /* Record Rn also as it changes. */
10684 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10685 arm_insn_r->reg_rec_count = 1;
10687 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10688 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10690 /* 4) Store, register pre-indexed. */
10691 /* 6) Store, register post -indexed. */
10692 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10693 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10694 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10695 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10696 /* Calculate target store address, Rn +/- Rm, register offset. */
10697 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10699 tgt_mem_addr = u_regval[0] + u_regval[1];
10703 tgt_mem_addr = u_regval[1] - u_regval[0];
10705 if (ARM_RECORD_STRH == str_type)
10707 record_buf_mem[0] = 2;
10708 record_buf_mem[1] = tgt_mem_addr;
10709 arm_insn_r->mem_rec_count = 1;
10711 else if (ARM_RECORD_STRD == str_type)
10713 record_buf_mem[0] = 4;
10714 record_buf_mem[1] = tgt_mem_addr;
10715 record_buf_mem[2] = 4;
10716 record_buf_mem[3] = tgt_mem_addr + 4;
10717 arm_insn_r->mem_rec_count = 2;
10719 /* Record Rn also as it changes. */
10720 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10721 arm_insn_r->reg_rec_count = 1;
10726 /* Handling ARM extension space insns. */
10729 arm_record_extension_space (insn_decode_record *arm_insn_r)
10731 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10732 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10733 uint32_t record_buf[8], record_buf_mem[8];
10734 uint32_t reg_src1 = 0;
10735 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10736 struct regcache *reg_cache = arm_insn_r->regcache;
10737 ULONGEST u_regval = 0;
10739 gdb_assert (!INSN_RECORDED(arm_insn_r));
10740 /* Handle unconditional insn extension space. */
10742 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10743 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10744 if (arm_insn_r->cond)
10746 /* PLD has no affect on architectural state, it just affects
10748 if (5 == ((opcode1 & 0xE0) >> 5))
10751 record_buf[0] = ARM_PS_REGNUM;
10752 record_buf[1] = ARM_LR_REGNUM;
10753 arm_insn_r->reg_rec_count = 2;
10755 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10759 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10760 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10763 /* Undefined instruction on ARM V5; need to handle if later
10764 versions define it. */
10767 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10768 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10769 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10771 /* Handle arithmetic insn extension space. */
10772 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10773 && !INSN_RECORDED(arm_insn_r))
10775 /* Handle MLA(S) and MUL(S). */
10776 if (0 <= insn_op1 && 3 >= insn_op1)
10778 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10779 record_buf[1] = ARM_PS_REGNUM;
10780 arm_insn_r->reg_rec_count = 2;
10782 else if (4 <= insn_op1 && 15 >= insn_op1)
10784 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10785 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10786 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10787 record_buf[2] = ARM_PS_REGNUM;
10788 arm_insn_r->reg_rec_count = 3;
10792 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10793 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10794 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10796 /* Handle control insn extension space. */
10798 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10799 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10801 if (!bit (arm_insn_r->arm_insn,25))
10803 if (!bits (arm_insn_r->arm_insn, 4, 7))
10805 if ((0 == insn_op1) || (2 == insn_op1))
10808 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10809 arm_insn_r->reg_rec_count = 1;
10811 else if (1 == insn_op1)
10813 /* CSPR is going to be changed. */
10814 record_buf[0] = ARM_PS_REGNUM;
10815 arm_insn_r->reg_rec_count = 1;
10817 else if (3 == insn_op1)
10819 /* SPSR is going to be changed. */
10820 /* We need to get SPSR value, which is yet to be done. */
10821 printf_unfiltered (_("Process record does not support "
10822 "instruction 0x%0x at address %s.\n"),
10823 arm_insn_r->arm_insn,
10824 paddress (arm_insn_r->gdbarch,
10825 arm_insn_r->this_addr));
10829 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10834 record_buf[0] = ARM_PS_REGNUM;
10835 arm_insn_r->reg_rec_count = 1;
10837 else if (3 == insn_op1)
10840 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10841 arm_insn_r->reg_rec_count = 1;
10844 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10847 record_buf[0] = ARM_PS_REGNUM;
10848 record_buf[1] = ARM_LR_REGNUM;
10849 arm_insn_r->reg_rec_count = 2;
10851 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10853 /* QADD, QSUB, QDADD, QDSUB */
10854 record_buf[0] = ARM_PS_REGNUM;
10855 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10856 arm_insn_r->reg_rec_count = 2;
10858 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10861 record_buf[0] = ARM_PS_REGNUM;
10862 record_buf[1] = ARM_LR_REGNUM;
10863 arm_insn_r->reg_rec_count = 2;
10865 /* Save SPSR also;how? */
10866 printf_unfiltered (_("Process record does not support "
10867 "instruction 0x%0x at address %s.\n"),
10868 arm_insn_r->arm_insn,
10869 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10872 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10873 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10874 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10875 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10878 if (0 == insn_op1 || 1 == insn_op1)
10880 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10881 /* We dont do optimization for SMULW<y> where we
10883 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10884 record_buf[1] = ARM_PS_REGNUM;
10885 arm_insn_r->reg_rec_count = 2;
10887 else if (2 == insn_op1)
10890 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10891 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10892 arm_insn_r->reg_rec_count = 2;
10894 else if (3 == insn_op1)
10897 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10898 arm_insn_r->reg_rec_count = 1;
10904 /* MSR : immediate form. */
10907 /* CSPR is going to be changed. */
10908 record_buf[0] = ARM_PS_REGNUM;
10909 arm_insn_r->reg_rec_count = 1;
10911 else if (3 == insn_op1)
10913 /* SPSR is going to be changed. */
10914 /* we need to get SPSR value, which is yet to be done */
10915 printf_unfiltered (_("Process record does not support "
10916 "instruction 0x%0x at address %s.\n"),
10917 arm_insn_r->arm_insn,
10918 paddress (arm_insn_r->gdbarch,
10919 arm_insn_r->this_addr));
10925 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10926 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10927 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10929 /* Handle load/store insn extension space. */
10931 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10932 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10933 && !INSN_RECORDED(arm_insn_r))
10938 /* These insn, changes register and memory as well. */
10939 /* SWP or SWPB insn. */
10940 /* Get memory address given by Rn. */
10941 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10942 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10943 /* SWP insn ?, swaps word. */
10944 if (8 == arm_insn_r->opcode)
10946 record_buf_mem[0] = 4;
10950 /* SWPB insn, swaps only byte. */
10951 record_buf_mem[0] = 1;
10953 record_buf_mem[1] = u_regval;
10954 arm_insn_r->mem_rec_count = 1;
10955 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10956 arm_insn_r->reg_rec_count = 1;
10958 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10961 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10964 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10967 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10968 record_buf[1] = record_buf[0] + 1;
10969 arm_insn_r->reg_rec_count = 2;
10971 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10974 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10977 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10979 /* LDRH, LDRSB, LDRSH. */
10980 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10981 arm_insn_r->reg_rec_count = 1;
10986 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10987 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10988 && !INSN_RECORDED(arm_insn_r))
10991 /* Handle coprocessor insn extension space. */
10994 /* To be done for ARMv5 and later; as of now we return -1. */
10996 printf_unfiltered (_("Process record does not support instruction x%0x "
10997 "at address %s.\n"),arm_insn_r->arm_insn,
10998 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11001 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11002 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11007 /* Handling opcode 000 insns. */
11010 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11012 struct regcache *reg_cache = arm_insn_r->regcache;
11013 uint32_t record_buf[8], record_buf_mem[8];
11014 ULONGEST u_regval[2] = {0};
11016 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11017 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11018 uint32_t opcode1 = 0;
11020 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11021 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11022 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11024 /* Data processing insn /multiply insn. */
11025 if (9 == arm_insn_r->decode
11026 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11027 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11029 /* Handle multiply instructions. */
11030 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11031 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11033 /* Handle MLA and MUL. */
11034 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11035 record_buf[1] = ARM_PS_REGNUM;
11036 arm_insn_r->reg_rec_count = 2;
11038 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11040 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11041 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11042 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11043 record_buf[2] = ARM_PS_REGNUM;
11044 arm_insn_r->reg_rec_count = 3;
11047 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11048 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11050 /* Handle misc load insns, as 20th bit (L = 1). */
11051 /* LDR insn has a capability to do branching, if
11052 MOV LR, PC is precceded by LDR insn having Rn as R15
11053 in that case, it emulates branch and link insn, and hence we
11054 need to save CSPR and PC as well. I am not sure this is right
11055 place; as opcode = 010 LDR insn make this happen, if R15 was
11057 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11058 if (15 != reg_dest)
11060 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11061 arm_insn_r->reg_rec_count = 1;
11065 record_buf[0] = reg_dest;
11066 record_buf[1] = ARM_PS_REGNUM;
11067 arm_insn_r->reg_rec_count = 2;
11070 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11071 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11072 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11073 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11075 /* Handle MSR insn. */
11076 if (9 == arm_insn_r->opcode)
11078 /* CSPR is going to be changed. */
11079 record_buf[0] = ARM_PS_REGNUM;
11080 arm_insn_r->reg_rec_count = 1;
11084 /* SPSR is going to be changed. */
11085 /* How to read SPSR value? */
11086 printf_unfiltered (_("Process record does not support instruction "
11087 "0x%0x at address %s.\n"),
11088 arm_insn_r->arm_insn,
11089 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11093 else if (9 == arm_insn_r->decode
11094 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11095 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11097 /* Handling SWP, SWPB. */
11098 /* These insn, changes register and memory as well. */
11099 /* SWP or SWPB insn. */
11101 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11102 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11103 /* SWP insn ?, swaps word. */
11104 if (8 == arm_insn_r->opcode)
11106 record_buf_mem[0] = 4;
11110 /* SWPB insn, swaps only byte. */
11111 record_buf_mem[0] = 1;
11113 record_buf_mem[1] = u_regval[0];
11114 arm_insn_r->mem_rec_count = 1;
11115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11116 arm_insn_r->reg_rec_count = 1;
11118 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11119 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11121 /* Handle BLX, branch and link/exchange. */
11122 if (9 == arm_insn_r->opcode)
11124 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11125 and R14 stores the return address. */
11126 record_buf[0] = ARM_PS_REGNUM;
11127 record_buf[1] = ARM_LR_REGNUM;
11128 arm_insn_r->reg_rec_count = 2;
11131 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11133 /* Handle enhanced software breakpoint insn, BKPT. */
11134 /* CPSR is changed to be executed in ARM state, disabling normal
11135 interrupts, entering abort mode. */
11136 /* According to high vector configuration PC is set. */
11137 /* user hit breakpoint and type reverse, in
11138 that case, we need to go back with previous CPSR and
11139 Program Counter. */
11140 record_buf[0] = ARM_PS_REGNUM;
11141 record_buf[1] = ARM_LR_REGNUM;
11142 arm_insn_r->reg_rec_count = 2;
11144 /* Save SPSR also; how? */
11145 printf_unfiltered (_("Process record does not support instruction "
11146 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11147 paddress (arm_insn_r->gdbarch,
11148 arm_insn_r->this_addr));
11151 else if (11 == arm_insn_r->decode
11152 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11154 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11156 /* Handle str(x) insn */
11157 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11160 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11161 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11163 /* Handle BX, branch and link/exchange. */
11164 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11165 record_buf[0] = ARM_PS_REGNUM;
11166 arm_insn_r->reg_rec_count = 1;
11168 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11169 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11170 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11172 /* Count leading zeros: CLZ. */
11173 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11174 arm_insn_r->reg_rec_count = 1;
11176 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11177 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11178 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11179 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11182 /* Handle MRS insn. */
11183 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11184 arm_insn_r->reg_rec_count = 1;
11186 else if (arm_insn_r->opcode <= 15)
11188 /* Normal data processing insns. */
11189 /* Out of 11 shifter operands mode, all the insn modifies destination
11190 register, which is specified by 13-16 decode. */
11191 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11192 record_buf[1] = ARM_PS_REGNUM;
11193 arm_insn_r->reg_rec_count = 2;
11200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11201 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11205 /* Handling opcode 001 insns. */
11208 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11210 uint32_t record_buf[8], record_buf_mem[8];
11212 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11213 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11215 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11216 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11217 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11220 /* Handle MSR insn. */
11221 if (9 == arm_insn_r->opcode)
11223 /* CSPR is going to be changed. */
11224 record_buf[0] = ARM_PS_REGNUM;
11225 arm_insn_r->reg_rec_count = 1;
11229 /* SPSR is going to be changed. */
11232 else if (arm_insn_r->opcode <= 15)
11234 /* Normal data processing insns. */
11235 /* Out of 11 shifter operands mode, all the insn modifies destination
11236 register, which is specified by 13-16 decode. */
11237 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11238 record_buf[1] = ARM_PS_REGNUM;
11239 arm_insn_r->reg_rec_count = 2;
11246 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11247 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11251 /* Handling opcode 010 insns. */
11254 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11256 struct regcache *reg_cache = arm_insn_r->regcache;
11258 uint32_t reg_src1 = 0 , reg_dest = 0;
11259 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11260 uint32_t record_buf[8], record_buf_mem[8];
11262 ULONGEST u_regval = 0;
11264 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11265 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11267 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11269 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11270 /* LDR insn has a capability to do branching, if
11271 MOV LR, PC is precedded by LDR insn having Rn as R15
11272 in that case, it emulates branch and link insn, and hence we
11273 need to save CSPR and PC as well. */
11274 if (ARM_PC_REGNUM != reg_dest)
11276 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11277 arm_insn_r->reg_rec_count = 1;
11281 record_buf[0] = reg_dest;
11282 record_buf[1] = ARM_PS_REGNUM;
11283 arm_insn_r->reg_rec_count = 2;
11288 /* Store, immediate offset, immediate pre-indexed,
11289 immediate post-indexed. */
11290 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11291 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11292 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11294 if (bit (arm_insn_r->arm_insn, 23))
11296 tgt_mem_addr = u_regval + offset_12;
11300 tgt_mem_addr = u_regval - offset_12;
11303 switch (arm_insn_r->opcode)
11317 record_buf_mem[0] = 4;
11332 record_buf_mem[0] = 1;
11336 gdb_assert_not_reached ("no decoding pattern found");
11339 record_buf_mem[1] = tgt_mem_addr;
11340 arm_insn_r->mem_rec_count = 1;
11342 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11343 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11344 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11345 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11346 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11347 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11350 /* We are handling pre-indexed mode; post-indexed mode;
11351 where Rn is going to be changed. */
11352 record_buf[0] = reg_src1;
11353 arm_insn_r->reg_rec_count = 1;
11357 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11358 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11362 /* Handling opcode 011 insns. */
11365 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11367 struct regcache *reg_cache = arm_insn_r->regcache;
11369 uint32_t shift_imm = 0;
11370 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11371 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11372 uint32_t record_buf[8], record_buf_mem[8];
11375 ULONGEST u_regval[2];
11377 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11378 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11380 /* Handle enhanced store insns and LDRD DSP insn,
11381 order begins according to addressing modes for store insns
11385 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11387 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11388 /* LDR insn has a capability to do branching, if
11389 MOV LR, PC is precedded by LDR insn having Rn as R15
11390 in that case, it emulates branch and link insn, and hence we
11391 need to save CSPR and PC as well. */
11392 if (15 != reg_dest)
11394 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11395 arm_insn_r->reg_rec_count = 1;
11399 record_buf[0] = reg_dest;
11400 record_buf[1] = ARM_PS_REGNUM;
11401 arm_insn_r->reg_rec_count = 2;
11406 if (! bits (arm_insn_r->arm_insn, 4, 11))
11408 /* Store insn, register offset and register pre-indexed,
11409 register post-indexed. */
11411 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11413 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11414 regcache_raw_read_unsigned (reg_cache, reg_src1
11416 regcache_raw_read_unsigned (reg_cache, reg_src2
11418 if (15 == reg_src2)
11420 /* If R15 was used as Rn, hence current PC+8. */
11421 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11422 u_regval[0] = u_regval[0] + 8;
11424 /* Calculate target store address, Rn +/- Rm, register offset. */
11426 if (bit (arm_insn_r->arm_insn, 23))
11428 tgt_mem_addr = u_regval[0] + u_regval[1];
11432 tgt_mem_addr = u_regval[1] - u_regval[0];
11435 switch (arm_insn_r->opcode)
11449 record_buf_mem[0] = 4;
11464 record_buf_mem[0] = 1;
11468 gdb_assert_not_reached ("no decoding pattern found");
11471 record_buf_mem[1] = tgt_mem_addr;
11472 arm_insn_r->mem_rec_count = 1;
11474 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11475 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11476 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11477 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11478 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11479 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11482 /* Rn is going to be changed in pre-indexed mode and
11483 post-indexed mode as well. */
11484 record_buf[0] = reg_src2;
11485 arm_insn_r->reg_rec_count = 1;
11490 /* Store insn, scaled register offset; scaled pre-indexed. */
11491 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11493 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11495 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11496 /* Get shift_imm. */
11497 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11498 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11499 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11500 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11501 /* Offset_12 used as shift. */
11505 /* Offset_12 used as index. */
11506 offset_12 = u_regval[0] << shift_imm;
11510 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11516 if (bit (u_regval[0], 31))
11518 offset_12 = 0xFFFFFFFF;
11527 /* This is arithmetic shift. */
11528 offset_12 = s_word >> shift_imm;
11535 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11537 /* Get C flag value and shift it by 31. */
11538 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11539 | (u_regval[0]) >> 1);
11543 offset_12 = (u_regval[0] >> shift_imm) \
11545 (sizeof(uint32_t) - shift_imm));
11550 gdb_assert_not_reached ("no decoding pattern found");
11554 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11556 if (bit (arm_insn_r->arm_insn, 23))
11558 tgt_mem_addr = u_regval[1] + offset_12;
11562 tgt_mem_addr = u_regval[1] - offset_12;
11565 switch (arm_insn_r->opcode)
11579 record_buf_mem[0] = 4;
11594 record_buf_mem[0] = 1;
11598 gdb_assert_not_reached ("no decoding pattern found");
11601 record_buf_mem[1] = tgt_mem_addr;
11602 arm_insn_r->mem_rec_count = 1;
11604 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11605 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11606 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11607 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11608 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11609 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11612 /* Rn is going to be changed in register scaled pre-indexed
11613 mode,and scaled post indexed mode. */
11614 record_buf[0] = reg_src2;
11615 arm_insn_r->reg_rec_count = 1;
11620 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11621 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11625 /* Handling opcode 100 insns. */
11628 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11630 struct regcache *reg_cache = arm_insn_r->regcache;
11632 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11633 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11634 uint32_t start_address = 0, index = 0;
11635 uint32_t record_buf[24], record_buf_mem[48];
11637 ULONGEST u_regval[2] = {0};
11639 /* This mode is exclusively for load and store multiple. */
11640 /* Handle incremenrt after/before and decrment after.before mode;
11641 Rn is changing depending on W bit, but as of now we store Rn too
11642 without optimization. */
11644 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11646 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11648 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11650 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11655 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11659 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11660 while (register_bits)
11662 if (register_bits & 0x00000001)
11663 register_list[register_count++] = 1;
11664 register_bits = register_bits >> 1;
11667 /* Extra space for Base Register and CPSR; wihtout optimization. */
11668 record_buf[register_count] = reg_src1;
11669 record_buf[register_count + 1] = ARM_PS_REGNUM;
11670 arm_insn_r->reg_rec_count = register_count + 2;
11672 for (register_count = 0; register_count < no_of_regs; register_count++)
11674 if (register_list[register_count])
11676 /* Register_count gives total no of registers
11677 and dually working as reg number. */
11678 record_buf[index] = register_count;
11686 /* It handles both STM(1) and STM(2). */
11687 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11689 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11691 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11692 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11693 while (register_bits)
11695 if (register_bits & 0x00000001)
11697 register_bits = register_bits >> 1;
11702 /* Decrement after. */
11704 start_address = (u_regval[0]) - (register_count * 4) + 4;
11705 arm_insn_r->mem_rec_count = register_count;
11706 while (register_count)
11708 record_buf_mem[(register_count * 2) - 1] = start_address;
11709 record_buf_mem[(register_count * 2) - 2] = 4;
11710 start_address = start_address + 4;
11715 /* Increment after. */
11717 start_address = u_regval[0];
11718 arm_insn_r->mem_rec_count = register_count;
11719 while (register_count)
11721 record_buf_mem[(register_count * 2) - 1] = start_address;
11722 record_buf_mem[(register_count * 2) - 2] = 4;
11723 start_address = start_address + 4;
11728 /* Decrement before. */
11731 start_address = (u_regval[0]) - (register_count * 4);
11732 arm_insn_r->mem_rec_count = register_count;
11733 while (register_count)
11735 record_buf_mem[(register_count * 2) - 1] = start_address;
11736 record_buf_mem[(register_count * 2) - 2] = 4;
11737 start_address = start_address + 4;
11742 /* Increment before. */
11744 start_address = u_regval[0] + 4;
11745 arm_insn_r->mem_rec_count = register_count;
11746 while (register_count)
11748 record_buf_mem[(register_count * 2) - 1] = start_address;
11749 record_buf_mem[(register_count * 2) - 2] = 4;
11750 start_address = start_address + 4;
11756 gdb_assert_not_reached ("no decoding pattern found");
11760 /* Base register also changes; based on condition and W bit. */
11761 /* We save it anyway without optimization. */
11762 record_buf[0] = reg_src1;
11763 arm_insn_r->reg_rec_count = 1;
11766 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11767 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11771 /* Handling opcode 101 insns. */
11774 arm_record_b_bl (insn_decode_record *arm_insn_r)
11776 uint32_t record_buf[8];
11778 /* Handle B, BL, BLX(1) insns. */
11779 /* B simply branches so we do nothing here. */
11780 /* Note: BLX(1) doesnt fall here but instead it falls into
11781 extension space. */
11782 if (bit (arm_insn_r->arm_insn, 24))
11784 record_buf[0] = ARM_LR_REGNUM;
11785 arm_insn_r->reg_rec_count = 1;
11788 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11793 /* Handling opcode 110 insns. */
11796 arm_record_coproc (insn_decode_record *arm_insn_r)
11798 printf_unfiltered (_("Process record does not support instruction "
11799 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11800 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11805 /* Handling opcode 111 insns. */
11808 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11810 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11811 struct regcache *reg_cache = arm_insn_r->regcache;
11812 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11814 /* Handle SWI insn; system call would be handled over here. */
11816 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11817 if (15 == arm_insn_r->opcode)
11819 /* Handle arm syscall insn. */
11820 if (tdep->arm_swi_record != NULL)
11822 ret = tdep->arm_swi_record(reg_cache);
11826 printf_unfiltered (_("no syscall record support\n"));
11831 printf_unfiltered (_("Process record does not support instruction "
11832 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11833 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11837 /* Handling opcode 000 insns. */
11840 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11842 uint32_t record_buf[8];
11843 uint32_t reg_src1 = 0;
11845 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11847 record_buf[0] = ARM_PS_REGNUM;
11848 record_buf[1] = reg_src1;
11849 thumb_insn_r->reg_rec_count = 2;
11851 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11857 /* Handling opcode 001 insns. */
11860 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11862 uint32_t record_buf[8];
11863 uint32_t reg_src1 = 0;
11865 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11867 record_buf[0] = ARM_PS_REGNUM;
11868 record_buf[1] = reg_src1;
11869 thumb_insn_r->reg_rec_count = 2;
11871 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11876 /* Handling opcode 010 insns. */
11879 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11881 struct regcache *reg_cache = thumb_insn_r->regcache;
11882 uint32_t record_buf[8], record_buf_mem[8];
11884 uint32_t reg_src1 = 0, reg_src2 = 0;
11885 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11887 ULONGEST u_regval[2] = {0};
11889 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11891 if (bit (thumb_insn_r->arm_insn, 12))
11893 /* Handle load/store register offset. */
11894 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11895 if (opcode2 >= 12 && opcode2 <= 15)
11897 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11898 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11899 record_buf[0] = reg_src1;
11900 thumb_insn_r->reg_rec_count = 1;
11902 else if (opcode2 >= 8 && opcode2 <= 10)
11904 /* STR(2), STRB(2), STRH(2) . */
11905 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11906 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11907 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11908 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11910 record_buf_mem[0] = 4; /* STR (2). */
11911 else if (10 == opcode2)
11912 record_buf_mem[0] = 1; /* STRB (2). */
11913 else if (9 == opcode2)
11914 record_buf_mem[0] = 2; /* STRH (2). */
11915 record_buf_mem[1] = u_regval[0] + u_regval[1];
11916 thumb_insn_r->mem_rec_count = 1;
11919 else if (bit (thumb_insn_r->arm_insn, 11))
11921 /* Handle load from literal pool. */
11923 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11924 record_buf[0] = reg_src1;
11925 thumb_insn_r->reg_rec_count = 1;
11929 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11930 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11931 if ((3 == opcode2) && (!opcode3))
11933 /* Branch with exchange. */
11934 record_buf[0] = ARM_PS_REGNUM;
11935 thumb_insn_r->reg_rec_count = 1;
11939 /* Format 8; special data processing insns. */
11940 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11941 record_buf[0] = ARM_PS_REGNUM;
11942 record_buf[1] = reg_src1;
11943 thumb_insn_r->reg_rec_count = 2;
11948 /* Format 5; data processing insns. */
11949 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11950 if (bit (thumb_insn_r->arm_insn, 7))
11952 reg_src1 = reg_src1 + 8;
11954 record_buf[0] = ARM_PS_REGNUM;
11955 record_buf[1] = reg_src1;
11956 thumb_insn_r->reg_rec_count = 2;
11959 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11960 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11966 /* Handling opcode 001 insns. */
11969 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11971 struct regcache *reg_cache = thumb_insn_r->regcache;
11972 uint32_t record_buf[8], record_buf_mem[8];
11974 uint32_t reg_src1 = 0;
11975 uint32_t opcode = 0, immed_5 = 0;
11977 ULONGEST u_regval = 0;
11979 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11984 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11985 record_buf[0] = reg_src1;
11986 thumb_insn_r->reg_rec_count = 1;
11991 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11992 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11993 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11994 record_buf_mem[0] = 4;
11995 record_buf_mem[1] = u_regval + (immed_5 * 4);
11996 thumb_insn_r->mem_rec_count = 1;
11999 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12000 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12006 /* Handling opcode 100 insns. */
12009 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12011 struct regcache *reg_cache = thumb_insn_r->regcache;
12012 uint32_t record_buf[8], record_buf_mem[8];
12014 uint32_t reg_src1 = 0;
12015 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12017 ULONGEST u_regval = 0;
12019 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12024 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12025 record_buf[0] = reg_src1;
12026 thumb_insn_r->reg_rec_count = 1;
12028 else if (1 == opcode)
12031 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12032 record_buf[0] = reg_src1;
12033 thumb_insn_r->reg_rec_count = 1;
12035 else if (2 == opcode)
12038 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12039 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12040 record_buf_mem[0] = 4;
12041 record_buf_mem[1] = u_regval + (immed_8 * 4);
12042 thumb_insn_r->mem_rec_count = 1;
12044 else if (0 == opcode)
12047 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12048 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12049 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12050 record_buf_mem[0] = 2;
12051 record_buf_mem[1] = u_regval + (immed_5 * 2);
12052 thumb_insn_r->mem_rec_count = 1;
12055 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12056 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12062 /* Handling opcode 101 insns. */
12065 thumb_record_misc (insn_decode_record *thumb_insn_r)
12067 struct regcache *reg_cache = thumb_insn_r->regcache;
12069 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12070 uint32_t register_bits = 0, register_count = 0;
12071 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12072 uint32_t record_buf[24], record_buf_mem[48];
12075 ULONGEST u_regval = 0;
12077 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12078 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12079 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12084 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12085 while (register_bits)
12087 if (register_bits & 0x00000001)
12088 register_list[register_count++] = 1;
12089 register_bits = register_bits >> 1;
12091 record_buf[register_count] = ARM_PS_REGNUM;
12092 record_buf[register_count + 1] = ARM_SP_REGNUM;
12093 thumb_insn_r->reg_rec_count = register_count + 2;
12094 for (register_count = 0; register_count < 8; register_count++)
12096 if (register_list[register_count])
12098 record_buf[index] = register_count;
12103 else if (10 == opcode2)
12106 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12107 regcache_raw_read_unsigned (reg_cache, ARM_PC_REGNUM, &u_regval);
12108 while (register_bits)
12110 if (register_bits & 0x00000001)
12112 register_bits = register_bits >> 1;
12114 start_address = u_regval - \
12115 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12116 thumb_insn_r->mem_rec_count = register_count;
12117 while (register_count)
12119 record_buf_mem[(register_count * 2) - 1] = start_address;
12120 record_buf_mem[(register_count * 2) - 2] = 4;
12121 start_address = start_address + 4;
12124 record_buf[0] = ARM_SP_REGNUM;
12125 thumb_insn_r->reg_rec_count = 1;
12127 else if (0x1E == opcode1)
12130 /* Handle enhanced software breakpoint insn, BKPT. */
12131 /* CPSR is changed to be executed in ARM state, disabling normal
12132 interrupts, entering abort mode. */
12133 /* According to high vector configuration PC is set. */
12134 /* User hits breakpoint and type reverse, in that case, we need to go back with
12135 previous CPSR and Program Counter. */
12136 record_buf[0] = ARM_PS_REGNUM;
12137 record_buf[1] = ARM_LR_REGNUM;
12138 thumb_insn_r->reg_rec_count = 2;
12139 /* We need to save SPSR value, which is not yet done. */
12140 printf_unfiltered (_("Process record does not support instruction "
12141 "0x%0x at address %s.\n"),
12142 thumb_insn_r->arm_insn,
12143 paddress (thumb_insn_r->gdbarch,
12144 thumb_insn_r->this_addr));
12147 else if ((0 == opcode) || (1 == opcode))
12149 /* ADD(5), ADD(6). */
12150 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12151 record_buf[0] = reg_src1;
12152 thumb_insn_r->reg_rec_count = 1;
12154 else if (2 == opcode)
12156 /* ADD(7), SUB(4). */
12157 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12158 record_buf[0] = ARM_SP_REGNUM;
12159 thumb_insn_r->reg_rec_count = 1;
12162 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12163 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12169 /* Handling opcode 110 insns. */
12172 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12174 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12175 struct regcache *reg_cache = thumb_insn_r->regcache;
12177 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12178 uint32_t reg_src1 = 0;
12179 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12180 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12181 uint32_t record_buf[24], record_buf_mem[48];
12183 ULONGEST u_regval = 0;
12185 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12186 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12192 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12194 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12195 while (register_bits)
12197 if (register_bits & 0x00000001)
12198 register_list[register_count++] = 1;
12199 register_bits = register_bits >> 1;
12201 record_buf[register_count] = reg_src1;
12202 thumb_insn_r->reg_rec_count = register_count + 1;
12203 for (register_count = 0; register_count < 8; register_count++)
12205 if (register_list[register_count])
12207 record_buf[index] = register_count;
12212 else if (0 == opcode2)
12214 /* It handles both STMIA. */
12215 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12217 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12218 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12219 while (register_bits)
12221 if (register_bits & 0x00000001)
12223 register_bits = register_bits >> 1;
12225 start_address = u_regval;
12226 thumb_insn_r->mem_rec_count = register_count;
12227 while (register_count)
12229 record_buf_mem[(register_count * 2) - 1] = start_address;
12230 record_buf_mem[(register_count * 2) - 2] = 4;
12231 start_address = start_address + 4;
12235 else if (0x1F == opcode1)
12237 /* Handle arm syscall insn. */
12238 if (tdep->arm_swi_record != NULL)
12240 ret = tdep->arm_swi_record(reg_cache);
12244 printf_unfiltered (_("no syscall record support\n"));
12249 /* B (1), conditional branch is automatically taken care in process_record,
12250 as PC is saved there. */
12252 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12253 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12259 /* Handling opcode 111 insns. */
12262 thumb_record_branch (insn_decode_record *thumb_insn_r)
12264 uint32_t record_buf[8];
12265 uint32_t bits_h = 0;
12267 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12269 if (2 == bits_h || 3 == bits_h)
12272 record_buf[0] = ARM_LR_REGNUM;
12273 thumb_insn_r->reg_rec_count = 1;
12275 else if (1 == bits_h)
12278 record_buf[0] = ARM_PS_REGNUM;
12279 record_buf[1] = ARM_LR_REGNUM;
12280 thumb_insn_r->reg_rec_count = 2;
12283 /* B(2) is automatically taken care in process_record, as PC is
12286 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12292 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12293 and positive val on fauilure. */
12296 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12298 gdb_byte buf[insn_size];
12300 memset (&buf[0], 0, insn_size);
12302 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12304 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12306 gdbarch_byte_order (insn_record->gdbarch));
12310 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12312 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12316 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12317 uint32_t insn_size)
12320 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
12321 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
12323 arm_record_data_proc_misc_ld_str, /* 000. */
12324 arm_record_data_proc_imm, /* 001. */
12325 arm_record_ld_st_imm_offset, /* 010. */
12326 arm_record_ld_st_reg_offset, /* 011. */
12327 arm_record_ld_st_multiple, /* 100. */
12328 arm_record_b_bl, /* 101. */
12329 arm_record_coproc, /* 110. */
12330 arm_record_coproc_data_proc /* 111. */
12333 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
12334 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
12336 thumb_record_shift_add_sub, /* 000. */
12337 thumb_record_add_sub_cmp_mov, /* 001. */
12338 thumb_record_ld_st_reg_offset, /* 010. */
12339 thumb_record_ld_st_imm_offset, /* 011. */
12340 thumb_record_ld_st_stack, /* 100. */
12341 thumb_record_misc, /* 101. */
12342 thumb_record_ldm_stm_swi, /* 110. */
12343 thumb_record_branch /* 111. */
12346 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12347 uint32_t insn_id = 0;
12349 if (extract_arm_insn (arm_record, insn_size))
12353 printf_unfiltered (_("Process record: error reading memory at "
12354 "addr %s len = %d.\n"),
12355 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12359 else if (ARM_RECORD == record_type)
12361 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12362 insn_id = bits (arm_record->arm_insn, 25, 27);
12363 ret = arm_record_extension_space (arm_record);
12364 /* If this insn has fallen into extension space
12365 then we need not decode it anymore. */
12366 if (ret != -1 && !INSN_RECORDED(arm_record))
12368 ret = arm_handle_insn[insn_id] (arm_record);
12371 else if (THUMB_RECORD == record_type)
12373 /* As thumb does not have condition codes, we set negative. */
12374 arm_record->cond = -1;
12375 insn_id = bits (arm_record->arm_insn, 13, 15);
12376 ret = thumb_handle_insn[insn_id] (arm_record);
12378 else if (THUMB2_RECORD == record_type)
12380 printf_unfiltered (_("Process record doesnt support thumb32 instruction "
12381 "0x%0x at address %s.\n"),arm_record->arm_insn,
12382 paddress (arm_record->gdbarch,
12383 arm_record->this_addr));
12388 /* Throw assertion. */
12389 gdb_assert_not_reached ("not a valid instruction, could not decode");
12396 /* Cleans up local record registers and memory allocations. */
12399 deallocate_reg_mem (insn_decode_record *record)
12401 xfree (record->arm_regs);
12402 xfree (record->arm_mems);
12406 /* Parse the current instruction and record the values of the registers and
12407 memory that will be changed in current instruction to record_arch_list".
12408 Return -1 if something is wrong. */
12411 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12412 CORE_ADDR insn_addr)
12415 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12416 uint32_t no_of_rec = 0;
12417 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12418 ULONGEST t_bit = 0, insn_id = 0;
12420 ULONGEST u_regval = 0;
12422 insn_decode_record arm_record;
12424 memset (&arm_record, 0, sizeof (insn_decode_record));
12425 arm_record.regcache = regcache;
12426 arm_record.this_addr = insn_addr;
12427 arm_record.gdbarch = gdbarch;
12430 if (record_debug > 1)
12432 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12434 paddress (gdbarch, arm_record.this_addr));
12437 if (extract_arm_insn (&arm_record, 2))
12441 printf_unfiltered (_("Process record: error reading memory at "
12442 "addr %s len = %d.\n"),
12443 paddress (arm_record.gdbarch,
12444 arm_record.this_addr), 2);
12449 /* Check the insn, whether it is thumb or arm one. */
12451 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12452 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12455 if (!(u_regval & t_bit))
12457 /* We are decoding arm insn. */
12458 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12462 insn_id = bits (arm_record.arm_insn, 11, 15);
12463 /* is it thumb2 insn? */
12464 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12466 ret = decode_insn (&arm_record, THUMB2_RECORD,
12467 THUMB2_INSN_SIZE_BYTES);
12471 /* We are decoding thumb insn. */
12472 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12478 /* Record registers. */
12479 record_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12480 if (arm_record.arm_regs)
12482 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12484 if (record_arch_list_add_reg (arm_record.regcache ,
12485 arm_record.arm_regs[no_of_rec]))
12489 /* Record memories. */
12490 if (arm_record.arm_mems)
12492 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12494 if (record_arch_list_add_mem
12495 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12496 arm_record.arm_mems[no_of_rec].len))
12501 if (record_arch_list_add_end ())
12506 deallocate_reg_mem (&arm_record);