1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-1989, 1991-1993, 1995-1996, 1998-2012 Free
4 Software Foundation, Inc.
6 This file is part of GDB.
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
21 #include <ctype.h> /* XXX for isupper (). */
28 #include "gdb_string.h"
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
55 #include "gdb_assert.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct minimal_symbol *sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
451 return UNMAKE_THUMB_ADDR (val);
453 return (val & 0x03fffffc);
456 /* When reading symbols, we need to zap the low bit of the address,
457 which may be set to 1 for Thumb functions. */
459 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct minimal_symbol *msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
476 && SYMBOL_VALUE_ADDRESS (msym) == pc
477 && SYMBOL_LINKAGE_NAME (msym) != NULL)
479 const char *name = SYMBOL_LINKAGE_NAME (msym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
703 struct pv_area *stack;
704 struct cleanup *back_to;
706 CORE_ADDR unrecognized_pc = 0;
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
713 while (start < limit)
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
740 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
743 offset = (insn & 0x7f) << 2; /* get scaled offset */
744 if (insn & 0x80) /* Check for SUB. */
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
791 if (pv_area_store_would_trash (stack, addr))
794 pv_area_store (stack, addr, 4, regs[regno]);
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
805 if (pv_area_store_would_trash (stack, addr))
808 pv_area_store (stack, addr, 4, regs[rd]);
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
846 unsigned short inst2;
848 inst2 = read_memory_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
857 int j1, j2, imm1, imm2;
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 pv_t addr = regs[bits (insn, 0, 3)];
884 if (pv_area_store_would_trash (stack, addr))
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
891 addr = pv_add_constant (addr, -4);
892 pv_area_store (stack, addr, 4, regs[regno]);
896 regs[bits (insn, 0, 3)] = addr;
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
907 offset = inst2 & 0xff;
909 addr = pv_add_constant (addr, offset);
911 addr = pv_add_constant (addr, -offset);
913 if (pv_area_store_would_trash (stack, addr))
916 pv_area_store (stack, addr, 4, regs[regno1]);
917 pv_area_store (stack, pv_add_constant (addr, 4),
921 regs[bits (insn, 0, 3)] = addr;
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
931 offset = inst2 & 0xff;
933 addr = pv_add_constant (addr, offset);
935 addr = pv_add_constant (addr, -offset);
937 if (pv_area_store_would_trash (stack, addr))
940 pv_area_store (stack, addr, 4, regs[regno]);
943 regs[bits (insn, 0, 3)] = addr;
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 int regno = bits (inst2, 12, 15);
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
955 if (pv_area_store_would_trash (stack, addr))
958 pv_area_store (stack, addr, 4, regs[regno]);
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
980 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1071 /* Constant pool loads. */
1072 unsigned int constant;
1075 offset = bits (insn, 0, 11);
1077 loc = start + 4 + offset;
1079 loc = start + 4 - offset;
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1087 /* Constant pool loads. */
1088 unsigned int constant;
1091 offset = bits (insn, 0, 7) << 2;
1093 loc = start + 4 + offset;
1095 loc = start + 4 - offset;
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1106 /* Don't scan past anything that might change control flow. */
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1118 else if (thumb_instruction_changes_pc (insn))
1120 /* Don't scan past anything that might change control flow. */
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1134 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1135 paddress (gdbarch, start));
1137 if (unrecognized_pc == 0)
1138 unrecognized_pc = start;
1142 do_cleanups (back_to);
1143 return unrecognized_pc;
1146 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1148 /* Frame pointer is fp. Frame size is constant. */
1149 cache->framereg = ARM_FP_REGNUM;
1150 cache->framesize = -regs[ARM_FP_REGNUM].k;
1152 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1154 /* Frame pointer is r7. Frame size is constant. */
1155 cache->framereg = THUMB_FP_REGNUM;
1156 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 /* Try the stack pointer... this is a bit desperate. */
1161 cache->framereg = ARM_SP_REGNUM;
1162 cache->framesize = -regs[ARM_SP_REGNUM].k;
1165 for (i = 0; i < 16; i++)
1166 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1167 cache->saved_regs[i].addr = offset;
1169 do_cleanups (back_to);
1170 return unrecognized_pc;
1174 /* Try to analyze the instructions starting from PC, which load symbol
1175 __stack_chk_guard. Return the address of instruction after loading this
1176 symbol, set the dest register number to *BASEREG, and set the size of
1177 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1182 unsigned int *destreg, int *offset)
1184 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1185 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1186 unsigned int low, high, address;
1191 unsigned short insn1
1192 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1194 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1196 *destreg = bits (insn1, 8, 10);
1198 address = bits (insn1, 0, 7);
1200 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1202 unsigned short insn2
1203 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1205 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1208 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1210 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1212 /* movt Rd, #const */
1213 if ((insn1 & 0xfbc0) == 0xf2c0)
1215 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1216 *destreg = bits (insn2, 8, 11);
1218 address = (high << 16 | low);
1225 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1227 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1229 address = bits (insn, 0, 11);
1230 *destreg = bits (insn, 12, 15);
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1245 address = (high << 16 | low);
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1276 .word __stack_chk_guard
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct minimal_symbol *stack_chk_guard;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* If name of symbol doesn't start with '__stack_chk_guard', this
1301 instruction sequence is not for stack protector. If symbol is
1302 removed, we conservatively think this sequence is for stack protector. */
1304 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1305 strlen ("__stack_chk_guard")) != 0)
1310 unsigned int destreg;
1312 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1314 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1315 if ((insn & 0xf800) != 0x6800)
1317 if (bits (insn, 3, 5) != basereg)
1319 destreg = bits (insn, 0, 2);
1321 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1322 byte_order_for_code);
1323 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1324 if ((insn & 0xf800) != 0x6000)
1326 if (destreg != bits (insn, 0, 2))
1331 unsigned int destreg;
1333 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1335 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1336 if ((insn & 0x0e500000) != 0x04100000)
1338 if (bits (insn, 16, 19) != basereg)
1340 destreg = bits (insn, 12, 15);
1341 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1342 insn = read_memory_unsigned_integer (pc + offset + 4,
1343 4, byte_order_for_code);
1344 if ((insn & 0x0e500000) != 0x04000000)
1346 if (bits (insn, 12, 15) != destreg)
1349 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1352 return pc + offset + 4;
1354 return pc + offset + 8;
1357 /* Advance the PC across any function entry prologue instructions to
1358 reach some "real" code.
1360 The APCS (ARM Procedure Call Standard) defines the following
1364 [stmfd sp!, {a1,a2,a3,a4}]
1365 stmfd sp!, {...,fp,ip,lr,pc}
1366 [stfe f7, [sp, #-12]!]
1367 [stfe f6, [sp, #-12]!]
1368 [stfe f5, [sp, #-12]!]
1369 [stfe f4, [sp, #-12]!]
1370 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1373 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1375 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1378 CORE_ADDR func_addr, limit_pc;
1380 /* See if we can determine the end of the prologue via the symbol table.
1381 If so, then return either PC, or the PC after the prologue, whichever
1383 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1385 CORE_ADDR post_prologue_pc
1386 = skip_prologue_using_sal (gdbarch, func_addr);
1387 struct symtab *s = find_pc_symtab (func_addr);
1389 if (post_prologue_pc)
1391 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1394 /* GCC always emits a line note before the prologue and another
1395 one after, even if the two are at the same address or on the
1396 same line. Take advantage of this so that we do not need to
1397 know every instruction that might appear in the prologue. We
1398 will have producer information for most binaries; if it is
1399 missing (e.g. for -gstabs), assuming the GNU tools. */
1400 if (post_prologue_pc
1402 || s->producer == NULL
1403 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1404 return post_prologue_pc;
1406 if (post_prologue_pc != 0)
1408 CORE_ADDR analyzed_limit;
1410 /* For non-GCC compilers, make sure the entire line is an
1411 acceptable prologue; GDB will round this function's
1412 return value up to the end of the following line so we
1413 can not skip just part of a line (and we do not want to).
1415 RealView does not treat the prologue specially, but does
1416 associate prologue code with the opening brace; so this
1417 lets us skip the first line if we think it is the opening
1419 if (arm_pc_is_thumb (gdbarch, func_addr))
1420 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1421 post_prologue_pc, NULL);
1423 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1424 post_prologue_pc, NULL);
1426 if (analyzed_limit != post_prologue_pc)
1429 return post_prologue_pc;
1433 /* Can't determine prologue from the symbol table, need to examine
1436 /* Find an upper limit on the function prologue using the debug
1437 information. If the debug information could not be used to provide
1438 that bound, then use an arbitrary large number as the upper bound. */
1439 /* Like arm_scan_prologue, stop no later than pc + 64. */
1440 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1442 limit_pc = pc + 64; /* Magic. */
1445 /* Check if this is Thumb code. */
1446 if (arm_pc_is_thumb (gdbarch, pc))
1447 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1449 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1451 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1453 /* "mov ip, sp" is no longer a required part of the prologue. */
1454 if (inst == 0xe1a0c00d) /* mov ip, sp */
1457 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1460 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1463 /* Some prologues begin with "str lr, [sp, #-4]!". */
1464 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1467 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1470 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1473 /* Any insns after this point may float into the code, if it makes
1474 for better instruction scheduling, so we skip them only if we
1475 find them, but still consider the function to be frame-ful. */
1477 /* We may have either one sfmfd instruction here, or several stfe
1478 insns, depending on the version of floating point code we
1480 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1483 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1486 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1489 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1492 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1493 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1494 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1497 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1498 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1499 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1502 /* Un-recognized instruction; stop scanning. */
1506 return skip_pc; /* End of prologue. */
1510 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1511 This function decodes a Thumb function prologue to determine:
1512 1) the size of the stack frame
1513 2) which registers are saved on it
1514 3) the offsets of saved regs
1515 4) the offset from the stack pointer to the frame pointer
1517 A typical Thumb function prologue would create this stack frame
1518 (offsets relative to FP)
1519 old SP -> 24 stack parameters
1522 R7 -> 0 local variables (16 bytes)
1523 SP -> -12 additional stack space (12 bytes)
1524 The frame size would thus be 36 bytes, and the frame offset would be
1525 12 bytes. The frame register is R7.
1527 The comments for thumb_skip_prolog() describe the algorithm we use
1528 to detect the end of the prolog. */
1532 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1533 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1535 CORE_ADDR prologue_start;
1536 CORE_ADDR prologue_end;
1538 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1541 /* See comment in arm_scan_prologue for an explanation of
1543 if (prologue_end > prologue_start + 64)
1545 prologue_end = prologue_start + 64;
1549 /* We're in the boondocks: we have no idea where the start of the
1553 prologue_end = min (prologue_end, prev_pc);
1555 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1558 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1561 arm_instruction_changes_pc (uint32_t this_instr)
1563 if (bits (this_instr, 28, 31) == INST_NV)
1564 /* Unconditional instructions. */
1565 switch (bits (this_instr, 24, 27))
1569 /* Branch with Link and change to Thumb. */
1574 /* Coprocessor register transfer. */
1575 if (bits (this_instr, 12, 15) == 15)
1576 error (_("Invalid update to pc in instruction"));
1582 switch (bits (this_instr, 25, 27))
1585 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1587 /* Multiplies and extra load/stores. */
1588 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1589 /* Neither multiplies nor extension load/stores are allowed
1593 /* Otherwise, miscellaneous instructions. */
1595 /* BX <reg>, BXJ <reg>, BLX <reg> */
1596 if (bits (this_instr, 4, 27) == 0x12fff1
1597 || bits (this_instr, 4, 27) == 0x12fff2
1598 || bits (this_instr, 4, 27) == 0x12fff3)
1601 /* Other miscellaneous instructions are unpredictable if they
1605 /* Data processing instruction. Fall through. */
1608 if (bits (this_instr, 12, 15) == 15)
1615 /* Media instructions and architecturally undefined instructions. */
1616 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1620 if (bit (this_instr, 20) == 0)
1624 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1630 /* Load/store multiple. */
1631 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1637 /* Branch and branch with link. */
1642 /* Coprocessor transfers or SWIs can not affect PC. */
1646 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1650 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1651 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1652 fill it in. Return the first address not recognized as a prologue
1655 We recognize all the instructions typically found in ARM prologues,
1656 plus harmless instructions which can be skipped (either for analysis
1657 purposes, or a more restrictive set that can be skipped when finding
1658 the end of the prologue). */
1661 arm_analyze_prologue (struct gdbarch *gdbarch,
1662 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1663 struct arm_prologue_cache *cache)
1665 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1666 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1668 CORE_ADDR offset, current_pc;
1669 pv_t regs[ARM_FPS_REGNUM];
1670 struct pv_area *stack;
1671 struct cleanup *back_to;
1672 int framereg, framesize;
1673 CORE_ADDR unrecognized_pc = 0;
1675 /* Search the prologue looking for instructions that set up the
1676 frame pointer, adjust the stack pointer, and save registers.
1678 Be careful, however, and if it doesn't look like a prologue,
1679 don't try to scan it. If, for instance, a frameless function
1680 begins with stmfd sp!, then we will tell ourselves there is
1681 a frame, which will confuse stack traceback, as well as "finish"
1682 and other operations that rely on a knowledge of the stack
1685 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1686 regs[regno] = pv_register (regno, 0);
1687 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1688 back_to = make_cleanup_free_pv_area (stack);
1690 for (current_pc = prologue_start;
1691 current_pc < prologue_end;
1695 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1697 if (insn == 0xe1a0c00d) /* mov ip, sp */
1699 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1702 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1703 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1705 unsigned imm = insn & 0xff; /* immediate value */
1706 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1707 int rd = bits (insn, 12, 15);
1708 imm = (imm >> rot) | (imm << (32 - rot));
1709 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1712 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1713 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1715 unsigned imm = insn & 0xff; /* immediate value */
1716 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1717 int rd = bits (insn, 12, 15);
1718 imm = (imm >> rot) | (imm << (32 - rot));
1719 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1722 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1725 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1727 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1728 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1729 regs[bits (insn, 12, 15)]);
1732 else if ((insn & 0xffff0000) == 0xe92d0000)
1733 /* stmfd sp!, {..., fp, ip, lr, pc}
1735 stmfd sp!, {a1, a2, a3, a4} */
1737 int mask = insn & 0xffff;
1739 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1742 /* Calculate offsets of saved registers. */
1743 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1744 if (mask & (1 << regno))
1747 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1748 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1751 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1752 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1753 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1755 /* No need to add this to saved_regs -- it's just an arg reg. */
1758 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1759 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1760 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1762 /* No need to add this to saved_regs -- it's just an arg reg. */
1765 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1767 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1769 /* No need to add this to saved_regs -- it's just arg regs. */
1772 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1774 unsigned imm = insn & 0xff; /* immediate value */
1775 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1776 imm = (imm >> rot) | (imm << (32 - rot));
1777 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1779 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1781 unsigned imm = insn & 0xff; /* immediate value */
1782 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1783 imm = (imm >> rot) | (imm << (32 - rot));
1784 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1786 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1788 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1790 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1793 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1794 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1795 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1797 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1801 int n_saved_fp_regs;
1802 unsigned int fp_start_reg, fp_bound_reg;
1804 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1807 if ((insn & 0x800) == 0x800) /* N0 is set */
1809 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1810 n_saved_fp_regs = 3;
1812 n_saved_fp_regs = 1;
1816 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1817 n_saved_fp_regs = 2;
1819 n_saved_fp_regs = 4;
1822 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1823 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1824 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1826 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1827 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1828 regs[fp_start_reg++]);
1831 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1833 /* Allow some special function calls when skipping the
1834 prologue; GCC generates these before storing arguments to
1836 CORE_ADDR dest = BranchDest (current_pc, insn);
1838 if (skip_prologue_function (gdbarch, dest, 0))
1843 else if ((insn & 0xf0000000) != 0xe0000000)
1844 break; /* Condition not true, exit early. */
1845 else if (arm_instruction_changes_pc (insn))
1846 /* Don't scan past anything that might change control flow. */
1848 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1849 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1850 /* Ignore block loads from the stack, potentially copying
1851 parameters from memory. */
1853 else if ((insn & 0xfc500000) == 0xe4100000
1854 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1855 /* Similarly ignore single loads from the stack. */
1857 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1858 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1859 register instead of the stack. */
1863 /* The optimizer might shove anything into the prologue,
1864 so we just skip what we don't recognize. */
1865 unrecognized_pc = current_pc;
1870 if (unrecognized_pc == 0)
1871 unrecognized_pc = current_pc;
1873 /* The frame size is just the distance from the frame register
1874 to the original stack pointer. */
1875 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1877 /* Frame pointer is fp. */
1878 framereg = ARM_FP_REGNUM;
1879 framesize = -regs[ARM_FP_REGNUM].k;
1883 /* Try the stack pointer... this is a bit desperate. */
1884 framereg = ARM_SP_REGNUM;
1885 framesize = -regs[ARM_SP_REGNUM].k;
1890 cache->framereg = framereg;
1891 cache->framesize = framesize;
1893 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1895 cache->saved_regs[regno].addr = offset;
1899 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1900 paddress (gdbarch, unrecognized_pc));
1902 do_cleanups (back_to);
1903 return unrecognized_pc;
1907 arm_scan_prologue (struct frame_info *this_frame,
1908 struct arm_prologue_cache *cache)
1910 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1911 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1913 CORE_ADDR prologue_start, prologue_end, current_pc;
1914 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1915 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1916 pv_t regs[ARM_FPS_REGNUM];
1917 struct pv_area *stack;
1918 struct cleanup *back_to;
1921 /* Assume there is no frame until proven otherwise. */
1922 cache->framereg = ARM_SP_REGNUM;
1923 cache->framesize = 0;
1925 /* Check for Thumb prologue. */
1926 if (arm_frame_is_thumb (this_frame))
1928 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1932 /* Find the function prologue. If we can't find the function in
1933 the symbol table, peek in the stack frame to find the PC. */
1934 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1937 /* One way to find the end of the prologue (which works well
1938 for unoptimized code) is to do the following:
1940 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1943 prologue_end = prev_pc;
1944 else if (sal.end < prologue_end)
1945 prologue_end = sal.end;
1947 This mechanism is very accurate so long as the optimizer
1948 doesn't move any instructions from the function body into the
1949 prologue. If this happens, sal.end will be the last
1950 instruction in the first hunk of prologue code just before
1951 the first instruction that the scheduler has moved from
1952 the body to the prologue.
1954 In order to make sure that we scan all of the prologue
1955 instructions, we use a slightly less accurate mechanism which
1956 may scan more than necessary. To help compensate for this
1957 lack of accuracy, the prologue scanning loop below contains
1958 several clauses which'll cause the loop to terminate early if
1959 an implausible prologue instruction is encountered.
1965 is a suitable endpoint since it accounts for the largest
1966 possible prologue plus up to five instructions inserted by
1969 if (prologue_end > prologue_start + 64)
1971 prologue_end = prologue_start + 64; /* See above. */
1976 /* We have no symbol information. Our only option is to assume this
1977 function has a standard stack frame and the normal frame register.
1978 Then, we can find the value of our frame pointer on entrance to
1979 the callee (or at the present moment if this is the innermost frame).
1980 The value stored there should be the address of the stmfd + 8. */
1981 CORE_ADDR frame_loc;
1982 LONGEST return_value;
1984 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1985 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1989 prologue_start = gdbarch_addr_bits_remove
1990 (gdbarch, return_value) - 8;
1991 prologue_end = prologue_start + 64; /* See above. */
1995 if (prev_pc < prologue_end)
1996 prologue_end = prev_pc;
1998 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2001 static struct arm_prologue_cache *
2002 arm_make_prologue_cache (struct frame_info *this_frame)
2005 struct arm_prologue_cache *cache;
2006 CORE_ADDR unwound_fp;
2008 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2009 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2011 arm_scan_prologue (this_frame, cache);
2013 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2014 if (unwound_fp == 0)
2017 cache->prev_sp = unwound_fp + cache->framesize;
2019 /* Calculate actual addresses of saved registers using offsets
2020 determined by arm_scan_prologue. */
2021 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2022 if (trad_frame_addr_p (cache->saved_regs, reg))
2023 cache->saved_regs[reg].addr += cache->prev_sp;
2028 /* Our frame ID for a normal frame is the current function's starting PC
2029 and the caller's SP when we were called. */
2032 arm_prologue_this_id (struct frame_info *this_frame,
2034 struct frame_id *this_id)
2036 struct arm_prologue_cache *cache;
2040 if (*this_cache == NULL)
2041 *this_cache = arm_make_prologue_cache (this_frame);
2042 cache = *this_cache;
2044 /* This is meant to halt the backtrace at "_start". */
2045 pc = get_frame_pc (this_frame);
2046 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2049 /* If we've hit a wall, stop. */
2050 if (cache->prev_sp == 0)
2053 /* Use function start address as part of the frame ID. If we cannot
2054 identify the start address (due to missing symbol information),
2055 fall back to just using the current PC. */
2056 func = get_frame_func (this_frame);
2060 id = frame_id_build (cache->prev_sp, func);
2064 static struct value *
2065 arm_prologue_prev_register (struct frame_info *this_frame,
2069 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2070 struct arm_prologue_cache *cache;
2072 if (*this_cache == NULL)
2073 *this_cache = arm_make_prologue_cache (this_frame);
2074 cache = *this_cache;
2076 /* If we are asked to unwind the PC, then we need to return the LR
2077 instead. The prologue may save PC, but it will point into this
2078 frame's prologue, not the next frame's resume location. Also
2079 strip the saved T bit. A valid LR may have the low bit set, but
2080 a valid PC never does. */
2081 if (prev_regnum == ARM_PC_REGNUM)
2085 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2086 return frame_unwind_got_constant (this_frame, prev_regnum,
2087 arm_addr_bits_remove (gdbarch, lr));
2090 /* SP is generally not saved to the stack, but this frame is
2091 identified by the next frame's stack pointer at the time of the call.
2092 The value was already reconstructed into PREV_SP. */
2093 if (prev_regnum == ARM_SP_REGNUM)
2094 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2096 /* The CPSR may have been changed by the call instruction and by the
2097 called function. The only bit we can reconstruct is the T bit,
2098 by checking the low bit of LR as of the call. This is a reliable
2099 indicator of Thumb-ness except for some ARM v4T pre-interworking
2100 Thumb code, which could get away with a clear low bit as long as
2101 the called function did not use bx. Guess that all other
2102 bits are unchanged; the condition flags are presumably lost,
2103 but the processor status is likely valid. */
2104 if (prev_regnum == ARM_PS_REGNUM)
2107 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2109 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2110 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2111 if (IS_THUMB_ADDR (lr))
2115 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2118 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2122 struct frame_unwind arm_prologue_unwind = {
2124 default_frame_unwind_stop_reason,
2125 arm_prologue_this_id,
2126 arm_prologue_prev_register,
2128 default_frame_sniffer
2131 /* Maintain a list of ARM exception table entries per objfile, similar to the
2132 list of mapping symbols. We only cache entries for standard ARM-defined
2133 personality routines; the cache will contain only the frame unwinding
2134 instructions associated with the entry (not the descriptors). */
2136 static const struct objfile_data *arm_exidx_data_key;
2138 struct arm_exidx_entry
2143 typedef struct arm_exidx_entry arm_exidx_entry_s;
2144 DEF_VEC_O(arm_exidx_entry_s);
2146 struct arm_exidx_data
2148 VEC(arm_exidx_entry_s) **section_maps;
2152 arm_exidx_data_free (struct objfile *objfile, void *arg)
2154 struct arm_exidx_data *data = arg;
2157 for (i = 0; i < objfile->obfd->section_count; i++)
2158 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2162 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2163 const struct arm_exidx_entry *rhs)
2165 return lhs->addr < rhs->addr;
2168 static struct obj_section *
2169 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2171 struct obj_section *osect;
2173 ALL_OBJFILE_OSECTIONS (objfile, osect)
2174 if (bfd_get_section_flags (objfile->obfd,
2175 osect->the_bfd_section) & SEC_ALLOC)
2177 bfd_vma start, size;
2178 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2179 size = bfd_get_section_size (osect->the_bfd_section);
2181 if (start <= vma && vma < start + size)
2188 /* Parse contents of exception table and exception index sections
2189 of OBJFILE, and fill in the exception table entry cache.
2191 For each entry that refers to a standard ARM-defined personality
2192 routine, extract the frame unwinding instructions (from either
2193 the index or the table section). The unwinding instructions
2195 - extracting them from the rest of the table data
2196 - converting to host endianness
2197 - appending the implicit 0xb0 ("Finish") code
2199 The extracted and normalized instructions are stored for later
2200 retrieval by the arm_find_exidx_entry routine. */
2203 arm_exidx_new_objfile (struct objfile *objfile)
2205 struct cleanup *cleanups;
2206 struct arm_exidx_data *data;
2207 asection *exidx, *extab;
2208 bfd_vma exidx_vma = 0, extab_vma = 0;
2209 bfd_size_type exidx_size = 0, extab_size = 0;
2210 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2213 /* If we've already touched this file, do nothing. */
2214 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2216 cleanups = make_cleanup (null_cleanup, NULL);
2218 /* Read contents of exception table and index. */
2219 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2222 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2223 exidx_size = bfd_get_section_size (exidx);
2224 exidx_data = xmalloc (exidx_size);
2225 make_cleanup (xfree, exidx_data);
2227 if (!bfd_get_section_contents (objfile->obfd, exidx,
2228 exidx_data, 0, exidx_size))
2230 do_cleanups (cleanups);
2235 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2238 extab_vma = bfd_section_vma (objfile->obfd, extab);
2239 extab_size = bfd_get_section_size (extab);
2240 extab_data = xmalloc (extab_size);
2241 make_cleanup (xfree, extab_data);
2243 if (!bfd_get_section_contents (objfile->obfd, extab,
2244 extab_data, 0, extab_size))
2246 do_cleanups (cleanups);
2251 /* Allocate exception table data structure. */
2252 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2253 set_objfile_data (objfile, arm_exidx_data_key, data);
2254 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2255 objfile->obfd->section_count,
2256 VEC(arm_exidx_entry_s) *);
2258 /* Fill in exception table. */
2259 for (i = 0; i < exidx_size / 8; i++)
2261 struct arm_exidx_entry new_exidx_entry;
2262 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2263 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2264 bfd_vma addr = 0, word = 0;
2265 int n_bytes = 0, n_words = 0;
2266 struct obj_section *sec;
2267 gdb_byte *entry = NULL;
2269 /* Extract address of start of function. */
2270 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2271 idx += exidx_vma + i * 8;
2273 /* Find section containing function and compute section offset. */
2274 sec = arm_obj_section_from_vma (objfile, idx);
2277 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2279 /* Determine address of exception table entry. */
2282 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2284 else if ((val & 0xff000000) == 0x80000000)
2286 /* Exception table entry embedded in .ARM.exidx
2287 -- must be short form. */
2291 else if (!(val & 0x80000000))
2293 /* Exception table entry in .ARM.extab. */
2294 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2295 addr += exidx_vma + i * 8 + 4;
2297 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2299 word = bfd_h_get_32 (objfile->obfd,
2300 extab_data + addr - extab_vma);
2303 if ((word & 0xff000000) == 0x80000000)
2308 else if ((word & 0xff000000) == 0x81000000
2309 || (word & 0xff000000) == 0x82000000)
2313 n_words = ((word >> 16) & 0xff);
2315 else if (!(word & 0x80000000))
2318 struct obj_section *pers_sec;
2319 int gnu_personality = 0;
2321 /* Custom personality routine. */
2322 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2323 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2325 /* Check whether we've got one of the variants of the
2326 GNU personality routines. */
2327 pers_sec = arm_obj_section_from_vma (objfile, pers);
2330 static const char *personality[] =
2332 "__gcc_personality_v0",
2333 "__gxx_personality_v0",
2334 "__gcj_personality_v0",
2335 "__gnu_objc_personality_v0",
2339 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2342 for (k = 0; personality[k]; k++)
2343 if (lookup_minimal_symbol_by_pc_name
2344 (pc, personality[k], objfile))
2346 gnu_personality = 1;
2351 /* If so, the next word contains a word count in the high
2352 byte, followed by the same unwind instructions as the
2353 pre-defined forms. */
2355 && addr + 4 <= extab_vma + extab_size)
2357 word = bfd_h_get_32 (objfile->obfd,
2358 extab_data + addr - extab_vma);
2361 n_words = ((word >> 24) & 0xff);
2367 /* Sanity check address. */
2369 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2370 n_words = n_bytes = 0;
2372 /* The unwind instructions reside in WORD (only the N_BYTES least
2373 significant bytes are valid), followed by N_WORDS words in the
2374 extab section starting at ADDR. */
2375 if (n_bytes || n_words)
2377 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2378 n_bytes + n_words * 4 + 1);
2381 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2385 word = bfd_h_get_32 (objfile->obfd,
2386 extab_data + addr - extab_vma);
2389 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2390 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2391 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2392 *p++ = (gdb_byte) (word & 0xff);
2395 /* Implied "Finish" to terminate the list. */
2399 /* Push entry onto vector. They are guaranteed to always
2400 appear in order of increasing addresses. */
2401 new_exidx_entry.addr = idx;
2402 new_exidx_entry.entry = entry;
2403 VEC_safe_push (arm_exidx_entry_s,
2404 data->section_maps[sec->the_bfd_section->index],
2408 do_cleanups (cleanups);
2411 /* Search for the exception table entry covering MEMADDR. If one is found,
2412 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2413 set *START to the start of the region covered by this entry. */
2416 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2418 struct obj_section *sec;
2420 sec = find_pc_section (memaddr);
2423 struct arm_exidx_data *data;
2424 VEC(arm_exidx_entry_s) *map;
2425 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2428 data = objfile_data (sec->objfile, arm_exidx_data_key);
2431 map = data->section_maps[sec->the_bfd_section->index];
2432 if (!VEC_empty (arm_exidx_entry_s, map))
2434 struct arm_exidx_entry *map_sym;
2436 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2437 arm_compare_exidx_entries);
2439 /* VEC_lower_bound finds the earliest ordered insertion
2440 point. If the following symbol starts at this exact
2441 address, we use that; otherwise, the preceding
2442 exception table entry covers this address. */
2443 if (idx < VEC_length (arm_exidx_entry_s, map))
2445 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2446 if (map_sym->addr == map_key.addr)
2449 *start = map_sym->addr + obj_section_addr (sec);
2450 return map_sym->entry;
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2458 *start = map_sym->addr + obj_section_addr (sec);
2459 return map_sym->entry;
2468 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2469 instruction list from the ARM exception table entry ENTRY, allocate and
2470 return a prologue cache structure describing how to unwind this frame.
2472 Return NULL if the unwinding instruction list contains a "spare",
2473 "reserved" or "refuse to unwind" instruction as defined in section
2474 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2475 for the ARM Architecture" document. */
2477 static struct arm_prologue_cache *
2478 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2483 struct arm_prologue_cache *cache;
2484 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2485 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2491 /* Whenever we reload SP, we actually have to retrieve its
2492 actual value in the current frame. */
2495 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2497 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2498 vsp = get_frame_register_unsigned (this_frame, reg);
2502 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2503 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2509 /* Decode next unwind instruction. */
2512 if ((insn & 0xc0) == 0)
2514 int offset = insn & 0x3f;
2515 vsp += (offset << 2) + 4;
2517 else if ((insn & 0xc0) == 0x40)
2519 int offset = insn & 0x3f;
2520 vsp -= (offset << 2) + 4;
2522 else if ((insn & 0xf0) == 0x80)
2524 int mask = ((insn & 0xf) << 8) | *entry++;
2527 /* The special case of an all-zero mask identifies
2528 "Refuse to unwind". We return NULL to fall back
2529 to the prologue analyzer. */
2533 /* Pop registers r4..r15 under mask. */
2534 for (i = 0; i < 12; i++)
2535 if (mask & (1 << i))
2537 cache->saved_regs[4 + i].addr = vsp;
2541 /* Special-case popping SP -- we need to reload vsp. */
2542 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2545 else if ((insn & 0xf0) == 0x90)
2547 int reg = insn & 0xf;
2549 /* Reserved cases. */
2550 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2553 /* Set SP from another register and mark VSP for reload. */
2554 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2557 else if ((insn & 0xf0) == 0xa0)
2559 int count = insn & 0x7;
2560 int pop_lr = (insn & 0x8) != 0;
2563 /* Pop r4..r[4+count]. */
2564 for (i = 0; i <= count; i++)
2566 cache->saved_regs[4 + i].addr = vsp;
2570 /* If indicated by flag, pop LR as well. */
2573 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2577 else if (insn == 0xb0)
2579 /* We could only have updated PC by popping into it; if so, it
2580 will show up as address. Otherwise, copy LR into PC. */
2581 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2582 cache->saved_regs[ARM_PC_REGNUM]
2583 = cache->saved_regs[ARM_LR_REGNUM];
2588 else if (insn == 0xb1)
2590 int mask = *entry++;
2593 /* All-zero mask and mask >= 16 is "spare". */
2594 if (mask == 0 || mask >= 16)
2597 /* Pop r0..r3 under mask. */
2598 for (i = 0; i < 4; i++)
2599 if (mask & (1 << i))
2601 cache->saved_regs[i].addr = vsp;
2605 else if (insn == 0xb2)
2607 ULONGEST offset = 0;
2612 offset |= (*entry & 0x7f) << shift;
2615 while (*entry++ & 0x80);
2617 vsp += 0x204 + (offset << 2);
2619 else if (insn == 0xb3)
2621 int start = *entry >> 4;
2622 int count = (*entry++) & 0xf;
2625 /* Only registers D0..D15 are valid here. */
2626 if (start + count >= 16)
2629 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2630 for (i = 0; i <= count; i++)
2632 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2636 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2639 else if ((insn & 0xf8) == 0xb8)
2641 int count = insn & 0x7;
2644 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2645 for (i = 0; i <= count; i++)
2647 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2651 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2654 else if (insn == 0xc6)
2656 int start = *entry >> 4;
2657 int count = (*entry++) & 0xf;
2660 /* Only registers WR0..WR15 are valid. */
2661 if (start + count >= 16)
2664 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2665 for (i = 0; i <= count; i++)
2667 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2671 else if (insn == 0xc7)
2673 int mask = *entry++;
2676 /* All-zero mask and mask >= 16 is "spare". */
2677 if (mask == 0 || mask >= 16)
2680 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2681 for (i = 0; i < 4; i++)
2682 if (mask & (1 << i))
2684 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2688 else if ((insn & 0xf8) == 0xc0)
2690 int count = insn & 0x7;
2693 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2694 for (i = 0; i <= count; i++)
2696 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2700 else if (insn == 0xc8)
2702 int start = *entry >> 4;
2703 int count = (*entry++) & 0xf;
2706 /* Only registers D0..D31 are valid. */
2707 if (start + count >= 16)
2710 /* Pop VFP double-precision registers
2711 D[16+start]..D[16+start+count]. */
2712 for (i = 0; i <= count; i++)
2714 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2718 else if (insn == 0xc9)
2720 int start = *entry >> 4;
2721 int count = (*entry++) & 0xf;
2724 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2725 for (i = 0; i <= count; i++)
2727 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2731 else if ((insn & 0xf8) == 0xd0)
2733 int count = insn & 0x7;
2736 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2737 for (i = 0; i <= count; i++)
2739 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2745 /* Everything else is "spare". */
2750 /* If we restore SP from a register, assume this was the frame register.
2751 Otherwise just fall back to SP as frame register. */
2752 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2753 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2755 cache->framereg = ARM_SP_REGNUM;
2757 /* Determine offset to previous frame. */
2759 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2761 /* We already got the previous SP. */
2762 cache->prev_sp = vsp;
2767 /* Unwinding via ARM exception table entries. Note that the sniffer
2768 already computes a filled-in prologue cache, which is then used
2769 with the same arm_prologue_this_id and arm_prologue_prev_register
2770 routines also used for prologue-parsing based unwinding. */
2773 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2774 struct frame_info *this_frame,
2775 void **this_prologue_cache)
2777 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2778 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2779 CORE_ADDR addr_in_block, exidx_region, func_start;
2780 struct arm_prologue_cache *cache;
2783 /* See if we have an ARM exception table entry covering this address. */
2784 addr_in_block = get_frame_address_in_block (this_frame);
2785 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2789 /* The ARM exception table does not describe unwind information
2790 for arbitrary PC values, but is guaranteed to be correct only
2791 at call sites. We have to decide here whether we want to use
2792 ARM exception table information for this frame, or fall back
2793 to using prologue parsing. (Note that if we have DWARF CFI,
2794 this sniffer isn't even called -- CFI is always preferred.)
2796 Before we make this decision, however, we check whether we
2797 actually have *symbol* information for the current frame.
2798 If not, prologue parsing would not work anyway, so we might
2799 as well use the exception table and hope for the best. */
2800 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2804 /* If the next frame is "normal", we are at a call site in this
2805 frame, so exception information is guaranteed to be valid. */
2806 if (get_next_frame (this_frame)
2807 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2810 /* We also assume exception information is valid if we're currently
2811 blocked in a system call. The system library is supposed to
2812 ensure this, so that e.g. pthread cancellation works. */
2813 if (arm_frame_is_thumb (this_frame))
2817 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2818 byte_order_for_code, &insn)
2819 && (insn & 0xff00) == 0xdf00 /* svc */)
2826 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2827 byte_order_for_code, &insn)
2828 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2832 /* Bail out if we don't know that exception information is valid. */
2836 /* The ARM exception index does not mark the *end* of the region
2837 covered by the entry, and some functions will not have any entry.
2838 To correctly recognize the end of the covered region, the linker
2839 should have inserted dummy records with a CANTUNWIND marker.
2841 Unfortunately, current versions of GNU ld do not reliably do
2842 this, and thus we may have found an incorrect entry above.
2843 As a (temporary) sanity check, we only use the entry if it
2844 lies *within* the bounds of the function. Note that this check
2845 might reject perfectly valid entries that just happen to cover
2846 multiple functions; therefore this check ought to be removed
2847 once the linker is fixed. */
2848 if (func_start > exidx_region)
2852 /* Decode the list of unwinding instructions into a prologue cache.
2853 Note that this may fail due to e.g. a "refuse to unwind" code. */
2854 cache = arm_exidx_fill_cache (this_frame, entry);
2858 *this_prologue_cache = cache;
2862 struct frame_unwind arm_exidx_unwind = {
2864 default_frame_unwind_stop_reason,
2865 arm_prologue_this_id,
2866 arm_prologue_prev_register,
2868 arm_exidx_unwind_sniffer
2871 static struct arm_prologue_cache *
2872 arm_make_stub_cache (struct frame_info *this_frame)
2874 struct arm_prologue_cache *cache;
2876 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2877 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2879 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2884 /* Our frame ID for a stub frame is the current SP and LR. */
2887 arm_stub_this_id (struct frame_info *this_frame,
2889 struct frame_id *this_id)
2891 struct arm_prologue_cache *cache;
2893 if (*this_cache == NULL)
2894 *this_cache = arm_make_stub_cache (this_frame);
2895 cache = *this_cache;
2897 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2901 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 struct frame_info *this_frame,
2903 void **this_prologue_cache)
2905 CORE_ADDR addr_in_block;
2908 addr_in_block = get_frame_address_in_block (this_frame);
2909 if (in_plt_section (addr_in_block, NULL)
2910 /* We also use the stub winder if the target memory is unreadable
2911 to avoid having the prologue unwinder trying to read it. */
2912 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2918 struct frame_unwind arm_stub_unwind = {
2920 default_frame_unwind_stop_reason,
2922 arm_prologue_prev_register,
2924 arm_stub_unwind_sniffer
2928 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2930 struct arm_prologue_cache *cache;
2932 if (*this_cache == NULL)
2933 *this_cache = arm_make_prologue_cache (this_frame);
2934 cache = *this_cache;
2936 return cache->prev_sp - cache->framesize;
2939 struct frame_base arm_normal_base = {
2940 &arm_prologue_unwind,
2941 arm_normal_frame_base,
2942 arm_normal_frame_base,
2943 arm_normal_frame_base
2946 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2947 dummy frame. The frame ID's base needs to match the TOS value
2948 saved by save_dummy_frame_tos() and returned from
2949 arm_push_dummy_call, and the PC needs to match the dummy frame's
2952 static struct frame_id
2953 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2955 return frame_id_build (get_frame_register_unsigned (this_frame,
2957 get_frame_pc (this_frame));
2960 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2961 be used to construct the previous frame's ID, after looking up the
2962 containing function). */
2965 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2968 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2969 return arm_addr_bits_remove (gdbarch, pc);
2973 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2975 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2978 static struct value *
2979 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2982 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2984 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2989 /* The PC is normally copied from the return column, which
2990 describes saves of LR. However, that version may have an
2991 extra bit set to indicate Thumb state. The bit is not
2993 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2994 return frame_unwind_got_constant (this_frame, regnum,
2995 arm_addr_bits_remove (gdbarch, lr));
2998 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2999 cpsr = get_frame_register_unsigned (this_frame, regnum);
3000 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3001 if (IS_THUMB_ADDR (lr))
3005 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3008 internal_error (__FILE__, __LINE__,
3009 _("Unexpected register %d"), regnum);
3014 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3015 struct dwarf2_frame_state_reg *reg,
3016 struct frame_info *this_frame)
3022 reg->how = DWARF2_FRAME_REG_FN;
3023 reg->loc.fn = arm_dwarf2_prev_register;
3026 reg->how = DWARF2_FRAME_REG_CFA;
3031 /* Return true if we are in the function's epilogue, i.e. after the
3032 instruction that destroyed the function's stack frame. */
3035 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3037 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3038 unsigned int insn, insn2;
3039 int found_return = 0, found_stack_adjust = 0;
3040 CORE_ADDR func_start, func_end;
3044 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3047 /* The epilogue is a sequence of instructions along the following lines:
3049 - add stack frame size to SP or FP
3050 - [if frame pointer used] restore SP from FP
3051 - restore registers from SP [may include PC]
3052 - a return-type instruction [if PC wasn't already restored]
3054 In a first pass, we scan forward from the current PC and verify the
3055 instructions we find as compatible with this sequence, ending in a
3058 However, this is not sufficient to distinguish indirect function calls
3059 within a function from indirect tail calls in the epilogue in some cases.
3060 Therefore, if we didn't already find any SP-changing instruction during
3061 forward scan, we add a backward scanning heuristic to ensure we actually
3062 are in the epilogue. */
3065 while (scan_pc < func_end && !found_return)
3067 if (target_read_memory (scan_pc, buf, 2))
3071 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3073 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3075 else if (insn == 0x46f7) /* mov pc, lr */
3077 else if (insn == 0x46bd) /* mov sp, r7 */
3078 found_stack_adjust = 1;
3079 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3080 found_stack_adjust = 1;
3081 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3083 found_stack_adjust = 1;
3084 if (insn & 0x0100) /* <registers> include PC. */
3087 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3089 if (target_read_memory (scan_pc, buf, 2))
3093 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3095 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3097 found_stack_adjust = 1;
3098 if (insn2 & 0x8000) /* <registers> include PC. */
3101 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3102 && (insn2 & 0x0fff) == 0x0b04)
3104 found_stack_adjust = 1;
3105 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3108 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3109 && (insn2 & 0x0e00) == 0x0a00)
3110 found_stack_adjust = 1;
3121 /* Since any instruction in the epilogue sequence, with the possible
3122 exception of return itself, updates the stack pointer, we need to
3123 scan backwards for at most one instruction. Try either a 16-bit or
3124 a 32-bit instruction. This is just a heuristic, so we do not worry
3125 too much about false positives. */
3127 if (!found_stack_adjust)
3129 if (pc - 4 < func_start)
3131 if (target_read_memory (pc - 4, buf, 4))
3134 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3135 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3137 if (insn2 == 0x46bd) /* mov sp, r7 */
3138 found_stack_adjust = 1;
3139 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3140 found_stack_adjust = 1;
3141 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3142 found_stack_adjust = 1;
3143 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3144 found_stack_adjust = 1;
3145 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3146 && (insn2 & 0x0fff) == 0x0b04)
3147 found_stack_adjust = 1;
3148 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3149 && (insn2 & 0x0e00) == 0x0a00)
3150 found_stack_adjust = 1;
3153 return found_stack_adjust;
3156 /* Return true if we are in the function's epilogue, i.e. after the
3157 instruction that destroyed the function's stack frame. */
3160 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3162 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3164 int found_return, found_stack_adjust;
3165 CORE_ADDR func_start, func_end;
3167 if (arm_pc_is_thumb (gdbarch, pc))
3168 return thumb_in_function_epilogue_p (gdbarch, pc);
3170 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3173 /* We are in the epilogue if the previous instruction was a stack
3174 adjustment and the next instruction is a possible return (bx, mov
3175 pc, or pop). We could have to scan backwards to find the stack
3176 adjustment, or forwards to find the return, but this is a decent
3177 approximation. First scan forwards. */
3180 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3181 if (bits (insn, 28, 31) != INST_NV)
3183 if ((insn & 0x0ffffff0) == 0x012fff10)
3186 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3189 else if ((insn & 0x0fff0000) == 0x08bd0000
3190 && (insn & 0x0000c000) != 0)
3191 /* POP (LDMIA), including PC or LR. */
3198 /* Scan backwards. This is just a heuristic, so do not worry about
3199 false positives from mode changes. */
3201 if (pc < func_start + 4)
3204 found_stack_adjust = 0;
3205 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3206 if (bits (insn, 28, 31) != INST_NV)
3208 if ((insn & 0x0df0f000) == 0x0080d000)
3209 /* ADD SP (register or immediate). */
3210 found_stack_adjust = 1;
3211 else if ((insn & 0x0df0f000) == 0x0040d000)
3212 /* SUB SP (register or immediate). */
3213 found_stack_adjust = 1;
3214 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3216 found_stack_adjust = 1;
3217 else if ((insn & 0x0fff0000) == 0x08bd0000)
3219 found_stack_adjust = 1;
3220 else if ((insn & 0x0fff0000) == 0x049d0000)
3221 /* POP of a single register. */
3222 found_stack_adjust = 1;
3225 if (found_stack_adjust)
3232 /* When arguments must be pushed onto the stack, they go on in reverse
3233 order. The code below implements a FILO (stack) to do this. */
3238 struct stack_item *prev;
3242 static struct stack_item *
3243 push_stack_item (struct stack_item *prev, const void *contents, int len)
3245 struct stack_item *si;
3246 si = xmalloc (sizeof (struct stack_item));
3247 si->data = xmalloc (len);
3250 memcpy (si->data, contents, len);
3254 static struct stack_item *
3255 pop_stack_item (struct stack_item *si)
3257 struct stack_item *dead = si;
3265 /* Return the alignment (in bytes) of the given type. */
3268 arm_type_align (struct type *t)
3274 t = check_typedef (t);
3275 switch (TYPE_CODE (t))
3278 /* Should never happen. */
3279 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3283 case TYPE_CODE_ENUM:
3287 case TYPE_CODE_RANGE:
3288 case TYPE_CODE_BITSTRING:
3290 case TYPE_CODE_CHAR:
3291 case TYPE_CODE_BOOL:
3292 return TYPE_LENGTH (t);
3294 case TYPE_CODE_ARRAY:
3295 case TYPE_CODE_COMPLEX:
3296 /* TODO: What about vector types? */
3297 return arm_type_align (TYPE_TARGET_TYPE (t));
3299 case TYPE_CODE_STRUCT:
3300 case TYPE_CODE_UNION:
3302 for (n = 0; n < TYPE_NFIELDS (t); n++)
3304 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3312 /* Possible base types for a candidate for passing and returning in
3315 enum arm_vfp_cprc_base_type
3324 /* The length of one element of base type B. */
3327 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3331 case VFP_CPRC_SINGLE:
3333 case VFP_CPRC_DOUBLE:
3335 case VFP_CPRC_VEC64:
3337 case VFP_CPRC_VEC128:
3340 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3345 /* The character ('s', 'd' or 'q') for the type of VFP register used
3346 for passing base type B. */
3349 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3353 case VFP_CPRC_SINGLE:
3355 case VFP_CPRC_DOUBLE:
3357 case VFP_CPRC_VEC64:
3359 case VFP_CPRC_VEC128:
3362 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3367 /* Determine whether T may be part of a candidate for passing and
3368 returning in VFP registers, ignoring the limit on the total number
3369 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3370 classification of the first valid component found; if it is not
3371 VFP_CPRC_UNKNOWN, all components must have the same classification
3372 as *BASE_TYPE. If it is found that T contains a type not permitted
3373 for passing and returning in VFP registers, a type differently
3374 classified from *BASE_TYPE, or two types differently classified
3375 from each other, return -1, otherwise return the total number of
3376 base-type elements found (possibly 0 in an empty structure or
3377 array). Vectors and complex types are not currently supported,
3378 matching the generic AAPCS support. */
3381 arm_vfp_cprc_sub_candidate (struct type *t,
3382 enum arm_vfp_cprc_base_type *base_type)
3384 t = check_typedef (t);
3385 switch (TYPE_CODE (t))
3388 switch (TYPE_LENGTH (t))
3391 if (*base_type == VFP_CPRC_UNKNOWN)
3392 *base_type = VFP_CPRC_SINGLE;
3393 else if (*base_type != VFP_CPRC_SINGLE)
3398 if (*base_type == VFP_CPRC_UNKNOWN)
3399 *base_type = VFP_CPRC_DOUBLE;
3400 else if (*base_type != VFP_CPRC_DOUBLE)
3409 case TYPE_CODE_ARRAY:
3413 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3416 if (TYPE_LENGTH (t) == 0)
3418 gdb_assert (count == 0);
3421 else if (count == 0)
3423 unitlen = arm_vfp_cprc_unit_length (*base_type);
3424 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3425 return TYPE_LENGTH (t) / unitlen;
3429 case TYPE_CODE_STRUCT:
3434 for (i = 0; i < TYPE_NFIELDS (t); i++)
3436 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3438 if (sub_count == -1)
3442 if (TYPE_LENGTH (t) == 0)
3444 gdb_assert (count == 0);
3447 else if (count == 0)
3449 unitlen = arm_vfp_cprc_unit_length (*base_type);
3450 if (TYPE_LENGTH (t) != unitlen * count)
3455 case TYPE_CODE_UNION:
3460 for (i = 0; i < TYPE_NFIELDS (t); i++)
3462 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3464 if (sub_count == -1)
3466 count = (count > sub_count ? count : sub_count);
3468 if (TYPE_LENGTH (t) == 0)
3470 gdb_assert (count == 0);
3473 else if (count == 0)
3475 unitlen = arm_vfp_cprc_unit_length (*base_type);
3476 if (TYPE_LENGTH (t) != unitlen * count)
3488 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3489 if passed to or returned from a non-variadic function with the VFP
3490 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3491 *BASE_TYPE to the base type for T and *COUNT to the number of
3492 elements of that base type before returning. */
3495 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3498 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3499 int c = arm_vfp_cprc_sub_candidate (t, &b);
3500 if (c <= 0 || c > 4)
3507 /* Return 1 if the VFP ABI should be used for passing arguments to and
3508 returning values from a function of type FUNC_TYPE, 0
3512 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3514 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3515 /* Variadic functions always use the base ABI. Assume that functions
3516 without debug info are not variadic. */
3517 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3519 /* The VFP ABI is only supported as a variant of AAPCS. */
3520 if (tdep->arm_abi != ARM_ABI_AAPCS)
3522 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3525 /* We currently only support passing parameters in integer registers, which
3526 conforms with GCC's default model, and VFP argument passing following
3527 the VFP variant of AAPCS. Several other variants exist and
3528 we should probably support some of them based on the selected ABI. */
3531 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3532 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3533 struct value **args, CORE_ADDR sp, int struct_return,
3534 CORE_ADDR struct_addr)
3536 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3540 struct stack_item *si = NULL;
3543 unsigned vfp_regs_free = (1 << 16) - 1;
3545 /* Determine the type of this function and whether the VFP ABI
3547 ftype = check_typedef (value_type (function));
3548 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3549 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3550 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3552 /* Set the return address. For the ARM, the return breakpoint is
3553 always at BP_ADDR. */
3554 if (arm_pc_is_thumb (gdbarch, bp_addr))
3556 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3558 /* Walk through the list of args and determine how large a temporary
3559 stack is required. Need to take care here as structs may be
3560 passed on the stack, and we have to push them. */
3563 argreg = ARM_A1_REGNUM;
3566 /* The struct_return pointer occupies the first parameter
3567 passing register. */
3571 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3572 gdbarch_register_name (gdbarch, argreg),
3573 paddress (gdbarch, struct_addr));
3574 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3578 for (argnum = 0; argnum < nargs; argnum++)
3581 struct type *arg_type;
3582 struct type *target_type;
3583 enum type_code typecode;
3584 const bfd_byte *val;
3586 enum arm_vfp_cprc_base_type vfp_base_type;
3588 int may_use_core_reg = 1;
3590 arg_type = check_typedef (value_type (args[argnum]));
3591 len = TYPE_LENGTH (arg_type);
3592 target_type = TYPE_TARGET_TYPE (arg_type);
3593 typecode = TYPE_CODE (arg_type);
3594 val = value_contents (args[argnum]);
3596 align = arm_type_align (arg_type);
3597 /* Round alignment up to a whole number of words. */
3598 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3599 /* Different ABIs have different maximum alignments. */
3600 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3602 /* The APCS ABI only requires word alignment. */
3603 align = INT_REGISTER_SIZE;
3607 /* The AAPCS requires at most doubleword alignment. */
3608 if (align > INT_REGISTER_SIZE * 2)
3609 align = INT_REGISTER_SIZE * 2;
3613 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3621 /* Because this is a CPRC it cannot go in a core register or
3622 cause a core register to be skipped for alignment.
3623 Either it goes in VFP registers and the rest of this loop
3624 iteration is skipped for this argument, or it goes on the
3625 stack (and the stack alignment code is correct for this
3627 may_use_core_reg = 0;
3629 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3630 shift = unit_length / 4;
3631 mask = (1 << (shift * vfp_base_count)) - 1;
3632 for (regno = 0; regno < 16; regno += shift)
3633 if (((vfp_regs_free >> regno) & mask) == mask)
3642 vfp_regs_free &= ~(mask << regno);
3643 reg_scaled = regno / shift;
3644 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3645 for (i = 0; i < vfp_base_count; i++)
3649 if (reg_char == 'q')
3650 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3651 val + i * unit_length);
3654 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3655 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3657 regcache_cooked_write (regcache, regnum,
3658 val + i * unit_length);
3665 /* This CPRC could not go in VFP registers, so all VFP
3666 registers are now marked as used. */
3671 /* Push stack padding for dowubleword alignment. */
3672 if (nstack & (align - 1))
3674 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3675 nstack += INT_REGISTER_SIZE;
3678 /* Doubleword aligned quantities must go in even register pairs. */
3679 if (may_use_core_reg
3680 && argreg <= ARM_LAST_ARG_REGNUM
3681 && align > INT_REGISTER_SIZE
3685 /* If the argument is a pointer to a function, and it is a
3686 Thumb function, create a LOCAL copy of the value and set
3687 the THUMB bit in it. */
3688 if (TYPE_CODE_PTR == typecode
3689 && target_type != NULL
3690 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3692 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3693 if (arm_pc_is_thumb (gdbarch, regval))
3695 bfd_byte *copy = alloca (len);
3696 store_unsigned_integer (copy, len, byte_order,
3697 MAKE_THUMB_ADDR (regval));
3702 /* Copy the argument to general registers or the stack in
3703 register-sized pieces. Large arguments are split between
3704 registers and stack. */
3707 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3709 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3711 /* The argument is being passed in a general purpose
3714 = extract_unsigned_integer (val, partial_len, byte_order);
3715 if (byte_order == BFD_ENDIAN_BIG)
3716 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3718 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3720 gdbarch_register_name
3722 phex (regval, INT_REGISTER_SIZE));
3723 regcache_cooked_write_unsigned (regcache, argreg, regval);
3728 /* Push the arguments onto the stack. */
3730 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3732 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3733 nstack += INT_REGISTER_SIZE;
3740 /* If we have an odd number of words to push, then decrement the stack
3741 by one word now, so first stack argument will be dword aligned. */
3748 write_memory (sp, si->data, si->len);
3749 si = pop_stack_item (si);
3752 /* Finally, update teh SP register. */
3753 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3759 /* Always align the frame to an 8-byte boundary. This is required on
3760 some platforms and harmless on the rest. */
3763 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3765 /* Align the stack to eight bytes. */
3766 return sp & ~ (CORE_ADDR) 7;
3770 print_fpu_flags (int flags)
3772 if (flags & (1 << 0))
3773 fputs ("IVO ", stdout);
3774 if (flags & (1 << 1))
3775 fputs ("DVZ ", stdout);
3776 if (flags & (1 << 2))
3777 fputs ("OFL ", stdout);
3778 if (flags & (1 << 3))
3779 fputs ("UFL ", stdout);
3780 if (flags & (1 << 4))
3781 fputs ("INX ", stdout);
3785 /* Print interesting information about the floating point processor
3786 (if present) or emulator. */
3788 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3789 struct frame_info *frame, const char *args)
3791 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3794 type = (status >> 24) & 127;
3795 if (status & (1 << 31))
3796 printf (_("Hardware FPU type %d\n"), type);
3798 printf (_("Software FPU type %d\n"), type);
3799 /* i18n: [floating point unit] mask */
3800 fputs (_("mask: "), stdout);
3801 print_fpu_flags (status >> 16);
3802 /* i18n: [floating point unit] flags */
3803 fputs (_("flags: "), stdout);
3804 print_fpu_flags (status);
3807 /* Construct the ARM extended floating point type. */
3808 static struct type *
3809 arm_ext_type (struct gdbarch *gdbarch)
3811 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3813 if (!tdep->arm_ext_type)
3815 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3816 floatformats_arm_ext);
3818 return tdep->arm_ext_type;
3821 static struct type *
3822 arm_neon_double_type (struct gdbarch *gdbarch)
3824 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3826 if (tdep->neon_double_type == NULL)
3828 struct type *t, *elem;
3830 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3832 elem = builtin_type (gdbarch)->builtin_uint8;
3833 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3834 elem = builtin_type (gdbarch)->builtin_uint16;
3835 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3836 elem = builtin_type (gdbarch)->builtin_uint32;
3837 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3838 elem = builtin_type (gdbarch)->builtin_uint64;
3839 append_composite_type_field (t, "u64", elem);
3840 elem = builtin_type (gdbarch)->builtin_float;
3841 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3842 elem = builtin_type (gdbarch)->builtin_double;
3843 append_composite_type_field (t, "f64", elem);
3845 TYPE_VECTOR (t) = 1;
3846 TYPE_NAME (t) = "neon_d";
3847 tdep->neon_double_type = t;
3850 return tdep->neon_double_type;
3853 /* FIXME: The vector types are not correctly ordered on big-endian
3854 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3855 bits of d0 - regardless of what unit size is being held in d0. So
3856 the offset of the first uint8 in d0 is 7, but the offset of the
3857 first float is 4. This code works as-is for little-endian
3860 static struct type *
3861 arm_neon_quad_type (struct gdbarch *gdbarch)
3863 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3865 if (tdep->neon_quad_type == NULL)
3867 struct type *t, *elem;
3869 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3871 elem = builtin_type (gdbarch)->builtin_uint8;
3872 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3873 elem = builtin_type (gdbarch)->builtin_uint16;
3874 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3875 elem = builtin_type (gdbarch)->builtin_uint32;
3876 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3877 elem = builtin_type (gdbarch)->builtin_uint64;
3878 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3879 elem = builtin_type (gdbarch)->builtin_float;
3880 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3881 elem = builtin_type (gdbarch)->builtin_double;
3882 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3884 TYPE_VECTOR (t) = 1;
3885 TYPE_NAME (t) = "neon_q";
3886 tdep->neon_quad_type = t;
3889 return tdep->neon_quad_type;
3892 /* Return the GDB type object for the "standard" data type of data in
3895 static struct type *
3896 arm_register_type (struct gdbarch *gdbarch, int regnum)
3898 int num_regs = gdbarch_num_regs (gdbarch);
3900 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3901 && regnum >= num_regs && regnum < num_regs + 32)
3902 return builtin_type (gdbarch)->builtin_float;
3904 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3905 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3906 return arm_neon_quad_type (gdbarch);
3908 /* If the target description has register information, we are only
3909 in this function so that we can override the types of
3910 double-precision registers for NEON. */
3911 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3913 struct type *t = tdesc_register_type (gdbarch, regnum);
3915 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3916 && TYPE_CODE (t) == TYPE_CODE_FLT
3917 && gdbarch_tdep (gdbarch)->have_neon)
3918 return arm_neon_double_type (gdbarch);
3923 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3925 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3926 return builtin_type (gdbarch)->builtin_void;
3928 return arm_ext_type (gdbarch);
3930 else if (regnum == ARM_SP_REGNUM)
3931 return builtin_type (gdbarch)->builtin_data_ptr;
3932 else if (regnum == ARM_PC_REGNUM)
3933 return builtin_type (gdbarch)->builtin_func_ptr;
3934 else if (regnum >= ARRAY_SIZE (arm_register_names))
3935 /* These registers are only supported on targets which supply
3936 an XML description. */
3937 return builtin_type (gdbarch)->builtin_int0;
3939 return builtin_type (gdbarch)->builtin_uint32;
3942 /* Map a DWARF register REGNUM onto the appropriate GDB register
3946 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3948 /* Core integer regs. */
3949 if (reg >= 0 && reg <= 15)
3952 /* Legacy FPA encoding. These were once used in a way which
3953 overlapped with VFP register numbering, so their use is
3954 discouraged, but GDB doesn't support the ARM toolchain
3955 which used them for VFP. */
3956 if (reg >= 16 && reg <= 23)
3957 return ARM_F0_REGNUM + reg - 16;
3959 /* New assignments for the FPA registers. */
3960 if (reg >= 96 && reg <= 103)
3961 return ARM_F0_REGNUM + reg - 96;
3963 /* WMMX register assignments. */
3964 if (reg >= 104 && reg <= 111)
3965 return ARM_WCGR0_REGNUM + reg - 104;
3967 if (reg >= 112 && reg <= 127)
3968 return ARM_WR0_REGNUM + reg - 112;
3970 if (reg >= 192 && reg <= 199)
3971 return ARM_WC0_REGNUM + reg - 192;
3973 /* VFP v2 registers. A double precision value is actually
3974 in d1 rather than s2, but the ABI only defines numbering
3975 for the single precision registers. This will "just work"
3976 in GDB for little endian targets (we'll read eight bytes,
3977 starting in s0 and then progressing to s1), but will be
3978 reversed on big endian targets with VFP. This won't
3979 be a problem for the new Neon quad registers; you're supposed
3980 to use DW_OP_piece for those. */
3981 if (reg >= 64 && reg <= 95)
3985 sprintf (name_buf, "s%d", reg - 64);
3986 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3990 /* VFP v3 / Neon registers. This range is also used for VFP v2
3991 registers, except that it now describes d0 instead of s0. */
3992 if (reg >= 256 && reg <= 287)
3996 sprintf (name_buf, "d%d", reg - 256);
3997 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4004 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4006 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4009 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4011 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4012 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4014 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4015 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4017 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4018 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4020 if (reg < NUM_GREGS)
4021 return SIM_ARM_R0_REGNUM + reg;
4024 if (reg < NUM_FREGS)
4025 return SIM_ARM_FP0_REGNUM + reg;
4028 if (reg < NUM_SREGS)
4029 return SIM_ARM_FPS_REGNUM + reg;
4032 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4035 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4036 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4037 It is thought that this is is the floating-point register format on
4038 little-endian systems. */
4041 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4042 void *dbl, int endianess)
4046 if (endianess == BFD_ENDIAN_BIG)
4047 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4049 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4051 floatformat_from_doublest (fmt, &d, dbl);
4055 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4060 floatformat_to_doublest (fmt, ptr, &d);
4061 if (endianess == BFD_ENDIAN_BIG)
4062 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4064 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4069 condition_true (unsigned long cond, unsigned long status_reg)
4071 if (cond == INST_AL || cond == INST_NV)
4077 return ((status_reg & FLAG_Z) != 0);
4079 return ((status_reg & FLAG_Z) == 0);
4081 return ((status_reg & FLAG_C) != 0);
4083 return ((status_reg & FLAG_C) == 0);
4085 return ((status_reg & FLAG_N) != 0);
4087 return ((status_reg & FLAG_N) == 0);
4089 return ((status_reg & FLAG_V) != 0);
4091 return ((status_reg & FLAG_V) == 0);
4093 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4095 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4097 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4099 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4101 return (((status_reg & FLAG_Z) == 0)
4102 && (((status_reg & FLAG_N) == 0)
4103 == ((status_reg & FLAG_V) == 0)));
4105 return (((status_reg & FLAG_Z) != 0)
4106 || (((status_reg & FLAG_N) == 0)
4107 != ((status_reg & FLAG_V) == 0)));
4112 static unsigned long
4113 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4114 unsigned long pc_val, unsigned long status_reg)
4116 unsigned long res, shift;
4117 int rm = bits (inst, 0, 3);
4118 unsigned long shifttype = bits (inst, 5, 6);
4122 int rs = bits (inst, 8, 11);
4123 shift = (rs == 15 ? pc_val + 8
4124 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4127 shift = bits (inst, 7, 11);
4129 res = (rm == ARM_PC_REGNUM
4130 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4131 : get_frame_register_unsigned (frame, rm));
4136 res = shift >= 32 ? 0 : res << shift;
4140 res = shift >= 32 ? 0 : res >> shift;
4146 res = ((res & 0x80000000L)
4147 ? ~((~res) >> shift) : res >> shift);
4150 case 3: /* ROR/RRX */
4153 res = (res >> 1) | (carry ? 0x80000000L : 0);
4155 res = (res >> shift) | (res << (32 - shift));
4159 return res & 0xffffffff;
4162 /* Return number of 1-bits in VAL. */
4165 bitcount (unsigned long val)
4168 for (nbits = 0; val != 0; nbits++)
4169 val &= val - 1; /* Delete rightmost 1-bit in val. */
4173 /* Return the size in bytes of the complete Thumb instruction whose
4174 first halfword is INST1. */
4177 thumb_insn_size (unsigned short inst1)
4179 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4186 thumb_advance_itstate (unsigned int itstate)
4188 /* Preserve IT[7:5], the first three bits of the condition. Shift
4189 the upcoming condition flags left by one bit. */
4190 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4192 /* If we have finished the IT block, clear the state. */
4193 if ((itstate & 0x0f) == 0)
4199 /* Find the next PC after the current instruction executes. In some
4200 cases we can not statically determine the answer (see the IT state
4201 handling in this function); in that case, a breakpoint may be
4202 inserted in addition to the returned PC, which will be used to set
4203 another breakpoint by our caller. */
4206 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4208 struct gdbarch *gdbarch = get_frame_arch (frame);
4209 struct address_space *aspace = get_frame_address_space (frame);
4210 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4211 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4212 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4213 unsigned short inst1;
4214 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4215 unsigned long offset;
4216 ULONGEST status, itstate;
4218 nextpc = MAKE_THUMB_ADDR (nextpc);
4219 pc_val = MAKE_THUMB_ADDR (pc_val);
4221 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4223 /* Thumb-2 conditional execution support. There are eight bits in
4224 the CPSR which describe conditional execution state. Once
4225 reconstructed (they're in a funny order), the low five bits
4226 describe the low bit of the condition for each instruction and
4227 how many instructions remain. The high three bits describe the
4228 base condition. One of the low four bits will be set if an IT
4229 block is active. These bits read as zero on earlier
4231 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4232 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4234 /* If-Then handling. On GNU/Linux, where this routine is used, we
4235 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4236 can disable execution of the undefined instruction. So we might
4237 miss the breakpoint if we set it on a skipped conditional
4238 instruction. Because conditional instructions can change the
4239 flags, affecting the execution of further instructions, we may
4240 need to set two breakpoints. */
4242 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4244 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4246 /* An IT instruction. Because this instruction does not
4247 modify the flags, we can accurately predict the next
4248 executed instruction. */
4249 itstate = inst1 & 0x00ff;
4250 pc += thumb_insn_size (inst1);
4252 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4254 inst1 = read_memory_unsigned_integer (pc, 2,
4255 byte_order_for_code);
4256 pc += thumb_insn_size (inst1);
4257 itstate = thumb_advance_itstate (itstate);
4260 return MAKE_THUMB_ADDR (pc);
4262 else if (itstate != 0)
4264 /* We are in a conditional block. Check the condition. */
4265 if (! condition_true (itstate >> 4, status))
4267 /* Advance to the next executed instruction. */
4268 pc += thumb_insn_size (inst1);
4269 itstate = thumb_advance_itstate (itstate);
4271 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4273 inst1 = read_memory_unsigned_integer (pc, 2,
4274 byte_order_for_code);
4275 pc += thumb_insn_size (inst1);
4276 itstate = thumb_advance_itstate (itstate);
4279 return MAKE_THUMB_ADDR (pc);
4281 else if ((itstate & 0x0f) == 0x08)
4283 /* This is the last instruction of the conditional
4284 block, and it is executed. We can handle it normally
4285 because the following instruction is not conditional,
4286 and we must handle it normally because it is
4287 permitted to branch. Fall through. */
4293 /* There are conditional instructions after this one.
4294 If this instruction modifies the flags, then we can
4295 not predict what the next executed instruction will
4296 be. Fortunately, this instruction is architecturally
4297 forbidden to branch; we know it will fall through.
4298 Start by skipping past it. */
4299 pc += thumb_insn_size (inst1);
4300 itstate = thumb_advance_itstate (itstate);
4302 /* Set a breakpoint on the following instruction. */
4303 gdb_assert ((itstate & 0x0f) != 0);
4304 arm_insert_single_step_breakpoint (gdbarch, aspace,
4305 MAKE_THUMB_ADDR (pc));
4306 cond_negated = (itstate >> 4) & 1;
4308 /* Skip all following instructions with the same
4309 condition. If there is a later instruction in the IT
4310 block with the opposite condition, set the other
4311 breakpoint there. If not, then set a breakpoint on
4312 the instruction after the IT block. */
4315 inst1 = read_memory_unsigned_integer (pc, 2,
4316 byte_order_for_code);
4317 pc += thumb_insn_size (inst1);
4318 itstate = thumb_advance_itstate (itstate);
4320 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4322 return MAKE_THUMB_ADDR (pc);
4326 else if (itstate & 0x0f)
4328 /* We are in a conditional block. Check the condition. */
4329 int cond = itstate >> 4;
4331 if (! condition_true (cond, status))
4332 /* Advance to the next instruction. All the 32-bit
4333 instructions share a common prefix. */
4334 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4336 /* Otherwise, handle the instruction normally. */
4339 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4343 /* Fetch the saved PC from the stack. It's stored above
4344 all of the other registers. */
4345 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4346 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4347 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4349 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4351 unsigned long cond = bits (inst1, 8, 11);
4352 if (cond == 0x0f) /* 0x0f = SWI */
4354 struct gdbarch_tdep *tdep;
4355 tdep = gdbarch_tdep (gdbarch);
4357 if (tdep->syscall_next_pc != NULL)
4358 nextpc = tdep->syscall_next_pc (frame);
4361 else if (cond != 0x0f && condition_true (cond, status))
4362 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4364 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4366 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4368 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4370 unsigned short inst2;
4371 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4373 /* Default to the next instruction. */
4375 nextpc = MAKE_THUMB_ADDR (nextpc);
4377 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4379 /* Branches and miscellaneous control instructions. */
4381 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4384 int j1, j2, imm1, imm2;
4386 imm1 = sbits (inst1, 0, 10);
4387 imm2 = bits (inst2, 0, 10);
4388 j1 = bit (inst2, 13);
4389 j2 = bit (inst2, 11);
4391 offset = ((imm1 << 12) + (imm2 << 1));
4392 offset ^= ((!j2) << 22) | ((!j1) << 23);
4394 nextpc = pc_val + offset;
4395 /* For BLX make sure to clear the low bits. */
4396 if (bit (inst2, 12) == 0)
4397 nextpc = nextpc & 0xfffffffc;
4399 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4401 /* SUBS PC, LR, #imm8. */
4402 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4403 nextpc -= inst2 & 0x00ff;
4405 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4407 /* Conditional branch. */
4408 if (condition_true (bits (inst1, 6, 9), status))
4410 int sign, j1, j2, imm1, imm2;
4412 sign = sbits (inst1, 10, 10);
4413 imm1 = bits (inst1, 0, 5);
4414 imm2 = bits (inst2, 0, 10);
4415 j1 = bit (inst2, 13);
4416 j2 = bit (inst2, 11);
4418 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4419 offset += (imm1 << 12) + (imm2 << 1);
4421 nextpc = pc_val + offset;
4425 else if ((inst1 & 0xfe50) == 0xe810)
4427 /* Load multiple or RFE. */
4428 int rn, offset, load_pc = 1;
4430 rn = bits (inst1, 0, 3);
4431 if (bit (inst1, 7) && !bit (inst1, 8))
4434 if (!bit (inst2, 15))
4436 offset = bitcount (inst2) * 4 - 4;
4438 else if (!bit (inst1, 7) && bit (inst1, 8))
4441 if (!bit (inst2, 15))
4445 else if (bit (inst1, 7) && bit (inst1, 8))
4450 else if (!bit (inst1, 7) && !bit (inst1, 8))
4460 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4461 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4464 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4466 /* MOV PC or MOVS PC. */
4467 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4468 nextpc = MAKE_THUMB_ADDR (nextpc);
4470 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4474 int rn, load_pc = 1;
4476 rn = bits (inst1, 0, 3);
4477 base = get_frame_register_unsigned (frame, rn);
4478 if (rn == ARM_PC_REGNUM)
4480 base = (base + 4) & ~(CORE_ADDR) 0x3;
4482 base += bits (inst2, 0, 11);
4484 base -= bits (inst2, 0, 11);
4486 else if (bit (inst1, 7))
4487 base += bits (inst2, 0, 11);
4488 else if (bit (inst2, 11))
4490 if (bit (inst2, 10))
4493 base += bits (inst2, 0, 7);
4495 base -= bits (inst2, 0, 7);
4498 else if ((inst2 & 0x0fc0) == 0x0000)
4500 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4501 base += get_frame_register_unsigned (frame, rm) << shift;
4508 nextpc = get_frame_memory_unsigned (frame, base, 4);
4510 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4513 CORE_ADDR tbl_reg, table, offset, length;
4515 tbl_reg = bits (inst1, 0, 3);
4516 if (tbl_reg == 0x0f)
4517 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4519 table = get_frame_register_unsigned (frame, tbl_reg);
4521 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4522 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4523 nextpc = pc_val + length;
4525 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4528 CORE_ADDR tbl_reg, table, offset, length;
4530 tbl_reg = bits (inst1, 0, 3);
4531 if (tbl_reg == 0x0f)
4532 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4534 table = get_frame_register_unsigned (frame, tbl_reg);
4536 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4537 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4538 nextpc = pc_val + length;
4541 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4543 if (bits (inst1, 3, 6) == 0x0f)
4546 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4548 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4550 if (bits (inst1, 3, 6) == 0x0f)
4553 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4555 nextpc = MAKE_THUMB_ADDR (nextpc);
4557 else if ((inst1 & 0xf500) == 0xb100)
4560 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4561 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4563 if (bit (inst1, 11) && reg != 0)
4564 nextpc = pc_val + imm;
4565 else if (!bit (inst1, 11) && reg == 0)
4566 nextpc = pc_val + imm;
4571 /* Get the raw next address. PC is the current program counter, in
4572 FRAME, which is assumed to be executing in ARM mode.
4574 The value returned has the execution state of the next instruction
4575 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4576 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4580 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4582 struct gdbarch *gdbarch = get_frame_arch (frame);
4583 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4584 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4585 unsigned long pc_val;
4586 unsigned long this_instr;
4587 unsigned long status;
4590 pc_val = (unsigned long) pc;
4591 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4593 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4594 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4596 if (bits (this_instr, 28, 31) == INST_NV)
4597 switch (bits (this_instr, 24, 27))
4602 /* Branch with Link and change to Thumb. */
4603 nextpc = BranchDest (pc, this_instr);
4604 nextpc |= bit (this_instr, 24) << 1;
4605 nextpc = MAKE_THUMB_ADDR (nextpc);
4611 /* Coprocessor register transfer. */
4612 if (bits (this_instr, 12, 15) == 15)
4613 error (_("Invalid update to pc in instruction"));
4616 else if (condition_true (bits (this_instr, 28, 31), status))
4618 switch (bits (this_instr, 24, 27))
4621 case 0x1: /* data processing */
4625 unsigned long operand1, operand2, result = 0;
4629 if (bits (this_instr, 12, 15) != 15)
4632 if (bits (this_instr, 22, 25) == 0
4633 && bits (this_instr, 4, 7) == 9) /* multiply */
4634 error (_("Invalid update to pc in instruction"));
4636 /* BX <reg>, BLX <reg> */
4637 if (bits (this_instr, 4, 27) == 0x12fff1
4638 || bits (this_instr, 4, 27) == 0x12fff3)
4640 rn = bits (this_instr, 0, 3);
4641 nextpc = ((rn == ARM_PC_REGNUM)
4643 : get_frame_register_unsigned (frame, rn));
4648 /* Multiply into PC. */
4649 c = (status & FLAG_C) ? 1 : 0;
4650 rn = bits (this_instr, 16, 19);
4651 operand1 = ((rn == ARM_PC_REGNUM)
4653 : get_frame_register_unsigned (frame, rn));
4655 if (bit (this_instr, 25))
4657 unsigned long immval = bits (this_instr, 0, 7);
4658 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4659 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4662 else /* operand 2 is a shifted register. */
4663 operand2 = shifted_reg_val (frame, this_instr, c,
4666 switch (bits (this_instr, 21, 24))
4669 result = operand1 & operand2;
4673 result = operand1 ^ operand2;
4677 result = operand1 - operand2;
4681 result = operand2 - operand1;
4685 result = operand1 + operand2;
4689 result = operand1 + operand2 + c;
4693 result = operand1 - operand2 + c;
4697 result = operand2 - operand1 + c;
4703 case 0xb: /* tst, teq, cmp, cmn */
4704 result = (unsigned long) nextpc;
4708 result = operand1 | operand2;
4712 /* Always step into a function. */
4717 result = operand1 & ~operand2;
4725 /* In 26-bit APCS the bottom two bits of the result are
4726 ignored, and we always end up in ARM state. */
4728 nextpc = arm_addr_bits_remove (gdbarch, result);
4736 case 0x5: /* data transfer */
4739 if (bit (this_instr, 20))
4742 if (bits (this_instr, 12, 15) == 15)
4748 if (bit (this_instr, 22))
4749 error (_("Invalid update to pc in instruction"));
4751 /* byte write to PC */
4752 rn = bits (this_instr, 16, 19);
4753 base = ((rn == ARM_PC_REGNUM)
4755 : get_frame_register_unsigned (frame, rn));
4757 if (bit (this_instr, 24))
4760 int c = (status & FLAG_C) ? 1 : 0;
4761 unsigned long offset =
4762 (bit (this_instr, 25)
4763 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4764 : bits (this_instr, 0, 11));
4766 if (bit (this_instr, 23))
4772 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4779 case 0x9: /* block transfer */
4780 if (bit (this_instr, 20))
4783 if (bit (this_instr, 15))
4787 unsigned long rn_val
4788 = get_frame_register_unsigned (frame,
4789 bits (this_instr, 16, 19));
4791 if (bit (this_instr, 23))
4794 unsigned long reglist = bits (this_instr, 0, 14);
4795 offset = bitcount (reglist) * 4;
4796 if (bit (this_instr, 24)) /* pre */
4799 else if (bit (this_instr, 24))
4803 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4810 case 0xb: /* branch & link */
4811 case 0xa: /* branch */
4813 nextpc = BranchDest (pc, this_instr);
4819 case 0xe: /* coproc ops */
4823 struct gdbarch_tdep *tdep;
4824 tdep = gdbarch_tdep (gdbarch);
4826 if (tdep->syscall_next_pc != NULL)
4827 nextpc = tdep->syscall_next_pc (frame);
4833 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4841 /* Determine next PC after current instruction executes. Will call either
4842 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4843 loop is detected. */
4846 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4850 if (arm_frame_is_thumb (frame))
4852 nextpc = thumb_get_next_pc_raw (frame, pc);
4853 if (nextpc == MAKE_THUMB_ADDR (pc))
4854 error (_("Infinite loop detected"));
4858 nextpc = arm_get_next_pc_raw (frame, pc);
4860 error (_("Infinite loop detected"));
4866 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4867 of the appropriate mode (as encoded in the PC value), even if this
4868 differs from what would be expected according to the symbol tables. */
4871 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4872 struct address_space *aspace,
4875 struct cleanup *old_chain
4876 = make_cleanup_restore_integer (&arm_override_mode);
4878 arm_override_mode = IS_THUMB_ADDR (pc);
4879 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4881 insert_single_step_breakpoint (gdbarch, aspace, pc);
4883 do_cleanups (old_chain);
4886 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
4887 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
4888 is found, attempt to step through it. A breakpoint is placed at the end of
4892 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
4894 struct gdbarch *gdbarch = get_frame_arch (frame);
4895 struct address_space *aspace = get_frame_address_space (frame);
4896 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4897 CORE_ADDR pc = get_frame_pc (frame);
4898 CORE_ADDR breaks[2] = {-1, -1};
4900 unsigned short insn1, insn2;
4903 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
4904 const int atomic_sequence_length = 16; /* Instruction sequence length. */
4905 ULONGEST status, itstate;
4907 /* We currently do not support atomic sequences within an IT block. */
4908 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4909 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4913 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
4914 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4916 if (thumb_insn_size (insn1) != 4)
4919 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4921 if (!((insn1 & 0xfff0) == 0xe850
4922 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
4925 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
4927 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
4929 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4932 if (thumb_insn_size (insn1) != 4)
4934 /* Assume that there is at most one conditional branch in the
4935 atomic sequence. If a conditional branch is found, put a
4936 breakpoint in its destination address. */
4937 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
4939 if (last_breakpoint > 0)
4940 return 0; /* More than one conditional branch found,
4941 fallback to the standard code. */
4943 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
4947 /* We do not support atomic sequences that use any *other*
4948 instructions but conditional branches to change the PC.
4949 Fall back to standard code to avoid losing control of
4951 else if (thumb_instruction_changes_pc (insn1))
4956 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4959 /* Assume that there is at most one conditional branch in the
4960 atomic sequence. If a conditional branch is found, put a
4961 breakpoint in its destination address. */
4962 if ((insn1 & 0xf800) == 0xf000
4963 && (insn2 & 0xd000) == 0x8000
4964 && (insn1 & 0x0380) != 0x0380)
4966 int sign, j1, j2, imm1, imm2;
4967 unsigned int offset;
4969 sign = sbits (insn1, 10, 10);
4970 imm1 = bits (insn1, 0, 5);
4971 imm2 = bits (insn2, 0, 10);
4972 j1 = bit (insn2, 13);
4973 j2 = bit (insn2, 11);
4975 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4976 offset += (imm1 << 12) + (imm2 << 1);
4978 if (last_breakpoint > 0)
4979 return 0; /* More than one conditional branch found,
4980 fallback to the standard code. */
4982 breaks[1] = loc + offset;
4986 /* We do not support atomic sequences that use any *other*
4987 instructions but conditional branches to change the PC.
4988 Fall back to standard code to avoid losing control of
4990 else if (thumb2_instruction_changes_pc (insn1, insn2))
4993 /* If we find a strex{,b,h,d}, we're done. */
4994 if ((insn1 & 0xfff0) == 0xe840
4995 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5000 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5001 if (insn_count == atomic_sequence_length)
5004 /* Insert a breakpoint right after the end of the atomic sequence. */
5007 /* Check for duplicated breakpoints. Check also for a breakpoint
5008 placed (branch instruction's destination) anywhere in sequence. */
5010 && (breaks[1] == breaks[0]
5011 || (breaks[1] >= pc && breaks[1] < loc)))
5012 last_breakpoint = 0;
5014 /* Effectively inserts the breakpoints. */
5015 for (index = 0; index <= last_breakpoint; index++)
5016 arm_insert_single_step_breakpoint (gdbarch, aspace,
5017 MAKE_THUMB_ADDR (breaks[index]));
5023 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5025 struct gdbarch *gdbarch = get_frame_arch (frame);
5026 struct address_space *aspace = get_frame_address_space (frame);
5027 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5028 CORE_ADDR pc = get_frame_pc (frame);
5029 CORE_ADDR breaks[2] = {-1, -1};
5034 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5035 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5037 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5038 Note that we do not currently support conditionally executed atomic
5040 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5042 if ((insn & 0xff9000f0) != 0xe1900090)
5045 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5047 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5049 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5052 /* Assume that there is at most one conditional branch in the atomic
5053 sequence. If a conditional branch is found, put a breakpoint in
5054 its destination address. */
5055 if (bits (insn, 24, 27) == 0xa)
5057 if (last_breakpoint > 0)
5058 return 0; /* More than one conditional branch found, fallback
5059 to the standard single-step code. */
5061 breaks[1] = BranchDest (loc - 4, insn);
5065 /* We do not support atomic sequences that use any *other* instructions
5066 but conditional branches to change the PC. Fall back to standard
5067 code to avoid losing control of execution. */
5068 else if (arm_instruction_changes_pc (insn))
5071 /* If we find a strex{,b,h,d}, we're done. */
5072 if ((insn & 0xff9000f0) == 0xe1800090)
5076 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5077 if (insn_count == atomic_sequence_length)
5080 /* Insert a breakpoint right after the end of the atomic sequence. */
5083 /* Check for duplicated breakpoints. Check also for a breakpoint
5084 placed (branch instruction's destination) anywhere in sequence. */
5086 && (breaks[1] == breaks[0]
5087 || (breaks[1] >= pc && breaks[1] < loc)))
5088 last_breakpoint = 0;
5090 /* Effectively inserts the breakpoints. */
5091 for (index = 0; index <= last_breakpoint; index++)
5092 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5098 arm_deal_with_atomic_sequence (struct frame_info *frame)
5100 if (arm_frame_is_thumb (frame))
5101 return thumb_deal_with_atomic_sequence_raw (frame);
5103 return arm_deal_with_atomic_sequence_raw (frame);
5106 /* single_step() is called just before we want to resume the inferior,
5107 if we want to single-step it but there is no hardware or kernel
5108 single-step support. We find the target of the coming instruction
5109 and breakpoint it. */
5112 arm_software_single_step (struct frame_info *frame)
5114 struct gdbarch *gdbarch = get_frame_arch (frame);
5115 struct address_space *aspace = get_frame_address_space (frame);
5118 if (arm_deal_with_atomic_sequence (frame))
5121 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5122 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5127 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5128 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5129 NULL if an error occurs. BUF is freed. */
5132 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5133 int old_len, int new_len)
5136 int bytes_to_read = new_len - old_len;
5138 new_buf = xmalloc (new_len);
5139 memcpy (new_buf + bytes_to_read, buf, old_len);
5141 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5149 /* An IT block is at most the 2-byte IT instruction followed by
5150 four 4-byte instructions. The furthest back we must search to
5151 find an IT block that affects the current instruction is thus
5152 2 + 3 * 4 == 14 bytes. */
5153 #define MAX_IT_BLOCK_PREFIX 14
5155 /* Use a quick scan if there are more than this many bytes of
5157 #define IT_SCAN_THRESHOLD 32
5159 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5160 A breakpoint in an IT block may not be hit, depending on the
5163 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5167 CORE_ADDR boundary, func_start;
5169 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5170 int i, any, last_it, last_it_count;
5172 /* If we are using BKPT breakpoints, none of this is necessary. */
5173 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5176 /* ARM mode does not have this problem. */
5177 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5180 /* We are setting a breakpoint in Thumb code that could potentially
5181 contain an IT block. The first step is to find how much Thumb
5182 code there is; we do not need to read outside of known Thumb
5184 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5186 /* Thumb-2 code must have mapping symbols to have a chance. */
5189 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5191 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5192 && func_start > boundary)
5193 boundary = func_start;
5195 /* Search for a candidate IT instruction. We have to do some fancy
5196 footwork to distinguish a real IT instruction from the second
5197 half of a 32-bit instruction, but there is no need for that if
5198 there's no candidate. */
5199 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5201 /* No room for an IT instruction. */
5204 buf = xmalloc (buf_len);
5205 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5208 for (i = 0; i < buf_len; i += 2)
5210 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5211 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5223 /* OK, the code bytes before this instruction contain at least one
5224 halfword which resembles an IT instruction. We know that it's
5225 Thumb code, but there are still two possibilities. Either the
5226 halfword really is an IT instruction, or it is the second half of
5227 a 32-bit Thumb instruction. The only way we can tell is to
5228 scan forwards from a known instruction boundary. */
5229 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5233 /* There's a lot of code before this instruction. Start with an
5234 optimistic search; it's easy to recognize halfwords that can
5235 not be the start of a 32-bit instruction, and use that to
5236 lock on to the instruction boundaries. */
5237 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5240 buf_len = IT_SCAN_THRESHOLD;
5243 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5245 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5246 if (thumb_insn_size (inst1) == 2)
5253 /* At this point, if DEFINITE, BUF[I] is the first place we
5254 are sure that we know the instruction boundaries, and it is far
5255 enough from BPADDR that we could not miss an IT instruction
5256 affecting BPADDR. If ! DEFINITE, give up - start from a
5260 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5264 buf_len = bpaddr - boundary;
5270 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5273 buf_len = bpaddr - boundary;
5277 /* Scan forwards. Find the last IT instruction before BPADDR. */
5282 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5284 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5289 else if (inst1 & 0x0002)
5291 else if (inst1 & 0x0004)
5296 i += thumb_insn_size (inst1);
5302 /* There wasn't really an IT instruction after all. */
5305 if (last_it_count < 1)
5306 /* It was too far away. */
5309 /* This really is a trouble spot. Move the breakpoint to the IT
5311 return bpaddr - buf_len + last_it;
5314 /* ARM displaced stepping support.
5316 Generally ARM displaced stepping works as follows:
5318 1. When an instruction is to be single-stepped, it is first decoded by
5319 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5320 Depending on the type of instruction, it is then copied to a scratch
5321 location, possibly in a modified form. The copy_* set of functions
5322 performs such modification, as necessary. A breakpoint is placed after
5323 the modified instruction in the scratch space to return control to GDB.
5324 Note in particular that instructions which modify the PC will no longer
5325 do so after modification.
5327 2. The instruction is single-stepped, by setting the PC to the scratch
5328 location address, and resuming. Control returns to GDB when the
5331 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5332 function used for the current instruction. This function's job is to
5333 put the CPU/memory state back to what it would have been if the
5334 instruction had been executed unmodified in its original location. */
5336 /* NOP instruction (mov r0, r0). */
5337 #define ARM_NOP 0xe1a00000
5338 #define THUMB_NOP 0x4600
5340 /* Helper for register reads for displaced stepping. In particular, this
5341 returns the PC as it would be seen by the instruction at its original
5345 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5349 CORE_ADDR from = dsc->insn_addr;
5351 if (regno == ARM_PC_REGNUM)
5353 /* Compute pipeline offset:
5354 - When executing an ARM instruction, PC reads as the address of the
5355 current instruction plus 8.
5356 - When executing a Thumb instruction, PC reads as the address of the
5357 current instruction plus 4. */
5364 if (debug_displaced)
5365 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5366 (unsigned long) from);
5367 return (ULONGEST) from;
5371 regcache_cooked_read_unsigned (regs, regno, &ret);
5372 if (debug_displaced)
5373 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5374 regno, (unsigned long) ret);
5380 displaced_in_arm_mode (struct regcache *regs)
5383 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5385 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5387 return (ps & t_bit) == 0;
5390 /* Write to the PC as from a branch instruction. */
5393 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5397 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5398 architecture versions < 6. */
5399 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5400 val & ~(ULONGEST) 0x3);
5402 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5403 val & ~(ULONGEST) 0x1);
5406 /* Write to the PC as from a branch-exchange instruction. */
5409 bx_write_pc (struct regcache *regs, ULONGEST val)
5412 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5414 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5418 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5419 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5421 else if ((val & 2) == 0)
5423 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5424 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5428 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5429 mode, align dest to 4 bytes). */
5430 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5431 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5432 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5436 /* Write to the PC as if from a load instruction. */
5439 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5442 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5443 bx_write_pc (regs, val);
5445 branch_write_pc (regs, dsc, val);
5448 /* Write to the PC as if from an ALU instruction. */
5451 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5454 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5455 bx_write_pc (regs, val);
5457 branch_write_pc (regs, dsc, val);
5460 /* Helper for writing to registers for displaced stepping. Writing to the PC
5461 has a varying effects depending on the instruction which does the write:
5462 this is controlled by the WRITE_PC argument. */
5465 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5466 int regno, ULONGEST val, enum pc_write_style write_pc)
5468 if (regno == ARM_PC_REGNUM)
5470 if (debug_displaced)
5471 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5472 (unsigned long) val);
5475 case BRANCH_WRITE_PC:
5476 branch_write_pc (regs, dsc, val);
5480 bx_write_pc (regs, val);
5484 load_write_pc (regs, dsc, val);
5488 alu_write_pc (regs, dsc, val);
5491 case CANNOT_WRITE_PC:
5492 warning (_("Instruction wrote to PC in an unexpected way when "
5493 "single-stepping"));
5497 internal_error (__FILE__, __LINE__,
5498 _("Invalid argument to displaced_write_reg"));
5501 dsc->wrote_to_pc = 1;
5505 if (debug_displaced)
5506 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5507 regno, (unsigned long) val);
5508 regcache_cooked_write_unsigned (regs, regno, val);
5512 /* This function is used to concisely determine if an instruction INSN
5513 references PC. Register fields of interest in INSN should have the
5514 corresponding fields of BITMASK set to 0b1111. The function
5515 returns return 1 if any of these fields in INSN reference the PC
5516 (also 0b1111, r15), else it returns 0. */
5519 insn_references_pc (uint32_t insn, uint32_t bitmask)
5521 uint32_t lowbit = 1;
5523 while (bitmask != 0)
5527 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5533 mask = lowbit * 0xf;
5535 if ((insn & mask) == mask)
5544 /* The simplest copy function. Many instructions have the same effect no
5545 matter what address they are executed at: in those cases, use this. */
5548 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5549 const char *iname, struct displaced_step_closure *dsc)
5551 if (debug_displaced)
5552 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5553 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5556 dsc->modinsn[0] = insn;
5562 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5563 uint16_t insn2, const char *iname,
5564 struct displaced_step_closure *dsc)
5566 if (debug_displaced)
5567 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5568 "opcode/class '%s' unmodified\n", insn1, insn2,
5571 dsc->modinsn[0] = insn1;
5572 dsc->modinsn[1] = insn2;
5578 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5581 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5583 struct displaced_step_closure *dsc)
5585 if (debug_displaced)
5586 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5587 "opcode/class '%s' unmodified\n", insn,
5590 dsc->modinsn[0] = insn;
5595 /* Preload instructions with immediate offset. */
5598 cleanup_preload (struct gdbarch *gdbarch,
5599 struct regcache *regs, struct displaced_step_closure *dsc)
5601 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5602 if (!dsc->u.preload.immed)
5603 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5607 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5608 struct displaced_step_closure *dsc, unsigned int rn)
5611 /* Preload instructions:
5613 {pli/pld} [rn, #+/-imm]
5615 {pli/pld} [r0, #+/-imm]. */
5617 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5618 rn_val = displaced_read_reg (regs, dsc, rn);
5619 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5620 dsc->u.preload.immed = 1;
5622 dsc->cleanup = &cleanup_preload;
5626 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5627 struct displaced_step_closure *dsc)
5629 unsigned int rn = bits (insn, 16, 19);
5631 if (!insn_references_pc (insn, 0x000f0000ul))
5632 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5634 if (debug_displaced)
5635 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5636 (unsigned long) insn);
5638 dsc->modinsn[0] = insn & 0xfff0ffff;
5640 install_preload (gdbarch, regs, dsc, rn);
5646 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5647 struct regcache *regs, struct displaced_step_closure *dsc)
5649 unsigned int rn = bits (insn1, 0, 3);
5650 unsigned int u_bit = bit (insn1, 7);
5651 int imm12 = bits (insn2, 0, 11);
5654 if (rn != ARM_PC_REGNUM)
5655 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5657 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5658 PLD (literal) Encoding T1. */
5659 if (debug_displaced)
5660 fprintf_unfiltered (gdb_stdlog,
5661 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5662 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5668 /* Rewrite instruction {pli/pld} PC imm12 into:
5669 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5673 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5675 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5676 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5678 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5680 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5681 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5682 dsc->u.preload.immed = 0;
5684 /* {pli/pld} [r0, r1] */
5685 dsc->modinsn[0] = insn1 & 0xfff0;
5686 dsc->modinsn[1] = 0xf001;
5689 dsc->cleanup = &cleanup_preload;
5693 /* Preload instructions with register offset. */
5696 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5697 struct displaced_step_closure *dsc, unsigned int rn,
5700 ULONGEST rn_val, rm_val;
5702 /* Preload register-offset instructions:
5704 {pli/pld} [rn, rm {, shift}]
5706 {pli/pld} [r0, r1 {, shift}]. */
5708 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5709 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5710 rn_val = displaced_read_reg (regs, dsc, rn);
5711 rm_val = displaced_read_reg (regs, dsc, rm);
5712 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5713 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5714 dsc->u.preload.immed = 0;
5716 dsc->cleanup = &cleanup_preload;
5720 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5721 struct regcache *regs,
5722 struct displaced_step_closure *dsc)
5724 unsigned int rn = bits (insn, 16, 19);
5725 unsigned int rm = bits (insn, 0, 3);
5728 if (!insn_references_pc (insn, 0x000f000ful))
5729 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5731 if (debug_displaced)
5732 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5733 (unsigned long) insn);
5735 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5737 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5741 /* Copy/cleanup coprocessor load and store instructions. */
5744 cleanup_copro_load_store (struct gdbarch *gdbarch,
5745 struct regcache *regs,
5746 struct displaced_step_closure *dsc)
5748 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5750 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5752 if (dsc->u.ldst.writeback)
5753 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5757 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5758 struct displaced_step_closure *dsc,
5759 int writeback, unsigned int rn)
5763 /* Coprocessor load/store instructions:
5765 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5767 {stc/stc2} [r0, #+/-imm].
5769 ldc/ldc2 are handled identically. */
5771 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5772 rn_val = displaced_read_reg (regs, dsc, rn);
5773 /* PC should be 4-byte aligned. */
5774 rn_val = rn_val & 0xfffffffc;
5775 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5777 dsc->u.ldst.writeback = writeback;
5778 dsc->u.ldst.rn = rn;
5780 dsc->cleanup = &cleanup_copro_load_store;
5784 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5785 struct regcache *regs,
5786 struct displaced_step_closure *dsc)
5788 unsigned int rn = bits (insn, 16, 19);
5790 if (!insn_references_pc (insn, 0x000f0000ul))
5791 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5793 if (debug_displaced)
5794 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5795 "load/store insn %.8lx\n", (unsigned long) insn);
5797 dsc->modinsn[0] = insn & 0xfff0ffff;
5799 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5805 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5806 uint16_t insn2, struct regcache *regs,
5807 struct displaced_step_closure *dsc)
5809 unsigned int rn = bits (insn1, 0, 3);
5811 if (rn != ARM_PC_REGNUM)
5812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5813 "copro load/store", dsc);
5815 if (debug_displaced)
5816 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5817 "load/store insn %.4x%.4x\n", insn1, insn2);
5819 dsc->modinsn[0] = insn1 & 0xfff0;
5820 dsc->modinsn[1] = insn2;
5823 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5824 doesn't support writeback, so pass 0. */
5825 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5830 /* Clean up branch instructions (actually perform the branch, by setting
5834 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5835 struct displaced_step_closure *dsc)
5837 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5838 int branch_taken = condition_true (dsc->u.branch.cond, status);
5839 enum pc_write_style write_pc = dsc->u.branch.exchange
5840 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5845 if (dsc->u.branch.link)
5847 /* The value of LR should be the next insn of current one. In order
5848 not to confuse logic hanlding later insn `bx lr', if current insn mode
5849 is Thumb, the bit 0 of LR value should be set to 1. */
5850 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5853 next_insn_addr |= 0x1;
5855 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5859 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5862 /* Copy B/BL/BLX instructions with immediate destinations. */
5865 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5866 struct displaced_step_closure *dsc,
5867 unsigned int cond, int exchange, int link, long offset)
5869 /* Implement "BL<cond> <label>" as:
5871 Preparation: cond <- instruction condition
5872 Insn: mov r0, r0 (nop)
5873 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5875 B<cond> similar, but don't set r14 in cleanup. */
5877 dsc->u.branch.cond = cond;
5878 dsc->u.branch.link = link;
5879 dsc->u.branch.exchange = exchange;
5881 dsc->u.branch.dest = dsc->insn_addr;
5882 if (link && exchange)
5883 /* For BLX, offset is computed from the Align (PC, 4). */
5884 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5887 dsc->u.branch.dest += 4 + offset;
5889 dsc->u.branch.dest += 8 + offset;
5891 dsc->cleanup = &cleanup_branch;
5894 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5895 struct regcache *regs, struct displaced_step_closure *dsc)
5897 unsigned int cond = bits (insn, 28, 31);
5898 int exchange = (cond == 0xf);
5899 int link = exchange || bit (insn, 24);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5904 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5905 (unsigned long) insn);
5907 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5908 then arrange the switch into Thumb mode. */
5909 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5911 offset = bits (insn, 0, 23) << 2;
5913 if (bit (offset, 25))
5914 offset = offset | ~0x3ffffff;
5916 dsc->modinsn[0] = ARM_NOP;
5918 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5923 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5924 uint16_t insn2, struct regcache *regs,
5925 struct displaced_step_closure *dsc)
5927 int link = bit (insn2, 14);
5928 int exchange = link && !bit (insn2, 12);
5931 int j1 = bit (insn2, 13);
5932 int j2 = bit (insn2, 11);
5933 int s = sbits (insn1, 10, 10);
5934 int i1 = !(j1 ^ bit (insn1, 10));
5935 int i2 = !(j2 ^ bit (insn1, 10));
5937 if (!link && !exchange) /* B */
5939 offset = (bits (insn2, 0, 10) << 1);
5940 if (bit (insn2, 12)) /* Encoding T4 */
5942 offset |= (bits (insn1, 0, 9) << 12)
5948 else /* Encoding T3 */
5950 offset |= (bits (insn1, 0, 5) << 12)
5954 cond = bits (insn1, 6, 9);
5959 offset = (bits (insn1, 0, 9) << 12);
5960 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5961 offset |= exchange ?
5962 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5965 if (debug_displaced)
5966 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5967 "%.4x %.4x with offset %.8lx\n",
5968 link ? (exchange) ? "blx" : "bl" : "b",
5969 insn1, insn2, offset);
5971 dsc->modinsn[0] = THUMB_NOP;
5973 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5977 /* Copy B Thumb instructions. */
5979 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
5980 struct displaced_step_closure *dsc)
5982 unsigned int cond = 0;
5984 unsigned short bit_12_15 = bits (insn, 12, 15);
5985 CORE_ADDR from = dsc->insn_addr;
5987 if (bit_12_15 == 0xd)
5989 /* offset = SignExtend (imm8:0, 32) */
5990 offset = sbits ((insn << 1), 0, 8);
5991 cond = bits (insn, 8, 11);
5993 else if (bit_12_15 == 0xe) /* Encoding T2 */
5995 offset = sbits ((insn << 1), 0, 11);
5999 if (debug_displaced)
6000 fprintf_unfiltered (gdb_stdlog,
6001 "displaced: copying b immediate insn %.4x "
6002 "with offset %d\n", insn, offset);
6004 dsc->u.branch.cond = cond;
6005 dsc->u.branch.link = 0;
6006 dsc->u.branch.exchange = 0;
6007 dsc->u.branch.dest = from + 4 + offset;
6009 dsc->modinsn[0] = THUMB_NOP;
6011 dsc->cleanup = &cleanup_branch;
6016 /* Copy BX/BLX with register-specified destinations. */
6019 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6020 struct displaced_step_closure *dsc, int link,
6021 unsigned int cond, unsigned int rm)
6023 /* Implement {BX,BLX}<cond> <reg>" as:
6025 Preparation: cond <- instruction condition
6026 Insn: mov r0, r0 (nop)
6027 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6029 Don't set r14 in cleanup for BX. */
6031 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6033 dsc->u.branch.cond = cond;
6034 dsc->u.branch.link = link;
6036 dsc->u.branch.exchange = 1;
6038 dsc->cleanup = &cleanup_branch;
6042 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6043 struct regcache *regs, struct displaced_step_closure *dsc)
6045 unsigned int cond = bits (insn, 28, 31);
6048 int link = bit (insn, 5);
6049 unsigned int rm = bits (insn, 0, 3);
6051 if (debug_displaced)
6052 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6053 (unsigned long) insn);
6055 dsc->modinsn[0] = ARM_NOP;
6057 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6062 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6063 struct regcache *regs,
6064 struct displaced_step_closure *dsc)
6066 int link = bit (insn, 7);
6067 unsigned int rm = bits (insn, 3, 6);
6069 if (debug_displaced)
6070 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6071 (unsigned short) insn);
6073 dsc->modinsn[0] = THUMB_NOP;
6075 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6081 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6084 cleanup_alu_imm (struct gdbarch *gdbarch,
6085 struct regcache *regs, struct displaced_step_closure *dsc)
6087 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6088 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6089 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6090 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6094 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6095 struct displaced_step_closure *dsc)
6097 unsigned int rn = bits (insn, 16, 19);
6098 unsigned int rd = bits (insn, 12, 15);
6099 unsigned int op = bits (insn, 21, 24);
6100 int is_mov = (op == 0xd);
6101 ULONGEST rd_val, rn_val;
6103 if (!insn_references_pc (insn, 0x000ff000ul))
6104 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6106 if (debug_displaced)
6107 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6108 "%.8lx\n", is_mov ? "move" : "ALU",
6109 (unsigned long) insn);
6111 /* Instruction is of form:
6113 <op><cond> rd, [rn,] #imm
6117 Preparation: tmp1, tmp2 <- r0, r1;
6119 Insn: <op><cond> r0, r1, #imm
6120 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6123 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6124 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6125 rn_val = displaced_read_reg (regs, dsc, rn);
6126 rd_val = displaced_read_reg (regs, dsc, rd);
6127 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6128 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6132 dsc->modinsn[0] = insn & 0xfff00fff;
6134 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6136 dsc->cleanup = &cleanup_alu_imm;
6142 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6143 uint16_t insn2, struct regcache *regs,
6144 struct displaced_step_closure *dsc)
6146 unsigned int op = bits (insn1, 5, 8);
6147 unsigned int rn, rm, rd;
6148 ULONGEST rd_val, rn_val;
6150 rn = bits (insn1, 0, 3); /* Rn */
6151 rm = bits (insn2, 0, 3); /* Rm */
6152 rd = bits (insn2, 8, 11); /* Rd */
6154 /* This routine is only called for instruction MOV. */
6155 gdb_assert (op == 0x2 && rn == 0xf);
6157 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6158 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6160 if (debug_displaced)
6161 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6162 "ALU", insn1, insn2);
6164 /* Instruction is of form:
6166 <op><cond> rd, [rn,] #imm
6170 Preparation: tmp1, tmp2 <- r0, r1;
6172 Insn: <op><cond> r0, r1, #imm
6173 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6176 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6177 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6178 rn_val = displaced_read_reg (regs, dsc, rn);
6179 rd_val = displaced_read_reg (regs, dsc, rd);
6180 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6181 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6184 dsc->modinsn[0] = insn1;
6185 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6188 dsc->cleanup = &cleanup_alu_imm;
6193 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6196 cleanup_alu_reg (struct gdbarch *gdbarch,
6197 struct regcache *regs, struct displaced_step_closure *dsc)
6202 rd_val = displaced_read_reg (regs, dsc, 0);
6204 for (i = 0; i < 3; i++)
6205 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6207 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6211 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6212 struct displaced_step_closure *dsc,
6213 unsigned int rd, unsigned int rn, unsigned int rm)
6215 ULONGEST rd_val, rn_val, rm_val;
6217 /* Instruction is of form:
6219 <op><cond> rd, [rn,] rm [, <shift>]
6223 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6224 r0, r1, r2 <- rd, rn, rm
6225 Insn: <op><cond> r0, r1, r2 [, <shift>]
6226 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6229 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6230 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6231 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6232 rd_val = displaced_read_reg (regs, dsc, rd);
6233 rn_val = displaced_read_reg (regs, dsc, rn);
6234 rm_val = displaced_read_reg (regs, dsc, rm);
6235 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6236 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6237 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6240 dsc->cleanup = &cleanup_alu_reg;
6244 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6245 struct displaced_step_closure *dsc)
6247 unsigned int op = bits (insn, 21, 24);
6248 int is_mov = (op == 0xd);
6250 if (!insn_references_pc (insn, 0x000ff00ful))
6251 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6253 if (debug_displaced)
6254 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6255 is_mov ? "move" : "ALU", (unsigned long) insn);
6258 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6260 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6262 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6268 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6269 struct regcache *regs,
6270 struct displaced_step_closure *dsc)
6272 unsigned rn, rm, rd;
6274 rd = bits (insn, 3, 6);
6275 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6278 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6279 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6281 if (debug_displaced)
6282 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6283 "ALU", (unsigned short) insn);
6285 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6287 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6292 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6295 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6296 struct regcache *regs,
6297 struct displaced_step_closure *dsc)
6299 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6302 for (i = 0; i < 4; i++)
6303 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6305 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6309 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6310 struct displaced_step_closure *dsc,
6311 unsigned int rd, unsigned int rn, unsigned int rm,
6315 ULONGEST rd_val, rn_val, rm_val, rs_val;
6317 /* Instruction is of form:
6319 <op><cond> rd, [rn,] rm, <shift> rs
6323 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6324 r0, r1, r2, r3 <- rd, rn, rm, rs
6325 Insn: <op><cond> r0, r1, r2, <shift> r3
6327 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6331 for (i = 0; i < 4; i++)
6332 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6334 rd_val = displaced_read_reg (regs, dsc, rd);
6335 rn_val = displaced_read_reg (regs, dsc, rn);
6336 rm_val = displaced_read_reg (regs, dsc, rm);
6337 rs_val = displaced_read_reg (regs, dsc, rs);
6338 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6339 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6340 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6341 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6343 dsc->cleanup = &cleanup_alu_shifted_reg;
6347 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6348 struct regcache *regs,
6349 struct displaced_step_closure *dsc)
6351 unsigned int op = bits (insn, 21, 24);
6352 int is_mov = (op == 0xd);
6353 unsigned int rd, rn, rm, rs;
6355 if (!insn_references_pc (insn, 0x000fff0ful))
6356 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6358 if (debug_displaced)
6359 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6360 "%.8lx\n", is_mov ? "move" : "ALU",
6361 (unsigned long) insn);
6363 rn = bits (insn, 16, 19);
6364 rm = bits (insn, 0, 3);
6365 rs = bits (insn, 8, 11);
6366 rd = bits (insn, 12, 15);
6369 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6371 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6373 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6378 /* Clean up load instructions. */
6381 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6382 struct displaced_step_closure *dsc)
6384 ULONGEST rt_val, rt_val2 = 0, rn_val;
6386 rt_val = displaced_read_reg (regs, dsc, 0);
6387 if (dsc->u.ldst.xfersize == 8)
6388 rt_val2 = displaced_read_reg (regs, dsc, 1);
6389 rn_val = displaced_read_reg (regs, dsc, 2);
6391 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6392 if (dsc->u.ldst.xfersize > 4)
6393 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6394 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6395 if (!dsc->u.ldst.immed)
6396 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6398 /* Handle register writeback. */
6399 if (dsc->u.ldst.writeback)
6400 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6401 /* Put result in right place. */
6402 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6403 if (dsc->u.ldst.xfersize == 8)
6404 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6407 /* Clean up store instructions. */
6410 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6411 struct displaced_step_closure *dsc)
6413 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6415 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6416 if (dsc->u.ldst.xfersize > 4)
6417 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6418 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6419 if (!dsc->u.ldst.immed)
6420 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6421 if (!dsc->u.ldst.restore_r4)
6422 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6425 if (dsc->u.ldst.writeback)
6426 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6429 /* Copy "extra" load/store instructions. These are halfword/doubleword
6430 transfers, which have a different encoding to byte/word transfers. */
6433 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6434 struct regcache *regs, struct displaced_step_closure *dsc)
6436 unsigned int op1 = bits (insn, 20, 24);
6437 unsigned int op2 = bits (insn, 5, 6);
6438 unsigned int rt = bits (insn, 12, 15);
6439 unsigned int rn = bits (insn, 16, 19);
6440 unsigned int rm = bits (insn, 0, 3);
6441 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6442 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6443 int immed = (op1 & 0x4) != 0;
6445 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6447 if (!insn_references_pc (insn, 0x000ff00ful))
6448 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6450 if (debug_displaced)
6451 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6452 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6453 (unsigned long) insn);
6455 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6458 internal_error (__FILE__, __LINE__,
6459 _("copy_extra_ld_st: instruction decode error"));
6461 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6462 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6463 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6465 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6467 rt_val = displaced_read_reg (regs, dsc, rt);
6468 if (bytesize[opcode] == 8)
6469 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6470 rn_val = displaced_read_reg (regs, dsc, rn);
6472 rm_val = displaced_read_reg (regs, dsc, rm);
6474 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6475 if (bytesize[opcode] == 8)
6476 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6477 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6479 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6482 dsc->u.ldst.xfersize = bytesize[opcode];
6483 dsc->u.ldst.rn = rn;
6484 dsc->u.ldst.immed = immed;
6485 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6486 dsc->u.ldst.restore_r4 = 0;
6489 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6491 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6492 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6494 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6496 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6497 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6499 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6504 /* Copy byte/half word/word loads and stores. */
6507 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6508 struct displaced_step_closure *dsc, int load,
6509 int immed, int writeback, int size, int usermode,
6510 int rt, int rm, int rn)
6512 ULONGEST rt_val, rn_val, rm_val = 0;
6514 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6515 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6517 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6519 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6521 rt_val = displaced_read_reg (regs, dsc, rt);
6522 rn_val = displaced_read_reg (regs, dsc, rn);
6524 rm_val = displaced_read_reg (regs, dsc, rm);
6526 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6527 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6529 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6531 dsc->u.ldst.xfersize = size;
6532 dsc->u.ldst.rn = rn;
6533 dsc->u.ldst.immed = immed;
6534 dsc->u.ldst.writeback = writeback;
6536 /* To write PC we can do:
6538 Before this sequence of instructions:
6539 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6540 r2 is the Rn value got from dispalced_read_reg.
6542 Insn1: push {pc} Write address of STR instruction + offset on stack
6543 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6544 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6545 = addr(Insn1) + offset - addr(Insn3) - 8
6547 Insn4: add r4, r4, #8 r4 = offset - 8
6548 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6550 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6552 Otherwise we don't know what value to write for PC, since the offset is
6553 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6554 of this can be found in Section "Saving from r15" in
6555 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6557 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6562 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6563 uint16_t insn2, struct regcache *regs,
6564 struct displaced_step_closure *dsc, int size)
6566 unsigned int u_bit = bit (insn1, 7);
6567 unsigned int rt = bits (insn2, 12, 15);
6568 int imm12 = bits (insn2, 0, 11);
6571 if (debug_displaced)
6572 fprintf_unfiltered (gdb_stdlog,
6573 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6574 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6580 /* Rewrite instruction LDR Rt imm12 into:
6582 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6586 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6589 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6590 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6591 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6593 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6595 pc_val = pc_val & 0xfffffffc;
6597 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6598 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6602 dsc->u.ldst.xfersize = size;
6603 dsc->u.ldst.immed = 0;
6604 dsc->u.ldst.writeback = 0;
6605 dsc->u.ldst.restore_r4 = 0;
6607 /* LDR R0, R2, R3 */
6608 dsc->modinsn[0] = 0xf852;
6609 dsc->modinsn[1] = 0x3;
6612 dsc->cleanup = &cleanup_load;
6618 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6619 uint16_t insn2, struct regcache *regs,
6620 struct displaced_step_closure *dsc,
6621 int writeback, int immed)
6623 unsigned int rt = bits (insn2, 12, 15);
6624 unsigned int rn = bits (insn1, 0, 3);
6625 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6626 /* In LDR (register), there is also a register Rm, which is not allowed to
6627 be PC, so we don't have to check it. */
6629 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6630 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6633 if (debug_displaced)
6634 fprintf_unfiltered (gdb_stdlog,
6635 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6636 rt, rn, insn1, insn2);
6638 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6641 dsc->u.ldst.restore_r4 = 0;
6644 /* ldr[b]<cond> rt, [rn, #imm], etc.
6646 ldr[b]<cond> r0, [r2, #imm]. */
6648 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6649 dsc->modinsn[1] = insn2 & 0x0fff;
6652 /* ldr[b]<cond> rt, [rn, rm], etc.
6654 ldr[b]<cond> r0, [r2, r3]. */
6656 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6657 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6667 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6668 struct regcache *regs,
6669 struct displaced_step_closure *dsc,
6670 int load, int size, int usermode)
6672 int immed = !bit (insn, 25);
6673 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6674 unsigned int rt = bits (insn, 12, 15);
6675 unsigned int rn = bits (insn, 16, 19);
6676 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6678 if (!insn_references_pc (insn, 0x000ff00ful))
6679 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6681 if (debug_displaced)
6682 fprintf_unfiltered (gdb_stdlog,
6683 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6684 load ? (size == 1 ? "ldrb" : "ldr")
6685 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6687 (unsigned long) insn);
6689 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6690 usermode, rt, rm, rn);
6692 if (load || rt != ARM_PC_REGNUM)
6694 dsc->u.ldst.restore_r4 = 0;
6697 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6699 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6700 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6702 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6704 {ldr,str}[b]<cond> r0, [r2, r3]. */
6705 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6709 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6710 dsc->u.ldst.restore_r4 = 1;
6711 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6712 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6713 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6714 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6715 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6719 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6721 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6726 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6731 /* Cleanup LDM instructions with fully-populated register list. This is an
6732 unfortunate corner case: it's impossible to implement correctly by modifying
6733 the instruction. The issue is as follows: we have an instruction,
6737 which we must rewrite to avoid loading PC. A possible solution would be to
6738 do the load in two halves, something like (with suitable cleanup
6742 ldm[id][ab] r8!, {r0-r7}
6744 ldm[id][ab] r8, {r7-r14}
6747 but at present there's no suitable place for <temp>, since the scratch space
6748 is overwritten before the cleanup routine is called. For now, we simply
6749 emulate the instruction. */
6752 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6753 struct displaced_step_closure *dsc)
6755 int inc = dsc->u.block.increment;
6756 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6757 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6758 uint32_t regmask = dsc->u.block.regmask;
6759 int regno = inc ? 0 : 15;
6760 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6761 int exception_return = dsc->u.block.load && dsc->u.block.user
6762 && (regmask & 0x8000) != 0;
6763 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6764 int do_transfer = condition_true (dsc->u.block.cond, status);
6765 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6770 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6771 sensible we can do here. Complain loudly. */
6772 if (exception_return)
6773 error (_("Cannot single-step exception return"));
6775 /* We don't handle any stores here for now. */
6776 gdb_assert (dsc->u.block.load != 0);
6778 if (debug_displaced)
6779 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6780 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6781 dsc->u.block.increment ? "inc" : "dec",
6782 dsc->u.block.before ? "before" : "after");
6789 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6792 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6795 xfer_addr += bump_before;
6797 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6798 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6800 xfer_addr += bump_after;
6802 regmask &= ~(1 << regno);
6805 if (dsc->u.block.writeback)
6806 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6810 /* Clean up an STM which included the PC in the register list. */
6813 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6814 struct displaced_step_closure *dsc)
6816 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6817 int store_executed = condition_true (dsc->u.block.cond, status);
6818 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6819 CORE_ADDR stm_insn_addr;
6822 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6824 /* If condition code fails, there's nothing else to do. */
6825 if (!store_executed)
6828 if (dsc->u.block.increment)
6830 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6832 if (dsc->u.block.before)
6837 pc_stored_at = dsc->u.block.xfer_addr;
6839 if (dsc->u.block.before)
6843 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6844 stm_insn_addr = dsc->scratch_base;
6845 offset = pc_val - stm_insn_addr;
6847 if (debug_displaced)
6848 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6849 "STM instruction\n", offset);
6851 /* Rewrite the stored PC to the proper value for the non-displaced original
6853 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6854 dsc->insn_addr + offset);
6857 /* Clean up an LDM which includes the PC in the register list. We clumped all
6858 the registers in the transferred list into a contiguous range r0...rX (to
6859 avoid loading PC directly and losing control of the debugged program), so we
6860 must undo that here. */
6863 cleanup_block_load_pc (struct gdbarch *gdbarch,
6864 struct regcache *regs,
6865 struct displaced_step_closure *dsc)
6867 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6868 int load_executed = condition_true (dsc->u.block.cond, status);
6869 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6870 unsigned int regs_loaded = bitcount (mask);
6871 unsigned int num_to_shuffle = regs_loaded, clobbered;
6873 /* The method employed here will fail if the register list is fully populated
6874 (we need to avoid loading PC directly). */
6875 gdb_assert (num_to_shuffle < 16);
6880 clobbered = (1 << num_to_shuffle) - 1;
6882 while (num_to_shuffle > 0)
6884 if ((mask & (1 << write_reg)) != 0)
6886 unsigned int read_reg = num_to_shuffle - 1;
6888 if (read_reg != write_reg)
6890 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6891 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6892 if (debug_displaced)
6893 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6894 "loaded register r%d to r%d\n"), read_reg,
6897 else if (debug_displaced)
6898 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6899 "r%d already in the right place\n"),
6902 clobbered &= ~(1 << write_reg);
6910 /* Restore any registers we scribbled over. */
6911 for (write_reg = 0; clobbered != 0; write_reg++)
6913 if ((clobbered & (1 << write_reg)) != 0)
6915 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6917 if (debug_displaced)
6918 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6919 "clobbered register r%d\n"), write_reg);
6920 clobbered &= ~(1 << write_reg);
6924 /* Perform register writeback manually. */
6925 if (dsc->u.block.writeback)
6927 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6929 if (dsc->u.block.increment)
6930 new_rn_val += regs_loaded * 4;
6932 new_rn_val -= regs_loaded * 4;
6934 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6939 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6940 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6943 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6944 struct regcache *regs,
6945 struct displaced_step_closure *dsc)
6947 int load = bit (insn, 20);
6948 int user = bit (insn, 22);
6949 int increment = bit (insn, 23);
6950 int before = bit (insn, 24);
6951 int writeback = bit (insn, 21);
6952 int rn = bits (insn, 16, 19);
6954 /* Block transfers which don't mention PC can be run directly
6956 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6957 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6959 if (rn == ARM_PC_REGNUM)
6961 warning (_("displaced: Unpredictable LDM or STM with "
6962 "base register r15"));
6963 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6966 if (debug_displaced)
6967 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6968 "%.8lx\n", (unsigned long) insn);
6970 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6971 dsc->u.block.rn = rn;
6973 dsc->u.block.load = load;
6974 dsc->u.block.user = user;
6975 dsc->u.block.increment = increment;
6976 dsc->u.block.before = before;
6977 dsc->u.block.writeback = writeback;
6978 dsc->u.block.cond = bits (insn, 28, 31);
6980 dsc->u.block.regmask = insn & 0xffff;
6984 if ((insn & 0xffff) == 0xffff)
6986 /* LDM with a fully-populated register list. This case is
6987 particularly tricky. Implement for now by fully emulating the
6988 instruction (which might not behave perfectly in all cases, but
6989 these instructions should be rare enough for that not to matter
6991 dsc->modinsn[0] = ARM_NOP;
6993 dsc->cleanup = &cleanup_block_load_all;
6997 /* LDM of a list of registers which includes PC. Implement by
6998 rewriting the list of registers to be transferred into a
6999 contiguous chunk r0...rX before doing the transfer, then shuffling
7000 registers into the correct places in the cleanup routine. */
7001 unsigned int regmask = insn & 0xffff;
7002 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7003 unsigned int to = 0, from = 0, i, new_rn;
7005 for (i = 0; i < num_in_list; i++)
7006 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7008 /* Writeback makes things complicated. We need to avoid clobbering
7009 the base register with one of the registers in our modified
7010 register list, but just using a different register can't work in
7013 ldm r14!, {r0-r13,pc}
7015 which would need to be rewritten as:
7019 but that can't work, because there's no free register for N.
7021 Solve this by turning off the writeback bit, and emulating
7022 writeback manually in the cleanup routine. */
7027 new_regmask = (1 << num_in_list) - 1;
7029 if (debug_displaced)
7030 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7031 "{..., pc}: original reg list %.4x, modified "
7032 "list %.4x\n"), rn, writeback ? "!" : "",
7033 (int) insn & 0xffff, new_regmask);
7035 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7037 dsc->cleanup = &cleanup_block_load_pc;
7042 /* STM of a list of registers which includes PC. Run the instruction
7043 as-is, but out of line: this will store the wrong value for the PC,
7044 so we must manually fix up the memory in the cleanup routine.
7045 Doing things this way has the advantage that we can auto-detect
7046 the offset of the PC write (which is architecture-dependent) in
7047 the cleanup routine. */
7048 dsc->modinsn[0] = insn;
7050 dsc->cleanup = &cleanup_block_store_pc;
7057 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7058 struct regcache *regs,
7059 struct displaced_step_closure *dsc)
7061 int rn = bits (insn1, 0, 3);
7062 int load = bit (insn1, 4);
7063 int writeback = bit (insn1, 5);
7065 /* Block transfers which don't mention PC can be run directly
7067 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7068 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7070 if (rn == ARM_PC_REGNUM)
7072 warning (_("displaced: Unpredictable LDM or STM with "
7073 "base register r15"));
7074 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7075 "unpredictable ldm/stm", dsc);
7078 if (debug_displaced)
7079 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7080 "%.4x%.4x\n", insn1, insn2);
7082 /* Clear bit 13, since it should be always zero. */
7083 dsc->u.block.regmask = (insn2 & 0xdfff);
7084 dsc->u.block.rn = rn;
7086 dsc->u.block.load = load;
7087 dsc->u.block.user = 0;
7088 dsc->u.block.increment = bit (insn1, 7);
7089 dsc->u.block.before = bit (insn1, 8);
7090 dsc->u.block.writeback = writeback;
7091 dsc->u.block.cond = INST_AL;
7092 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7096 if (dsc->u.block.regmask == 0xffff)
7098 /* This branch is impossible to happen. */
7103 unsigned int regmask = dsc->u.block.regmask;
7104 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7105 unsigned int to = 0, from = 0, i, new_rn;
7107 for (i = 0; i < num_in_list; i++)
7108 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113 new_regmask = (1 << num_in_list) - 1;
7115 if (debug_displaced)
7116 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7117 "{..., pc}: original reg list %.4x, modified "
7118 "list %.4x\n"), rn, writeback ? "!" : "",
7119 (int) dsc->u.block.regmask, new_regmask);
7121 dsc->modinsn[0] = insn1;
7122 dsc->modinsn[1] = (new_regmask & 0xffff);
7125 dsc->cleanup = &cleanup_block_load_pc;
7130 dsc->modinsn[0] = insn1;
7131 dsc->modinsn[1] = insn2;
7133 dsc->cleanup = &cleanup_block_store_pc;
7138 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7139 for Linux, where some SVC instructions must be treated specially. */
7142 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7143 struct displaced_step_closure *dsc)
7145 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7147 if (debug_displaced)
7148 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7149 "%.8lx\n", (unsigned long) resume_addr);
7151 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7155 /* Common copy routine for svc instruciton. */
7158 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7161 /* Preparation: none.
7162 Insn: unmodified svc.
7163 Cleanup: pc <- insn_addr + insn_size. */
7165 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7167 dsc->wrote_to_pc = 1;
7169 /* Allow OS-specific code to override SVC handling. */
7170 if (dsc->u.svc.copy_svc_os)
7171 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7174 dsc->cleanup = &cleanup_svc;
7180 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7181 struct regcache *regs, struct displaced_step_closure *dsc)
7184 if (debug_displaced)
7185 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7186 (unsigned long) insn);
7188 dsc->modinsn[0] = insn;
7190 return install_svc (gdbarch, regs, dsc);
7194 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7195 struct regcache *regs, struct displaced_step_closure *dsc)
7198 if (debug_displaced)
7199 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7202 dsc->modinsn[0] = insn;
7204 return install_svc (gdbarch, regs, dsc);
7207 /* Copy undefined instructions. */
7210 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7211 struct displaced_step_closure *dsc)
7213 if (debug_displaced)
7214 fprintf_unfiltered (gdb_stdlog,
7215 "displaced: copying undefined insn %.8lx\n",
7216 (unsigned long) insn);
7218 dsc->modinsn[0] = insn;
7224 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7225 struct displaced_step_closure *dsc)
7228 if (debug_displaced)
7229 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7230 "%.4x %.4x\n", (unsigned short) insn1,
7231 (unsigned short) insn2);
7233 dsc->modinsn[0] = insn1;
7234 dsc->modinsn[1] = insn2;
7240 /* Copy unpredictable instructions. */
7243 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7244 struct displaced_step_closure *dsc)
7246 if (debug_displaced)
7247 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7248 "%.8lx\n", (unsigned long) insn);
7250 dsc->modinsn[0] = insn;
7255 /* The decode_* functions are instruction decoding helpers. They mostly follow
7256 the presentation in the ARM ARM. */
7259 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7260 struct regcache *regs,
7261 struct displaced_step_closure *dsc)
7263 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7264 unsigned int rn = bits (insn, 16, 19);
7266 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7267 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7268 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7269 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7270 else if ((op1 & 0x60) == 0x20)
7271 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7272 else if ((op1 & 0x71) == 0x40)
7273 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7275 else if ((op1 & 0x77) == 0x41)
7276 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7277 else if ((op1 & 0x77) == 0x45)
7278 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7279 else if ((op1 & 0x77) == 0x51)
7282 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7284 return arm_copy_unpred (gdbarch, insn, dsc);
7286 else if ((op1 & 0x77) == 0x55)
7287 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7288 else if (op1 == 0x57)
7291 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7292 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7293 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7294 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7295 default: return arm_copy_unpred (gdbarch, insn, dsc);
7297 else if ((op1 & 0x63) == 0x43)
7298 return arm_copy_unpred (gdbarch, insn, dsc);
7299 else if ((op2 & 0x1) == 0x0)
7300 switch (op1 & ~0x80)
7303 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7305 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7306 case 0x71: case 0x75:
7308 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7309 case 0x63: case 0x67: case 0x73: case 0x77:
7310 return arm_copy_unpred (gdbarch, insn, dsc);
7312 return arm_copy_undef (gdbarch, insn, dsc);
7315 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7319 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7320 struct regcache *regs,
7321 struct displaced_step_closure *dsc)
7323 if (bit (insn, 27) == 0)
7324 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7325 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7326 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7329 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7332 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7334 case 0x4: case 0x5: case 0x6: case 0x7:
7335 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7338 switch ((insn & 0xe00000) >> 21)
7340 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7342 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7345 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7348 return arm_copy_undef (gdbarch, insn, dsc);
7353 int rn_f = (bits (insn, 16, 19) == 0xf);
7354 switch ((insn & 0xe00000) >> 21)
7357 /* ldc/ldc2 imm (undefined for rn == pc). */
7358 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7359 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7362 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7364 case 0x4: case 0x5: case 0x6: case 0x7:
7365 /* ldc/ldc2 lit (undefined for rn != pc). */
7366 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7367 : arm_copy_undef (gdbarch, insn, dsc);
7370 return arm_copy_undef (gdbarch, insn, dsc);
7375 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7378 if (bits (insn, 16, 19) == 0xf)
7380 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7382 return arm_copy_undef (gdbarch, insn, dsc);
7386 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7388 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7392 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7394 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7397 return arm_copy_undef (gdbarch, insn, dsc);
7401 /* Decode miscellaneous instructions in dp/misc encoding space. */
7404 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7405 struct regcache *regs,
7406 struct displaced_step_closure *dsc)
7408 unsigned int op2 = bits (insn, 4, 6);
7409 unsigned int op = bits (insn, 21, 22);
7410 unsigned int op1 = bits (insn, 16, 19);
7415 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7418 if (op == 0x1) /* bx. */
7419 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7421 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7423 return arm_copy_undef (gdbarch, insn, dsc);
7427 /* Not really supported. */
7428 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7430 return arm_copy_undef (gdbarch, insn, dsc);
7434 return arm_copy_bx_blx_reg (gdbarch, insn,
7435 regs, dsc); /* blx register. */
7437 return arm_copy_undef (gdbarch, insn, dsc);
7440 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7444 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7446 /* Not really supported. */
7447 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7450 return arm_copy_undef (gdbarch, insn, dsc);
7455 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7456 struct regcache *regs,
7457 struct displaced_step_closure *dsc)
7460 switch (bits (insn, 20, 24))
7463 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7466 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7468 case 0x12: case 0x16:
7469 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7472 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7476 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7478 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7479 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7480 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7481 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7482 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7483 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7484 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7485 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7486 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7487 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7488 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7489 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7490 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7491 /* 2nd arg means "unpriveleged". */
7492 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7496 /* Should be unreachable. */
7501 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7502 struct regcache *regs,
7503 struct displaced_step_closure *dsc)
7505 int a = bit (insn, 25), b = bit (insn, 4);
7506 uint32_t op1 = bits (insn, 20, 24);
7507 int rn_f = bits (insn, 16, 19) == 0xf;
7509 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7510 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7511 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7512 else if ((!a && (op1 & 0x17) == 0x02)
7513 || (a && (op1 & 0x17) == 0x02 && !b))
7514 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7515 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7516 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7517 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7518 else if ((!a && (op1 & 0x17) == 0x03)
7519 || (a && (op1 & 0x17) == 0x03 && !b))
7520 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7521 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7522 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7523 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7524 else if ((!a && (op1 & 0x17) == 0x06)
7525 || (a && (op1 & 0x17) == 0x06 && !b))
7526 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7527 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7528 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7529 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7530 else if ((!a && (op1 & 0x17) == 0x07)
7531 || (a && (op1 & 0x17) == 0x07 && !b))
7532 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7534 /* Should be unreachable. */
7539 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7540 struct displaced_step_closure *dsc)
7542 switch (bits (insn, 20, 24))
7544 case 0x00: case 0x01: case 0x02: case 0x03:
7545 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7547 case 0x04: case 0x05: case 0x06: case 0x07:
7548 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7550 case 0x08: case 0x09: case 0x0a: case 0x0b:
7551 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7552 return arm_copy_unmodified (gdbarch, insn,
7553 "decode/pack/unpack/saturate/reverse", dsc);
7556 if (bits (insn, 5, 7) == 0) /* op2. */
7558 if (bits (insn, 12, 15) == 0xf)
7559 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7561 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7564 return arm_copy_undef (gdbarch, insn, dsc);
7566 case 0x1a: case 0x1b:
7567 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7568 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7570 return arm_copy_undef (gdbarch, insn, dsc);
7572 case 0x1c: case 0x1d:
7573 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7575 if (bits (insn, 0, 3) == 0xf)
7576 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7578 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7581 return arm_copy_undef (gdbarch, insn, dsc);
7583 case 0x1e: case 0x1f:
7584 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7585 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7587 return arm_copy_undef (gdbarch, insn, dsc);
7590 /* Should be unreachable. */
7595 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7596 struct regcache *regs,
7597 struct displaced_step_closure *dsc)
7600 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7602 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7606 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7607 struct regcache *regs,
7608 struct displaced_step_closure *dsc)
7610 unsigned int opcode = bits (insn, 20, 24);
7614 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7615 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7617 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7618 case 0x12: case 0x16:
7619 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7621 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7622 case 0x13: case 0x17:
7623 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7625 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7626 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7627 /* Note: no writeback for these instructions. Bit 25 will always be
7628 zero though (via caller), so the following works OK. */
7629 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7632 /* Should be unreachable. */
7636 /* Decode shifted register instructions. */
7639 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7640 uint16_t insn2, struct regcache *regs,
7641 struct displaced_step_closure *dsc)
7643 /* PC is only allowed to be used in instruction MOV. */
7645 unsigned int op = bits (insn1, 5, 8);
7646 unsigned int rn = bits (insn1, 0, 3);
7648 if (op == 0x2 && rn == 0xf) /* MOV */
7649 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7651 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7652 "dp (shift reg)", dsc);
7656 /* Decode extension register load/store. Exactly the same as
7657 arm_decode_ext_reg_ld_st. */
7660 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7661 uint16_t insn2, struct regcache *regs,
7662 struct displaced_step_closure *dsc)
7664 unsigned int opcode = bits (insn1, 4, 8);
7668 case 0x04: case 0x05:
7669 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7670 "vfp/neon vmov", dsc);
7672 case 0x08: case 0x0c: /* 01x00 */
7673 case 0x0a: case 0x0e: /* 01x10 */
7674 case 0x12: case 0x16: /* 10x10 */
7675 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7676 "vfp/neon vstm/vpush", dsc);
7678 case 0x09: case 0x0d: /* 01x01 */
7679 case 0x0b: case 0x0f: /* 01x11 */
7680 case 0x13: case 0x17: /* 10x11 */
7681 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7682 "vfp/neon vldm/vpop", dsc);
7684 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7685 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7687 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7688 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7691 /* Should be unreachable. */
7696 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7697 struct regcache *regs, struct displaced_step_closure *dsc)
7699 unsigned int op1 = bits (insn, 20, 25);
7700 int op = bit (insn, 4);
7701 unsigned int coproc = bits (insn, 8, 11);
7702 unsigned int rn = bits (insn, 16, 19);
7704 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7705 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7706 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7707 && (coproc & 0xe) != 0xa)
7709 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7710 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7711 && (coproc & 0xe) != 0xa)
7712 /* ldc/ldc2 imm/lit. */
7713 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7714 else if ((op1 & 0x3e) == 0x00)
7715 return arm_copy_undef (gdbarch, insn, dsc);
7716 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7717 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7718 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7719 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7720 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7721 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7722 else if ((op1 & 0x30) == 0x20 && !op)
7724 if ((coproc & 0xe) == 0xa)
7725 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7727 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7729 else if ((op1 & 0x30) == 0x20 && op)
7730 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7731 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7732 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7733 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7734 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7735 else if ((op1 & 0x30) == 0x30)
7736 return arm_copy_svc (gdbarch, insn, regs, dsc);
7738 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7742 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7743 uint16_t insn2, struct regcache *regs,
7744 struct displaced_step_closure *dsc)
7746 unsigned int coproc = bits (insn2, 8, 11);
7747 unsigned int op1 = bits (insn1, 4, 9);
7748 unsigned int bit_5_8 = bits (insn1, 5, 8);
7749 unsigned int bit_9 = bit (insn1, 9);
7750 unsigned int bit_4 = bit (insn1, 4);
7751 unsigned int rn = bits (insn1, 0, 3);
7756 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7757 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7759 else if (bit_5_8 == 0) /* UNDEFINED. */
7760 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7763 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7764 if ((coproc & 0xe) == 0xa)
7765 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7767 else /* coproc is not 101x. */
7769 if (bit_4 == 0) /* STC/STC2. */
7770 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7772 else /* LDC/LDC2 {literal, immeidate}. */
7773 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7779 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7785 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7786 struct displaced_step_closure *dsc, int rd)
7792 Preparation: Rd <- PC
7798 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7799 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7803 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7804 struct displaced_step_closure *dsc,
7805 int rd, unsigned int imm)
7808 /* Encoding T2: ADDS Rd, #imm */
7809 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7811 install_pc_relative (gdbarch, regs, dsc, rd);
7817 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7818 struct regcache *regs,
7819 struct displaced_step_closure *dsc)
7821 unsigned int rd = bits (insn, 8, 10);
7822 unsigned int imm8 = bits (insn, 0, 7);
7824 if (debug_displaced)
7825 fprintf_unfiltered (gdb_stdlog,
7826 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7829 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7833 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7834 uint16_t insn2, struct regcache *regs,
7835 struct displaced_step_closure *dsc)
7837 unsigned int rd = bits (insn2, 8, 11);
7838 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7839 extract raw immediate encoding rather than computing immediate. When
7840 generating ADD or SUB instruction, we can simply perform OR operation to
7841 set immediate into ADD. */
7842 unsigned int imm_3_8 = insn2 & 0x70ff;
7843 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7845 if (debug_displaced)
7846 fprintf_unfiltered (gdb_stdlog,
7847 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7848 rd, imm_i, imm_3_8, insn1, insn2);
7850 if (bit (insn1, 7)) /* Encoding T2 */
7852 /* Encoding T3: SUB Rd, Rd, #imm */
7853 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7854 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7856 else /* Encoding T3 */
7858 /* Encoding T3: ADD Rd, Rd, #imm */
7859 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7860 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7864 install_pc_relative (gdbarch, regs, dsc, rd);
7870 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7871 struct regcache *regs,
7872 struct displaced_step_closure *dsc)
7874 unsigned int rt = bits (insn1, 8, 10);
7876 int imm8 = (bits (insn1, 0, 7) << 2);
7877 CORE_ADDR from = dsc->insn_addr;
7883 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7885 Insn: LDR R0, [R2, R3];
7886 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7888 if (debug_displaced)
7889 fprintf_unfiltered (gdb_stdlog,
7890 "displaced: copying thumb ldr r%d [pc #%d]\n"
7893 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7894 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7895 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7896 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7897 /* The assembler calculates the required value of the offset from the
7898 Align(PC,4) value of this instruction to the label. */
7899 pc = pc & 0xfffffffc;
7901 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7902 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7905 dsc->u.ldst.xfersize = 4;
7907 dsc->u.ldst.immed = 0;
7908 dsc->u.ldst.writeback = 0;
7909 dsc->u.ldst.restore_r4 = 0;
7911 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7913 dsc->cleanup = &cleanup_load;
7918 /* Copy Thumb cbnz/cbz insruction. */
7921 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7922 struct regcache *regs,
7923 struct displaced_step_closure *dsc)
7925 int non_zero = bit (insn1, 11);
7926 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7927 CORE_ADDR from = dsc->insn_addr;
7928 int rn = bits (insn1, 0, 2);
7929 int rn_val = displaced_read_reg (regs, dsc, rn);
7931 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7932 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7933 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7934 condition is false, let it be, cleanup_branch will do nothing. */
7935 if (dsc->u.branch.cond)
7937 dsc->u.branch.cond = INST_AL;
7938 dsc->u.branch.dest = from + 4 + imm5;
7941 dsc->u.branch.dest = from + 2;
7943 dsc->u.branch.link = 0;
7944 dsc->u.branch.exchange = 0;
7946 if (debug_displaced)
7947 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7948 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7949 rn, rn_val, insn1, dsc->u.branch.dest);
7951 dsc->modinsn[0] = THUMB_NOP;
7953 dsc->cleanup = &cleanup_branch;
7957 /* Copy Table Branch Byte/Halfword */
7959 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7960 uint16_t insn2, struct regcache *regs,
7961 struct displaced_step_closure *dsc)
7963 ULONGEST rn_val, rm_val;
7964 int is_tbh = bit (insn2, 4);
7965 CORE_ADDR halfwords = 0;
7966 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7968 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7969 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7975 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7976 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7982 target_read_memory (rn_val + rm_val, buf, 1);
7983 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7986 if (debug_displaced)
7987 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7988 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7989 (unsigned int) rn_val, (unsigned int) rm_val,
7990 (unsigned int) halfwords);
7992 dsc->u.branch.cond = INST_AL;
7993 dsc->u.branch.link = 0;
7994 dsc->u.branch.exchange = 0;
7995 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7997 dsc->cleanup = &cleanup_branch;
8003 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8004 struct displaced_step_closure *dsc)
8007 int val = displaced_read_reg (regs, dsc, 7);
8008 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8011 val = displaced_read_reg (regs, dsc, 8);
8012 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8015 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8020 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8021 struct regcache *regs,
8022 struct displaced_step_closure *dsc)
8024 dsc->u.block.regmask = insn1 & 0x00ff;
8026 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8029 (1) register list is full, that is, r0-r7 are used.
8030 Prepare: tmp[0] <- r8
8032 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8033 MOV r8, r7; Move value of r7 to r8;
8034 POP {r7}; Store PC value into r7.
8036 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8038 (2) register list is not full, supposing there are N registers in
8039 register list (except PC, 0 <= N <= 7).
8040 Prepare: for each i, 0 - N, tmp[i] <- ri.
8042 POP {r0, r1, ...., rN};
8044 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8045 from tmp[] properly.
8047 if (debug_displaced)
8048 fprintf_unfiltered (gdb_stdlog,
8049 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8050 dsc->u.block.regmask, insn1);
8052 if (dsc->u.block.regmask == 0xff)
8054 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8056 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8057 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8058 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8061 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8065 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8066 unsigned int new_regmask, bit = 1;
8067 unsigned int to = 0, from = 0, i, new_rn;
8069 for (i = 0; i < num_in_list + 1; i++)
8070 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8072 new_regmask = (1 << (num_in_list + 1)) - 1;
8074 if (debug_displaced)
8075 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8076 "{..., pc}: original reg list %.4x,"
8077 " modified list %.4x\n"),
8078 (int) dsc->u.block.regmask, new_regmask);
8080 dsc->u.block.regmask |= 0x8000;
8081 dsc->u.block.writeback = 0;
8082 dsc->u.block.cond = INST_AL;
8084 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8086 dsc->cleanup = &cleanup_block_load_pc;
8093 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8094 struct regcache *regs,
8095 struct displaced_step_closure *dsc)
8097 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8098 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8101 /* 16-bit thumb instructions. */
8102 switch (op_bit_12_15)
8104 /* Shift (imme), add, subtract, move and compare. */
8105 case 0: case 1: case 2: case 3:
8106 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8107 "shift/add/sub/mov/cmp",
8111 switch (op_bit_10_11)
8113 case 0: /* Data-processing */
8114 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8118 case 1: /* Special data instructions and branch and exchange. */
8120 unsigned short op = bits (insn1, 7, 9);
8121 if (op == 6 || op == 7) /* BX or BLX */
8122 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8123 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8124 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8126 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8130 default: /* LDR (literal) */
8131 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8134 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8135 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8138 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8139 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8140 else /* Generate SP-relative address */
8141 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8143 case 11: /* Misc 16-bit instructions */
8145 switch (bits (insn1, 8, 11))
8147 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8148 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8150 case 12: case 13: /* POP */
8151 if (bit (insn1, 8)) /* PC is in register list. */
8152 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8154 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8156 case 15: /* If-Then, and hints */
8157 if (bits (insn1, 0, 3))
8158 /* If-Then makes up to four following instructions conditional.
8159 IT instruction itself is not conditional, so handle it as a
8160 common unmodified instruction. */
8161 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8164 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8167 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8172 if (op_bit_10_11 < 2) /* Store multiple registers */
8173 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8174 else /* Load multiple registers */
8175 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8177 case 13: /* Conditional branch and supervisor call */
8178 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8179 err = thumb_copy_b (gdbarch, insn1, dsc);
8181 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8183 case 14: /* Unconditional branch */
8184 err = thumb_copy_b (gdbarch, insn1, dsc);
8191 internal_error (__FILE__, __LINE__,
8192 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8196 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8197 uint16_t insn1, uint16_t insn2,
8198 struct regcache *regs,
8199 struct displaced_step_closure *dsc)
8201 int rt = bits (insn2, 12, 15);
8202 int rn = bits (insn1, 0, 3);
8203 int op1 = bits (insn1, 7, 8);
8206 switch (bits (insn1, 5, 6))
8208 case 0: /* Load byte and memory hints */
8209 if (rt == 0xf) /* PLD/PLI */
8212 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8213 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8215 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8220 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8221 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8224 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8225 "ldrb{reg, immediate}/ldrbt",
8230 case 1: /* Load halfword and memory hints. */
8231 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8232 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8233 "pld/unalloc memhint", dsc);
8237 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8240 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8244 case 2: /* Load word */
8246 int insn2_bit_8_11 = bits (insn2, 8, 11);
8249 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8250 else if (op1 == 0x1) /* Encoding T3 */
8251 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8253 else /* op1 == 0x0 */
8255 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8256 /* LDR (immediate) */
8257 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8258 dsc, bit (insn2, 8), 1);
8259 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8260 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8263 /* LDR (register) */
8264 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8270 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8277 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8278 uint16_t insn2, struct regcache *regs,
8279 struct displaced_step_closure *dsc)
8282 unsigned short op = bit (insn2, 15);
8283 unsigned int op1 = bits (insn1, 11, 12);
8289 switch (bits (insn1, 9, 10))
8294 /* Load/store {dual, execlusive}, table branch. */
8295 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8296 && bits (insn2, 5, 7) == 0)
8297 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8300 /* PC is not allowed to use in load/store {dual, exclusive}
8302 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8303 "load/store dual/ex", dsc);
8305 else /* load/store multiple */
8307 switch (bits (insn1, 7, 8))
8309 case 0: case 3: /* SRS, RFE */
8310 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8313 case 1: case 2: /* LDM/STM/PUSH/POP */
8314 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8321 /* Data-processing (shift register). */
8322 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8325 default: /* Coprocessor instructions. */
8326 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8331 case 2: /* op1 = 2 */
8332 if (op) /* Branch and misc control. */
8334 if (bit (insn2, 14) /* BLX/BL */
8335 || bit (insn2, 12) /* Unconditional branch */
8336 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8337 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8339 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8344 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8346 int op = bits (insn1, 4, 8);
8347 int rn = bits (insn1, 0, 3);
8348 if ((op == 0 || op == 0xa) && rn == 0xf)
8349 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8352 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8355 else /* Data processing (modified immeidate) */
8356 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8360 case 3: /* op1 = 3 */
8361 switch (bits (insn1, 9, 10))
8365 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8367 else /* NEON Load/Store and Store single data item */
8368 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8369 "neon elt/struct load/store",
8372 case 1: /* op1 = 3, bits (9, 10) == 1 */
8373 switch (bits (insn1, 7, 8))
8375 case 0: case 1: /* Data processing (register) */
8376 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8379 case 2: /* Multiply and absolute difference */
8380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8381 "mul/mua/diff", dsc);
8383 case 3: /* Long multiply and divide */
8384 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8389 default: /* Coprocessor instructions */
8390 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8399 internal_error (__FILE__, __LINE__,
8400 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8405 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8406 CORE_ADDR to, struct regcache *regs,
8407 struct displaced_step_closure *dsc)
8409 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8411 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8413 if (debug_displaced)
8414 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8415 "at %.8lx\n", insn1, (unsigned long) from);
8418 dsc->insn_size = thumb_insn_size (insn1);
8419 if (thumb_insn_size (insn1) == 4)
8422 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8423 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8426 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8430 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8431 CORE_ADDR to, struct regcache *regs,
8432 struct displaced_step_closure *dsc)
8435 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8438 /* Most displaced instructions use a 1-instruction scratch space, so set this
8439 here and override below if/when necessary. */
8441 dsc->insn_addr = from;
8442 dsc->scratch_base = to;
8443 dsc->cleanup = NULL;
8444 dsc->wrote_to_pc = 0;
8446 if (!displaced_in_arm_mode (regs))
8447 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8451 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8452 if (debug_displaced)
8453 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8454 "at %.8lx\n", (unsigned long) insn,
8455 (unsigned long) from);
8457 if ((insn & 0xf0000000) == 0xf0000000)
8458 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8459 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8461 case 0x0: case 0x1: case 0x2: case 0x3:
8462 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8465 case 0x4: case 0x5: case 0x6:
8466 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8470 err = arm_decode_media (gdbarch, insn, dsc);
8473 case 0x8: case 0x9: case 0xa: case 0xb:
8474 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8477 case 0xc: case 0xd: case 0xe: case 0xf:
8478 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8483 internal_error (__FILE__, __LINE__,
8484 _("arm_process_displaced_insn: Instruction decode error"));
8487 /* Actually set up the scratch space for a displaced instruction. */
8490 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8491 CORE_ADDR to, struct displaced_step_closure *dsc)
8493 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8494 unsigned int i, len, offset;
8495 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8496 int size = dsc->is_thumb? 2 : 4;
8497 const unsigned char *bkp_insn;
8500 /* Poke modified instruction(s). */
8501 for (i = 0; i < dsc->numinsns; i++)
8503 if (debug_displaced)
8505 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8507 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8510 fprintf_unfiltered (gdb_stdlog, "%.4x",
8511 (unsigned short)dsc->modinsn[i]);
8513 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8514 (unsigned long) to + offset);
8517 write_memory_unsigned_integer (to + offset, size,
8518 byte_order_for_code,
8523 /* Choose the correct breakpoint instruction. */
8526 bkp_insn = tdep->thumb_breakpoint;
8527 len = tdep->thumb_breakpoint_size;
8531 bkp_insn = tdep->arm_breakpoint;
8532 len = tdep->arm_breakpoint_size;
8535 /* Put breakpoint afterwards. */
8536 write_memory (to + offset, bkp_insn, len);
8538 if (debug_displaced)
8539 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8540 paddress (gdbarch, from), paddress (gdbarch, to));
8543 /* Entry point for copying an instruction into scratch space for displaced
8546 struct displaced_step_closure *
8547 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8548 CORE_ADDR from, CORE_ADDR to,
8549 struct regcache *regs)
8551 struct displaced_step_closure *dsc
8552 = xmalloc (sizeof (struct displaced_step_closure));
8553 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8554 arm_displaced_init_closure (gdbarch, from, to, dsc);
8559 /* Entry point for cleaning things up after a displaced instruction has been
8563 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8564 struct displaced_step_closure *dsc,
8565 CORE_ADDR from, CORE_ADDR to,
8566 struct regcache *regs)
8569 dsc->cleanup (gdbarch, regs, dsc);
8571 if (!dsc->wrote_to_pc)
8572 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8573 dsc->insn_addr + dsc->insn_size);
8577 #include "bfd-in2.h"
8578 #include "libcoff.h"
8581 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8583 struct gdbarch *gdbarch = info->application_data;
8585 if (arm_pc_is_thumb (gdbarch, memaddr))
8587 static asymbol *asym;
8588 static combined_entry_type ce;
8589 static struct coff_symbol_struct csym;
8590 static struct bfd fake_bfd;
8591 static bfd_target fake_target;
8593 if (csym.native == NULL)
8595 /* Create a fake symbol vector containing a Thumb symbol.
8596 This is solely so that the code in print_insn_little_arm()
8597 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8598 the presence of a Thumb symbol and switch to decoding
8599 Thumb instructions. */
8601 fake_target.flavour = bfd_target_coff_flavour;
8602 fake_bfd.xvec = &fake_target;
8603 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8605 csym.symbol.the_bfd = &fake_bfd;
8606 csym.symbol.name = "fake";
8607 asym = (asymbol *) & csym;
8610 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8611 info->symbols = &asym;
8614 info->symbols = NULL;
8616 if (info->endian == BFD_ENDIAN_BIG)
8617 return print_insn_big_arm (memaddr, info);
8619 return print_insn_little_arm (memaddr, info);
8622 /* The following define instruction sequences that will cause ARM
8623 cpu's to take an undefined instruction trap. These are used to
8624 signal a breakpoint to GDB.
8626 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8627 modes. A different instruction is required for each mode. The ARM
8628 cpu's can also be big or little endian. Thus four different
8629 instructions are needed to support all cases.
8631 Note: ARMv4 defines several new instructions that will take the
8632 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8633 not in fact add the new instructions. The new undefined
8634 instructions in ARMv4 are all instructions that had no defined
8635 behaviour in earlier chips. There is no guarantee that they will
8636 raise an exception, but may be treated as NOP's. In practice, it
8637 may only safe to rely on instructions matching:
8639 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8640 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8641 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8643 Even this may only true if the condition predicate is true. The
8644 following use a condition predicate of ALWAYS so it is always TRUE.
8646 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8647 and NetBSD all use a software interrupt rather than an undefined
8648 instruction to force a trap. This can be handled by by the
8649 abi-specific code during establishment of the gdbarch vector. */
8651 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8652 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8653 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8654 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8656 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8657 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8658 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8659 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8661 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8662 the program counter value to determine whether a 16-bit or 32-bit
8663 breakpoint should be used. It returns a pointer to a string of
8664 bytes that encode a breakpoint instruction, stores the length of
8665 the string to *lenptr, and adjusts the program counter (if
8666 necessary) to point to the actual memory location where the
8667 breakpoint should be inserted. */
8669 static const unsigned char *
8670 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8672 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8673 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8675 if (arm_pc_is_thumb (gdbarch, *pcptr))
8677 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8679 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8680 check whether we are replacing a 32-bit instruction. */
8681 if (tdep->thumb2_breakpoint != NULL)
8684 if (target_read_memory (*pcptr, buf, 2) == 0)
8686 unsigned short inst1;
8687 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8688 if (thumb_insn_size (inst1) == 4)
8690 *lenptr = tdep->thumb2_breakpoint_size;
8691 return tdep->thumb2_breakpoint;
8696 *lenptr = tdep->thumb_breakpoint_size;
8697 return tdep->thumb_breakpoint;
8701 *lenptr = tdep->arm_breakpoint_size;
8702 return tdep->arm_breakpoint;
8707 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8710 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8712 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8713 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8714 that this is not confused with a 32-bit ARM breakpoint. */
8718 /* Extract from an array REGBUF containing the (raw) register state a
8719 function return value of type TYPE, and copy that, in virtual
8720 format, into VALBUF. */
8723 arm_extract_return_value (struct type *type, struct regcache *regs,
8726 struct gdbarch *gdbarch = get_regcache_arch (regs);
8727 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8729 if (TYPE_CODE_FLT == TYPE_CODE (type))
8731 switch (gdbarch_tdep (gdbarch)->fp_model)
8735 /* The value is in register F0 in internal format. We need to
8736 extract the raw value and then convert it to the desired
8738 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8740 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8741 convert_from_extended (floatformat_from_type (type), tmpbuf,
8742 valbuf, gdbarch_byte_order (gdbarch));
8746 case ARM_FLOAT_SOFT_FPA:
8747 case ARM_FLOAT_SOFT_VFP:
8748 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8749 not using the VFP ABI code. */
8751 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8752 if (TYPE_LENGTH (type) > 4)
8753 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8754 valbuf + INT_REGISTER_SIZE);
8758 internal_error (__FILE__, __LINE__,
8759 _("arm_extract_return_value: "
8760 "Floating point model not supported"));
8764 else if (TYPE_CODE (type) == TYPE_CODE_INT
8765 || TYPE_CODE (type) == TYPE_CODE_CHAR
8766 || TYPE_CODE (type) == TYPE_CODE_BOOL
8767 || TYPE_CODE (type) == TYPE_CODE_PTR
8768 || TYPE_CODE (type) == TYPE_CODE_REF
8769 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8771 /* If the type is a plain integer, then the access is
8772 straight-forward. Otherwise we have to play around a bit
8774 int len = TYPE_LENGTH (type);
8775 int regno = ARM_A1_REGNUM;
8780 /* By using store_unsigned_integer we avoid having to do
8781 anything special for small big-endian values. */
8782 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8783 store_unsigned_integer (valbuf,
8784 (len > INT_REGISTER_SIZE
8785 ? INT_REGISTER_SIZE : len),
8787 len -= INT_REGISTER_SIZE;
8788 valbuf += INT_REGISTER_SIZE;
8793 /* For a structure or union the behaviour is as if the value had
8794 been stored to word-aligned memory and then loaded into
8795 registers with 32-bit load instruction(s). */
8796 int len = TYPE_LENGTH (type);
8797 int regno = ARM_A1_REGNUM;
8798 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8802 regcache_cooked_read (regs, regno++, tmpbuf);
8803 memcpy (valbuf, tmpbuf,
8804 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8805 len -= INT_REGISTER_SIZE;
8806 valbuf += INT_REGISTER_SIZE;
8812 /* Will a function return an aggregate type in memory or in a
8813 register? Return 0 if an aggregate type can be returned in a
8814 register, 1 if it must be returned in memory. */
8817 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8820 enum type_code code;
8822 CHECK_TYPEDEF (type);
8824 /* In the ARM ABI, "integer" like aggregate types are returned in
8825 registers. For an aggregate type to be integer like, its size
8826 must be less than or equal to INT_REGISTER_SIZE and the
8827 offset of each addressable subfield must be zero. Note that bit
8828 fields are not addressable, and all addressable subfields of
8829 unions always start at offset zero.
8831 This function is based on the behaviour of GCC 2.95.1.
8832 See: gcc/arm.c: arm_return_in_memory() for details.
8834 Note: All versions of GCC before GCC 2.95.2 do not set up the
8835 parameters correctly for a function returning the following
8836 structure: struct { float f;}; This should be returned in memory,
8837 not a register. Richard Earnshaw sent me a patch, but I do not
8838 know of any way to detect if a function like the above has been
8839 compiled with the correct calling convention. */
8841 /* All aggregate types that won't fit in a register must be returned
8843 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8848 /* The AAPCS says all aggregates not larger than a word are returned
8850 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8853 /* The only aggregate types that can be returned in a register are
8854 structs and unions. Arrays must be returned in memory. */
8855 code = TYPE_CODE (type);
8856 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8861 /* Assume all other aggregate types can be returned in a register.
8862 Run a check for structures, unions and arrays. */
8865 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8868 /* Need to check if this struct/union is "integer" like. For
8869 this to be true, its size must be less than or equal to
8870 INT_REGISTER_SIZE and the offset of each addressable
8871 subfield must be zero. Note that bit fields are not
8872 addressable, and unions always start at offset zero. If any
8873 of the subfields is a floating point type, the struct/union
8874 cannot be an integer type. */
8876 /* For each field in the object, check:
8877 1) Is it FP? --> yes, nRc = 1;
8878 2) Is it addressable (bitpos != 0) and
8879 not packed (bitsize == 0)?
8883 for (i = 0; i < TYPE_NFIELDS (type); i++)
8885 enum type_code field_type_code;
8886 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8889 /* Is it a floating point type field? */
8890 if (field_type_code == TYPE_CODE_FLT)
8896 /* If bitpos != 0, then we have to care about it. */
8897 if (TYPE_FIELD_BITPOS (type, i) != 0)
8899 /* Bitfields are not addressable. If the field bitsize is
8900 zero, then the field is not packed. Hence it cannot be
8901 a bitfield or any other packed type. */
8902 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8914 /* Write into appropriate registers a function return value of type
8915 TYPE, given in virtual format. */
8918 arm_store_return_value (struct type *type, struct regcache *regs,
8919 const gdb_byte *valbuf)
8921 struct gdbarch *gdbarch = get_regcache_arch (regs);
8922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8924 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8926 char buf[MAX_REGISTER_SIZE];
8928 switch (gdbarch_tdep (gdbarch)->fp_model)
8932 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8933 gdbarch_byte_order (gdbarch));
8934 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8937 case ARM_FLOAT_SOFT_FPA:
8938 case ARM_FLOAT_SOFT_VFP:
8939 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8940 not using the VFP ABI code. */
8942 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8943 if (TYPE_LENGTH (type) > 4)
8944 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8945 valbuf + INT_REGISTER_SIZE);
8949 internal_error (__FILE__, __LINE__,
8950 _("arm_store_return_value: Floating "
8951 "point model not supported"));
8955 else if (TYPE_CODE (type) == TYPE_CODE_INT
8956 || TYPE_CODE (type) == TYPE_CODE_CHAR
8957 || TYPE_CODE (type) == TYPE_CODE_BOOL
8958 || TYPE_CODE (type) == TYPE_CODE_PTR
8959 || TYPE_CODE (type) == TYPE_CODE_REF
8960 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8962 if (TYPE_LENGTH (type) <= 4)
8964 /* Values of one word or less are zero/sign-extended and
8966 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8967 LONGEST val = unpack_long (type, valbuf);
8969 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8970 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8974 /* Integral values greater than one word are stored in consecutive
8975 registers starting with r0. This will always be a multiple of
8976 the regiser size. */
8977 int len = TYPE_LENGTH (type);
8978 int regno = ARM_A1_REGNUM;
8982 regcache_cooked_write (regs, regno++, valbuf);
8983 len -= INT_REGISTER_SIZE;
8984 valbuf += INT_REGISTER_SIZE;
8990 /* For a structure or union the behaviour is as if the value had
8991 been stored to word-aligned memory and then loaded into
8992 registers with 32-bit load instruction(s). */
8993 int len = TYPE_LENGTH (type);
8994 int regno = ARM_A1_REGNUM;
8995 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8999 memcpy (tmpbuf, valbuf,
9000 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9001 regcache_cooked_write (regs, regno++, tmpbuf);
9002 len -= INT_REGISTER_SIZE;
9003 valbuf += INT_REGISTER_SIZE;
9009 /* Handle function return values. */
9011 static enum return_value_convention
9012 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9013 struct type *valtype, struct regcache *regcache,
9014 gdb_byte *readbuf, const gdb_byte *writebuf)
9016 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9017 struct type *func_type = function ? value_type (function) : NULL;
9018 enum arm_vfp_cprc_base_type vfp_base_type;
9021 if (arm_vfp_abi_for_function (gdbarch, func_type)
9022 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9024 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9025 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9027 for (i = 0; i < vfp_base_count; i++)
9029 if (reg_char == 'q')
9032 arm_neon_quad_write (gdbarch, regcache, i,
9033 writebuf + i * unit_length);
9036 arm_neon_quad_read (gdbarch, regcache, i,
9037 readbuf + i * unit_length);
9044 sprintf (name_buf, "%c%d", reg_char, i);
9045 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9048 regcache_cooked_write (regcache, regnum,
9049 writebuf + i * unit_length);
9051 regcache_cooked_read (regcache, regnum,
9052 readbuf + i * unit_length);
9055 return RETURN_VALUE_REGISTER_CONVENTION;
9058 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9059 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9060 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9062 if (tdep->struct_return == pcc_struct_return
9063 || arm_return_in_memory (gdbarch, valtype))
9064 return RETURN_VALUE_STRUCT_CONVENTION;
9067 /* AAPCS returns complex types longer than a register in memory. */
9068 if (tdep->arm_abi != ARM_ABI_APCS
9069 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9070 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9071 return RETURN_VALUE_STRUCT_CONVENTION;
9074 arm_store_return_value (valtype, regcache, writebuf);
9077 arm_extract_return_value (valtype, regcache, readbuf);
9079 return RETURN_VALUE_REGISTER_CONVENTION;
9084 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9086 struct gdbarch *gdbarch = get_frame_arch (frame);
9087 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9088 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9090 char buf[INT_REGISTER_SIZE];
9092 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9094 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9098 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9102 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9103 return the target PC. Otherwise return 0. */
9106 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9110 CORE_ADDR start_addr;
9112 /* Find the starting address and name of the function containing the PC. */
9113 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9116 /* If PC is in a Thumb call or return stub, return the address of the
9117 target PC, which is in a register. The thunk functions are called
9118 _call_via_xx, where x is the register name. The possible names
9119 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9120 functions, named __ARM_call_via_r[0-7]. */
9121 if (strncmp (name, "_call_via_", 10) == 0
9122 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9124 /* Use the name suffix to determine which register contains the
9126 static char *table[15] =
9127 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9128 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9131 int offset = strlen (name) - 2;
9133 for (regno = 0; regno <= 14; regno++)
9134 if (strcmp (&name[offset], table[regno]) == 0)
9135 return get_frame_register_unsigned (frame, regno);
9138 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9139 non-interworking calls to foo. We could decode the stubs
9140 to find the target but it's easier to use the symbol table. */
9141 namelen = strlen (name);
9142 if (name[0] == '_' && name[1] == '_'
9143 && ((namelen > 2 + strlen ("_from_thumb")
9144 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9145 strlen ("_from_thumb")) == 0)
9146 || (namelen > 2 + strlen ("_from_arm")
9147 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9148 strlen ("_from_arm")) == 0)))
9151 int target_len = namelen - 2;
9152 struct minimal_symbol *minsym;
9153 struct objfile *objfile;
9154 struct obj_section *sec;
9156 if (name[namelen - 1] == 'b')
9157 target_len -= strlen ("_from_thumb");
9159 target_len -= strlen ("_from_arm");
9161 target_name = alloca (target_len + 1);
9162 memcpy (target_name, name + 2, target_len);
9163 target_name[target_len] = '\0';
9165 sec = find_pc_section (pc);
9166 objfile = (sec == NULL) ? NULL : sec->objfile;
9167 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9169 return SYMBOL_VALUE_ADDRESS (minsym);
9174 return 0; /* not a stub */
9178 set_arm_command (char *args, int from_tty)
9180 printf_unfiltered (_("\
9181 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9182 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9186 show_arm_command (char *args, int from_tty)
9188 cmd_show_list (showarmcmdlist, from_tty, "");
9192 arm_update_current_architecture (void)
9194 struct gdbarch_info info;
9196 /* If the current architecture is not ARM, we have nothing to do. */
9197 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
9200 /* Update the architecture. */
9201 gdbarch_info_init (&info);
9203 if (!gdbarch_update_p (info))
9204 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9208 set_fp_model_sfunc (char *args, int from_tty,
9209 struct cmd_list_element *c)
9211 enum arm_float_model fp_model;
9213 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9214 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9216 arm_fp_model = fp_model;
9220 if (fp_model == ARM_FLOAT_LAST)
9221 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9224 arm_update_current_architecture ();
9228 show_fp_model (struct ui_file *file, int from_tty,
9229 struct cmd_list_element *c, const char *value)
9231 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9233 if (arm_fp_model == ARM_FLOAT_AUTO
9234 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9235 fprintf_filtered (file, _("\
9236 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9237 fp_model_strings[tdep->fp_model]);
9239 fprintf_filtered (file, _("\
9240 The current ARM floating point model is \"%s\".\n"),
9241 fp_model_strings[arm_fp_model]);
9245 arm_set_abi (char *args, int from_tty,
9246 struct cmd_list_element *c)
9248 enum arm_abi_kind arm_abi;
9250 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9251 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9253 arm_abi_global = arm_abi;
9257 if (arm_abi == ARM_ABI_LAST)
9258 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9261 arm_update_current_architecture ();
9265 arm_show_abi (struct ui_file *file, int from_tty,
9266 struct cmd_list_element *c, const char *value)
9268 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9270 if (arm_abi_global == ARM_ABI_AUTO
9271 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
9272 fprintf_filtered (file, _("\
9273 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9274 arm_abi_strings[tdep->arm_abi]);
9276 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9281 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9282 struct cmd_list_element *c, const char *value)
9284 fprintf_filtered (file,
9285 _("The current execution mode assumed "
9286 "(when symbols are unavailable) is \"%s\".\n"),
9287 arm_fallback_mode_string);
9291 arm_show_force_mode (struct ui_file *file, int from_tty,
9292 struct cmd_list_element *c, const char *value)
9294 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
9296 fprintf_filtered (file,
9297 _("The current execution mode assumed "
9298 "(even when symbols are available) is \"%s\".\n"),
9299 arm_force_mode_string);
9302 /* If the user changes the register disassembly style used for info
9303 register and other commands, we have to also switch the style used
9304 in opcodes for disassembly output. This function is run in the "set
9305 arm disassembly" command, and does that. */
9308 set_disassembly_style_sfunc (char *args, int from_tty,
9309 struct cmd_list_element *c)
9311 set_disassembly_style ();
9314 /* Return the ARM register name corresponding to register I. */
9316 arm_register_name (struct gdbarch *gdbarch, int i)
9318 const int num_regs = gdbarch_num_regs (gdbarch);
9320 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9321 && i >= num_regs && i < num_regs + 32)
9323 static const char *const vfp_pseudo_names[] = {
9324 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9325 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9326 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9327 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9330 return vfp_pseudo_names[i - num_regs];
9333 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9334 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9336 static const char *const neon_pseudo_names[] = {
9337 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9338 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9341 return neon_pseudo_names[i - num_regs - 32];
9344 if (i >= ARRAY_SIZE (arm_register_names))
9345 /* These registers are only supported on targets which supply
9346 an XML description. */
9349 return arm_register_names[i];
9353 set_disassembly_style (void)
9357 /* Find the style that the user wants. */
9358 for (current = 0; current < num_disassembly_options; current++)
9359 if (disassembly_style == valid_disassembly_styles[current])
9361 gdb_assert (current < num_disassembly_options);
9363 /* Synchronize the disassembler. */
9364 set_arm_regname_option (current);
9367 /* Test whether the coff symbol specific value corresponds to a Thumb
9371 coff_sym_is_thumb (int val)
9373 return (val == C_THUMBEXT
9374 || val == C_THUMBSTAT
9375 || val == C_THUMBEXTFUNC
9376 || val == C_THUMBSTATFUNC
9377 || val == C_THUMBLABEL);
9380 /* arm_coff_make_msymbol_special()
9381 arm_elf_make_msymbol_special()
9383 These functions test whether the COFF or ELF symbol corresponds to
9384 an address in thumb code, and set a "special" bit in a minimal
9385 symbol to indicate that it does. */
9388 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9390 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9391 == ST_BRANCH_TO_THUMB)
9392 MSYMBOL_SET_SPECIAL (msym);
9396 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9398 if (coff_sym_is_thumb (val))
9399 MSYMBOL_SET_SPECIAL (msym);
9403 arm_objfile_data_free (struct objfile *objfile, void *arg)
9405 struct arm_per_objfile *data = arg;
9408 for (i = 0; i < objfile->obfd->section_count; i++)
9409 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9413 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9416 const char *name = bfd_asymbol_name (sym);
9417 struct arm_per_objfile *data;
9418 VEC(arm_mapping_symbol_s) **map_p;
9419 struct arm_mapping_symbol new_map_sym;
9421 gdb_assert (name[0] == '$');
9422 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9425 data = objfile_data (objfile, arm_objfile_data_key);
9428 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9429 struct arm_per_objfile);
9430 set_objfile_data (objfile, arm_objfile_data_key, data);
9431 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9432 objfile->obfd->section_count,
9433 VEC(arm_mapping_symbol_s) *);
9435 map_p = &data->section_maps[bfd_get_section (sym)->index];
9437 new_map_sym.value = sym->value;
9438 new_map_sym.type = name[1];
9440 /* Assume that most mapping symbols appear in order of increasing
9441 value. If they were randomly distributed, it would be faster to
9442 always push here and then sort at first use. */
9443 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9445 struct arm_mapping_symbol *prev_map_sym;
9447 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9448 if (prev_map_sym->value >= sym->value)
9451 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9452 arm_compare_mapping_symbols);
9453 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9458 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9462 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9464 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9465 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9467 /* If necessary, set the T bit. */
9470 ULONGEST val, t_bit;
9471 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9472 t_bit = arm_psr_thumb_bit (gdbarch);
9473 if (arm_pc_is_thumb (gdbarch, pc))
9474 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9477 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9482 /* Read the contents of a NEON quad register, by reading from two
9483 double registers. This is used to implement the quad pseudo
9484 registers, and for argument passing in case the quad registers are
9485 missing; vectors are passed in quad registers when using the VFP
9486 ABI, even if a NEON unit is not present. REGNUM is the index of
9487 the quad register, in [0, 15]. */
9489 static enum register_status
9490 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9491 int regnum, gdb_byte *buf)
9494 gdb_byte reg_buf[8];
9495 int offset, double_regnum;
9496 enum register_status status;
9498 sprintf (name_buf, "d%d", regnum << 1);
9499 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9502 /* d0 is always the least significant half of q0. */
9503 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9508 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9509 if (status != REG_VALID)
9511 memcpy (buf + offset, reg_buf, 8);
9513 offset = 8 - offset;
9514 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9515 if (status != REG_VALID)
9517 memcpy (buf + offset, reg_buf, 8);
9522 static enum register_status
9523 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9524 int regnum, gdb_byte *buf)
9526 const int num_regs = gdbarch_num_regs (gdbarch);
9528 gdb_byte reg_buf[8];
9529 int offset, double_regnum;
9531 gdb_assert (regnum >= num_regs);
9534 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9535 /* Quad-precision register. */
9536 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9539 enum register_status status;
9541 /* Single-precision register. */
9542 gdb_assert (regnum < 32);
9544 /* s0 is always the least significant half of d0. */
9545 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9546 offset = (regnum & 1) ? 0 : 4;
9548 offset = (regnum & 1) ? 4 : 0;
9550 sprintf (name_buf, "d%d", regnum >> 1);
9551 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9554 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9555 if (status == REG_VALID)
9556 memcpy (buf, reg_buf + offset, 4);
9561 /* Store the contents of BUF to a NEON quad register, by writing to
9562 two double registers. This is used to implement the quad pseudo
9563 registers, and for argument passing in case the quad registers are
9564 missing; vectors are passed in quad registers when using the VFP
9565 ABI, even if a NEON unit is not present. REGNUM is the index
9566 of the quad register, in [0, 15]. */
9569 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9570 int regnum, const gdb_byte *buf)
9573 int offset, double_regnum;
9575 sprintf (name_buf, "d%d", regnum << 1);
9576 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9579 /* d0 is always the least significant half of q0. */
9580 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9585 regcache_raw_write (regcache, double_regnum, buf + offset);
9586 offset = 8 - offset;
9587 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9591 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9592 int regnum, const gdb_byte *buf)
9594 const int num_regs = gdbarch_num_regs (gdbarch);
9596 gdb_byte reg_buf[8];
9597 int offset, double_regnum;
9599 gdb_assert (regnum >= num_regs);
9602 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9603 /* Quad-precision register. */
9604 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9607 /* Single-precision register. */
9608 gdb_assert (regnum < 32);
9610 /* s0 is always the least significant half of d0. */
9611 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9612 offset = (regnum & 1) ? 0 : 4;
9614 offset = (regnum & 1) ? 4 : 0;
9616 sprintf (name_buf, "d%d", regnum >> 1);
9617 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9620 regcache_raw_read (regcache, double_regnum, reg_buf);
9621 memcpy (reg_buf + offset, buf, 4);
9622 regcache_raw_write (regcache, double_regnum, reg_buf);
9626 static struct value *
9627 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9629 const int *reg_p = baton;
9630 return value_of_register (*reg_p, frame);
9633 static enum gdb_osabi
9634 arm_elf_osabi_sniffer (bfd *abfd)
9636 unsigned int elfosabi;
9637 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9639 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9641 if (elfosabi == ELFOSABI_ARM)
9642 /* GNU tools use this value. Check note sections in this case,
9644 bfd_map_over_sections (abfd,
9645 generic_elf_osabi_sniff_abi_tag_sections,
9648 /* Anything else will be handled by the generic ELF sniffer. */
9653 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9654 struct reggroup *group)
9656 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9657 this, FPS register belongs to save_regroup, restore_reggroup, and
9658 all_reggroup, of course. */
9659 if (regnum == ARM_FPS_REGNUM)
9660 return (group == float_reggroup
9661 || group == save_reggroup
9662 || group == restore_reggroup
9663 || group == all_reggroup);
9665 return default_register_reggroup_p (gdbarch, regnum, group);
9669 /* For backward-compatibility we allow two 'g' packet lengths with
9670 the remote protocol depending on whether FPA registers are
9671 supplied. M-profile targets do not have FPA registers, but some
9672 stubs already exist in the wild which use a 'g' packet which
9673 supplies them albeit with dummy values. The packet format which
9674 includes FPA registers should be considered deprecated for
9675 M-profile targets. */
9678 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9680 if (gdbarch_tdep (gdbarch)->is_m)
9682 /* If we know from the executable this is an M-profile target,
9683 cater for remote targets whose register set layout is the
9684 same as the FPA layout. */
9685 register_remote_g_packet_guess (gdbarch,
9686 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9687 (16 * INT_REGISTER_SIZE)
9688 + (8 * FP_REGISTER_SIZE)
9689 + (2 * INT_REGISTER_SIZE),
9690 tdesc_arm_with_m_fpa_layout);
9692 /* The regular M-profile layout. */
9693 register_remote_g_packet_guess (gdbarch,
9694 /* r0-r12,sp,lr,pc; xpsr */
9695 (16 * INT_REGISTER_SIZE)
9696 + INT_REGISTER_SIZE,
9699 /* M-profile plus M4F VFP. */
9700 register_remote_g_packet_guess (gdbarch,
9701 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9702 (16 * INT_REGISTER_SIZE)
9703 + (16 * VFP_REGISTER_SIZE)
9704 + (2 * INT_REGISTER_SIZE),
9705 tdesc_arm_with_m_vfp_d16);
9708 /* Otherwise we don't have a useful guess. */
9712 /* Initialize the current architecture based on INFO. If possible,
9713 re-use an architecture from ARCHES, which is a list of
9714 architectures already created during this debugging session.
9716 Called e.g. at program startup, when reading a core file, and when
9717 reading a binary file. */
9719 static struct gdbarch *
9720 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9722 struct gdbarch_tdep *tdep;
9723 struct gdbarch *gdbarch;
9724 struct gdbarch_list *best_arch;
9725 enum arm_abi_kind arm_abi = arm_abi_global;
9726 enum arm_float_model fp_model = arm_fp_model;
9727 struct tdesc_arch_data *tdesc_data = NULL;
9729 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9731 int have_fpa_registers = 1;
9732 const struct target_desc *tdesc = info.target_desc;
9734 /* If we have an object to base this architecture on, try to determine
9737 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9739 int ei_osabi, e_flags;
9741 switch (bfd_get_flavour (info.abfd))
9743 case bfd_target_aout_flavour:
9744 /* Assume it's an old APCS-style ABI. */
9745 arm_abi = ARM_ABI_APCS;
9748 case bfd_target_coff_flavour:
9749 /* Assume it's an old APCS-style ABI. */
9751 arm_abi = ARM_ABI_APCS;
9754 case bfd_target_elf_flavour:
9755 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9756 e_flags = elf_elfheader (info.abfd)->e_flags;
9758 if (ei_osabi == ELFOSABI_ARM)
9760 /* GNU tools used to use this value, but do not for EABI
9761 objects. There's nowhere to tag an EABI version
9762 anyway, so assume APCS. */
9763 arm_abi = ARM_ABI_APCS;
9765 else if (ei_osabi == ELFOSABI_NONE)
9767 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9768 int attr_arch, attr_profile;
9772 case EF_ARM_EABI_UNKNOWN:
9773 /* Assume GNU tools. */
9774 arm_abi = ARM_ABI_APCS;
9777 case EF_ARM_EABI_VER4:
9778 case EF_ARM_EABI_VER5:
9779 arm_abi = ARM_ABI_AAPCS;
9780 /* EABI binaries default to VFP float ordering.
9781 They may also contain build attributes that can
9782 be used to identify if the VFP argument-passing
9784 if (fp_model == ARM_FLOAT_AUTO)
9787 switch (bfd_elf_get_obj_attr_int (info.abfd,
9792 /* "The user intended FP parameter/result
9793 passing to conform to AAPCS, base
9795 fp_model = ARM_FLOAT_SOFT_VFP;
9798 /* "The user intended FP parameter/result
9799 passing to conform to AAPCS, VFP
9801 fp_model = ARM_FLOAT_VFP;
9804 /* "The user intended FP parameter/result
9805 passing to conform to tool chain-specific
9806 conventions" - we don't know any such
9807 conventions, so leave it as "auto". */
9810 /* Attribute value not mentioned in the
9811 October 2008 ABI, so leave it as
9816 fp_model = ARM_FLOAT_SOFT_VFP;
9822 /* Leave it as "auto". */
9823 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9828 /* Detect M-profile programs. This only works if the
9829 executable file includes build attributes; GCC does
9830 copy them to the executable, but e.g. RealView does
9832 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9834 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9836 Tag_CPU_arch_profile);
9837 /* GCC specifies the profile for v6-M; RealView only
9838 specifies the profile for architectures starting with
9839 V7 (as opposed to architectures with a tag
9840 numerically greater than TAG_CPU_ARCH_V7). */
9841 if (!tdesc_has_registers (tdesc)
9842 && (attr_arch == TAG_CPU_ARCH_V6_M
9843 || attr_arch == TAG_CPU_ARCH_V6S_M
9844 || attr_profile == 'M'))
9849 if (fp_model == ARM_FLOAT_AUTO)
9851 int e_flags = elf_elfheader (info.abfd)->e_flags;
9853 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9856 /* Leave it as "auto". Strictly speaking this case
9857 means FPA, but almost nobody uses that now, and
9858 many toolchains fail to set the appropriate bits
9859 for the floating-point model they use. */
9861 case EF_ARM_SOFT_FLOAT:
9862 fp_model = ARM_FLOAT_SOFT_FPA;
9864 case EF_ARM_VFP_FLOAT:
9865 fp_model = ARM_FLOAT_VFP;
9867 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9868 fp_model = ARM_FLOAT_SOFT_VFP;
9873 if (e_flags & EF_ARM_BE8)
9874 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9879 /* Leave it as "auto". */
9884 /* Check any target description for validity. */
9885 if (tdesc_has_registers (tdesc))
9887 /* For most registers we require GDB's default names; but also allow
9888 the numeric names for sp / lr / pc, as a convenience. */
9889 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9890 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9891 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9893 const struct tdesc_feature *feature;
9896 feature = tdesc_find_feature (tdesc,
9897 "org.gnu.gdb.arm.core");
9898 if (feature == NULL)
9900 feature = tdesc_find_feature (tdesc,
9901 "org.gnu.gdb.arm.m-profile");
9902 if (feature == NULL)
9908 tdesc_data = tdesc_data_alloc ();
9911 for (i = 0; i < ARM_SP_REGNUM; i++)
9912 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9913 arm_register_names[i]);
9914 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9917 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9920 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9924 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9925 ARM_PS_REGNUM, "xpsr");
9927 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9928 ARM_PS_REGNUM, "cpsr");
9932 tdesc_data_cleanup (tdesc_data);
9936 feature = tdesc_find_feature (tdesc,
9937 "org.gnu.gdb.arm.fpa");
9938 if (feature != NULL)
9941 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9942 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9943 arm_register_names[i]);
9946 tdesc_data_cleanup (tdesc_data);
9951 have_fpa_registers = 0;
9953 feature = tdesc_find_feature (tdesc,
9954 "org.gnu.gdb.xscale.iwmmxt");
9955 if (feature != NULL)
9957 static const char *const iwmmxt_names[] = {
9958 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9959 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9960 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9961 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9965 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9967 &= tdesc_numbered_register (feature, tdesc_data, i,
9968 iwmmxt_names[i - ARM_WR0_REGNUM]);
9970 /* Check for the control registers, but do not fail if they
9972 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9973 tdesc_numbered_register (feature, tdesc_data, i,
9974 iwmmxt_names[i - ARM_WR0_REGNUM]);
9976 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9978 &= tdesc_numbered_register (feature, tdesc_data, i,
9979 iwmmxt_names[i - ARM_WR0_REGNUM]);
9983 tdesc_data_cleanup (tdesc_data);
9988 /* If we have a VFP unit, check whether the single precision registers
9989 are present. If not, then we will synthesize them as pseudo
9991 feature = tdesc_find_feature (tdesc,
9992 "org.gnu.gdb.arm.vfp");
9993 if (feature != NULL)
9995 static const char *const vfp_double_names[] = {
9996 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9997 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9998 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9999 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10002 /* Require the double precision registers. There must be either
10005 for (i = 0; i < 32; i++)
10007 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10009 vfp_double_names[i]);
10013 if (!valid_p && i == 16)
10016 /* Also require FPSCR. */
10017 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10018 ARM_FPSCR_REGNUM, "fpscr");
10021 tdesc_data_cleanup (tdesc_data);
10025 if (tdesc_unnumbered_register (feature, "s0") == 0)
10026 have_vfp_pseudos = 1;
10028 have_vfp_registers = 1;
10030 /* If we have VFP, also check for NEON. The architecture allows
10031 NEON without VFP (integer vector operations only), but GDB
10032 does not support that. */
10033 feature = tdesc_find_feature (tdesc,
10034 "org.gnu.gdb.arm.neon");
10035 if (feature != NULL)
10037 /* NEON requires 32 double-precision registers. */
10040 tdesc_data_cleanup (tdesc_data);
10044 /* If there are quad registers defined by the stub, use
10045 their type; otherwise (normally) provide them with
10046 the default type. */
10047 if (tdesc_unnumbered_register (feature, "q0") == 0)
10048 have_neon_pseudos = 1;
10055 /* If there is already a candidate, use it. */
10056 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10058 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10060 if (arm_abi != ARM_ABI_AUTO
10061 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10064 if (fp_model != ARM_FLOAT_AUTO
10065 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10068 /* There are various other properties in tdep that we do not
10069 need to check here: those derived from a target description,
10070 since gdbarches with a different target description are
10071 automatically disqualified. */
10073 /* Do check is_m, though, since it might come from the binary. */
10074 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10077 /* Found a match. */
10081 if (best_arch != NULL)
10083 if (tdesc_data != NULL)
10084 tdesc_data_cleanup (tdesc_data);
10085 return best_arch->gdbarch;
10088 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10089 gdbarch = gdbarch_alloc (&info, tdep);
10091 /* Record additional information about the architecture we are defining.
10092 These are gdbarch discriminators, like the OSABI. */
10093 tdep->arm_abi = arm_abi;
10094 tdep->fp_model = fp_model;
10096 tdep->have_fpa_registers = have_fpa_registers;
10097 tdep->have_vfp_registers = have_vfp_registers;
10098 tdep->have_vfp_pseudos = have_vfp_pseudos;
10099 tdep->have_neon_pseudos = have_neon_pseudos;
10100 tdep->have_neon = have_neon;
10102 arm_register_g_packet_guesses (gdbarch);
10105 switch (info.byte_order_for_code)
10107 case BFD_ENDIAN_BIG:
10108 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10109 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10110 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10111 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10115 case BFD_ENDIAN_LITTLE:
10116 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10117 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10118 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10119 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10124 internal_error (__FILE__, __LINE__,
10125 _("arm_gdbarch_init: bad byte order for float format"));
10128 /* On ARM targets char defaults to unsigned. */
10129 set_gdbarch_char_signed (gdbarch, 0);
10131 /* Note: for displaced stepping, this includes the breakpoint, and one word
10132 of additional scratch space. This setting isn't used for anything beside
10133 displaced stepping at present. */
10134 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10136 /* This should be low enough for everything. */
10137 tdep->lowest_pc = 0x20;
10138 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10140 /* The default, for both APCS and AAPCS, is to return small
10141 structures in registers. */
10142 tdep->struct_return = reg_struct_return;
10144 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10145 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10147 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10149 /* Frame handling. */
10150 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10151 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10152 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10154 frame_base_set_default (gdbarch, &arm_normal_base);
10156 /* Address manipulation. */
10157 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
10158 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10160 /* Advance PC across function entry code. */
10161 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10163 /* Detect whether PC is in function epilogue. */
10164 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10166 /* Skip trampolines. */
10167 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10169 /* The stack grows downward. */
10170 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10172 /* Breakpoint manipulation. */
10173 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10174 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10175 arm_remote_breakpoint_from_pc);
10177 /* Information about registers, etc. */
10178 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10179 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10180 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10181 set_gdbarch_register_type (gdbarch, arm_register_type);
10182 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10184 /* This "info float" is FPA-specific. Use the generic version if we
10185 do not have FPA. */
10186 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10187 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10189 /* Internal <-> external register number maps. */
10190 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10191 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10193 set_gdbarch_register_name (gdbarch, arm_register_name);
10195 /* Returning results. */
10196 set_gdbarch_return_value (gdbarch, arm_return_value);
10199 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10201 /* Minsymbol frobbing. */
10202 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10203 set_gdbarch_coff_make_msymbol_special (gdbarch,
10204 arm_coff_make_msymbol_special);
10205 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10207 /* Thumb-2 IT block support. */
10208 set_gdbarch_adjust_breakpoint_address (gdbarch,
10209 arm_adjust_breakpoint_address);
10211 /* Virtual tables. */
10212 set_gdbarch_vbit_in_delta (gdbarch, 1);
10214 /* Hook in the ABI-specific overrides, if they have been registered. */
10215 gdbarch_init_osabi (info, gdbarch);
10217 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10219 /* Add some default predicates. */
10220 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10221 dwarf2_append_unwinders (gdbarch);
10222 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10223 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10225 /* Now we have tuned the configuration, set a few final things,
10226 based on what the OS ABI has told us. */
10228 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10229 binaries are always marked. */
10230 if (tdep->arm_abi == ARM_ABI_AUTO)
10231 tdep->arm_abi = ARM_ABI_APCS;
10233 /* Watchpoints are not steppable. */
10234 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10236 /* We used to default to FPA for generic ARM, but almost nobody
10237 uses that now, and we now provide a way for the user to force
10238 the model. So default to the most useful variant. */
10239 if (tdep->fp_model == ARM_FLOAT_AUTO)
10240 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10242 if (tdep->jb_pc >= 0)
10243 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10245 /* Floating point sizes and format. */
10246 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10247 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10249 set_gdbarch_double_format
10250 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10251 set_gdbarch_long_double_format
10252 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10256 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10257 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10260 if (have_vfp_pseudos)
10262 /* NOTE: These are the only pseudo registers used by
10263 the ARM target at the moment. If more are added, a
10264 little more care in numbering will be needed. */
10266 int num_pseudos = 32;
10267 if (have_neon_pseudos)
10269 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10270 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10271 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10276 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10278 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10280 /* Override tdesc_register_type to adjust the types of VFP
10281 registers for NEON. */
10282 set_gdbarch_register_type (gdbarch, arm_register_type);
10285 /* Add standard register aliases. We add aliases even for those
10286 nanes which are used by the current architecture - it's simpler,
10287 and does no harm, since nothing ever lists user registers. */
10288 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10289 user_reg_add (gdbarch, arm_register_aliases[i].name,
10290 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10296 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10298 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10303 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10304 (unsigned long) tdep->lowest_pc);
10307 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10310 _initialize_arm_tdep (void)
10312 struct ui_file *stb;
10314 struct cmd_list_element *new_set, *new_show;
10315 const char *setname;
10316 const char *setdesc;
10317 const char *const *regnames;
10319 static char *helptext;
10320 char regdesc[1024], *rdptr = regdesc;
10321 size_t rest = sizeof (regdesc);
10323 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10325 arm_objfile_data_key
10326 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10328 /* Add ourselves to objfile event chain. */
10329 observer_attach_new_objfile (arm_exidx_new_objfile);
10331 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10333 /* Register an ELF OS ABI sniffer for ARM binaries. */
10334 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10335 bfd_target_elf_flavour,
10336 arm_elf_osabi_sniffer);
10338 /* Initialize the standard target descriptions. */
10339 initialize_tdesc_arm_with_m ();
10340 initialize_tdesc_arm_with_m_fpa_layout ();
10341 initialize_tdesc_arm_with_m_vfp_d16 ();
10342 initialize_tdesc_arm_with_iwmmxt ();
10343 initialize_tdesc_arm_with_vfpv2 ();
10344 initialize_tdesc_arm_with_vfpv3 ();
10345 initialize_tdesc_arm_with_neon ();
10347 /* Get the number of possible sets of register names defined in opcodes. */
10348 num_disassembly_options = get_arm_regname_num_options ();
10350 /* Add root prefix command for all "set arm"/"show arm" commands. */
10351 add_prefix_cmd ("arm", no_class, set_arm_command,
10352 _("Various ARM-specific commands."),
10353 &setarmcmdlist, "set arm ", 0, &setlist);
10355 add_prefix_cmd ("arm", no_class, show_arm_command,
10356 _("Various ARM-specific commands."),
10357 &showarmcmdlist, "show arm ", 0, &showlist);
10359 /* Sync the opcode insn printer with our register viewer. */
10360 parse_arm_disassembler_option ("reg-names-std");
10362 /* Initialize the array that will be passed to
10363 add_setshow_enum_cmd(). */
10364 valid_disassembly_styles
10365 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10366 for (i = 0; i < num_disassembly_options; i++)
10368 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10369 valid_disassembly_styles[i] = setname;
10370 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10373 /* When we find the default names, tell the disassembler to use
10375 if (!strcmp (setname, "std"))
10377 disassembly_style = setname;
10378 set_arm_regname_option (i);
10381 /* Mark the end of valid options. */
10382 valid_disassembly_styles[num_disassembly_options] = NULL;
10384 /* Create the help text. */
10385 stb = mem_fileopen ();
10386 fprintf_unfiltered (stb, "%s%s%s",
10387 _("The valid values are:\n"),
10389 _("The default is \"std\"."));
10390 helptext = ui_file_xstrdup (stb, NULL);
10391 ui_file_delete (stb);
10393 add_setshow_enum_cmd("disassembler", no_class,
10394 valid_disassembly_styles, &disassembly_style,
10395 _("Set the disassembly style."),
10396 _("Show the disassembly style."),
10398 set_disassembly_style_sfunc,
10399 NULL, /* FIXME: i18n: The disassembly style is
10401 &setarmcmdlist, &showarmcmdlist);
10403 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10404 _("Set usage of ARM 32-bit mode."),
10405 _("Show usage of ARM 32-bit mode."),
10406 _("When off, a 26-bit PC will be used."),
10408 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10410 &setarmcmdlist, &showarmcmdlist);
10412 /* Add a command to allow the user to force the FPU model. */
10413 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10414 _("Set the floating point type."),
10415 _("Show the floating point type."),
10416 _("auto - Determine the FP typefrom the OS-ABI.\n\
10417 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10418 fpa - FPA co-processor (GCC compiled).\n\
10419 softvfp - Software FP with pure-endian doubles.\n\
10420 vfp - VFP co-processor."),
10421 set_fp_model_sfunc, show_fp_model,
10422 &setarmcmdlist, &showarmcmdlist);
10424 /* Add a command to allow the user to force the ABI. */
10425 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10427 _("Show the ABI."),
10428 NULL, arm_set_abi, arm_show_abi,
10429 &setarmcmdlist, &showarmcmdlist);
10431 /* Add two commands to allow the user to force the assumed
10433 add_setshow_enum_cmd ("fallback-mode", class_support,
10434 arm_mode_strings, &arm_fallback_mode_string,
10435 _("Set the mode assumed when symbols are unavailable."),
10436 _("Show the mode assumed when symbols are unavailable."),
10437 NULL, NULL, arm_show_fallback_mode,
10438 &setarmcmdlist, &showarmcmdlist);
10439 add_setshow_enum_cmd ("force-mode", class_support,
10440 arm_mode_strings, &arm_force_mode_string,
10441 _("Set the mode assumed even when symbols are available."),
10442 _("Show the mode assumed even when symbols are available."),
10443 NULL, NULL, arm_show_force_mode,
10444 &setarmcmdlist, &showarmcmdlist);
10446 /* Debugging flag. */
10447 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10448 _("Set ARM debugging."),
10449 _("Show ARM debugging."),
10450 _("When on, arm-specific debugging is enabled."),
10452 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10453 &setdebuglist, &showdebuglist);
10456 /* ARM-reversible process record data structures. */
10458 #define ARM_INSN_SIZE_BYTES 4
10459 #define THUMB_INSN_SIZE_BYTES 2
10460 #define THUMB2_INSN_SIZE_BYTES 4
10463 #define INSN_S_L_BIT_NUM 20
10465 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10468 unsigned int reg_len = LENGTH; \
10471 REGS = XNEWVEC (uint32_t, reg_len); \
10472 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10477 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10480 unsigned int mem_len = LENGTH; \
10483 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10484 memcpy(&MEMS->len, &RECORD_BUF[0], \
10485 sizeof(struct arm_mem_r) * LENGTH); \
10490 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10491 #define INSN_RECORDED(ARM_RECORD) \
10492 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10494 /* ARM memory record structure. */
10497 uint32_t len; /* Record length. */
10498 CORE_ADDR addr; /* Memory address. */
10501 /* ARM instruction record contains opcode of current insn
10502 and execution state (before entry to decode_insn()),
10503 contains list of to-be-modified registers and
10504 memory blocks (on return from decode_insn()). */
10506 typedef struct insn_decode_record_t
10508 struct gdbarch *gdbarch;
10509 struct regcache *regcache;
10510 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10511 uint32_t arm_insn; /* Should accommodate thumb. */
10512 uint32_t cond; /* Condition code. */
10513 uint32_t opcode; /* Insn opcode. */
10514 uint32_t decode; /* Insn decode bits. */
10515 uint32_t mem_rec_count; /* No of mem records. */
10516 uint32_t reg_rec_count; /* No of reg records. */
10517 uint32_t *arm_regs; /* Registers to be saved for this record. */
10518 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10519 } insn_decode_record;
10522 /* Checks ARM SBZ and SBO mandatory fields. */
10525 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10527 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10550 } arm_record_strx_t;
10561 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10562 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10565 struct regcache *reg_cache = arm_insn_r->regcache;
10566 ULONGEST u_regval[2]= {0};
10568 uint32_t reg_src1 = 0, reg_src2 = 0;
10569 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10570 uint32_t opcode1 = 0;
10572 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10573 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10574 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10577 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10579 /* 1) Handle misc store, immediate offset. */
10580 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10581 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10582 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10583 regcache_raw_read_unsigned (reg_cache, reg_src1,
10585 if (ARM_PC_REGNUM == reg_src1)
10587 /* If R15 was used as Rn, hence current PC+8. */
10588 u_regval[0] = u_regval[0] + 8;
10590 offset_8 = (immed_high << 4) | immed_low;
10591 /* Calculate target store address. */
10592 if (14 == arm_insn_r->opcode)
10594 tgt_mem_addr = u_regval[0] + offset_8;
10598 tgt_mem_addr = u_regval[0] - offset_8;
10600 if (ARM_RECORD_STRH == str_type)
10602 record_buf_mem[0] = 2;
10603 record_buf_mem[1] = tgt_mem_addr;
10604 arm_insn_r->mem_rec_count = 1;
10606 else if (ARM_RECORD_STRD == str_type)
10608 record_buf_mem[0] = 4;
10609 record_buf_mem[1] = tgt_mem_addr;
10610 record_buf_mem[2] = 4;
10611 record_buf_mem[3] = tgt_mem_addr + 4;
10612 arm_insn_r->mem_rec_count = 2;
10615 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10617 /* 2) Store, register offset. */
10619 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10621 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10622 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10623 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10624 if (15 == reg_src2)
10626 /* If R15 was used as Rn, hence current PC+8. */
10627 u_regval[0] = u_regval[0] + 8;
10629 /* Calculate target store address, Rn +/- Rm, register offset. */
10630 if (12 == arm_insn_r->opcode)
10632 tgt_mem_addr = u_regval[0] + u_regval[1];
10636 tgt_mem_addr = u_regval[1] - u_regval[0];
10638 if (ARM_RECORD_STRH == str_type)
10640 record_buf_mem[0] = 2;
10641 record_buf_mem[1] = tgt_mem_addr;
10642 arm_insn_r->mem_rec_count = 1;
10644 else if (ARM_RECORD_STRD == str_type)
10646 record_buf_mem[0] = 4;
10647 record_buf_mem[1] = tgt_mem_addr;
10648 record_buf_mem[2] = 4;
10649 record_buf_mem[3] = tgt_mem_addr + 4;
10650 arm_insn_r->mem_rec_count = 2;
10653 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10654 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10656 /* 3) Store, immediate pre-indexed. */
10657 /* 5) Store, immediate post-indexed. */
10658 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10659 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10660 offset_8 = (immed_high << 4) | immed_low;
10661 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10662 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10663 /* Calculate target store address, Rn +/- Rm, register offset. */
10664 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10666 tgt_mem_addr = u_regval[0] + offset_8;
10670 tgt_mem_addr = u_regval[0] - offset_8;
10672 if (ARM_RECORD_STRH == str_type)
10674 record_buf_mem[0] = 2;
10675 record_buf_mem[1] = tgt_mem_addr;
10676 arm_insn_r->mem_rec_count = 1;
10678 else if (ARM_RECORD_STRD == str_type)
10680 record_buf_mem[0] = 4;
10681 record_buf_mem[1] = tgt_mem_addr;
10682 record_buf_mem[2] = 4;
10683 record_buf_mem[3] = tgt_mem_addr + 4;
10684 arm_insn_r->mem_rec_count = 2;
10686 /* Record Rn also as it changes. */
10687 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10688 arm_insn_r->reg_rec_count = 1;
10690 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10691 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10693 /* 4) Store, register pre-indexed. */
10694 /* 6) Store, register post -indexed. */
10695 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10696 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10697 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10698 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10699 /* Calculate target store address, Rn +/- Rm, register offset. */
10700 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10702 tgt_mem_addr = u_regval[0] + u_regval[1];
10706 tgt_mem_addr = u_regval[1] - u_regval[0];
10708 if (ARM_RECORD_STRH == str_type)
10710 record_buf_mem[0] = 2;
10711 record_buf_mem[1] = tgt_mem_addr;
10712 arm_insn_r->mem_rec_count = 1;
10714 else if (ARM_RECORD_STRD == str_type)
10716 record_buf_mem[0] = 4;
10717 record_buf_mem[1] = tgt_mem_addr;
10718 record_buf_mem[2] = 4;
10719 record_buf_mem[3] = tgt_mem_addr + 4;
10720 arm_insn_r->mem_rec_count = 2;
10722 /* Record Rn also as it changes. */
10723 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10724 arm_insn_r->reg_rec_count = 1;
10729 /* Handling ARM extension space insns. */
10732 arm_record_extension_space (insn_decode_record *arm_insn_r)
10734 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10735 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10736 uint32_t record_buf[8], record_buf_mem[8];
10737 uint32_t reg_src1 = 0;
10738 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10739 struct regcache *reg_cache = arm_insn_r->regcache;
10740 ULONGEST u_regval = 0;
10742 gdb_assert (!INSN_RECORDED(arm_insn_r));
10743 /* Handle unconditional insn extension space. */
10745 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10746 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10747 if (arm_insn_r->cond)
10749 /* PLD has no affect on architectural state, it just affects
10751 if (5 == ((opcode1 & 0xE0) >> 5))
10754 record_buf[0] = ARM_PS_REGNUM;
10755 record_buf[1] = ARM_LR_REGNUM;
10756 arm_insn_r->reg_rec_count = 2;
10758 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10762 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10763 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10766 /* Undefined instruction on ARM V5; need to handle if later
10767 versions define it. */
10770 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10771 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10772 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10774 /* Handle arithmetic insn extension space. */
10775 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10776 && !INSN_RECORDED(arm_insn_r))
10778 /* Handle MLA(S) and MUL(S). */
10779 if (0 <= insn_op1 && 3 >= insn_op1)
10781 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10782 record_buf[1] = ARM_PS_REGNUM;
10783 arm_insn_r->reg_rec_count = 2;
10785 else if (4 <= insn_op1 && 15 >= insn_op1)
10787 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10788 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10789 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10790 record_buf[2] = ARM_PS_REGNUM;
10791 arm_insn_r->reg_rec_count = 3;
10795 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10796 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10797 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10799 /* Handle control insn extension space. */
10801 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10802 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10804 if (!bit (arm_insn_r->arm_insn,25))
10806 if (!bits (arm_insn_r->arm_insn, 4, 7))
10808 if ((0 == insn_op1) || (2 == insn_op1))
10811 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10812 arm_insn_r->reg_rec_count = 1;
10814 else if (1 == insn_op1)
10816 /* CSPR is going to be changed. */
10817 record_buf[0] = ARM_PS_REGNUM;
10818 arm_insn_r->reg_rec_count = 1;
10820 else if (3 == insn_op1)
10822 /* SPSR is going to be changed. */
10823 /* We need to get SPSR value, which is yet to be done. */
10824 printf_unfiltered (_("Process record does not support "
10825 "instruction 0x%0x at address %s.\n"),
10826 arm_insn_r->arm_insn,
10827 paddress (arm_insn_r->gdbarch,
10828 arm_insn_r->this_addr));
10832 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10837 record_buf[0] = ARM_PS_REGNUM;
10838 arm_insn_r->reg_rec_count = 1;
10840 else if (3 == insn_op1)
10843 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10844 arm_insn_r->reg_rec_count = 1;
10847 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10850 record_buf[0] = ARM_PS_REGNUM;
10851 record_buf[1] = ARM_LR_REGNUM;
10852 arm_insn_r->reg_rec_count = 2;
10854 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10856 /* QADD, QSUB, QDADD, QDSUB */
10857 record_buf[0] = ARM_PS_REGNUM;
10858 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10859 arm_insn_r->reg_rec_count = 2;
10861 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10864 record_buf[0] = ARM_PS_REGNUM;
10865 record_buf[1] = ARM_LR_REGNUM;
10866 arm_insn_r->reg_rec_count = 2;
10868 /* Save SPSR also;how? */
10869 printf_unfiltered (_("Process record does not support "
10870 "instruction 0x%0x at address %s.\n"),
10871 arm_insn_r->arm_insn,
10872 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10875 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10876 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10877 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10878 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10881 if (0 == insn_op1 || 1 == insn_op1)
10883 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10884 /* We dont do optimization for SMULW<y> where we
10886 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10887 record_buf[1] = ARM_PS_REGNUM;
10888 arm_insn_r->reg_rec_count = 2;
10890 else if (2 == insn_op1)
10893 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10894 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10895 arm_insn_r->reg_rec_count = 2;
10897 else if (3 == insn_op1)
10900 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10901 arm_insn_r->reg_rec_count = 1;
10907 /* MSR : immediate form. */
10910 /* CSPR is going to be changed. */
10911 record_buf[0] = ARM_PS_REGNUM;
10912 arm_insn_r->reg_rec_count = 1;
10914 else if (3 == insn_op1)
10916 /* SPSR is going to be changed. */
10917 /* we need to get SPSR value, which is yet to be done */
10918 printf_unfiltered (_("Process record does not support "
10919 "instruction 0x%0x at address %s.\n"),
10920 arm_insn_r->arm_insn,
10921 paddress (arm_insn_r->gdbarch,
10922 arm_insn_r->this_addr));
10928 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10929 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10930 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10932 /* Handle load/store insn extension space. */
10934 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10935 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10936 && !INSN_RECORDED(arm_insn_r))
10941 /* These insn, changes register and memory as well. */
10942 /* SWP or SWPB insn. */
10943 /* Get memory address given by Rn. */
10944 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10945 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10946 /* SWP insn ?, swaps word. */
10947 if (8 == arm_insn_r->opcode)
10949 record_buf_mem[0] = 4;
10953 /* SWPB insn, swaps only byte. */
10954 record_buf_mem[0] = 1;
10956 record_buf_mem[1] = u_regval;
10957 arm_insn_r->mem_rec_count = 1;
10958 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10959 arm_insn_r->reg_rec_count = 1;
10961 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10964 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10967 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10970 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10971 record_buf[1] = record_buf[0] + 1;
10972 arm_insn_r->reg_rec_count = 2;
10974 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10977 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10980 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10982 /* LDRH, LDRSB, LDRSH. */
10983 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10984 arm_insn_r->reg_rec_count = 1;
10989 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10990 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10991 && !INSN_RECORDED(arm_insn_r))
10994 /* Handle coprocessor insn extension space. */
10997 /* To be done for ARMv5 and later; as of now we return -1. */
10999 printf_unfiltered (_("Process record does not support instruction x%0x "
11000 "at address %s.\n"),arm_insn_r->arm_insn,
11001 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11004 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11005 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11010 /* Handling opcode 000 insns. */
11013 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11015 struct regcache *reg_cache = arm_insn_r->regcache;
11016 uint32_t record_buf[8], record_buf_mem[8];
11017 ULONGEST u_regval[2] = {0};
11019 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11020 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11021 uint32_t opcode1 = 0;
11023 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11024 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11025 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11027 /* Data processing insn /multiply insn. */
11028 if (9 == arm_insn_r->decode
11029 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11030 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11032 /* Handle multiply instructions. */
11033 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11034 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11036 /* Handle MLA and MUL. */
11037 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11038 record_buf[1] = ARM_PS_REGNUM;
11039 arm_insn_r->reg_rec_count = 2;
11041 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11043 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11044 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11045 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11046 record_buf[2] = ARM_PS_REGNUM;
11047 arm_insn_r->reg_rec_count = 3;
11050 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11051 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11053 /* Handle misc load insns, as 20th bit (L = 1). */
11054 /* LDR insn has a capability to do branching, if
11055 MOV LR, PC is precceded by LDR insn having Rn as R15
11056 in that case, it emulates branch and link insn, and hence we
11057 need to save CSPR and PC as well. I am not sure this is right
11058 place; as opcode = 010 LDR insn make this happen, if R15 was
11060 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11061 if (15 != reg_dest)
11063 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11064 arm_insn_r->reg_rec_count = 1;
11068 record_buf[0] = reg_dest;
11069 record_buf[1] = ARM_PS_REGNUM;
11070 arm_insn_r->reg_rec_count = 2;
11073 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11074 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11075 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11076 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11078 /* Handle MSR insn. */
11079 if (9 == arm_insn_r->opcode)
11081 /* CSPR is going to be changed. */
11082 record_buf[0] = ARM_PS_REGNUM;
11083 arm_insn_r->reg_rec_count = 1;
11087 /* SPSR is going to be changed. */
11088 /* How to read SPSR value? */
11089 printf_unfiltered (_("Process record does not support instruction "
11090 "0x%0x at address %s.\n"),
11091 arm_insn_r->arm_insn,
11092 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11096 else if (9 == arm_insn_r->decode
11097 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11098 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11100 /* Handling SWP, SWPB. */
11101 /* These insn, changes register and memory as well. */
11102 /* SWP or SWPB insn. */
11104 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11105 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11106 /* SWP insn ?, swaps word. */
11107 if (8 == arm_insn_r->opcode)
11109 record_buf_mem[0] = 4;
11113 /* SWPB insn, swaps only byte. */
11114 record_buf_mem[0] = 1;
11116 record_buf_mem[1] = u_regval[0];
11117 arm_insn_r->mem_rec_count = 1;
11118 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11119 arm_insn_r->reg_rec_count = 1;
11121 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11122 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11124 /* Handle BLX, branch and link/exchange. */
11125 if (9 == arm_insn_r->opcode)
11127 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11128 and R14 stores the return address. */
11129 record_buf[0] = ARM_PS_REGNUM;
11130 record_buf[1] = ARM_LR_REGNUM;
11131 arm_insn_r->reg_rec_count = 2;
11134 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11136 /* Handle enhanced software breakpoint insn, BKPT. */
11137 /* CPSR is changed to be executed in ARM state, disabling normal
11138 interrupts, entering abort mode. */
11139 /* According to high vector configuration PC is set. */
11140 /* user hit breakpoint and type reverse, in
11141 that case, we need to go back with previous CPSR and
11142 Program Counter. */
11143 record_buf[0] = ARM_PS_REGNUM;
11144 record_buf[1] = ARM_LR_REGNUM;
11145 arm_insn_r->reg_rec_count = 2;
11147 /* Save SPSR also; how? */
11148 printf_unfiltered (_("Process record does not support instruction "
11149 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11150 paddress (arm_insn_r->gdbarch,
11151 arm_insn_r->this_addr));
11154 else if (11 == arm_insn_r->decode
11155 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11157 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11159 /* Handle str(x) insn */
11160 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11163 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11164 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11166 /* Handle BX, branch and link/exchange. */
11167 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11168 record_buf[0] = ARM_PS_REGNUM;
11169 arm_insn_r->reg_rec_count = 1;
11171 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11172 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11173 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11175 /* Count leading zeros: CLZ. */
11176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11177 arm_insn_r->reg_rec_count = 1;
11179 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11180 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11181 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11182 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11185 /* Handle MRS insn. */
11186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11187 arm_insn_r->reg_rec_count = 1;
11189 else if (arm_insn_r->opcode <= 15)
11191 /* Normal data processing insns. */
11192 /* Out of 11 shifter operands mode, all the insn modifies destination
11193 register, which is specified by 13-16 decode. */
11194 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11195 record_buf[1] = ARM_PS_REGNUM;
11196 arm_insn_r->reg_rec_count = 2;
11203 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11204 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11208 /* Handling opcode 001 insns. */
11211 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11213 uint32_t record_buf[8], record_buf_mem[8];
11215 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11216 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11218 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11219 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11220 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11223 /* Handle MSR insn. */
11224 if (9 == arm_insn_r->opcode)
11226 /* CSPR is going to be changed. */
11227 record_buf[0] = ARM_PS_REGNUM;
11228 arm_insn_r->reg_rec_count = 1;
11232 /* SPSR is going to be changed. */
11235 else if (arm_insn_r->opcode <= 15)
11237 /* Normal data processing insns. */
11238 /* Out of 11 shifter operands mode, all the insn modifies destination
11239 register, which is specified by 13-16 decode. */
11240 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11241 record_buf[1] = ARM_PS_REGNUM;
11242 arm_insn_r->reg_rec_count = 2;
11249 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11250 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11254 /* Handling opcode 010 insns. */
11257 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11259 struct regcache *reg_cache = arm_insn_r->regcache;
11261 uint32_t reg_src1 = 0 , reg_dest = 0;
11262 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11263 uint32_t record_buf[8], record_buf_mem[8];
11265 ULONGEST u_regval = 0;
11267 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11268 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11270 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11272 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11273 /* LDR insn has a capability to do branching, if
11274 MOV LR, PC is precedded by LDR insn having Rn as R15
11275 in that case, it emulates branch and link insn, and hence we
11276 need to save CSPR and PC as well. */
11277 if (ARM_PC_REGNUM != reg_dest)
11279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11280 arm_insn_r->reg_rec_count = 1;
11284 record_buf[0] = reg_dest;
11285 record_buf[1] = ARM_PS_REGNUM;
11286 arm_insn_r->reg_rec_count = 2;
11291 /* Store, immediate offset, immediate pre-indexed,
11292 immediate post-indexed. */
11293 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11294 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11295 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11297 if (bit (arm_insn_r->arm_insn, 23))
11299 tgt_mem_addr = u_regval + offset_12;
11303 tgt_mem_addr = u_regval - offset_12;
11306 switch (arm_insn_r->opcode)
11320 record_buf_mem[0] = 4;
11335 record_buf_mem[0] = 1;
11339 gdb_assert_not_reached ("no decoding pattern found");
11342 record_buf_mem[1] = tgt_mem_addr;
11343 arm_insn_r->mem_rec_count = 1;
11345 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11346 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11347 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11348 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11349 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11350 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11353 /* We are handling pre-indexed mode; post-indexed mode;
11354 where Rn is going to be changed. */
11355 record_buf[0] = reg_src1;
11356 arm_insn_r->reg_rec_count = 1;
11360 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11361 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11365 /* Handling opcode 011 insns. */
11368 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11370 struct regcache *reg_cache = arm_insn_r->regcache;
11372 uint32_t shift_imm = 0;
11373 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11374 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11375 uint32_t record_buf[8], record_buf_mem[8];
11378 ULONGEST u_regval[2];
11380 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11381 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11383 /* Handle enhanced store insns and LDRD DSP insn,
11384 order begins according to addressing modes for store insns
11388 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11390 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11391 /* LDR insn has a capability to do branching, if
11392 MOV LR, PC is precedded by LDR insn having Rn as R15
11393 in that case, it emulates branch and link insn, and hence we
11394 need to save CSPR and PC as well. */
11395 if (15 != reg_dest)
11397 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11398 arm_insn_r->reg_rec_count = 1;
11402 record_buf[0] = reg_dest;
11403 record_buf[1] = ARM_PS_REGNUM;
11404 arm_insn_r->reg_rec_count = 2;
11409 if (! bits (arm_insn_r->arm_insn, 4, 11))
11411 /* Store insn, register offset and register pre-indexed,
11412 register post-indexed. */
11414 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11416 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11417 regcache_raw_read_unsigned (reg_cache, reg_src1
11419 regcache_raw_read_unsigned (reg_cache, reg_src2
11421 if (15 == reg_src2)
11423 /* If R15 was used as Rn, hence current PC+8. */
11424 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11425 u_regval[0] = u_regval[0] + 8;
11427 /* Calculate target store address, Rn +/- Rm, register offset. */
11429 if (bit (arm_insn_r->arm_insn, 23))
11431 tgt_mem_addr = u_regval[0] + u_regval[1];
11435 tgt_mem_addr = u_regval[1] - u_regval[0];
11438 switch (arm_insn_r->opcode)
11452 record_buf_mem[0] = 4;
11467 record_buf_mem[0] = 1;
11471 gdb_assert_not_reached ("no decoding pattern found");
11474 record_buf_mem[1] = tgt_mem_addr;
11475 arm_insn_r->mem_rec_count = 1;
11477 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11478 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11479 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11480 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11481 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11482 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11485 /* Rn is going to be changed in pre-indexed mode and
11486 post-indexed mode as well. */
11487 record_buf[0] = reg_src2;
11488 arm_insn_r->reg_rec_count = 1;
11493 /* Store insn, scaled register offset; scaled pre-indexed. */
11494 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11496 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11498 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11499 /* Get shift_imm. */
11500 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11501 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11502 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11503 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11504 /* Offset_12 used as shift. */
11508 /* Offset_12 used as index. */
11509 offset_12 = u_regval[0] << shift_imm;
11513 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11519 if (bit (u_regval[0], 31))
11521 offset_12 = 0xFFFFFFFF;
11530 /* This is arithmetic shift. */
11531 offset_12 = s_word >> shift_imm;
11538 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11540 /* Get C flag value and shift it by 31. */
11541 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11542 | (u_regval[0]) >> 1);
11546 offset_12 = (u_regval[0] >> shift_imm) \
11548 (sizeof(uint32_t) - shift_imm));
11553 gdb_assert_not_reached ("no decoding pattern found");
11557 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11559 if (bit (arm_insn_r->arm_insn, 23))
11561 tgt_mem_addr = u_regval[1] + offset_12;
11565 tgt_mem_addr = u_regval[1] - offset_12;
11568 switch (arm_insn_r->opcode)
11582 record_buf_mem[0] = 4;
11597 record_buf_mem[0] = 1;
11601 gdb_assert_not_reached ("no decoding pattern found");
11604 record_buf_mem[1] = tgt_mem_addr;
11605 arm_insn_r->mem_rec_count = 1;
11607 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11608 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11609 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11610 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11611 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11612 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11615 /* Rn is going to be changed in register scaled pre-indexed
11616 mode,and scaled post indexed mode. */
11617 record_buf[0] = reg_src2;
11618 arm_insn_r->reg_rec_count = 1;
11623 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11624 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11628 /* Handling opcode 100 insns. */
11631 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11633 struct regcache *reg_cache = arm_insn_r->regcache;
11635 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11636 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11637 uint32_t start_address = 0, index = 0;
11638 uint32_t record_buf[24], record_buf_mem[48];
11640 ULONGEST u_regval[2] = {0};
11642 /* This mode is exclusively for load and store multiple. */
11643 /* Handle incremenrt after/before and decrment after.before mode;
11644 Rn is changing depending on W bit, but as of now we store Rn too
11645 without optimization. */
11647 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11649 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11651 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11653 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11658 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11662 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11663 while (register_bits)
11665 if (register_bits & 0x00000001)
11666 register_list[register_count++] = 1;
11667 register_bits = register_bits >> 1;
11670 /* Extra space for Base Register and CPSR; wihtout optimization. */
11671 record_buf[register_count] = reg_src1;
11672 record_buf[register_count + 1] = ARM_PS_REGNUM;
11673 arm_insn_r->reg_rec_count = register_count + 2;
11675 for (register_count = 0; register_count < no_of_regs; register_count++)
11677 if (register_list[register_count])
11679 /* Register_count gives total no of registers
11680 and dually working as reg number. */
11681 record_buf[index] = register_count;
11689 /* It handles both STM(1) and STM(2). */
11690 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11692 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11694 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11695 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11696 while (register_bits)
11698 if (register_bits & 0x00000001)
11700 register_bits = register_bits >> 1;
11705 /* Decrement after. */
11707 start_address = (u_regval[0]) - (register_count * 4) + 4;
11708 arm_insn_r->mem_rec_count = register_count;
11709 while (register_count)
11711 record_buf_mem[(register_count * 2) - 1] = start_address;
11712 record_buf_mem[(register_count * 2) - 2] = 4;
11713 start_address = start_address + 4;
11718 /* Increment after. */
11720 start_address = u_regval[0];
11721 arm_insn_r->mem_rec_count = register_count;
11722 while (register_count)
11724 record_buf_mem[(register_count * 2) - 1] = start_address;
11725 record_buf_mem[(register_count * 2) - 2] = 4;
11726 start_address = start_address + 4;
11731 /* Decrement before. */
11734 start_address = (u_regval[0]) - (register_count * 4);
11735 arm_insn_r->mem_rec_count = register_count;
11736 while (register_count)
11738 record_buf_mem[(register_count * 2) - 1] = start_address;
11739 record_buf_mem[(register_count * 2) - 2] = 4;
11740 start_address = start_address + 4;
11745 /* Increment before. */
11747 start_address = u_regval[0] + 4;
11748 arm_insn_r->mem_rec_count = register_count;
11749 while (register_count)
11751 record_buf_mem[(register_count * 2) - 1] = start_address;
11752 record_buf_mem[(register_count * 2) - 2] = 4;
11753 start_address = start_address + 4;
11759 gdb_assert_not_reached ("no decoding pattern found");
11763 /* Base register also changes; based on condition and W bit. */
11764 /* We save it anyway without optimization. */
11765 record_buf[0] = reg_src1;
11766 arm_insn_r->reg_rec_count = 1;
11769 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11770 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11774 /* Handling opcode 101 insns. */
11777 arm_record_b_bl (insn_decode_record *arm_insn_r)
11779 uint32_t record_buf[8];
11781 /* Handle B, BL, BLX(1) insns. */
11782 /* B simply branches so we do nothing here. */
11783 /* Note: BLX(1) doesnt fall here but instead it falls into
11784 extension space. */
11785 if (bit (arm_insn_r->arm_insn, 24))
11787 record_buf[0] = ARM_LR_REGNUM;
11788 arm_insn_r->reg_rec_count = 1;
11791 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11796 /* Handling opcode 110 insns. */
11799 arm_record_coproc (insn_decode_record *arm_insn_r)
11801 printf_unfiltered (_("Process record does not support instruction "
11802 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11803 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11808 /* Handling opcode 111 insns. */
11811 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11813 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11814 struct regcache *reg_cache = arm_insn_r->regcache;
11815 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11817 /* Handle SWI insn; system call would be handled over here. */
11819 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11820 if (15 == arm_insn_r->opcode)
11822 /* Handle arm syscall insn. */
11823 if (tdep->arm_swi_record != NULL)
11825 ret = tdep->arm_swi_record(reg_cache);
11829 printf_unfiltered (_("no syscall record support\n"));
11834 printf_unfiltered (_("Process record does not support instruction "
11835 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11836 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11840 /* Handling opcode 000 insns. */
11843 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11845 uint32_t record_buf[8];
11846 uint32_t reg_src1 = 0;
11848 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 record_buf[0] = ARM_PS_REGNUM;
11851 record_buf[1] = reg_src1;
11852 thumb_insn_r->reg_rec_count = 2;
11854 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11860 /* Handling opcode 001 insns. */
11863 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11865 uint32_t record_buf[8];
11866 uint32_t reg_src1 = 0;
11868 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11870 record_buf[0] = ARM_PS_REGNUM;
11871 record_buf[1] = reg_src1;
11872 thumb_insn_r->reg_rec_count = 2;
11874 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11879 /* Handling opcode 010 insns. */
11882 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11884 struct regcache *reg_cache = thumb_insn_r->regcache;
11885 uint32_t record_buf[8], record_buf_mem[8];
11887 uint32_t reg_src1 = 0, reg_src2 = 0;
11888 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11890 ULONGEST u_regval[2] = {0};
11892 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11894 if (bit (thumb_insn_r->arm_insn, 12))
11896 /* Handle load/store register offset. */
11897 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11898 if (opcode2 >= 12 && opcode2 <= 15)
11900 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11901 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11902 record_buf[0] = reg_src1;
11903 thumb_insn_r->reg_rec_count = 1;
11905 else if (opcode2 >= 8 && opcode2 <= 10)
11907 /* STR(2), STRB(2), STRH(2) . */
11908 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11909 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11910 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11911 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11913 record_buf_mem[0] = 4; /* STR (2). */
11914 else if (10 == opcode2)
11915 record_buf_mem[0] = 1; /* STRB (2). */
11916 else if (9 == opcode2)
11917 record_buf_mem[0] = 2; /* STRH (2). */
11918 record_buf_mem[1] = u_regval[0] + u_regval[1];
11919 thumb_insn_r->mem_rec_count = 1;
11922 else if (bit (thumb_insn_r->arm_insn, 11))
11924 /* Handle load from literal pool. */
11926 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11927 record_buf[0] = reg_src1;
11928 thumb_insn_r->reg_rec_count = 1;
11932 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11933 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11934 if ((3 == opcode2) && (!opcode3))
11936 /* Branch with exchange. */
11937 record_buf[0] = ARM_PS_REGNUM;
11938 thumb_insn_r->reg_rec_count = 1;
11942 /* Format 8; special data processing insns. */
11943 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11944 record_buf[0] = ARM_PS_REGNUM;
11945 record_buf[1] = reg_src1;
11946 thumb_insn_r->reg_rec_count = 2;
11951 /* Format 5; data processing insns. */
11952 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11953 if (bit (thumb_insn_r->arm_insn, 7))
11955 reg_src1 = reg_src1 + 8;
11957 record_buf[0] = ARM_PS_REGNUM;
11958 record_buf[1] = reg_src1;
11959 thumb_insn_r->reg_rec_count = 2;
11962 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11963 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11969 /* Handling opcode 001 insns. */
11972 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11974 struct regcache *reg_cache = thumb_insn_r->regcache;
11975 uint32_t record_buf[8], record_buf_mem[8];
11977 uint32_t reg_src1 = 0;
11978 uint32_t opcode = 0, immed_5 = 0;
11980 ULONGEST u_regval = 0;
11982 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11987 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11988 record_buf[0] = reg_src1;
11989 thumb_insn_r->reg_rec_count = 1;
11994 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11995 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11996 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11997 record_buf_mem[0] = 4;
11998 record_buf_mem[1] = u_regval + (immed_5 * 4);
11999 thumb_insn_r->mem_rec_count = 1;
12002 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12003 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12009 /* Handling opcode 100 insns. */
12012 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12014 struct regcache *reg_cache = thumb_insn_r->regcache;
12015 uint32_t record_buf[8], record_buf_mem[8];
12017 uint32_t reg_src1 = 0;
12018 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12020 ULONGEST u_regval = 0;
12022 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12027 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12028 record_buf[0] = reg_src1;
12029 thumb_insn_r->reg_rec_count = 1;
12031 else if (1 == opcode)
12034 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12035 record_buf[0] = reg_src1;
12036 thumb_insn_r->reg_rec_count = 1;
12038 else if (2 == opcode)
12041 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12042 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12043 record_buf_mem[0] = 4;
12044 record_buf_mem[1] = u_regval + (immed_8 * 4);
12045 thumb_insn_r->mem_rec_count = 1;
12047 else if (0 == opcode)
12050 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12051 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12052 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12053 record_buf_mem[0] = 2;
12054 record_buf_mem[1] = u_regval + (immed_5 * 2);
12055 thumb_insn_r->mem_rec_count = 1;
12058 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12059 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12065 /* Handling opcode 101 insns. */
12068 thumb_record_misc (insn_decode_record *thumb_insn_r)
12070 struct regcache *reg_cache = thumb_insn_r->regcache;
12072 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12073 uint32_t register_bits = 0, register_count = 0;
12074 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12075 uint32_t record_buf[24], record_buf_mem[48];
12078 ULONGEST u_regval = 0;
12080 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12081 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12082 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12087 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12088 while (register_bits)
12090 if (register_bits & 0x00000001)
12091 register_list[register_count++] = 1;
12092 register_bits = register_bits >> 1;
12094 record_buf[register_count] = ARM_PS_REGNUM;
12095 record_buf[register_count + 1] = ARM_SP_REGNUM;
12096 thumb_insn_r->reg_rec_count = register_count + 2;
12097 for (register_count = 0; register_count < 8; register_count++)
12099 if (register_list[register_count])
12101 record_buf[index] = register_count;
12106 else if (10 == opcode2)
12109 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12110 regcache_raw_read_unsigned (reg_cache, ARM_PC_REGNUM, &u_regval);
12111 while (register_bits)
12113 if (register_bits & 0x00000001)
12115 register_bits = register_bits >> 1;
12117 start_address = u_regval - \
12118 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12119 thumb_insn_r->mem_rec_count = register_count;
12120 while (register_count)
12122 record_buf_mem[(register_count * 2) - 1] = start_address;
12123 record_buf_mem[(register_count * 2) - 2] = 4;
12124 start_address = start_address + 4;
12127 record_buf[0] = ARM_SP_REGNUM;
12128 thumb_insn_r->reg_rec_count = 1;
12130 else if (0x1E == opcode1)
12133 /* Handle enhanced software breakpoint insn, BKPT. */
12134 /* CPSR is changed to be executed in ARM state, disabling normal
12135 interrupts, entering abort mode. */
12136 /* According to high vector configuration PC is set. */
12137 /* User hits breakpoint and type reverse, in that case, we need to go back with
12138 previous CPSR and Program Counter. */
12139 record_buf[0] = ARM_PS_REGNUM;
12140 record_buf[1] = ARM_LR_REGNUM;
12141 thumb_insn_r->reg_rec_count = 2;
12142 /* We need to save SPSR value, which is not yet done. */
12143 printf_unfiltered (_("Process record does not support instruction "
12144 "0x%0x at address %s.\n"),
12145 thumb_insn_r->arm_insn,
12146 paddress (thumb_insn_r->gdbarch,
12147 thumb_insn_r->this_addr));
12150 else if ((0 == opcode) || (1 == opcode))
12152 /* ADD(5), ADD(6). */
12153 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12154 record_buf[0] = reg_src1;
12155 thumb_insn_r->reg_rec_count = 1;
12157 else if (2 == opcode)
12159 /* ADD(7), SUB(4). */
12160 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12161 record_buf[0] = ARM_SP_REGNUM;
12162 thumb_insn_r->reg_rec_count = 1;
12165 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12166 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12172 /* Handling opcode 110 insns. */
12175 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12177 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12178 struct regcache *reg_cache = thumb_insn_r->regcache;
12180 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12181 uint32_t reg_src1 = 0;
12182 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12183 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12184 uint32_t record_buf[24], record_buf_mem[48];
12186 ULONGEST u_regval = 0;
12188 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12189 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12195 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12197 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12198 while (register_bits)
12200 if (register_bits & 0x00000001)
12201 register_list[register_count++] = 1;
12202 register_bits = register_bits >> 1;
12204 record_buf[register_count] = reg_src1;
12205 thumb_insn_r->reg_rec_count = register_count + 1;
12206 for (register_count = 0; register_count < 8; register_count++)
12208 if (register_list[register_count])
12210 record_buf[index] = register_count;
12215 else if (0 == opcode2)
12217 /* It handles both STMIA. */
12218 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12220 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12221 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12222 while (register_bits)
12224 if (register_bits & 0x00000001)
12226 register_bits = register_bits >> 1;
12228 start_address = u_regval;
12229 thumb_insn_r->mem_rec_count = register_count;
12230 while (register_count)
12232 record_buf_mem[(register_count * 2) - 1] = start_address;
12233 record_buf_mem[(register_count * 2) - 2] = 4;
12234 start_address = start_address + 4;
12238 else if (0x1F == opcode1)
12240 /* Handle arm syscall insn. */
12241 if (tdep->arm_swi_record != NULL)
12243 ret = tdep->arm_swi_record(reg_cache);
12247 printf_unfiltered (_("no syscall record support\n"));
12252 /* B (1), conditional branch is automatically taken care in process_record,
12253 as PC is saved there. */
12255 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12256 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12262 /* Handling opcode 111 insns. */
12265 thumb_record_branch (insn_decode_record *thumb_insn_r)
12267 uint32_t record_buf[8];
12268 uint32_t bits_h = 0;
12270 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12272 if (2 == bits_h || 3 == bits_h)
12275 record_buf[0] = ARM_LR_REGNUM;
12276 thumb_insn_r->reg_rec_count = 1;
12278 else if (1 == bits_h)
12281 record_buf[0] = ARM_PS_REGNUM;
12282 record_buf[1] = ARM_LR_REGNUM;
12283 thumb_insn_r->reg_rec_count = 2;
12286 /* B(2) is automatically taken care in process_record, as PC is
12289 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12295 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12296 and positive val on fauilure. */
12299 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12301 gdb_byte buf[insn_size];
12303 memset (&buf[0], 0, insn_size);
12305 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12307 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12309 gdbarch_byte_order (insn_record->gdbarch));
12313 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12315 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12319 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12320 uint32_t insn_size)
12323 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
12324 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
12326 arm_record_data_proc_misc_ld_str, /* 000. */
12327 arm_record_data_proc_imm, /* 001. */
12328 arm_record_ld_st_imm_offset, /* 010. */
12329 arm_record_ld_st_reg_offset, /* 011. */
12330 arm_record_ld_st_multiple, /* 100. */
12331 arm_record_b_bl, /* 101. */
12332 arm_record_coproc, /* 110. */
12333 arm_record_coproc_data_proc /* 111. */
12336 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
12337 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
12339 thumb_record_shift_add_sub, /* 000. */
12340 thumb_record_add_sub_cmp_mov, /* 001. */
12341 thumb_record_ld_st_reg_offset, /* 010. */
12342 thumb_record_ld_st_imm_offset, /* 011. */
12343 thumb_record_ld_st_stack, /* 100. */
12344 thumb_record_misc, /* 101. */
12345 thumb_record_ldm_stm_swi, /* 110. */
12346 thumb_record_branch /* 111. */
12349 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12350 uint32_t insn_id = 0;
12352 if (extract_arm_insn (arm_record, insn_size))
12356 printf_unfiltered (_("Process record: error reading memory at "
12357 "addr %s len = %d.\n"),
12358 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12362 else if (ARM_RECORD == record_type)
12364 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12365 insn_id = bits (arm_record->arm_insn, 25, 27);
12366 ret = arm_record_extension_space (arm_record);
12367 /* If this insn has fallen into extension space
12368 then we need not decode it anymore. */
12369 if (ret != -1 && !INSN_RECORDED(arm_record))
12371 ret = arm_handle_insn[insn_id] (arm_record);
12374 else if (THUMB_RECORD == record_type)
12376 /* As thumb does not have condition codes, we set negative. */
12377 arm_record->cond = -1;
12378 insn_id = bits (arm_record->arm_insn, 13, 15);
12379 ret = thumb_handle_insn[insn_id] (arm_record);
12381 else if (THUMB2_RECORD == record_type)
12383 printf_unfiltered (_("Process record doesnt support thumb32 instruction "
12384 "0x%0x at address %s.\n"),arm_record->arm_insn,
12385 paddress (arm_record->gdbarch,
12386 arm_record->this_addr));
12391 /* Throw assertion. */
12392 gdb_assert_not_reached ("not a valid instruction, could not decode");
12399 /* Cleans up local record registers and memory allocations. */
12402 deallocate_reg_mem (insn_decode_record *record)
12404 xfree (record->arm_regs);
12405 xfree (record->arm_mems);
12409 /* Parse the current instruction and record the values of the registers and
12410 memory that will be changed in current instruction to record_arch_list".
12411 Return -1 if something is wrong. */
12414 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12415 CORE_ADDR insn_addr)
12418 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12419 uint32_t no_of_rec = 0;
12420 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12421 ULONGEST t_bit = 0, insn_id = 0;
12423 ULONGEST u_regval = 0;
12425 insn_decode_record arm_record;
12427 memset (&arm_record, 0, sizeof (insn_decode_record));
12428 arm_record.regcache = regcache;
12429 arm_record.this_addr = insn_addr;
12430 arm_record.gdbarch = gdbarch;
12433 if (record_debug > 1)
12435 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12437 paddress (gdbarch, arm_record.this_addr));
12440 if (extract_arm_insn (&arm_record, 2))
12444 printf_unfiltered (_("Process record: error reading memory at "
12445 "addr %s len = %d.\n"),
12446 paddress (arm_record.gdbarch,
12447 arm_record.this_addr), 2);
12452 /* Check the insn, whether it is thumb or arm one. */
12454 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12455 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12458 if (!(u_regval & t_bit))
12460 /* We are decoding arm insn. */
12461 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12465 insn_id = bits (arm_record.arm_insn, 11, 15);
12466 /* is it thumb2 insn? */
12467 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12469 ret = decode_insn (&arm_record, THUMB2_RECORD,
12470 THUMB2_INSN_SIZE_BYTES);
12474 /* We are decoding thumb insn. */
12475 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12481 /* Record registers. */
12482 record_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12483 if (arm_record.arm_regs)
12485 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12487 if (record_arch_list_add_reg (arm_record.regcache ,
12488 arm_record.arm_regs[no_of_rec]))
12492 /* Record memories. */
12493 if (arm_record.arm_mems)
12495 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12497 if (record_arch_list_add_mem
12498 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12499 arm_record.arm_mems[no_of_rec].len))
12504 if (record_arch_list_add_end ())
12509 deallocate_reg_mem (&arm_record);