1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct bound_minimal_symbol sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val);
459 return (val & 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name, "_from_thumb") != NULL)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm)
545 unsigned int count = imm >> 7;
553 return (imm & 0xff) | ((imm & 0xff) << 16);
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 return (0x80 | (imm & 0x7f)) << (32 - count);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst)
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1 & 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1, 7) && !bit (inst1, 8))
630 else if (!bit (inst1, 7) && bit (inst1, 8))
636 else if (bit (inst1, 7) && bit (inst1, 8))
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
659 if (bits (inst1, 0, 3) == 15)
665 if ((inst2 & 0x0fc0) == 0x0000)
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn)
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
712 struct pv_area *stack;
713 struct cleanup *back_to;
715 CORE_ADDR unrecognized_pc = 0;
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
722 while (start < limit)
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
755 else if (thumb_instruction_restores_sp (insn))
757 /* Don't scan past the epilogue. */
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800 if (pv_area_store_would_trash (stack, addr))
803 pv_area_store (stack, addr, 4, regs[regno]);
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
814 if (pv_area_store_would_trash (stack, addr))
817 pv_area_store (stack, addr, 4, regs[rd]);
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2;
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1, j2, imm1, imm2;
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 pv_t addr = regs[bits (insn, 0, 3)];
893 if (pv_area_store_would_trash (stack, addr))
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
905 regs[bits (insn, 0, 3)] = addr;
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
916 offset = inst2 & 0xff;
918 addr = pv_add_constant (addr, offset);
920 addr = pv_add_constant (addr, -offset);
922 if (pv_area_store_would_trash (stack, addr))
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
930 regs[bits (insn, 0, 3)] = addr;
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
940 offset = inst2 & 0xff;
942 addr = pv_add_constant (addr, offset);
944 addr = pv_add_constant (addr, -offset);
946 if (pv_area_store_would_trash (stack, addr))
949 pv_area_store (stack, addr, 4, regs[regno]);
952 regs[bits (insn, 0, 3)] = addr;
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 int regno = bits (inst2, 12, 15);
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964 if (pv_area_store_would_trash (stack, addr))
967 pv_area_store (stack, addr, 4, regs[regno]);
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant;
1084 offset = bits (inst2, 0, 11);
1086 loc = start + 4 + offset;
1088 loc = start + 4 - offset;
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant;
1100 offset = bits (inst2, 0, 7) << 2;
1102 loc = start + 4 + offset;
1104 loc = start + 4 - offset;
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1127 else if (thumb_instruction_changes_pc (insn))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg = bits (insn1, 8, 10);
1207 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1208 address = read_memory_unsigned_integer (address, 4,
1209 byte_order_for_code);
1211 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1216 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1219 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1221 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1223 /* movt Rd, #const */
1224 if ((insn1 & 0xfbc0) == 0xf2c0)
1226 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 *destreg = bits (insn2, 8, 11);
1229 address = (high << 16 | low);
1236 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1238 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address = bits (insn, 0, 11) + pc + 8;
1241 address = read_memory_unsigned_integer (address, 4,
1242 byte_order_for_code);
1244 *destreg = bits (insn, 12, 15);
1247 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1252 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1254 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1257 *destreg = bits (insn, 12, 15);
1259 address = (high << 16 | low);
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1298 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1300 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1301 unsigned int basereg;
1302 struct bound_minimal_symbol stack_chk_guard;
1304 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1307 /* Try to parse the instructions in Step 1. */
1308 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1313 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard.minsym == NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1324 unsigned int destreg;
1326 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn & 0xf800) != 0x6800)
1331 if (bits (insn, 3, 5) != basereg)
1333 destreg = bits (insn, 0, 2);
1335 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1336 byte_order_for_code);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn & 0xf800) != 0x6000)
1340 if (destreg != bits (insn, 0, 2))
1345 unsigned int destreg;
1347 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn & 0x0e500000) != 0x04100000)
1352 if (bits (insn, 16, 19) != basereg)
1354 destreg = bits (insn, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn = read_memory_unsigned_integer (pc + offset + 4,
1357 4, byte_order_for_code);
1358 if ((insn & 0x0e500000) != 0x04000000)
1360 if (bits (insn, 12, 15) != destreg)
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1366 return pc + offset + 4;
1368 return pc + offset + 8;
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1374 The APCS (ARM Procedure Call Standard) defines the following
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1387 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1389 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1391 CORE_ADDR func_addr, limit_pc;
1393 /* See if we can determine the end of the prologue via the symbol table.
1394 If so, then return either PC, or the PC after the prologue, whichever
1396 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1398 CORE_ADDR post_prologue_pc
1399 = skip_prologue_using_sal (gdbarch, func_addr);
1400 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1402 if (post_prologue_pc)
1404 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1407 /* GCC always emits a line note before the prologue and another
1408 one after, even if the two are at the same address or on the
1409 same line. Take advantage of this so that we do not need to
1410 know every instruction that might appear in the prologue. We
1411 will have producer information for most binaries; if it is
1412 missing (e.g. for -gstabs), assuming the GNU tools. */
1413 if (post_prologue_pc
1415 || COMPUNIT_PRODUCER (cust) == NULL
1416 || strncmp (COMPUNIT_PRODUCER (cust), "GNU ",
1417 sizeof ("GNU ") - 1) == 0
1418 || strncmp (COMPUNIT_PRODUCER (cust), "clang ",
1419 sizeof ("clang ") - 1) == 0))
1420 return post_prologue_pc;
1422 if (post_prologue_pc != 0)
1424 CORE_ADDR analyzed_limit;
1426 /* For non-GCC compilers, make sure the entire line is an
1427 acceptable prologue; GDB will round this function's
1428 return value up to the end of the following line so we
1429 can not skip just part of a line (and we do not want to).
1431 RealView does not treat the prologue specially, but does
1432 associate prologue code with the opening brace; so this
1433 lets us skip the first line if we think it is the opening
1435 if (arm_pc_is_thumb (gdbarch, func_addr))
1436 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1437 post_prologue_pc, NULL);
1439 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1440 post_prologue_pc, NULL);
1442 if (analyzed_limit != post_prologue_pc)
1445 return post_prologue_pc;
1449 /* Can't determine prologue from the symbol table, need to examine
1452 /* Find an upper limit on the function prologue using the debug
1453 information. If the debug information could not be used to provide
1454 that bound, then use an arbitrary large number as the upper bound. */
1455 /* Like arm_scan_prologue, stop no later than pc + 64. */
1456 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1458 limit_pc = pc + 64; /* Magic. */
1461 /* Check if this is Thumb code. */
1462 if (arm_pc_is_thumb (gdbarch, pc))
1463 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1465 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1469 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1470 This function decodes a Thumb function prologue to determine:
1471 1) the size of the stack frame
1472 2) which registers are saved on it
1473 3) the offsets of saved regs
1474 4) the offset from the stack pointer to the frame pointer
1476 A typical Thumb function prologue would create this stack frame
1477 (offsets relative to FP)
1478 old SP -> 24 stack parameters
1481 R7 -> 0 local variables (16 bytes)
1482 SP -> -12 additional stack space (12 bytes)
1483 The frame size would thus be 36 bytes, and the frame offset would be
1484 12 bytes. The frame register is R7.
1486 The comments for thumb_skip_prolog() describe the algorithm we use
1487 to detect the end of the prolog. */
1491 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1492 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1494 CORE_ADDR prologue_start;
1495 CORE_ADDR prologue_end;
1497 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1500 /* See comment in arm_scan_prologue for an explanation of
1502 if (prologue_end > prologue_start + 64)
1504 prologue_end = prologue_start + 64;
1508 /* We're in the boondocks: we have no idea where the start of the
1512 prologue_end = min (prologue_end, prev_pc);
1514 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1517 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1520 arm_instruction_changes_pc (uint32_t this_instr)
1522 if (bits (this_instr, 28, 31) == INST_NV)
1523 /* Unconditional instructions. */
1524 switch (bits (this_instr, 24, 27))
1528 /* Branch with Link and change to Thumb. */
1533 /* Coprocessor register transfer. */
1534 if (bits (this_instr, 12, 15) == 15)
1535 error (_("Invalid update to pc in instruction"));
1541 switch (bits (this_instr, 25, 27))
1544 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1546 /* Multiplies and extra load/stores. */
1547 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1548 /* Neither multiplies nor extension load/stores are allowed
1552 /* Otherwise, miscellaneous instructions. */
1554 /* BX <reg>, BXJ <reg>, BLX <reg> */
1555 if (bits (this_instr, 4, 27) == 0x12fff1
1556 || bits (this_instr, 4, 27) == 0x12fff2
1557 || bits (this_instr, 4, 27) == 0x12fff3)
1560 /* Other miscellaneous instructions are unpredictable if they
1564 /* Data processing instruction. Fall through. */
1567 if (bits (this_instr, 12, 15) == 15)
1574 /* Media instructions and architecturally undefined instructions. */
1575 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1579 if (bit (this_instr, 20) == 0)
1583 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1589 /* Load/store multiple. */
1590 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1596 /* Branch and branch with link. */
1601 /* Coprocessor transfers or SWIs can not affect PC. */
1605 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1609 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1613 arm_instruction_restores_sp (unsigned int insn)
1615 if (bits (insn, 28, 31) != INST_NV)
1617 if ((insn & 0x0df0f000) == 0x0080d000
1618 /* ADD SP (register or immediate). */
1619 || (insn & 0x0df0f000) == 0x0040d000
1620 /* SUB SP (register or immediate). */
1621 || (insn & 0x0ffffff0) == 0x01a0d000
1623 || (insn & 0x0fff0000) == 0x08bd0000
1625 || (insn & 0x0fff0000) == 0x049d0000)
1626 /* POP of a single register. */
1633 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1634 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1635 fill it in. Return the first address not recognized as a prologue
1638 We recognize all the instructions typically found in ARM prologues,
1639 plus harmless instructions which can be skipped (either for analysis
1640 purposes, or a more restrictive set that can be skipped when finding
1641 the end of the prologue). */
1644 arm_analyze_prologue (struct gdbarch *gdbarch,
1645 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1646 struct arm_prologue_cache *cache)
1648 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1649 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1651 CORE_ADDR offset, current_pc;
1652 pv_t regs[ARM_FPS_REGNUM];
1653 struct pv_area *stack;
1654 struct cleanup *back_to;
1655 CORE_ADDR unrecognized_pc = 0;
1657 /* Search the prologue looking for instructions that set up the
1658 frame pointer, adjust the stack pointer, and save registers.
1660 Be careful, however, and if it doesn't look like a prologue,
1661 don't try to scan it. If, for instance, a frameless function
1662 begins with stmfd sp!, then we will tell ourselves there is
1663 a frame, which will confuse stack traceback, as well as "finish"
1664 and other operations that rely on a knowledge of the stack
1667 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1668 regs[regno] = pv_register (regno, 0);
1669 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1670 back_to = make_cleanup_free_pv_area (stack);
1672 for (current_pc = prologue_start;
1673 current_pc < prologue_end;
1677 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1679 if (insn == 0xe1a0c00d) /* mov ip, sp */
1681 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1684 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1694 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1695 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1697 unsigned imm = insn & 0xff; /* immediate value */
1698 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1699 int rd = bits (insn, 12, 15);
1700 imm = (imm >> rot) | (imm << (32 - rot));
1701 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1704 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1707 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1709 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1710 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1711 regs[bits (insn, 12, 15)]);
1714 else if ((insn & 0xffff0000) == 0xe92d0000)
1715 /* stmfd sp!, {..., fp, ip, lr, pc}
1717 stmfd sp!, {a1, a2, a3, a4} */
1719 int mask = insn & 0xffff;
1721 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1724 /* Calculate offsets of saved registers. */
1725 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1726 if (mask & (1 << regno))
1729 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1730 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1733 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1734 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1735 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1737 /* No need to add this to saved_regs -- it's just an arg reg. */
1740 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1741 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1742 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1744 /* No need to add this to saved_regs -- it's just an arg reg. */
1747 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1749 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1751 /* No need to add this to saved_regs -- it's just arg regs. */
1754 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1756 unsigned imm = insn & 0xff; /* immediate value */
1757 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1758 imm = (imm >> rot) | (imm << (32 - rot));
1759 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1761 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1763 unsigned imm = insn & 0xff; /* immediate value */
1764 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1765 imm = (imm >> rot) | (imm << (32 - rot));
1766 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1768 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1770 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1772 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1775 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1776 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1777 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1779 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1781 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1783 int n_saved_fp_regs;
1784 unsigned int fp_start_reg, fp_bound_reg;
1786 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1789 if ((insn & 0x800) == 0x800) /* N0 is set */
1791 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1792 n_saved_fp_regs = 3;
1794 n_saved_fp_regs = 1;
1798 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1799 n_saved_fp_regs = 2;
1801 n_saved_fp_regs = 4;
1804 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1805 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1806 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1808 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1809 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1810 regs[fp_start_reg++]);
1813 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1815 /* Allow some special function calls when skipping the
1816 prologue; GCC generates these before storing arguments to
1818 CORE_ADDR dest = BranchDest (current_pc, insn);
1820 if (skip_prologue_function (gdbarch, dest, 0))
1825 else if ((insn & 0xf0000000) != 0xe0000000)
1826 break; /* Condition not true, exit early. */
1827 else if (arm_instruction_changes_pc (insn))
1828 /* Don't scan past anything that might change control flow. */
1830 else if (arm_instruction_restores_sp (insn))
1832 /* Don't scan past the epilogue. */
1835 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1836 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1837 /* Ignore block loads from the stack, potentially copying
1838 parameters from memory. */
1840 else if ((insn & 0xfc500000) == 0xe4100000
1841 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1842 /* Similarly ignore single loads from the stack. */
1844 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1845 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1846 register instead of the stack. */
1850 /* The optimizer might shove anything into the prologue, if
1851 we build up cache (cache != NULL) from scanning prologue,
1852 we just skip what we don't recognize and scan further to
1853 make cache as complete as possible. However, if we skip
1854 prologue, we'll stop immediately on unrecognized
1856 unrecognized_pc = current_pc;
1864 if (unrecognized_pc == 0)
1865 unrecognized_pc = current_pc;
1869 int framereg, framesize;
1871 /* The frame size is just the distance from the frame register
1872 to the original stack pointer. */
1873 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1875 /* Frame pointer is fp. */
1876 framereg = ARM_FP_REGNUM;
1877 framesize = -regs[ARM_FP_REGNUM].k;
1881 /* Try the stack pointer... this is a bit desperate. */
1882 framereg = ARM_SP_REGNUM;
1883 framesize = -regs[ARM_SP_REGNUM].k;
1886 cache->framereg = framereg;
1887 cache->framesize = framesize;
1889 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1890 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1891 cache->saved_regs[regno].addr = offset;
1895 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1896 paddress (gdbarch, unrecognized_pc));
1898 do_cleanups (back_to);
1899 return unrecognized_pc;
1903 arm_scan_prologue (struct frame_info *this_frame,
1904 struct arm_prologue_cache *cache)
1906 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1907 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1909 CORE_ADDR prologue_start, prologue_end, current_pc;
1910 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1911 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1912 pv_t regs[ARM_FPS_REGNUM];
1913 struct pv_area *stack;
1914 struct cleanup *back_to;
1917 /* Assume there is no frame until proven otherwise. */
1918 cache->framereg = ARM_SP_REGNUM;
1919 cache->framesize = 0;
1921 /* Check for Thumb prologue. */
1922 if (arm_frame_is_thumb (this_frame))
1924 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1928 /* Find the function prologue. If we can't find the function in
1929 the symbol table, peek in the stack frame to find the PC. */
1930 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1933 /* One way to find the end of the prologue (which works well
1934 for unoptimized code) is to do the following:
1936 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1939 prologue_end = prev_pc;
1940 else if (sal.end < prologue_end)
1941 prologue_end = sal.end;
1943 This mechanism is very accurate so long as the optimizer
1944 doesn't move any instructions from the function body into the
1945 prologue. If this happens, sal.end will be the last
1946 instruction in the first hunk of prologue code just before
1947 the first instruction that the scheduler has moved from
1948 the body to the prologue.
1950 In order to make sure that we scan all of the prologue
1951 instructions, we use a slightly less accurate mechanism which
1952 may scan more than necessary. To help compensate for this
1953 lack of accuracy, the prologue scanning loop below contains
1954 several clauses which'll cause the loop to terminate early if
1955 an implausible prologue instruction is encountered.
1961 is a suitable endpoint since it accounts for the largest
1962 possible prologue plus up to five instructions inserted by
1965 if (prologue_end > prologue_start + 64)
1967 prologue_end = prologue_start + 64; /* See above. */
1972 /* We have no symbol information. Our only option is to assume this
1973 function has a standard stack frame and the normal frame register.
1974 Then, we can find the value of our frame pointer on entrance to
1975 the callee (or at the present moment if this is the innermost frame).
1976 The value stored there should be the address of the stmfd + 8. */
1977 CORE_ADDR frame_loc;
1978 LONGEST return_value;
1980 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1981 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1985 prologue_start = gdbarch_addr_bits_remove
1986 (gdbarch, return_value) - 8;
1987 prologue_end = prologue_start + 64; /* See above. */
1991 if (prev_pc < prologue_end)
1992 prologue_end = prev_pc;
1994 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1997 static struct arm_prologue_cache *
1998 arm_make_prologue_cache (struct frame_info *this_frame)
2001 struct arm_prologue_cache *cache;
2002 CORE_ADDR unwound_fp;
2004 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2005 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2007 arm_scan_prologue (this_frame, cache);
2009 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2010 if (unwound_fp == 0)
2013 cache->prev_sp = unwound_fp + cache->framesize;
2015 /* Calculate actual addresses of saved registers using offsets
2016 determined by arm_scan_prologue. */
2017 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2018 if (trad_frame_addr_p (cache->saved_regs, reg))
2019 cache->saved_regs[reg].addr += cache->prev_sp;
2024 /* Our frame ID for a normal frame is the current function's starting PC
2025 and the caller's SP when we were called. */
2028 arm_prologue_this_id (struct frame_info *this_frame,
2030 struct frame_id *this_id)
2032 struct arm_prologue_cache *cache;
2036 if (*this_cache == NULL)
2037 *this_cache = arm_make_prologue_cache (this_frame);
2038 cache = *this_cache;
2040 /* This is meant to halt the backtrace at "_start". */
2041 pc = get_frame_pc (this_frame);
2042 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2045 /* If we've hit a wall, stop. */
2046 if (cache->prev_sp == 0)
2049 /* Use function start address as part of the frame ID. If we cannot
2050 identify the start address (due to missing symbol information),
2051 fall back to just using the current PC. */
2052 func = get_frame_func (this_frame);
2056 id = frame_id_build (cache->prev_sp, func);
2060 static struct value *
2061 arm_prologue_prev_register (struct frame_info *this_frame,
2065 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2066 struct arm_prologue_cache *cache;
2068 if (*this_cache == NULL)
2069 *this_cache = arm_make_prologue_cache (this_frame);
2070 cache = *this_cache;
2072 /* If we are asked to unwind the PC, then we need to return the LR
2073 instead. The prologue may save PC, but it will point into this
2074 frame's prologue, not the next frame's resume location. Also
2075 strip the saved T bit. A valid LR may have the low bit set, but
2076 a valid PC never does. */
2077 if (prev_regnum == ARM_PC_REGNUM)
2081 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2082 return frame_unwind_got_constant (this_frame, prev_regnum,
2083 arm_addr_bits_remove (gdbarch, lr));
2086 /* SP is generally not saved to the stack, but this frame is
2087 identified by the next frame's stack pointer at the time of the call.
2088 The value was already reconstructed into PREV_SP. */
2089 if (prev_regnum == ARM_SP_REGNUM)
2090 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2092 /* The CPSR may have been changed by the call instruction and by the
2093 called function. The only bit we can reconstruct is the T bit,
2094 by checking the low bit of LR as of the call. This is a reliable
2095 indicator of Thumb-ness except for some ARM v4T pre-interworking
2096 Thumb code, which could get away with a clear low bit as long as
2097 the called function did not use bx. Guess that all other
2098 bits are unchanged; the condition flags are presumably lost,
2099 but the processor status is likely valid. */
2100 if (prev_regnum == ARM_PS_REGNUM)
2103 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2105 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2106 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2107 if (IS_THUMB_ADDR (lr))
2111 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2114 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2118 struct frame_unwind arm_prologue_unwind = {
2120 default_frame_unwind_stop_reason,
2121 arm_prologue_this_id,
2122 arm_prologue_prev_register,
2124 default_frame_sniffer
2127 /* Maintain a list of ARM exception table entries per objfile, similar to the
2128 list of mapping symbols. We only cache entries for standard ARM-defined
2129 personality routines; the cache will contain only the frame unwinding
2130 instructions associated with the entry (not the descriptors). */
2132 static const struct objfile_data *arm_exidx_data_key;
2134 struct arm_exidx_entry
2139 typedef struct arm_exidx_entry arm_exidx_entry_s;
2140 DEF_VEC_O(arm_exidx_entry_s);
2142 struct arm_exidx_data
2144 VEC(arm_exidx_entry_s) **section_maps;
2148 arm_exidx_data_free (struct objfile *objfile, void *arg)
2150 struct arm_exidx_data *data = arg;
2153 for (i = 0; i < objfile->obfd->section_count; i++)
2154 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2158 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2159 const struct arm_exidx_entry *rhs)
2161 return lhs->addr < rhs->addr;
2164 static struct obj_section *
2165 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2167 struct obj_section *osect;
2169 ALL_OBJFILE_OSECTIONS (objfile, osect)
2170 if (bfd_get_section_flags (objfile->obfd,
2171 osect->the_bfd_section) & SEC_ALLOC)
2173 bfd_vma start, size;
2174 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2175 size = bfd_get_section_size (osect->the_bfd_section);
2177 if (start <= vma && vma < start + size)
2184 /* Parse contents of exception table and exception index sections
2185 of OBJFILE, and fill in the exception table entry cache.
2187 For each entry that refers to a standard ARM-defined personality
2188 routine, extract the frame unwinding instructions (from either
2189 the index or the table section). The unwinding instructions
2191 - extracting them from the rest of the table data
2192 - converting to host endianness
2193 - appending the implicit 0xb0 ("Finish") code
2195 The extracted and normalized instructions are stored for later
2196 retrieval by the arm_find_exidx_entry routine. */
2199 arm_exidx_new_objfile (struct objfile *objfile)
2201 struct cleanup *cleanups;
2202 struct arm_exidx_data *data;
2203 asection *exidx, *extab;
2204 bfd_vma exidx_vma = 0, extab_vma = 0;
2205 bfd_size_type exidx_size = 0, extab_size = 0;
2206 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2209 /* If we've already touched this file, do nothing. */
2210 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2212 cleanups = make_cleanup (null_cleanup, NULL);
2214 /* Read contents of exception table and index. */
2215 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2218 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2219 exidx_size = bfd_get_section_size (exidx);
2220 exidx_data = xmalloc (exidx_size);
2221 make_cleanup (xfree, exidx_data);
2223 if (!bfd_get_section_contents (objfile->obfd, exidx,
2224 exidx_data, 0, exidx_size))
2226 do_cleanups (cleanups);
2231 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2234 extab_vma = bfd_section_vma (objfile->obfd, extab);
2235 extab_size = bfd_get_section_size (extab);
2236 extab_data = xmalloc (extab_size);
2237 make_cleanup (xfree, extab_data);
2239 if (!bfd_get_section_contents (objfile->obfd, extab,
2240 extab_data, 0, extab_size))
2242 do_cleanups (cleanups);
2247 /* Allocate exception table data structure. */
2248 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2249 set_objfile_data (objfile, arm_exidx_data_key, data);
2250 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2251 objfile->obfd->section_count,
2252 VEC(arm_exidx_entry_s) *);
2254 /* Fill in exception table. */
2255 for (i = 0; i < exidx_size / 8; i++)
2257 struct arm_exidx_entry new_exidx_entry;
2258 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2259 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2260 bfd_vma addr = 0, word = 0;
2261 int n_bytes = 0, n_words = 0;
2262 struct obj_section *sec;
2263 gdb_byte *entry = NULL;
2265 /* Extract address of start of function. */
2266 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2267 idx += exidx_vma + i * 8;
2269 /* Find section containing function and compute section offset. */
2270 sec = arm_obj_section_from_vma (objfile, idx);
2273 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2275 /* Determine address of exception table entry. */
2278 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2280 else if ((val & 0xff000000) == 0x80000000)
2282 /* Exception table entry embedded in .ARM.exidx
2283 -- must be short form. */
2287 else if (!(val & 0x80000000))
2289 /* Exception table entry in .ARM.extab. */
2290 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2291 addr += exidx_vma + i * 8 + 4;
2293 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2295 word = bfd_h_get_32 (objfile->obfd,
2296 extab_data + addr - extab_vma);
2299 if ((word & 0xff000000) == 0x80000000)
2304 else if ((word & 0xff000000) == 0x81000000
2305 || (word & 0xff000000) == 0x82000000)
2309 n_words = ((word >> 16) & 0xff);
2311 else if (!(word & 0x80000000))
2314 struct obj_section *pers_sec;
2315 int gnu_personality = 0;
2317 /* Custom personality routine. */
2318 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2319 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2321 /* Check whether we've got one of the variants of the
2322 GNU personality routines. */
2323 pers_sec = arm_obj_section_from_vma (objfile, pers);
2326 static const char *personality[] =
2328 "__gcc_personality_v0",
2329 "__gxx_personality_v0",
2330 "__gcj_personality_v0",
2331 "__gnu_objc_personality_v0",
2335 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2338 for (k = 0; personality[k]; k++)
2339 if (lookup_minimal_symbol_by_pc_name
2340 (pc, personality[k], objfile))
2342 gnu_personality = 1;
2347 /* If so, the next word contains a word count in the high
2348 byte, followed by the same unwind instructions as the
2349 pre-defined forms. */
2351 && addr + 4 <= extab_vma + extab_size)
2353 word = bfd_h_get_32 (objfile->obfd,
2354 extab_data + addr - extab_vma);
2357 n_words = ((word >> 24) & 0xff);
2363 /* Sanity check address. */
2365 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2366 n_words = n_bytes = 0;
2368 /* The unwind instructions reside in WORD (only the N_BYTES least
2369 significant bytes are valid), followed by N_WORDS words in the
2370 extab section starting at ADDR. */
2371 if (n_bytes || n_words)
2373 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2374 n_bytes + n_words * 4 + 1);
2377 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2381 word = bfd_h_get_32 (objfile->obfd,
2382 extab_data + addr - extab_vma);
2385 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2386 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2387 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2388 *p++ = (gdb_byte) (word & 0xff);
2391 /* Implied "Finish" to terminate the list. */
2395 /* Push entry onto vector. They are guaranteed to always
2396 appear in order of increasing addresses. */
2397 new_exidx_entry.addr = idx;
2398 new_exidx_entry.entry = entry;
2399 VEC_safe_push (arm_exidx_entry_s,
2400 data->section_maps[sec->the_bfd_section->index],
2404 do_cleanups (cleanups);
2407 /* Search for the exception table entry covering MEMADDR. If one is found,
2408 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2409 set *START to the start of the region covered by this entry. */
2412 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2414 struct obj_section *sec;
2416 sec = find_pc_section (memaddr);
2419 struct arm_exidx_data *data;
2420 VEC(arm_exidx_entry_s) *map;
2421 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2424 data = objfile_data (sec->objfile, arm_exidx_data_key);
2427 map = data->section_maps[sec->the_bfd_section->index];
2428 if (!VEC_empty (arm_exidx_entry_s, map))
2430 struct arm_exidx_entry *map_sym;
2432 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2433 arm_compare_exidx_entries);
2435 /* VEC_lower_bound finds the earliest ordered insertion
2436 point. If the following symbol starts at this exact
2437 address, we use that; otherwise, the preceding
2438 exception table entry covers this address. */
2439 if (idx < VEC_length (arm_exidx_entry_s, map))
2441 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2442 if (map_sym->addr == map_key.addr)
2445 *start = map_sym->addr + obj_section_addr (sec);
2446 return map_sym->entry;
2452 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2454 *start = map_sym->addr + obj_section_addr (sec);
2455 return map_sym->entry;
2464 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2465 instruction list from the ARM exception table entry ENTRY, allocate and
2466 return a prologue cache structure describing how to unwind this frame.
2468 Return NULL if the unwinding instruction list contains a "spare",
2469 "reserved" or "refuse to unwind" instruction as defined in section
2470 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2471 for the ARM Architecture" document. */
2473 static struct arm_prologue_cache *
2474 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2479 struct arm_prologue_cache *cache;
2480 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2481 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2487 /* Whenever we reload SP, we actually have to retrieve its
2488 actual value in the current frame. */
2491 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2493 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2494 vsp = get_frame_register_unsigned (this_frame, reg);
2498 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2499 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2505 /* Decode next unwind instruction. */
2508 if ((insn & 0xc0) == 0)
2510 int offset = insn & 0x3f;
2511 vsp += (offset << 2) + 4;
2513 else if ((insn & 0xc0) == 0x40)
2515 int offset = insn & 0x3f;
2516 vsp -= (offset << 2) + 4;
2518 else if ((insn & 0xf0) == 0x80)
2520 int mask = ((insn & 0xf) << 8) | *entry++;
2523 /* The special case of an all-zero mask identifies
2524 "Refuse to unwind". We return NULL to fall back
2525 to the prologue analyzer. */
2529 /* Pop registers r4..r15 under mask. */
2530 for (i = 0; i < 12; i++)
2531 if (mask & (1 << i))
2533 cache->saved_regs[4 + i].addr = vsp;
2537 /* Special-case popping SP -- we need to reload vsp. */
2538 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2541 else if ((insn & 0xf0) == 0x90)
2543 int reg = insn & 0xf;
2545 /* Reserved cases. */
2546 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2549 /* Set SP from another register and mark VSP for reload. */
2550 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2553 else if ((insn & 0xf0) == 0xa0)
2555 int count = insn & 0x7;
2556 int pop_lr = (insn & 0x8) != 0;
2559 /* Pop r4..r[4+count]. */
2560 for (i = 0; i <= count; i++)
2562 cache->saved_regs[4 + i].addr = vsp;
2566 /* If indicated by flag, pop LR as well. */
2569 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2573 else if (insn == 0xb0)
2575 /* We could only have updated PC by popping into it; if so, it
2576 will show up as address. Otherwise, copy LR into PC. */
2577 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2578 cache->saved_regs[ARM_PC_REGNUM]
2579 = cache->saved_regs[ARM_LR_REGNUM];
2584 else if (insn == 0xb1)
2586 int mask = *entry++;
2589 /* All-zero mask and mask >= 16 is "spare". */
2590 if (mask == 0 || mask >= 16)
2593 /* Pop r0..r3 under mask. */
2594 for (i = 0; i < 4; i++)
2595 if (mask & (1 << i))
2597 cache->saved_regs[i].addr = vsp;
2601 else if (insn == 0xb2)
2603 ULONGEST offset = 0;
2608 offset |= (*entry & 0x7f) << shift;
2611 while (*entry++ & 0x80);
2613 vsp += 0x204 + (offset << 2);
2615 else if (insn == 0xb3)
2617 int start = *entry >> 4;
2618 int count = (*entry++) & 0xf;
2621 /* Only registers D0..D15 are valid here. */
2622 if (start + count >= 16)
2625 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2626 for (i = 0; i <= count; i++)
2628 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2632 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2635 else if ((insn & 0xf8) == 0xb8)
2637 int count = insn & 0x7;
2640 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2641 for (i = 0; i <= count; i++)
2643 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2647 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2650 else if (insn == 0xc6)
2652 int start = *entry >> 4;
2653 int count = (*entry++) & 0xf;
2656 /* Only registers WR0..WR15 are valid. */
2657 if (start + count >= 16)
2660 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2661 for (i = 0; i <= count; i++)
2663 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2667 else if (insn == 0xc7)
2669 int mask = *entry++;
2672 /* All-zero mask and mask >= 16 is "spare". */
2673 if (mask == 0 || mask >= 16)
2676 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2677 for (i = 0; i < 4; i++)
2678 if (mask & (1 << i))
2680 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2684 else if ((insn & 0xf8) == 0xc0)
2686 int count = insn & 0x7;
2689 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2690 for (i = 0; i <= count; i++)
2692 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2696 else if (insn == 0xc8)
2698 int start = *entry >> 4;
2699 int count = (*entry++) & 0xf;
2702 /* Only registers D0..D31 are valid. */
2703 if (start + count >= 16)
2706 /* Pop VFP double-precision registers
2707 D[16+start]..D[16+start+count]. */
2708 for (i = 0; i <= count; i++)
2710 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2714 else if (insn == 0xc9)
2716 int start = *entry >> 4;
2717 int count = (*entry++) & 0xf;
2720 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2721 for (i = 0; i <= count; i++)
2723 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2727 else if ((insn & 0xf8) == 0xd0)
2729 int count = insn & 0x7;
2732 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2733 for (i = 0; i <= count; i++)
2735 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2741 /* Everything else is "spare". */
2746 /* If we restore SP from a register, assume this was the frame register.
2747 Otherwise just fall back to SP as frame register. */
2748 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2749 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2751 cache->framereg = ARM_SP_REGNUM;
2753 /* Determine offset to previous frame. */
2755 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2757 /* We already got the previous SP. */
2758 cache->prev_sp = vsp;
2763 /* Unwinding via ARM exception table entries. Note that the sniffer
2764 already computes a filled-in prologue cache, which is then used
2765 with the same arm_prologue_this_id and arm_prologue_prev_register
2766 routines also used for prologue-parsing based unwinding. */
2769 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2770 struct frame_info *this_frame,
2771 void **this_prologue_cache)
2773 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2774 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2775 CORE_ADDR addr_in_block, exidx_region, func_start;
2776 struct arm_prologue_cache *cache;
2779 /* See if we have an ARM exception table entry covering this address. */
2780 addr_in_block = get_frame_address_in_block (this_frame);
2781 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2785 /* The ARM exception table does not describe unwind information
2786 for arbitrary PC values, but is guaranteed to be correct only
2787 at call sites. We have to decide here whether we want to use
2788 ARM exception table information for this frame, or fall back
2789 to using prologue parsing. (Note that if we have DWARF CFI,
2790 this sniffer isn't even called -- CFI is always preferred.)
2792 Before we make this decision, however, we check whether we
2793 actually have *symbol* information for the current frame.
2794 If not, prologue parsing would not work anyway, so we might
2795 as well use the exception table and hope for the best. */
2796 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2800 /* If the next frame is "normal", we are at a call site in this
2801 frame, so exception information is guaranteed to be valid. */
2802 if (get_next_frame (this_frame)
2803 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2806 /* We also assume exception information is valid if we're currently
2807 blocked in a system call. The system library is supposed to
2808 ensure this, so that e.g. pthread cancellation works. */
2809 if (arm_frame_is_thumb (this_frame))
2813 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2814 byte_order_for_code, &insn)
2815 && (insn & 0xff00) == 0xdf00 /* svc */)
2822 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2823 byte_order_for_code, &insn)
2824 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2828 /* Bail out if we don't know that exception information is valid. */
2832 /* The ARM exception index does not mark the *end* of the region
2833 covered by the entry, and some functions will not have any entry.
2834 To correctly recognize the end of the covered region, the linker
2835 should have inserted dummy records with a CANTUNWIND marker.
2837 Unfortunately, current versions of GNU ld do not reliably do
2838 this, and thus we may have found an incorrect entry above.
2839 As a (temporary) sanity check, we only use the entry if it
2840 lies *within* the bounds of the function. Note that this check
2841 might reject perfectly valid entries that just happen to cover
2842 multiple functions; therefore this check ought to be removed
2843 once the linker is fixed. */
2844 if (func_start > exidx_region)
2848 /* Decode the list of unwinding instructions into a prologue cache.
2849 Note that this may fail due to e.g. a "refuse to unwind" code. */
2850 cache = arm_exidx_fill_cache (this_frame, entry);
2854 *this_prologue_cache = cache;
2858 struct frame_unwind arm_exidx_unwind = {
2860 default_frame_unwind_stop_reason,
2861 arm_prologue_this_id,
2862 arm_prologue_prev_register,
2864 arm_exidx_unwind_sniffer
2867 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2868 trampoline, return the target PC. Otherwise return 0.
2870 void call0a (char c, short s, int i, long l) {}
2874 (*pointer_to_call0a) (c, s, i, l);
2877 Instead of calling a stub library function _call_via_xx (xx is
2878 the register name), GCC may inline the trampoline in the object
2879 file as below (register r2 has the address of call0a).
2882 .type main, %function
2891 The trampoline 'bx r2' doesn't belong to main. */
2894 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2896 /* The heuristics of recognizing such trampoline is that FRAME is
2897 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2898 if (arm_frame_is_thumb (frame))
2902 if (target_read_memory (pc, buf, 2) == 0)
2904 struct gdbarch *gdbarch = get_frame_arch (frame);
2905 enum bfd_endian byte_order_for_code
2906 = gdbarch_byte_order_for_code (gdbarch);
2908 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2910 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2913 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2915 /* Clear the LSB so that gdb core sets step-resume
2916 breakpoint at the right address. */
2917 return UNMAKE_THUMB_ADDR (dest);
2925 static struct arm_prologue_cache *
2926 arm_make_stub_cache (struct frame_info *this_frame)
2928 struct arm_prologue_cache *cache;
2930 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2931 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2933 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2938 /* Our frame ID for a stub frame is the current SP and LR. */
2941 arm_stub_this_id (struct frame_info *this_frame,
2943 struct frame_id *this_id)
2945 struct arm_prologue_cache *cache;
2947 if (*this_cache == NULL)
2948 *this_cache = arm_make_stub_cache (this_frame);
2949 cache = *this_cache;
2951 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2955 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2956 struct frame_info *this_frame,
2957 void **this_prologue_cache)
2959 CORE_ADDR addr_in_block;
2961 CORE_ADDR pc, start_addr;
2964 addr_in_block = get_frame_address_in_block (this_frame);
2965 pc = get_frame_pc (this_frame);
2966 if (in_plt_section (addr_in_block)
2967 /* We also use the stub winder if the target memory is unreadable
2968 to avoid having the prologue unwinder trying to read it. */
2969 || target_read_memory (pc, dummy, 4) != 0)
2972 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2973 && arm_skip_bx_reg (this_frame, pc) != 0)
2979 struct frame_unwind arm_stub_unwind = {
2981 default_frame_unwind_stop_reason,
2983 arm_prologue_prev_register,
2985 arm_stub_unwind_sniffer
2988 /* Put here the code to store, into CACHE->saved_regs, the addresses
2989 of the saved registers of frame described by THIS_FRAME. CACHE is
2992 static struct arm_prologue_cache *
2993 arm_m_exception_cache (struct frame_info *this_frame)
2995 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2996 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2997 struct arm_prologue_cache *cache;
2998 CORE_ADDR unwound_sp;
3001 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3002 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3004 unwound_sp = get_frame_register_unsigned (this_frame,
3007 /* The hardware saves eight 32-bit words, comprising xPSR,
3008 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3009 "B1.5.6 Exception entry behavior" in
3010 "ARMv7-M Architecture Reference Manual". */
3011 cache->saved_regs[0].addr = unwound_sp;
3012 cache->saved_regs[1].addr = unwound_sp + 4;
3013 cache->saved_regs[2].addr = unwound_sp + 8;
3014 cache->saved_regs[3].addr = unwound_sp + 12;
3015 cache->saved_regs[12].addr = unwound_sp + 16;
3016 cache->saved_regs[14].addr = unwound_sp + 20;
3017 cache->saved_regs[15].addr = unwound_sp + 24;
3018 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3020 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3021 aligner between the top of the 32-byte stack frame and the
3022 previous context's stack pointer. */
3023 cache->prev_sp = unwound_sp + 32;
3024 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3025 && (xpsr & (1 << 9)) != 0)
3026 cache->prev_sp += 4;
3031 /* Implementation of function hook 'this_id' in
3032 'struct frame_uwnind'. */
3035 arm_m_exception_this_id (struct frame_info *this_frame,
3037 struct frame_id *this_id)
3039 struct arm_prologue_cache *cache;
3041 if (*this_cache == NULL)
3042 *this_cache = arm_m_exception_cache (this_frame);
3043 cache = *this_cache;
3045 /* Our frame ID for a stub frame is the current SP and LR. */
3046 *this_id = frame_id_build (cache->prev_sp,
3047 get_frame_pc (this_frame));
3050 /* Implementation of function hook 'prev_register' in
3051 'struct frame_uwnind'. */
3053 static struct value *
3054 arm_m_exception_prev_register (struct frame_info *this_frame,
3058 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3059 struct arm_prologue_cache *cache;
3061 if (*this_cache == NULL)
3062 *this_cache = arm_m_exception_cache (this_frame);
3063 cache = *this_cache;
3065 /* The value was already reconstructed into PREV_SP. */
3066 if (prev_regnum == ARM_SP_REGNUM)
3067 return frame_unwind_got_constant (this_frame, prev_regnum,
3070 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3074 /* Implementation of function hook 'sniffer' in
3075 'struct frame_uwnind'. */
3078 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3079 struct frame_info *this_frame,
3080 void **this_prologue_cache)
3082 CORE_ADDR this_pc = get_frame_pc (this_frame);
3084 /* No need to check is_m; this sniffer is only registered for
3085 M-profile architectures. */
3087 /* Exception frames return to one of these magic PCs. Other values
3088 are not defined as of v7-M. See details in "B1.5.8 Exception
3089 return behavior" in "ARMv7-M Architecture Reference Manual". */
3090 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3091 || this_pc == 0xfffffffd)
3097 /* Frame unwinder for M-profile exceptions. */
3099 struct frame_unwind arm_m_exception_unwind =
3102 default_frame_unwind_stop_reason,
3103 arm_m_exception_this_id,
3104 arm_m_exception_prev_register,
3106 arm_m_exception_unwind_sniffer
3110 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3112 struct arm_prologue_cache *cache;
3114 if (*this_cache == NULL)
3115 *this_cache = arm_make_prologue_cache (this_frame);
3116 cache = *this_cache;
3118 return cache->prev_sp - cache->framesize;
3121 struct frame_base arm_normal_base = {
3122 &arm_prologue_unwind,
3123 arm_normal_frame_base,
3124 arm_normal_frame_base,
3125 arm_normal_frame_base
3128 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3129 dummy frame. The frame ID's base needs to match the TOS value
3130 saved by save_dummy_frame_tos() and returned from
3131 arm_push_dummy_call, and the PC needs to match the dummy frame's
3134 static struct frame_id
3135 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3137 return frame_id_build (get_frame_register_unsigned (this_frame,
3139 get_frame_pc (this_frame));
3142 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3143 be used to construct the previous frame's ID, after looking up the
3144 containing function). */
3147 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3150 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3151 return arm_addr_bits_remove (gdbarch, pc);
3155 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3157 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3160 static struct value *
3161 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3164 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3166 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3171 /* The PC is normally copied from the return column, which
3172 describes saves of LR. However, that version may have an
3173 extra bit set to indicate Thumb state. The bit is not
3175 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3176 return frame_unwind_got_constant (this_frame, regnum,
3177 arm_addr_bits_remove (gdbarch, lr));
3180 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3181 cpsr = get_frame_register_unsigned (this_frame, regnum);
3182 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3183 if (IS_THUMB_ADDR (lr))
3187 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3190 internal_error (__FILE__, __LINE__,
3191 _("Unexpected register %d"), regnum);
3196 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3197 struct dwarf2_frame_state_reg *reg,
3198 struct frame_info *this_frame)
3204 reg->how = DWARF2_FRAME_REG_FN;
3205 reg->loc.fn = arm_dwarf2_prev_register;
3208 reg->how = DWARF2_FRAME_REG_CFA;
3213 /* Return true if we are in the function's epilogue, i.e. after the
3214 instruction that destroyed the function's stack frame. */
3217 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3219 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3220 unsigned int insn, insn2;
3221 int found_return = 0, found_stack_adjust = 0;
3222 CORE_ADDR func_start, func_end;
3226 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3229 /* The epilogue is a sequence of instructions along the following lines:
3231 - add stack frame size to SP or FP
3232 - [if frame pointer used] restore SP from FP
3233 - restore registers from SP [may include PC]
3234 - a return-type instruction [if PC wasn't already restored]
3236 In a first pass, we scan forward from the current PC and verify the
3237 instructions we find as compatible with this sequence, ending in a
3240 However, this is not sufficient to distinguish indirect function calls
3241 within a function from indirect tail calls in the epilogue in some cases.
3242 Therefore, if we didn't already find any SP-changing instruction during
3243 forward scan, we add a backward scanning heuristic to ensure we actually
3244 are in the epilogue. */
3247 while (scan_pc < func_end && !found_return)
3249 if (target_read_memory (scan_pc, buf, 2))
3253 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3255 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3257 else if (insn == 0x46f7) /* mov pc, lr */
3259 else if (thumb_instruction_restores_sp (insn))
3261 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3264 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3266 if (target_read_memory (scan_pc, buf, 2))
3270 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3272 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3274 if (insn2 & 0x8000) /* <registers> include PC. */
3277 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3278 && (insn2 & 0x0fff) == 0x0b04)
3280 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3283 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3284 && (insn2 & 0x0e00) == 0x0a00)
3296 /* Since any instruction in the epilogue sequence, with the possible
3297 exception of return itself, updates the stack pointer, we need to
3298 scan backwards for at most one instruction. Try either a 16-bit or
3299 a 32-bit instruction. This is just a heuristic, so we do not worry
3300 too much about false positives. */
3302 if (pc - 4 < func_start)
3304 if (target_read_memory (pc - 4, buf, 4))
3307 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3308 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3310 if (thumb_instruction_restores_sp (insn2))
3311 found_stack_adjust = 1;
3312 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3313 found_stack_adjust = 1;
3314 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3315 && (insn2 & 0x0fff) == 0x0b04)
3316 found_stack_adjust = 1;
3317 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3318 && (insn2 & 0x0e00) == 0x0a00)
3319 found_stack_adjust = 1;
3321 return found_stack_adjust;
3324 /* Return true if we are in the function's epilogue, i.e. after the
3325 instruction that destroyed the function's stack frame. */
3328 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3330 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3333 CORE_ADDR func_start, func_end;
3335 if (arm_pc_is_thumb (gdbarch, pc))
3336 return thumb_in_function_epilogue_p (gdbarch, pc);
3338 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3341 /* We are in the epilogue if the previous instruction was a stack
3342 adjustment and the next instruction is a possible return (bx, mov
3343 pc, or pop). We could have to scan backwards to find the stack
3344 adjustment, or forwards to find the return, but this is a decent
3345 approximation. First scan forwards. */
3348 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3349 if (bits (insn, 28, 31) != INST_NV)
3351 if ((insn & 0x0ffffff0) == 0x012fff10)
3354 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3357 else if ((insn & 0x0fff0000) == 0x08bd0000
3358 && (insn & 0x0000c000) != 0)
3359 /* POP (LDMIA), including PC or LR. */
3366 /* Scan backwards. This is just a heuristic, so do not worry about
3367 false positives from mode changes. */
3369 if (pc < func_start + 4)
3372 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3373 if (arm_instruction_restores_sp (insn))
3380 /* When arguments must be pushed onto the stack, they go on in reverse
3381 order. The code below implements a FILO (stack) to do this. */
3386 struct stack_item *prev;
3390 static struct stack_item *
3391 push_stack_item (struct stack_item *prev, const void *contents, int len)
3393 struct stack_item *si;
3394 si = xmalloc (sizeof (struct stack_item));
3395 si->data = xmalloc (len);
3398 memcpy (si->data, contents, len);
3402 static struct stack_item *
3403 pop_stack_item (struct stack_item *si)
3405 struct stack_item *dead = si;
3413 /* Return the alignment (in bytes) of the given type. */
3416 arm_type_align (struct type *t)
3422 t = check_typedef (t);
3423 switch (TYPE_CODE (t))
3426 /* Should never happen. */
3427 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3431 case TYPE_CODE_ENUM:
3435 case TYPE_CODE_RANGE:
3437 case TYPE_CODE_CHAR:
3438 case TYPE_CODE_BOOL:
3439 return TYPE_LENGTH (t);
3441 case TYPE_CODE_ARRAY:
3442 case TYPE_CODE_COMPLEX:
3443 /* TODO: What about vector types? */
3444 return arm_type_align (TYPE_TARGET_TYPE (t));
3446 case TYPE_CODE_STRUCT:
3447 case TYPE_CODE_UNION:
3449 for (n = 0; n < TYPE_NFIELDS (t); n++)
3451 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3459 /* Possible base types for a candidate for passing and returning in
3462 enum arm_vfp_cprc_base_type
3471 /* The length of one element of base type B. */
3474 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3478 case VFP_CPRC_SINGLE:
3480 case VFP_CPRC_DOUBLE:
3482 case VFP_CPRC_VEC64:
3484 case VFP_CPRC_VEC128:
3487 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3492 /* The character ('s', 'd' or 'q') for the type of VFP register used
3493 for passing base type B. */
3496 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3500 case VFP_CPRC_SINGLE:
3502 case VFP_CPRC_DOUBLE:
3504 case VFP_CPRC_VEC64:
3506 case VFP_CPRC_VEC128:
3509 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3514 /* Determine whether T may be part of a candidate for passing and
3515 returning in VFP registers, ignoring the limit on the total number
3516 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3517 classification of the first valid component found; if it is not
3518 VFP_CPRC_UNKNOWN, all components must have the same classification
3519 as *BASE_TYPE. If it is found that T contains a type not permitted
3520 for passing and returning in VFP registers, a type differently
3521 classified from *BASE_TYPE, or two types differently classified
3522 from each other, return -1, otherwise return the total number of
3523 base-type elements found (possibly 0 in an empty structure or
3524 array). Vector types are not currently supported, matching the
3525 generic AAPCS support. */
3528 arm_vfp_cprc_sub_candidate (struct type *t,
3529 enum arm_vfp_cprc_base_type *base_type)
3531 t = check_typedef (t);
3532 switch (TYPE_CODE (t))
3535 switch (TYPE_LENGTH (t))
3538 if (*base_type == VFP_CPRC_UNKNOWN)
3539 *base_type = VFP_CPRC_SINGLE;
3540 else if (*base_type != VFP_CPRC_SINGLE)
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_DOUBLE;
3547 else if (*base_type != VFP_CPRC_DOUBLE)
3556 case TYPE_CODE_COMPLEX:
3557 /* Arguments of complex T where T is one of the types float or
3558 double get treated as if they are implemented as:
3567 switch (TYPE_LENGTH (t))
3570 if (*base_type == VFP_CPRC_UNKNOWN)
3571 *base_type = VFP_CPRC_SINGLE;
3572 else if (*base_type != VFP_CPRC_SINGLE)
3577 if (*base_type == VFP_CPRC_UNKNOWN)
3578 *base_type = VFP_CPRC_DOUBLE;
3579 else if (*base_type != VFP_CPRC_DOUBLE)
3588 case TYPE_CODE_ARRAY:
3592 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3595 if (TYPE_LENGTH (t) == 0)
3597 gdb_assert (count == 0);
3600 else if (count == 0)
3602 unitlen = arm_vfp_cprc_unit_length (*base_type);
3603 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3604 return TYPE_LENGTH (t) / unitlen;
3608 case TYPE_CODE_STRUCT:
3613 for (i = 0; i < TYPE_NFIELDS (t); i++)
3615 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3617 if (sub_count == -1)
3621 if (TYPE_LENGTH (t) == 0)
3623 gdb_assert (count == 0);
3626 else if (count == 0)
3628 unitlen = arm_vfp_cprc_unit_length (*base_type);
3629 if (TYPE_LENGTH (t) != unitlen * count)
3634 case TYPE_CODE_UNION:
3639 for (i = 0; i < TYPE_NFIELDS (t); i++)
3641 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3643 if (sub_count == -1)
3645 count = (count > sub_count ? count : sub_count);
3647 if (TYPE_LENGTH (t) == 0)
3649 gdb_assert (count == 0);
3652 else if (count == 0)
3654 unitlen = arm_vfp_cprc_unit_length (*base_type);
3655 if (TYPE_LENGTH (t) != unitlen * count)
3667 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3668 if passed to or returned from a non-variadic function with the VFP
3669 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3670 *BASE_TYPE to the base type for T and *COUNT to the number of
3671 elements of that base type before returning. */
3674 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3677 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3678 int c = arm_vfp_cprc_sub_candidate (t, &b);
3679 if (c <= 0 || c > 4)
3686 /* Return 1 if the VFP ABI should be used for passing arguments to and
3687 returning values from a function of type FUNC_TYPE, 0
3691 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3693 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3694 /* Variadic functions always use the base ABI. Assume that functions
3695 without debug info are not variadic. */
3696 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3698 /* The VFP ABI is only supported as a variant of AAPCS. */
3699 if (tdep->arm_abi != ARM_ABI_AAPCS)
3701 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3704 /* We currently only support passing parameters in integer registers, which
3705 conforms with GCC's default model, and VFP argument passing following
3706 the VFP variant of AAPCS. Several other variants exist and
3707 we should probably support some of them based on the selected ABI. */
3710 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3711 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3712 struct value **args, CORE_ADDR sp, int struct_return,
3713 CORE_ADDR struct_addr)
3715 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3719 struct stack_item *si = NULL;
3722 unsigned vfp_regs_free = (1 << 16) - 1;
3724 /* Determine the type of this function and whether the VFP ABI
3726 ftype = check_typedef (value_type (function));
3727 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3728 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3729 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3731 /* Set the return address. For the ARM, the return breakpoint is
3732 always at BP_ADDR. */
3733 if (arm_pc_is_thumb (gdbarch, bp_addr))
3735 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3737 /* Walk through the list of args and determine how large a temporary
3738 stack is required. Need to take care here as structs may be
3739 passed on the stack, and we have to push them. */
3742 argreg = ARM_A1_REGNUM;
3745 /* The struct_return pointer occupies the first parameter
3746 passing register. */
3750 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3751 gdbarch_register_name (gdbarch, argreg),
3752 paddress (gdbarch, struct_addr));
3753 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3757 for (argnum = 0; argnum < nargs; argnum++)
3760 struct type *arg_type;
3761 struct type *target_type;
3762 enum type_code typecode;
3763 const bfd_byte *val;
3765 enum arm_vfp_cprc_base_type vfp_base_type;
3767 int may_use_core_reg = 1;
3769 arg_type = check_typedef (value_type (args[argnum]));
3770 len = TYPE_LENGTH (arg_type);
3771 target_type = TYPE_TARGET_TYPE (arg_type);
3772 typecode = TYPE_CODE (arg_type);
3773 val = value_contents (args[argnum]);
3775 align = arm_type_align (arg_type);
3776 /* Round alignment up to a whole number of words. */
3777 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3778 /* Different ABIs have different maximum alignments. */
3779 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3781 /* The APCS ABI only requires word alignment. */
3782 align = INT_REGISTER_SIZE;
3786 /* The AAPCS requires at most doubleword alignment. */
3787 if (align > INT_REGISTER_SIZE * 2)
3788 align = INT_REGISTER_SIZE * 2;
3792 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3800 /* Because this is a CPRC it cannot go in a core register or
3801 cause a core register to be skipped for alignment.
3802 Either it goes in VFP registers and the rest of this loop
3803 iteration is skipped for this argument, or it goes on the
3804 stack (and the stack alignment code is correct for this
3806 may_use_core_reg = 0;
3808 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3809 shift = unit_length / 4;
3810 mask = (1 << (shift * vfp_base_count)) - 1;
3811 for (regno = 0; regno < 16; regno += shift)
3812 if (((vfp_regs_free >> regno) & mask) == mask)
3821 vfp_regs_free &= ~(mask << regno);
3822 reg_scaled = regno / shift;
3823 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3824 for (i = 0; i < vfp_base_count; i++)
3828 if (reg_char == 'q')
3829 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3830 val + i * unit_length);
3833 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3834 reg_char, reg_scaled + i);
3835 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3837 regcache_cooked_write (regcache, regnum,
3838 val + i * unit_length);
3845 /* This CPRC could not go in VFP registers, so all VFP
3846 registers are now marked as used. */
3851 /* Push stack padding for dowubleword alignment. */
3852 if (nstack & (align - 1))
3854 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3855 nstack += INT_REGISTER_SIZE;
3858 /* Doubleword aligned quantities must go in even register pairs. */
3859 if (may_use_core_reg
3860 && argreg <= ARM_LAST_ARG_REGNUM
3861 && align > INT_REGISTER_SIZE
3865 /* If the argument is a pointer to a function, and it is a
3866 Thumb function, create a LOCAL copy of the value and set
3867 the THUMB bit in it. */
3868 if (TYPE_CODE_PTR == typecode
3869 && target_type != NULL
3870 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3872 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3873 if (arm_pc_is_thumb (gdbarch, regval))
3875 bfd_byte *copy = alloca (len);
3876 store_unsigned_integer (copy, len, byte_order,
3877 MAKE_THUMB_ADDR (regval));
3882 /* Copy the argument to general registers or the stack in
3883 register-sized pieces. Large arguments are split between
3884 registers and stack. */
3887 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3889 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3891 /* The argument is being passed in a general purpose
3894 = extract_unsigned_integer (val, partial_len, byte_order);
3895 if (byte_order == BFD_ENDIAN_BIG)
3896 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3898 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3900 gdbarch_register_name
3902 phex (regval, INT_REGISTER_SIZE));
3903 regcache_cooked_write_unsigned (regcache, argreg, regval);
3908 /* Push the arguments onto the stack. */
3910 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3912 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3913 nstack += INT_REGISTER_SIZE;
3920 /* If we have an odd number of words to push, then decrement the stack
3921 by one word now, so first stack argument will be dword aligned. */
3928 write_memory (sp, si->data, si->len);
3929 si = pop_stack_item (si);
3932 /* Finally, update teh SP register. */
3933 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3939 /* Always align the frame to an 8-byte boundary. This is required on
3940 some platforms and harmless on the rest. */
3943 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3945 /* Align the stack to eight bytes. */
3946 return sp & ~ (CORE_ADDR) 7;
3950 print_fpu_flags (struct ui_file *file, int flags)
3952 if (flags & (1 << 0))
3953 fputs_filtered ("IVO ", file);
3954 if (flags & (1 << 1))
3955 fputs_filtered ("DVZ ", file);
3956 if (flags & (1 << 2))
3957 fputs_filtered ("OFL ", file);
3958 if (flags & (1 << 3))
3959 fputs_filtered ("UFL ", file);
3960 if (flags & (1 << 4))
3961 fputs_filtered ("INX ", file);
3962 fputc_filtered ('\n', file);
3965 /* Print interesting information about the floating point processor
3966 (if present) or emulator. */
3968 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3969 struct frame_info *frame, const char *args)
3971 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3974 type = (status >> 24) & 127;
3975 if (status & (1 << 31))
3976 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3978 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3979 /* i18n: [floating point unit] mask */
3980 fputs_filtered (_("mask: "), file);
3981 print_fpu_flags (file, status >> 16);
3982 /* i18n: [floating point unit] flags */
3983 fputs_filtered (_("flags: "), file);
3984 print_fpu_flags (file, status);
3987 /* Construct the ARM extended floating point type. */
3988 static struct type *
3989 arm_ext_type (struct gdbarch *gdbarch)
3991 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3993 if (!tdep->arm_ext_type)
3995 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3996 floatformats_arm_ext);
3998 return tdep->arm_ext_type;
4001 static struct type *
4002 arm_neon_double_type (struct gdbarch *gdbarch)
4004 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4006 if (tdep->neon_double_type == NULL)
4008 struct type *t, *elem;
4010 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4012 elem = builtin_type (gdbarch)->builtin_uint8;
4013 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4014 elem = builtin_type (gdbarch)->builtin_uint16;
4015 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4016 elem = builtin_type (gdbarch)->builtin_uint32;
4017 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4018 elem = builtin_type (gdbarch)->builtin_uint64;
4019 append_composite_type_field (t, "u64", elem);
4020 elem = builtin_type (gdbarch)->builtin_float;
4021 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4022 elem = builtin_type (gdbarch)->builtin_double;
4023 append_composite_type_field (t, "f64", elem);
4025 TYPE_VECTOR (t) = 1;
4026 TYPE_NAME (t) = "neon_d";
4027 tdep->neon_double_type = t;
4030 return tdep->neon_double_type;
4033 /* FIXME: The vector types are not correctly ordered on big-endian
4034 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4035 bits of d0 - regardless of what unit size is being held in d0. So
4036 the offset of the first uint8 in d0 is 7, but the offset of the
4037 first float is 4. This code works as-is for little-endian
4040 static struct type *
4041 arm_neon_quad_type (struct gdbarch *gdbarch)
4043 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4045 if (tdep->neon_quad_type == NULL)
4047 struct type *t, *elem;
4049 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4051 elem = builtin_type (gdbarch)->builtin_uint8;
4052 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4053 elem = builtin_type (gdbarch)->builtin_uint16;
4054 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4055 elem = builtin_type (gdbarch)->builtin_uint32;
4056 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4057 elem = builtin_type (gdbarch)->builtin_uint64;
4058 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4059 elem = builtin_type (gdbarch)->builtin_float;
4060 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4061 elem = builtin_type (gdbarch)->builtin_double;
4062 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4064 TYPE_VECTOR (t) = 1;
4065 TYPE_NAME (t) = "neon_q";
4066 tdep->neon_quad_type = t;
4069 return tdep->neon_quad_type;
4072 /* Return the GDB type object for the "standard" data type of data in
4075 static struct type *
4076 arm_register_type (struct gdbarch *gdbarch, int regnum)
4078 int num_regs = gdbarch_num_regs (gdbarch);
4080 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4081 && regnum >= num_regs && regnum < num_regs + 32)
4082 return builtin_type (gdbarch)->builtin_float;
4084 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4085 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4086 return arm_neon_quad_type (gdbarch);
4088 /* If the target description has register information, we are only
4089 in this function so that we can override the types of
4090 double-precision registers for NEON. */
4091 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4093 struct type *t = tdesc_register_type (gdbarch, regnum);
4095 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4096 && TYPE_CODE (t) == TYPE_CODE_FLT
4097 && gdbarch_tdep (gdbarch)->have_neon)
4098 return arm_neon_double_type (gdbarch);
4103 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4105 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4106 return builtin_type (gdbarch)->builtin_void;
4108 return arm_ext_type (gdbarch);
4110 else if (regnum == ARM_SP_REGNUM)
4111 return builtin_type (gdbarch)->builtin_data_ptr;
4112 else if (regnum == ARM_PC_REGNUM)
4113 return builtin_type (gdbarch)->builtin_func_ptr;
4114 else if (regnum >= ARRAY_SIZE (arm_register_names))
4115 /* These registers are only supported on targets which supply
4116 an XML description. */
4117 return builtin_type (gdbarch)->builtin_int0;
4119 return builtin_type (gdbarch)->builtin_uint32;
4122 /* Map a DWARF register REGNUM onto the appropriate GDB register
4126 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4128 /* Core integer regs. */
4129 if (reg >= 0 && reg <= 15)
4132 /* Legacy FPA encoding. These were once used in a way which
4133 overlapped with VFP register numbering, so their use is
4134 discouraged, but GDB doesn't support the ARM toolchain
4135 which used them for VFP. */
4136 if (reg >= 16 && reg <= 23)
4137 return ARM_F0_REGNUM + reg - 16;
4139 /* New assignments for the FPA registers. */
4140 if (reg >= 96 && reg <= 103)
4141 return ARM_F0_REGNUM + reg - 96;
4143 /* WMMX register assignments. */
4144 if (reg >= 104 && reg <= 111)
4145 return ARM_WCGR0_REGNUM + reg - 104;
4147 if (reg >= 112 && reg <= 127)
4148 return ARM_WR0_REGNUM + reg - 112;
4150 if (reg >= 192 && reg <= 199)
4151 return ARM_WC0_REGNUM + reg - 192;
4153 /* VFP v2 registers. A double precision value is actually
4154 in d1 rather than s2, but the ABI only defines numbering
4155 for the single precision registers. This will "just work"
4156 in GDB for little endian targets (we'll read eight bytes,
4157 starting in s0 and then progressing to s1), but will be
4158 reversed on big endian targets with VFP. This won't
4159 be a problem for the new Neon quad registers; you're supposed
4160 to use DW_OP_piece for those. */
4161 if (reg >= 64 && reg <= 95)
4165 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4166 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4170 /* VFP v3 / Neon registers. This range is also used for VFP v2
4171 registers, except that it now describes d0 instead of s0. */
4172 if (reg >= 256 && reg <= 287)
4176 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4177 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4184 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4186 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4189 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4191 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4192 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4194 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4195 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4197 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4198 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4200 if (reg < NUM_GREGS)
4201 return SIM_ARM_R0_REGNUM + reg;
4204 if (reg < NUM_FREGS)
4205 return SIM_ARM_FP0_REGNUM + reg;
4208 if (reg < NUM_SREGS)
4209 return SIM_ARM_FPS_REGNUM + reg;
4212 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4215 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4216 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4217 It is thought that this is is the floating-point register format on
4218 little-endian systems. */
4221 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4222 void *dbl, int endianess)
4226 if (endianess == BFD_ENDIAN_BIG)
4227 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4229 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4231 floatformat_from_doublest (fmt, &d, dbl);
4235 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4240 floatformat_to_doublest (fmt, ptr, &d);
4241 if (endianess == BFD_ENDIAN_BIG)
4242 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4244 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4249 condition_true (unsigned long cond, unsigned long status_reg)
4251 if (cond == INST_AL || cond == INST_NV)
4257 return ((status_reg & FLAG_Z) != 0);
4259 return ((status_reg & FLAG_Z) == 0);
4261 return ((status_reg & FLAG_C) != 0);
4263 return ((status_reg & FLAG_C) == 0);
4265 return ((status_reg & FLAG_N) != 0);
4267 return ((status_reg & FLAG_N) == 0);
4269 return ((status_reg & FLAG_V) != 0);
4271 return ((status_reg & FLAG_V) == 0);
4273 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4275 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4277 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4279 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4281 return (((status_reg & FLAG_Z) == 0)
4282 && (((status_reg & FLAG_N) == 0)
4283 == ((status_reg & FLAG_V) == 0)));
4285 return (((status_reg & FLAG_Z) != 0)
4286 || (((status_reg & FLAG_N) == 0)
4287 != ((status_reg & FLAG_V) == 0)));
4292 static unsigned long
4293 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4294 unsigned long pc_val, unsigned long status_reg)
4296 unsigned long res, shift;
4297 int rm = bits (inst, 0, 3);
4298 unsigned long shifttype = bits (inst, 5, 6);
4302 int rs = bits (inst, 8, 11);
4303 shift = (rs == 15 ? pc_val + 8
4304 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4307 shift = bits (inst, 7, 11);
4309 res = (rm == ARM_PC_REGNUM
4310 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4311 : get_frame_register_unsigned (frame, rm));
4316 res = shift >= 32 ? 0 : res << shift;
4320 res = shift >= 32 ? 0 : res >> shift;
4326 res = ((res & 0x80000000L)
4327 ? ~((~res) >> shift) : res >> shift);
4330 case 3: /* ROR/RRX */
4333 res = (res >> 1) | (carry ? 0x80000000L : 0);
4335 res = (res >> shift) | (res << (32 - shift));
4339 return res & 0xffffffff;
4342 /* Return number of 1-bits in VAL. */
4345 bitcount (unsigned long val)
4348 for (nbits = 0; val != 0; nbits++)
4349 val &= val - 1; /* Delete rightmost 1-bit in val. */
4353 /* Return the size in bytes of the complete Thumb instruction whose
4354 first halfword is INST1. */
4357 thumb_insn_size (unsigned short inst1)
4359 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4366 thumb_advance_itstate (unsigned int itstate)
4368 /* Preserve IT[7:5], the first three bits of the condition. Shift
4369 the upcoming condition flags left by one bit. */
4370 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4372 /* If we have finished the IT block, clear the state. */
4373 if ((itstate & 0x0f) == 0)
4379 /* Find the next PC after the current instruction executes. In some
4380 cases we can not statically determine the answer (see the IT state
4381 handling in this function); in that case, a breakpoint may be
4382 inserted in addition to the returned PC, which will be used to set
4383 another breakpoint by our caller. */
4386 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4388 struct gdbarch *gdbarch = get_frame_arch (frame);
4389 struct address_space *aspace = get_frame_address_space (frame);
4390 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4391 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4392 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4393 unsigned short inst1;
4394 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4395 unsigned long offset;
4396 ULONGEST status, itstate;
4398 nextpc = MAKE_THUMB_ADDR (nextpc);
4399 pc_val = MAKE_THUMB_ADDR (pc_val);
4401 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4403 /* Thumb-2 conditional execution support. There are eight bits in
4404 the CPSR which describe conditional execution state. Once
4405 reconstructed (they're in a funny order), the low five bits
4406 describe the low bit of the condition for each instruction and
4407 how many instructions remain. The high three bits describe the
4408 base condition. One of the low four bits will be set if an IT
4409 block is active. These bits read as zero on earlier
4411 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4412 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4414 /* If-Then handling. On GNU/Linux, where this routine is used, we
4415 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4416 can disable execution of the undefined instruction. So we might
4417 miss the breakpoint if we set it on a skipped conditional
4418 instruction. Because conditional instructions can change the
4419 flags, affecting the execution of further instructions, we may
4420 need to set two breakpoints. */
4422 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4424 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4426 /* An IT instruction. Because this instruction does not
4427 modify the flags, we can accurately predict the next
4428 executed instruction. */
4429 itstate = inst1 & 0x00ff;
4430 pc += thumb_insn_size (inst1);
4432 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4434 inst1 = read_memory_unsigned_integer (pc, 2,
4435 byte_order_for_code);
4436 pc += thumb_insn_size (inst1);
4437 itstate = thumb_advance_itstate (itstate);
4440 return MAKE_THUMB_ADDR (pc);
4442 else if (itstate != 0)
4444 /* We are in a conditional block. Check the condition. */
4445 if (! condition_true (itstate >> 4, status))
4447 /* Advance to the next executed instruction. */
4448 pc += thumb_insn_size (inst1);
4449 itstate = thumb_advance_itstate (itstate);
4451 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4453 inst1 = read_memory_unsigned_integer (pc, 2,
4454 byte_order_for_code);
4455 pc += thumb_insn_size (inst1);
4456 itstate = thumb_advance_itstate (itstate);
4459 return MAKE_THUMB_ADDR (pc);
4461 else if ((itstate & 0x0f) == 0x08)
4463 /* This is the last instruction of the conditional
4464 block, and it is executed. We can handle it normally
4465 because the following instruction is not conditional,
4466 and we must handle it normally because it is
4467 permitted to branch. Fall through. */
4473 /* There are conditional instructions after this one.
4474 If this instruction modifies the flags, then we can
4475 not predict what the next executed instruction will
4476 be. Fortunately, this instruction is architecturally
4477 forbidden to branch; we know it will fall through.
4478 Start by skipping past it. */
4479 pc += thumb_insn_size (inst1);
4480 itstate = thumb_advance_itstate (itstate);
4482 /* Set a breakpoint on the following instruction. */
4483 gdb_assert ((itstate & 0x0f) != 0);
4484 arm_insert_single_step_breakpoint (gdbarch, aspace,
4485 MAKE_THUMB_ADDR (pc));
4486 cond_negated = (itstate >> 4) & 1;
4488 /* Skip all following instructions with the same
4489 condition. If there is a later instruction in the IT
4490 block with the opposite condition, set the other
4491 breakpoint there. If not, then set a breakpoint on
4492 the instruction after the IT block. */
4495 inst1 = read_memory_unsigned_integer (pc, 2,
4496 byte_order_for_code);
4497 pc += thumb_insn_size (inst1);
4498 itstate = thumb_advance_itstate (itstate);
4500 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4502 return MAKE_THUMB_ADDR (pc);
4506 else if (itstate & 0x0f)
4508 /* We are in a conditional block. Check the condition. */
4509 int cond = itstate >> 4;
4511 if (! condition_true (cond, status))
4512 /* Advance to the next instruction. All the 32-bit
4513 instructions share a common prefix. */
4514 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4516 /* Otherwise, handle the instruction normally. */
4519 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4523 /* Fetch the saved PC from the stack. It's stored above
4524 all of the other registers. */
4525 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4526 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4527 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4529 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4531 unsigned long cond = bits (inst1, 8, 11);
4532 if (cond == 0x0f) /* 0x0f = SWI */
4534 struct gdbarch_tdep *tdep;
4535 tdep = gdbarch_tdep (gdbarch);
4537 if (tdep->syscall_next_pc != NULL)
4538 nextpc = tdep->syscall_next_pc (frame);
4541 else if (cond != 0x0f && condition_true (cond, status))
4542 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4544 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4546 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4548 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4550 unsigned short inst2;
4551 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4553 /* Default to the next instruction. */
4555 nextpc = MAKE_THUMB_ADDR (nextpc);
4557 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4559 /* Branches and miscellaneous control instructions. */
4561 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4564 int j1, j2, imm1, imm2;
4566 imm1 = sbits (inst1, 0, 10);
4567 imm2 = bits (inst2, 0, 10);
4568 j1 = bit (inst2, 13);
4569 j2 = bit (inst2, 11);
4571 offset = ((imm1 << 12) + (imm2 << 1));
4572 offset ^= ((!j2) << 22) | ((!j1) << 23);
4574 nextpc = pc_val + offset;
4575 /* For BLX make sure to clear the low bits. */
4576 if (bit (inst2, 12) == 0)
4577 nextpc = nextpc & 0xfffffffc;
4579 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4581 /* SUBS PC, LR, #imm8. */
4582 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4583 nextpc -= inst2 & 0x00ff;
4585 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4587 /* Conditional branch. */
4588 if (condition_true (bits (inst1, 6, 9), status))
4590 int sign, j1, j2, imm1, imm2;
4592 sign = sbits (inst1, 10, 10);
4593 imm1 = bits (inst1, 0, 5);
4594 imm2 = bits (inst2, 0, 10);
4595 j1 = bit (inst2, 13);
4596 j2 = bit (inst2, 11);
4598 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4599 offset += (imm1 << 12) + (imm2 << 1);
4601 nextpc = pc_val + offset;
4605 else if ((inst1 & 0xfe50) == 0xe810)
4607 /* Load multiple or RFE. */
4608 int rn, offset, load_pc = 1;
4610 rn = bits (inst1, 0, 3);
4611 if (bit (inst1, 7) && !bit (inst1, 8))
4614 if (!bit (inst2, 15))
4616 offset = bitcount (inst2) * 4 - 4;
4618 else if (!bit (inst1, 7) && bit (inst1, 8))
4621 if (!bit (inst2, 15))
4625 else if (bit (inst1, 7) && bit (inst1, 8))
4630 else if (!bit (inst1, 7) && !bit (inst1, 8))
4640 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4641 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4644 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4646 /* MOV PC or MOVS PC. */
4647 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4648 nextpc = MAKE_THUMB_ADDR (nextpc);
4650 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4654 int rn, load_pc = 1;
4656 rn = bits (inst1, 0, 3);
4657 base = get_frame_register_unsigned (frame, rn);
4658 if (rn == ARM_PC_REGNUM)
4660 base = (base + 4) & ~(CORE_ADDR) 0x3;
4662 base += bits (inst2, 0, 11);
4664 base -= bits (inst2, 0, 11);
4666 else if (bit (inst1, 7))
4667 base += bits (inst2, 0, 11);
4668 else if (bit (inst2, 11))
4670 if (bit (inst2, 10))
4673 base += bits (inst2, 0, 7);
4675 base -= bits (inst2, 0, 7);
4678 else if ((inst2 & 0x0fc0) == 0x0000)
4680 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4681 base += get_frame_register_unsigned (frame, rm) << shift;
4688 nextpc = get_frame_memory_unsigned (frame, base, 4);
4690 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4693 CORE_ADDR tbl_reg, table, offset, length;
4695 tbl_reg = bits (inst1, 0, 3);
4696 if (tbl_reg == 0x0f)
4697 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4699 table = get_frame_register_unsigned (frame, tbl_reg);
4701 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4702 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4703 nextpc = pc_val + length;
4705 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4708 CORE_ADDR tbl_reg, table, offset, length;
4710 tbl_reg = bits (inst1, 0, 3);
4711 if (tbl_reg == 0x0f)
4712 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4714 table = get_frame_register_unsigned (frame, tbl_reg);
4716 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4717 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4718 nextpc = pc_val + length;
4721 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4723 if (bits (inst1, 3, 6) == 0x0f)
4724 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4726 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4728 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4730 if (bits (inst1, 3, 6) == 0x0f)
4733 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4735 nextpc = MAKE_THUMB_ADDR (nextpc);
4737 else if ((inst1 & 0xf500) == 0xb100)
4740 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4741 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4743 if (bit (inst1, 11) && reg != 0)
4744 nextpc = pc_val + imm;
4745 else if (!bit (inst1, 11) && reg == 0)
4746 nextpc = pc_val + imm;
4751 /* Get the raw next address. PC is the current program counter, in
4752 FRAME, which is assumed to be executing in ARM mode.
4754 The value returned has the execution state of the next instruction
4755 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4756 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4760 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4762 struct gdbarch *gdbarch = get_frame_arch (frame);
4763 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4764 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4765 unsigned long pc_val;
4766 unsigned long this_instr;
4767 unsigned long status;
4770 pc_val = (unsigned long) pc;
4771 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4773 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4774 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4776 if (bits (this_instr, 28, 31) == INST_NV)
4777 switch (bits (this_instr, 24, 27))
4782 /* Branch with Link and change to Thumb. */
4783 nextpc = BranchDest (pc, this_instr);
4784 nextpc |= bit (this_instr, 24) << 1;
4785 nextpc = MAKE_THUMB_ADDR (nextpc);
4791 /* Coprocessor register transfer. */
4792 if (bits (this_instr, 12, 15) == 15)
4793 error (_("Invalid update to pc in instruction"));
4796 else if (condition_true (bits (this_instr, 28, 31), status))
4798 switch (bits (this_instr, 24, 27))
4801 case 0x1: /* data processing */
4805 unsigned long operand1, operand2, result = 0;
4809 if (bits (this_instr, 12, 15) != 15)
4812 if (bits (this_instr, 22, 25) == 0
4813 && bits (this_instr, 4, 7) == 9) /* multiply */
4814 error (_("Invalid update to pc in instruction"));
4816 /* BX <reg>, BLX <reg> */
4817 if (bits (this_instr, 4, 27) == 0x12fff1
4818 || bits (this_instr, 4, 27) == 0x12fff3)
4820 rn = bits (this_instr, 0, 3);
4821 nextpc = ((rn == ARM_PC_REGNUM)
4823 : get_frame_register_unsigned (frame, rn));
4828 /* Multiply into PC. */
4829 c = (status & FLAG_C) ? 1 : 0;
4830 rn = bits (this_instr, 16, 19);
4831 operand1 = ((rn == ARM_PC_REGNUM)
4833 : get_frame_register_unsigned (frame, rn));
4835 if (bit (this_instr, 25))
4837 unsigned long immval = bits (this_instr, 0, 7);
4838 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4839 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4842 else /* operand 2 is a shifted register. */
4843 operand2 = shifted_reg_val (frame, this_instr, c,
4846 switch (bits (this_instr, 21, 24))
4849 result = operand1 & operand2;
4853 result = operand1 ^ operand2;
4857 result = operand1 - operand2;
4861 result = operand2 - operand1;
4865 result = operand1 + operand2;
4869 result = operand1 + operand2 + c;
4873 result = operand1 - operand2 + c;
4877 result = operand2 - operand1 + c;
4883 case 0xb: /* tst, teq, cmp, cmn */
4884 result = (unsigned long) nextpc;
4888 result = operand1 | operand2;
4892 /* Always step into a function. */
4897 result = operand1 & ~operand2;
4905 /* In 26-bit APCS the bottom two bits of the result are
4906 ignored, and we always end up in ARM state. */
4908 nextpc = arm_addr_bits_remove (gdbarch, result);
4916 case 0x5: /* data transfer */
4919 if (bit (this_instr, 20))
4922 if (bits (this_instr, 12, 15) == 15)
4928 if (bit (this_instr, 22))
4929 error (_("Invalid update to pc in instruction"));
4931 /* byte write to PC */
4932 rn = bits (this_instr, 16, 19);
4933 base = ((rn == ARM_PC_REGNUM)
4935 : get_frame_register_unsigned (frame, rn));
4937 if (bit (this_instr, 24))
4940 int c = (status & FLAG_C) ? 1 : 0;
4941 unsigned long offset =
4942 (bit (this_instr, 25)
4943 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4944 : bits (this_instr, 0, 11));
4946 if (bit (this_instr, 23))
4952 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4959 case 0x9: /* block transfer */
4960 if (bit (this_instr, 20))
4963 if (bit (this_instr, 15))
4967 unsigned long rn_val
4968 = get_frame_register_unsigned (frame,
4969 bits (this_instr, 16, 19));
4971 if (bit (this_instr, 23))
4974 unsigned long reglist = bits (this_instr, 0, 14);
4975 offset = bitcount (reglist) * 4;
4976 if (bit (this_instr, 24)) /* pre */
4979 else if (bit (this_instr, 24))
4983 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4990 case 0xb: /* branch & link */
4991 case 0xa: /* branch */
4993 nextpc = BranchDest (pc, this_instr);
4999 case 0xe: /* coproc ops */
5003 struct gdbarch_tdep *tdep;
5004 tdep = gdbarch_tdep (gdbarch);
5006 if (tdep->syscall_next_pc != NULL)
5007 nextpc = tdep->syscall_next_pc (frame);
5013 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5021 /* Determine next PC after current instruction executes. Will call either
5022 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5023 loop is detected. */
5026 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5030 if (arm_frame_is_thumb (frame))
5031 nextpc = thumb_get_next_pc_raw (frame, pc);
5033 nextpc = arm_get_next_pc_raw (frame, pc);
5038 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5039 of the appropriate mode (as encoded in the PC value), even if this
5040 differs from what would be expected according to the symbol tables. */
5043 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5044 struct address_space *aspace,
5047 struct cleanup *old_chain
5048 = make_cleanup_restore_integer (&arm_override_mode);
5050 arm_override_mode = IS_THUMB_ADDR (pc);
5051 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5053 insert_single_step_breakpoint (gdbarch, aspace, pc);
5055 do_cleanups (old_chain);
5058 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5059 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5060 is found, attempt to step through it. A breakpoint is placed at the end of
5064 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5066 struct gdbarch *gdbarch = get_frame_arch (frame);
5067 struct address_space *aspace = get_frame_address_space (frame);
5068 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5069 CORE_ADDR pc = get_frame_pc (frame);
5070 CORE_ADDR breaks[2] = {-1, -1};
5072 unsigned short insn1, insn2;
5075 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5076 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5077 ULONGEST status, itstate;
5079 /* We currently do not support atomic sequences within an IT block. */
5080 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5081 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5085 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5086 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5088 if (thumb_insn_size (insn1) != 4)
5091 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5093 if (!((insn1 & 0xfff0) == 0xe850
5094 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5097 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5099 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5101 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5104 if (thumb_insn_size (insn1) != 4)
5106 /* Assume that there is at most one conditional branch in the
5107 atomic sequence. If a conditional branch is found, put a
5108 breakpoint in its destination address. */
5109 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5111 if (last_breakpoint > 0)
5112 return 0; /* More than one conditional branch found,
5113 fallback to the standard code. */
5115 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5119 /* We do not support atomic sequences that use any *other*
5120 instructions but conditional branches to change the PC.
5121 Fall back to standard code to avoid losing control of
5123 else if (thumb_instruction_changes_pc (insn1))
5128 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5131 /* Assume that there is at most one conditional branch in the
5132 atomic sequence. If a conditional branch is found, put a
5133 breakpoint in its destination address. */
5134 if ((insn1 & 0xf800) == 0xf000
5135 && (insn2 & 0xd000) == 0x8000
5136 && (insn1 & 0x0380) != 0x0380)
5138 int sign, j1, j2, imm1, imm2;
5139 unsigned int offset;
5141 sign = sbits (insn1, 10, 10);
5142 imm1 = bits (insn1, 0, 5);
5143 imm2 = bits (insn2, 0, 10);
5144 j1 = bit (insn2, 13);
5145 j2 = bit (insn2, 11);
5147 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5148 offset += (imm1 << 12) + (imm2 << 1);
5150 if (last_breakpoint > 0)
5151 return 0; /* More than one conditional branch found,
5152 fallback to the standard code. */
5154 breaks[1] = loc + offset;
5158 /* We do not support atomic sequences that use any *other*
5159 instructions but conditional branches to change the PC.
5160 Fall back to standard code to avoid losing control of
5162 else if (thumb2_instruction_changes_pc (insn1, insn2))
5165 /* If we find a strex{,b,h,d}, we're done. */
5166 if ((insn1 & 0xfff0) == 0xe840
5167 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5172 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5173 if (insn_count == atomic_sequence_length)
5176 /* Insert a breakpoint right after the end of the atomic sequence. */
5179 /* Check for duplicated breakpoints. Check also for a breakpoint
5180 placed (branch instruction's destination) anywhere in sequence. */
5182 && (breaks[1] == breaks[0]
5183 || (breaks[1] >= pc && breaks[1] < loc)))
5184 last_breakpoint = 0;
5186 /* Effectively inserts the breakpoints. */
5187 for (index = 0; index <= last_breakpoint; index++)
5188 arm_insert_single_step_breakpoint (gdbarch, aspace,
5189 MAKE_THUMB_ADDR (breaks[index]));
5195 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5197 struct gdbarch *gdbarch = get_frame_arch (frame);
5198 struct address_space *aspace = get_frame_address_space (frame);
5199 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5200 CORE_ADDR pc = get_frame_pc (frame);
5201 CORE_ADDR breaks[2] = {-1, -1};
5206 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5207 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5209 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5210 Note that we do not currently support conditionally executed atomic
5212 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5214 if ((insn & 0xff9000f0) != 0xe1900090)
5217 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5219 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5221 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5224 /* Assume that there is at most one conditional branch in the atomic
5225 sequence. If a conditional branch is found, put a breakpoint in
5226 its destination address. */
5227 if (bits (insn, 24, 27) == 0xa)
5229 if (last_breakpoint > 0)
5230 return 0; /* More than one conditional branch found, fallback
5231 to the standard single-step code. */
5233 breaks[1] = BranchDest (loc - 4, insn);
5237 /* We do not support atomic sequences that use any *other* instructions
5238 but conditional branches to change the PC. Fall back to standard
5239 code to avoid losing control of execution. */
5240 else if (arm_instruction_changes_pc (insn))
5243 /* If we find a strex{,b,h,d}, we're done. */
5244 if ((insn & 0xff9000f0) == 0xe1800090)
5248 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5249 if (insn_count == atomic_sequence_length)
5252 /* Insert a breakpoint right after the end of the atomic sequence. */
5255 /* Check for duplicated breakpoints. Check also for a breakpoint
5256 placed (branch instruction's destination) anywhere in sequence. */
5258 && (breaks[1] == breaks[0]
5259 || (breaks[1] >= pc && breaks[1] < loc)))
5260 last_breakpoint = 0;
5262 /* Effectively inserts the breakpoints. */
5263 for (index = 0; index <= last_breakpoint; index++)
5264 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5270 arm_deal_with_atomic_sequence (struct frame_info *frame)
5272 if (arm_frame_is_thumb (frame))
5273 return thumb_deal_with_atomic_sequence_raw (frame);
5275 return arm_deal_with_atomic_sequence_raw (frame);
5278 /* single_step() is called just before we want to resume the inferior,
5279 if we want to single-step it but there is no hardware or kernel
5280 single-step support. We find the target of the coming instruction
5281 and breakpoint it. */
5284 arm_software_single_step (struct frame_info *frame)
5286 struct gdbarch *gdbarch = get_frame_arch (frame);
5287 struct address_space *aspace = get_frame_address_space (frame);
5290 if (arm_deal_with_atomic_sequence (frame))
5293 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5294 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5299 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5300 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5301 NULL if an error occurs. BUF is freed. */
5304 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5305 int old_len, int new_len)
5308 int bytes_to_read = new_len - old_len;
5310 new_buf = xmalloc (new_len);
5311 memcpy (new_buf + bytes_to_read, buf, old_len);
5313 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5321 /* An IT block is at most the 2-byte IT instruction followed by
5322 four 4-byte instructions. The furthest back we must search to
5323 find an IT block that affects the current instruction is thus
5324 2 + 3 * 4 == 14 bytes. */
5325 #define MAX_IT_BLOCK_PREFIX 14
5327 /* Use a quick scan if there are more than this many bytes of
5329 #define IT_SCAN_THRESHOLD 32
5331 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5332 A breakpoint in an IT block may not be hit, depending on the
5335 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5339 CORE_ADDR boundary, func_start;
5341 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5342 int i, any, last_it, last_it_count;
5344 /* If we are using BKPT breakpoints, none of this is necessary. */
5345 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5348 /* ARM mode does not have this problem. */
5349 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5352 /* We are setting a breakpoint in Thumb code that could potentially
5353 contain an IT block. The first step is to find how much Thumb
5354 code there is; we do not need to read outside of known Thumb
5356 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5358 /* Thumb-2 code must have mapping symbols to have a chance. */
5361 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5363 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5364 && func_start > boundary)
5365 boundary = func_start;
5367 /* Search for a candidate IT instruction. We have to do some fancy
5368 footwork to distinguish a real IT instruction from the second
5369 half of a 32-bit instruction, but there is no need for that if
5370 there's no candidate. */
5371 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5373 /* No room for an IT instruction. */
5376 buf = xmalloc (buf_len);
5377 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5380 for (i = 0; i < buf_len; i += 2)
5382 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5383 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5395 /* OK, the code bytes before this instruction contain at least one
5396 halfword which resembles an IT instruction. We know that it's
5397 Thumb code, but there are still two possibilities. Either the
5398 halfword really is an IT instruction, or it is the second half of
5399 a 32-bit Thumb instruction. The only way we can tell is to
5400 scan forwards from a known instruction boundary. */
5401 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5405 /* There's a lot of code before this instruction. Start with an
5406 optimistic search; it's easy to recognize halfwords that can
5407 not be the start of a 32-bit instruction, and use that to
5408 lock on to the instruction boundaries. */
5409 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5412 buf_len = IT_SCAN_THRESHOLD;
5415 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5417 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5418 if (thumb_insn_size (inst1) == 2)
5425 /* At this point, if DEFINITE, BUF[I] is the first place we
5426 are sure that we know the instruction boundaries, and it is far
5427 enough from BPADDR that we could not miss an IT instruction
5428 affecting BPADDR. If ! DEFINITE, give up - start from a
5432 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5436 buf_len = bpaddr - boundary;
5442 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5445 buf_len = bpaddr - boundary;
5449 /* Scan forwards. Find the last IT instruction before BPADDR. */
5454 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5456 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5461 else if (inst1 & 0x0002)
5463 else if (inst1 & 0x0004)
5468 i += thumb_insn_size (inst1);
5474 /* There wasn't really an IT instruction after all. */
5477 if (last_it_count < 1)
5478 /* It was too far away. */
5481 /* This really is a trouble spot. Move the breakpoint to the IT
5483 return bpaddr - buf_len + last_it;
5486 /* ARM displaced stepping support.
5488 Generally ARM displaced stepping works as follows:
5490 1. When an instruction is to be single-stepped, it is first decoded by
5491 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5492 Depending on the type of instruction, it is then copied to a scratch
5493 location, possibly in a modified form. The copy_* set of functions
5494 performs such modification, as necessary. A breakpoint is placed after
5495 the modified instruction in the scratch space to return control to GDB.
5496 Note in particular that instructions which modify the PC will no longer
5497 do so after modification.
5499 2. The instruction is single-stepped, by setting the PC to the scratch
5500 location address, and resuming. Control returns to GDB when the
5503 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5504 function used for the current instruction. This function's job is to
5505 put the CPU/memory state back to what it would have been if the
5506 instruction had been executed unmodified in its original location. */
5508 /* NOP instruction (mov r0, r0). */
5509 #define ARM_NOP 0xe1a00000
5510 #define THUMB_NOP 0x4600
5512 /* Helper for register reads for displaced stepping. In particular, this
5513 returns the PC as it would be seen by the instruction at its original
5517 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5521 CORE_ADDR from = dsc->insn_addr;
5523 if (regno == ARM_PC_REGNUM)
5525 /* Compute pipeline offset:
5526 - When executing an ARM instruction, PC reads as the address of the
5527 current instruction plus 8.
5528 - When executing a Thumb instruction, PC reads as the address of the
5529 current instruction plus 4. */
5536 if (debug_displaced)
5537 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5538 (unsigned long) from);
5539 return (ULONGEST) from;
5543 regcache_cooked_read_unsigned (regs, regno, &ret);
5544 if (debug_displaced)
5545 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5546 regno, (unsigned long) ret);
5552 displaced_in_arm_mode (struct regcache *regs)
5555 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5557 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5559 return (ps & t_bit) == 0;
5562 /* Write to the PC as from a branch instruction. */
5565 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5569 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5570 architecture versions < 6. */
5571 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5572 val & ~(ULONGEST) 0x3);
5574 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5575 val & ~(ULONGEST) 0x1);
5578 /* Write to the PC as from a branch-exchange instruction. */
5581 bx_write_pc (struct regcache *regs, ULONGEST val)
5584 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5586 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5590 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5591 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5593 else if ((val & 2) == 0)
5595 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5596 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5600 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5601 mode, align dest to 4 bytes). */
5602 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5603 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5604 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5608 /* Write to the PC as if from a load instruction. */
5611 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5614 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5615 bx_write_pc (regs, val);
5617 branch_write_pc (regs, dsc, val);
5620 /* Write to the PC as if from an ALU instruction. */
5623 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5626 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5627 bx_write_pc (regs, val);
5629 branch_write_pc (regs, dsc, val);
5632 /* Helper for writing to registers for displaced stepping. Writing to the PC
5633 has a varying effects depending on the instruction which does the write:
5634 this is controlled by the WRITE_PC argument. */
5637 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5638 int regno, ULONGEST val, enum pc_write_style write_pc)
5640 if (regno == ARM_PC_REGNUM)
5642 if (debug_displaced)
5643 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5644 (unsigned long) val);
5647 case BRANCH_WRITE_PC:
5648 branch_write_pc (regs, dsc, val);
5652 bx_write_pc (regs, val);
5656 load_write_pc (regs, dsc, val);
5660 alu_write_pc (regs, dsc, val);
5663 case CANNOT_WRITE_PC:
5664 warning (_("Instruction wrote to PC in an unexpected way when "
5665 "single-stepping"));
5669 internal_error (__FILE__, __LINE__,
5670 _("Invalid argument to displaced_write_reg"));
5673 dsc->wrote_to_pc = 1;
5677 if (debug_displaced)
5678 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5679 regno, (unsigned long) val);
5680 regcache_cooked_write_unsigned (regs, regno, val);
5684 /* This function is used to concisely determine if an instruction INSN
5685 references PC. Register fields of interest in INSN should have the
5686 corresponding fields of BITMASK set to 0b1111. The function
5687 returns return 1 if any of these fields in INSN reference the PC
5688 (also 0b1111, r15), else it returns 0. */
5691 insn_references_pc (uint32_t insn, uint32_t bitmask)
5693 uint32_t lowbit = 1;
5695 while (bitmask != 0)
5699 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5705 mask = lowbit * 0xf;
5707 if ((insn & mask) == mask)
5716 /* The simplest copy function. Many instructions have the same effect no
5717 matter what address they are executed at: in those cases, use this. */
5720 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5721 const char *iname, struct displaced_step_closure *dsc)
5723 if (debug_displaced)
5724 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5725 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5728 dsc->modinsn[0] = insn;
5734 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5735 uint16_t insn2, const char *iname,
5736 struct displaced_step_closure *dsc)
5738 if (debug_displaced)
5739 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5740 "opcode/class '%s' unmodified\n", insn1, insn2,
5743 dsc->modinsn[0] = insn1;
5744 dsc->modinsn[1] = insn2;
5750 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5753 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5755 struct displaced_step_closure *dsc)
5757 if (debug_displaced)
5758 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5759 "opcode/class '%s' unmodified\n", insn,
5762 dsc->modinsn[0] = insn;
5767 /* Preload instructions with immediate offset. */
5770 cleanup_preload (struct gdbarch *gdbarch,
5771 struct regcache *regs, struct displaced_step_closure *dsc)
5773 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5774 if (!dsc->u.preload.immed)
5775 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5779 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5780 struct displaced_step_closure *dsc, unsigned int rn)
5783 /* Preload instructions:
5785 {pli/pld} [rn, #+/-imm]
5787 {pli/pld} [r0, #+/-imm]. */
5789 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5790 rn_val = displaced_read_reg (regs, dsc, rn);
5791 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5792 dsc->u.preload.immed = 1;
5794 dsc->cleanup = &cleanup_preload;
5798 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5799 struct displaced_step_closure *dsc)
5801 unsigned int rn = bits (insn, 16, 19);
5803 if (!insn_references_pc (insn, 0x000f0000ul))
5804 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5806 if (debug_displaced)
5807 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5808 (unsigned long) insn);
5810 dsc->modinsn[0] = insn & 0xfff0ffff;
5812 install_preload (gdbarch, regs, dsc, rn);
5818 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5819 struct regcache *regs, struct displaced_step_closure *dsc)
5821 unsigned int rn = bits (insn1, 0, 3);
5822 unsigned int u_bit = bit (insn1, 7);
5823 int imm12 = bits (insn2, 0, 11);
5826 if (rn != ARM_PC_REGNUM)
5827 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5829 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5830 PLD (literal) Encoding T1. */
5831 if (debug_displaced)
5832 fprintf_unfiltered (gdb_stdlog,
5833 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5834 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5840 /* Rewrite instruction {pli/pld} PC imm12 into:
5841 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5845 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5847 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5848 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5850 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5852 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5853 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5854 dsc->u.preload.immed = 0;
5856 /* {pli/pld} [r0, r1] */
5857 dsc->modinsn[0] = insn1 & 0xfff0;
5858 dsc->modinsn[1] = 0xf001;
5861 dsc->cleanup = &cleanup_preload;
5865 /* Preload instructions with register offset. */
5868 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5869 struct displaced_step_closure *dsc, unsigned int rn,
5872 ULONGEST rn_val, rm_val;
5874 /* Preload register-offset instructions:
5876 {pli/pld} [rn, rm {, shift}]
5878 {pli/pld} [r0, r1 {, shift}]. */
5880 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5881 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5882 rn_val = displaced_read_reg (regs, dsc, rn);
5883 rm_val = displaced_read_reg (regs, dsc, rm);
5884 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5885 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5886 dsc->u.preload.immed = 0;
5888 dsc->cleanup = &cleanup_preload;
5892 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5893 struct regcache *regs,
5894 struct displaced_step_closure *dsc)
5896 unsigned int rn = bits (insn, 16, 19);
5897 unsigned int rm = bits (insn, 0, 3);
5900 if (!insn_references_pc (insn, 0x000f000ful))
5901 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5903 if (debug_displaced)
5904 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5905 (unsigned long) insn);
5907 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5909 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5913 /* Copy/cleanup coprocessor load and store instructions. */
5916 cleanup_copro_load_store (struct gdbarch *gdbarch,
5917 struct regcache *regs,
5918 struct displaced_step_closure *dsc)
5920 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5922 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5924 if (dsc->u.ldst.writeback)
5925 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5929 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5930 struct displaced_step_closure *dsc,
5931 int writeback, unsigned int rn)
5935 /* Coprocessor load/store instructions:
5937 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5939 {stc/stc2} [r0, #+/-imm].
5941 ldc/ldc2 are handled identically. */
5943 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5944 rn_val = displaced_read_reg (regs, dsc, rn);
5945 /* PC should be 4-byte aligned. */
5946 rn_val = rn_val & 0xfffffffc;
5947 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5949 dsc->u.ldst.writeback = writeback;
5950 dsc->u.ldst.rn = rn;
5952 dsc->cleanup = &cleanup_copro_load_store;
5956 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5957 struct regcache *regs,
5958 struct displaced_step_closure *dsc)
5960 unsigned int rn = bits (insn, 16, 19);
5962 if (!insn_references_pc (insn, 0x000f0000ul))
5963 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5965 if (debug_displaced)
5966 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5967 "load/store insn %.8lx\n", (unsigned long) insn);
5969 dsc->modinsn[0] = insn & 0xfff0ffff;
5971 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5977 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5978 uint16_t insn2, struct regcache *regs,
5979 struct displaced_step_closure *dsc)
5981 unsigned int rn = bits (insn1, 0, 3);
5983 if (rn != ARM_PC_REGNUM)
5984 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5985 "copro load/store", dsc);
5987 if (debug_displaced)
5988 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5989 "load/store insn %.4x%.4x\n", insn1, insn2);
5991 dsc->modinsn[0] = insn1 & 0xfff0;
5992 dsc->modinsn[1] = insn2;
5995 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5996 doesn't support writeback, so pass 0. */
5997 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6002 /* Clean up branch instructions (actually perform the branch, by setting
6006 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6007 struct displaced_step_closure *dsc)
6009 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6010 int branch_taken = condition_true (dsc->u.branch.cond, status);
6011 enum pc_write_style write_pc = dsc->u.branch.exchange
6012 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6017 if (dsc->u.branch.link)
6019 /* The value of LR should be the next insn of current one. In order
6020 not to confuse logic hanlding later insn `bx lr', if current insn mode
6021 is Thumb, the bit 0 of LR value should be set to 1. */
6022 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6025 next_insn_addr |= 0x1;
6027 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6031 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6034 /* Copy B/BL/BLX instructions with immediate destinations. */
6037 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6038 struct displaced_step_closure *dsc,
6039 unsigned int cond, int exchange, int link, long offset)
6041 /* Implement "BL<cond> <label>" as:
6043 Preparation: cond <- instruction condition
6044 Insn: mov r0, r0 (nop)
6045 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6047 B<cond> similar, but don't set r14 in cleanup. */
6049 dsc->u.branch.cond = cond;
6050 dsc->u.branch.link = link;
6051 dsc->u.branch.exchange = exchange;
6053 dsc->u.branch.dest = dsc->insn_addr;
6054 if (link && exchange)
6055 /* For BLX, offset is computed from the Align (PC, 4). */
6056 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6059 dsc->u.branch.dest += 4 + offset;
6061 dsc->u.branch.dest += 8 + offset;
6063 dsc->cleanup = &cleanup_branch;
6066 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6067 struct regcache *regs, struct displaced_step_closure *dsc)
6069 unsigned int cond = bits (insn, 28, 31);
6070 int exchange = (cond == 0xf);
6071 int link = exchange || bit (insn, 24);
6074 if (debug_displaced)
6075 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6076 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6077 (unsigned long) insn);
6079 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6080 then arrange the switch into Thumb mode. */
6081 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6083 offset = bits (insn, 0, 23) << 2;
6085 if (bit (offset, 25))
6086 offset = offset | ~0x3ffffff;
6088 dsc->modinsn[0] = ARM_NOP;
6090 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6095 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6096 uint16_t insn2, struct regcache *regs,
6097 struct displaced_step_closure *dsc)
6099 int link = bit (insn2, 14);
6100 int exchange = link && !bit (insn2, 12);
6103 int j1 = bit (insn2, 13);
6104 int j2 = bit (insn2, 11);
6105 int s = sbits (insn1, 10, 10);
6106 int i1 = !(j1 ^ bit (insn1, 10));
6107 int i2 = !(j2 ^ bit (insn1, 10));
6109 if (!link && !exchange) /* B */
6111 offset = (bits (insn2, 0, 10) << 1);
6112 if (bit (insn2, 12)) /* Encoding T4 */
6114 offset |= (bits (insn1, 0, 9) << 12)
6120 else /* Encoding T3 */
6122 offset |= (bits (insn1, 0, 5) << 12)
6126 cond = bits (insn1, 6, 9);
6131 offset = (bits (insn1, 0, 9) << 12);
6132 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6133 offset |= exchange ?
6134 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6137 if (debug_displaced)
6138 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6139 "%.4x %.4x with offset %.8lx\n",
6140 link ? (exchange) ? "blx" : "bl" : "b",
6141 insn1, insn2, offset);
6143 dsc->modinsn[0] = THUMB_NOP;
6145 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6149 /* Copy B Thumb instructions. */
6151 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6152 struct displaced_step_closure *dsc)
6154 unsigned int cond = 0;
6156 unsigned short bit_12_15 = bits (insn, 12, 15);
6157 CORE_ADDR from = dsc->insn_addr;
6159 if (bit_12_15 == 0xd)
6161 /* offset = SignExtend (imm8:0, 32) */
6162 offset = sbits ((insn << 1), 0, 8);
6163 cond = bits (insn, 8, 11);
6165 else if (bit_12_15 == 0xe) /* Encoding T2 */
6167 offset = sbits ((insn << 1), 0, 11);
6171 if (debug_displaced)
6172 fprintf_unfiltered (gdb_stdlog,
6173 "displaced: copying b immediate insn %.4x "
6174 "with offset %d\n", insn, offset);
6176 dsc->u.branch.cond = cond;
6177 dsc->u.branch.link = 0;
6178 dsc->u.branch.exchange = 0;
6179 dsc->u.branch.dest = from + 4 + offset;
6181 dsc->modinsn[0] = THUMB_NOP;
6183 dsc->cleanup = &cleanup_branch;
6188 /* Copy BX/BLX with register-specified destinations. */
6191 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6192 struct displaced_step_closure *dsc, int link,
6193 unsigned int cond, unsigned int rm)
6195 /* Implement {BX,BLX}<cond> <reg>" as:
6197 Preparation: cond <- instruction condition
6198 Insn: mov r0, r0 (nop)
6199 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6201 Don't set r14 in cleanup for BX. */
6203 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6205 dsc->u.branch.cond = cond;
6206 dsc->u.branch.link = link;
6208 dsc->u.branch.exchange = 1;
6210 dsc->cleanup = &cleanup_branch;
6214 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6215 struct regcache *regs, struct displaced_step_closure *dsc)
6217 unsigned int cond = bits (insn, 28, 31);
6220 int link = bit (insn, 5);
6221 unsigned int rm = bits (insn, 0, 3);
6223 if (debug_displaced)
6224 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6225 (unsigned long) insn);
6227 dsc->modinsn[0] = ARM_NOP;
6229 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6234 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6235 struct regcache *regs,
6236 struct displaced_step_closure *dsc)
6238 int link = bit (insn, 7);
6239 unsigned int rm = bits (insn, 3, 6);
6241 if (debug_displaced)
6242 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6243 (unsigned short) insn);
6245 dsc->modinsn[0] = THUMB_NOP;
6247 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6253 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6256 cleanup_alu_imm (struct gdbarch *gdbarch,
6257 struct regcache *regs, struct displaced_step_closure *dsc)
6259 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6260 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6261 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6262 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6266 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6267 struct displaced_step_closure *dsc)
6269 unsigned int rn = bits (insn, 16, 19);
6270 unsigned int rd = bits (insn, 12, 15);
6271 unsigned int op = bits (insn, 21, 24);
6272 int is_mov = (op == 0xd);
6273 ULONGEST rd_val, rn_val;
6275 if (!insn_references_pc (insn, 0x000ff000ul))
6276 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6278 if (debug_displaced)
6279 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6280 "%.8lx\n", is_mov ? "move" : "ALU",
6281 (unsigned long) insn);
6283 /* Instruction is of form:
6285 <op><cond> rd, [rn,] #imm
6289 Preparation: tmp1, tmp2 <- r0, r1;
6291 Insn: <op><cond> r0, r1, #imm
6292 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6295 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6296 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6297 rn_val = displaced_read_reg (regs, dsc, rn);
6298 rd_val = displaced_read_reg (regs, dsc, rd);
6299 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6300 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6304 dsc->modinsn[0] = insn & 0xfff00fff;
6306 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6308 dsc->cleanup = &cleanup_alu_imm;
6314 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6315 uint16_t insn2, struct regcache *regs,
6316 struct displaced_step_closure *dsc)
6318 unsigned int op = bits (insn1, 5, 8);
6319 unsigned int rn, rm, rd;
6320 ULONGEST rd_val, rn_val;
6322 rn = bits (insn1, 0, 3); /* Rn */
6323 rm = bits (insn2, 0, 3); /* Rm */
6324 rd = bits (insn2, 8, 11); /* Rd */
6326 /* This routine is only called for instruction MOV. */
6327 gdb_assert (op == 0x2 && rn == 0xf);
6329 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6330 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6332 if (debug_displaced)
6333 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6334 "ALU", insn1, insn2);
6336 /* Instruction is of form:
6338 <op><cond> rd, [rn,] #imm
6342 Preparation: tmp1, tmp2 <- r0, r1;
6344 Insn: <op><cond> r0, r1, #imm
6345 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6348 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6349 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6350 rn_val = displaced_read_reg (regs, dsc, rn);
6351 rd_val = displaced_read_reg (regs, dsc, rd);
6352 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6353 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6356 dsc->modinsn[0] = insn1;
6357 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6360 dsc->cleanup = &cleanup_alu_imm;
6365 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6368 cleanup_alu_reg (struct gdbarch *gdbarch,
6369 struct regcache *regs, struct displaced_step_closure *dsc)
6374 rd_val = displaced_read_reg (regs, dsc, 0);
6376 for (i = 0; i < 3; i++)
6377 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6379 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6383 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6384 struct displaced_step_closure *dsc,
6385 unsigned int rd, unsigned int rn, unsigned int rm)
6387 ULONGEST rd_val, rn_val, rm_val;
6389 /* Instruction is of form:
6391 <op><cond> rd, [rn,] rm [, <shift>]
6395 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6396 r0, r1, r2 <- rd, rn, rm
6397 Insn: <op><cond> r0, r1, r2 [, <shift>]
6398 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6401 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6402 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6403 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6404 rd_val = displaced_read_reg (regs, dsc, rd);
6405 rn_val = displaced_read_reg (regs, dsc, rn);
6406 rm_val = displaced_read_reg (regs, dsc, rm);
6407 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6408 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6409 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6412 dsc->cleanup = &cleanup_alu_reg;
6416 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6417 struct displaced_step_closure *dsc)
6419 unsigned int op = bits (insn, 21, 24);
6420 int is_mov = (op == 0xd);
6422 if (!insn_references_pc (insn, 0x000ff00ful))
6423 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6425 if (debug_displaced)
6426 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6427 is_mov ? "move" : "ALU", (unsigned long) insn);
6430 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6432 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6434 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6440 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6441 struct regcache *regs,
6442 struct displaced_step_closure *dsc)
6444 unsigned rn, rm, rd;
6446 rd = bits (insn, 3, 6);
6447 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6450 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6451 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6453 if (debug_displaced)
6454 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6455 "ALU", (unsigned short) insn);
6457 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6459 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6464 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6467 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6468 struct regcache *regs,
6469 struct displaced_step_closure *dsc)
6471 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6474 for (i = 0; i < 4; i++)
6475 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6477 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6481 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6482 struct displaced_step_closure *dsc,
6483 unsigned int rd, unsigned int rn, unsigned int rm,
6487 ULONGEST rd_val, rn_val, rm_val, rs_val;
6489 /* Instruction is of form:
6491 <op><cond> rd, [rn,] rm, <shift> rs
6495 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6496 r0, r1, r2, r3 <- rd, rn, rm, rs
6497 Insn: <op><cond> r0, r1, r2, <shift> r3
6499 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6503 for (i = 0; i < 4; i++)
6504 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6506 rd_val = displaced_read_reg (regs, dsc, rd);
6507 rn_val = displaced_read_reg (regs, dsc, rn);
6508 rm_val = displaced_read_reg (regs, dsc, rm);
6509 rs_val = displaced_read_reg (regs, dsc, rs);
6510 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6511 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6512 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6515 dsc->cleanup = &cleanup_alu_shifted_reg;
6519 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6520 struct regcache *regs,
6521 struct displaced_step_closure *dsc)
6523 unsigned int op = bits (insn, 21, 24);
6524 int is_mov = (op == 0xd);
6525 unsigned int rd, rn, rm, rs;
6527 if (!insn_references_pc (insn, 0x000fff0ful))
6528 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6530 if (debug_displaced)
6531 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6532 "%.8lx\n", is_mov ? "move" : "ALU",
6533 (unsigned long) insn);
6535 rn = bits (insn, 16, 19);
6536 rm = bits (insn, 0, 3);
6537 rs = bits (insn, 8, 11);
6538 rd = bits (insn, 12, 15);
6541 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6543 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6545 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6550 /* Clean up load instructions. */
6553 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6554 struct displaced_step_closure *dsc)
6556 ULONGEST rt_val, rt_val2 = 0, rn_val;
6558 rt_val = displaced_read_reg (regs, dsc, 0);
6559 if (dsc->u.ldst.xfersize == 8)
6560 rt_val2 = displaced_read_reg (regs, dsc, 1);
6561 rn_val = displaced_read_reg (regs, dsc, 2);
6563 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6564 if (dsc->u.ldst.xfersize > 4)
6565 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6566 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6567 if (!dsc->u.ldst.immed)
6568 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6570 /* Handle register writeback. */
6571 if (dsc->u.ldst.writeback)
6572 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6573 /* Put result in right place. */
6574 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6575 if (dsc->u.ldst.xfersize == 8)
6576 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6579 /* Clean up store instructions. */
6582 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6583 struct displaced_step_closure *dsc)
6585 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6587 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6588 if (dsc->u.ldst.xfersize > 4)
6589 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6590 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6591 if (!dsc->u.ldst.immed)
6592 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6593 if (!dsc->u.ldst.restore_r4)
6594 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6597 if (dsc->u.ldst.writeback)
6598 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6601 /* Copy "extra" load/store instructions. These are halfword/doubleword
6602 transfers, which have a different encoding to byte/word transfers. */
6605 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6606 struct regcache *regs, struct displaced_step_closure *dsc)
6608 unsigned int op1 = bits (insn, 20, 24);
6609 unsigned int op2 = bits (insn, 5, 6);
6610 unsigned int rt = bits (insn, 12, 15);
6611 unsigned int rn = bits (insn, 16, 19);
6612 unsigned int rm = bits (insn, 0, 3);
6613 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6614 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6615 int immed = (op1 & 0x4) != 0;
6617 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6619 if (!insn_references_pc (insn, 0x000ff00ful))
6620 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6622 if (debug_displaced)
6623 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6624 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6625 (unsigned long) insn);
6627 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6630 internal_error (__FILE__, __LINE__,
6631 _("copy_extra_ld_st: instruction decode error"));
6633 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6634 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6635 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6637 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6639 rt_val = displaced_read_reg (regs, dsc, rt);
6640 if (bytesize[opcode] == 8)
6641 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6642 rn_val = displaced_read_reg (regs, dsc, rn);
6644 rm_val = displaced_read_reg (regs, dsc, rm);
6646 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6647 if (bytesize[opcode] == 8)
6648 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6649 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6651 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6654 dsc->u.ldst.xfersize = bytesize[opcode];
6655 dsc->u.ldst.rn = rn;
6656 dsc->u.ldst.immed = immed;
6657 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6658 dsc->u.ldst.restore_r4 = 0;
6661 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6663 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6664 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6666 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6668 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6669 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6671 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6676 /* Copy byte/half word/word loads and stores. */
6679 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6680 struct displaced_step_closure *dsc, int load,
6681 int immed, int writeback, int size, int usermode,
6682 int rt, int rm, int rn)
6684 ULONGEST rt_val, rn_val, rm_val = 0;
6686 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6687 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6689 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6691 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6693 rt_val = displaced_read_reg (regs, dsc, rt);
6694 rn_val = displaced_read_reg (regs, dsc, rn);
6696 rm_val = displaced_read_reg (regs, dsc, rm);
6698 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6699 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6701 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6703 dsc->u.ldst.xfersize = size;
6704 dsc->u.ldst.rn = rn;
6705 dsc->u.ldst.immed = immed;
6706 dsc->u.ldst.writeback = writeback;
6708 /* To write PC we can do:
6710 Before this sequence of instructions:
6711 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6712 r2 is the Rn value got from dispalced_read_reg.
6714 Insn1: push {pc} Write address of STR instruction + offset on stack
6715 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6716 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6717 = addr(Insn1) + offset - addr(Insn3) - 8
6719 Insn4: add r4, r4, #8 r4 = offset - 8
6720 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6722 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6724 Otherwise we don't know what value to write for PC, since the offset is
6725 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6726 of this can be found in Section "Saving from r15" in
6727 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6729 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6734 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6735 uint16_t insn2, struct regcache *regs,
6736 struct displaced_step_closure *dsc, int size)
6738 unsigned int u_bit = bit (insn1, 7);
6739 unsigned int rt = bits (insn2, 12, 15);
6740 int imm12 = bits (insn2, 0, 11);
6743 if (debug_displaced)
6744 fprintf_unfiltered (gdb_stdlog,
6745 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6746 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6752 /* Rewrite instruction LDR Rt imm12 into:
6754 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6758 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6761 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6762 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6763 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6765 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6767 pc_val = pc_val & 0xfffffffc;
6769 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6770 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6774 dsc->u.ldst.xfersize = size;
6775 dsc->u.ldst.immed = 0;
6776 dsc->u.ldst.writeback = 0;
6777 dsc->u.ldst.restore_r4 = 0;
6779 /* LDR R0, R2, R3 */
6780 dsc->modinsn[0] = 0xf852;
6781 dsc->modinsn[1] = 0x3;
6784 dsc->cleanup = &cleanup_load;
6790 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6791 uint16_t insn2, struct regcache *regs,
6792 struct displaced_step_closure *dsc,
6793 int writeback, int immed)
6795 unsigned int rt = bits (insn2, 12, 15);
6796 unsigned int rn = bits (insn1, 0, 3);
6797 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6798 /* In LDR (register), there is also a register Rm, which is not allowed to
6799 be PC, so we don't have to check it. */
6801 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6802 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6805 if (debug_displaced)
6806 fprintf_unfiltered (gdb_stdlog,
6807 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6808 rt, rn, insn1, insn2);
6810 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6813 dsc->u.ldst.restore_r4 = 0;
6816 /* ldr[b]<cond> rt, [rn, #imm], etc.
6818 ldr[b]<cond> r0, [r2, #imm]. */
6820 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6821 dsc->modinsn[1] = insn2 & 0x0fff;
6824 /* ldr[b]<cond> rt, [rn, rm], etc.
6826 ldr[b]<cond> r0, [r2, r3]. */
6828 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6829 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6839 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6840 struct regcache *regs,
6841 struct displaced_step_closure *dsc,
6842 int load, int size, int usermode)
6844 int immed = !bit (insn, 25);
6845 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6846 unsigned int rt = bits (insn, 12, 15);
6847 unsigned int rn = bits (insn, 16, 19);
6848 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6850 if (!insn_references_pc (insn, 0x000ff00ful))
6851 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6853 if (debug_displaced)
6854 fprintf_unfiltered (gdb_stdlog,
6855 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6856 load ? (size == 1 ? "ldrb" : "ldr")
6857 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6859 (unsigned long) insn);
6861 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6862 usermode, rt, rm, rn);
6864 if (load || rt != ARM_PC_REGNUM)
6866 dsc->u.ldst.restore_r4 = 0;
6869 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6871 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6872 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6874 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6876 {ldr,str}[b]<cond> r0, [r2, r3]. */
6877 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6881 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6882 dsc->u.ldst.restore_r4 = 1;
6883 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6884 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6885 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6886 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6887 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6891 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6893 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6898 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6903 /* Cleanup LDM instructions with fully-populated register list. This is an
6904 unfortunate corner case: it's impossible to implement correctly by modifying
6905 the instruction. The issue is as follows: we have an instruction,
6909 which we must rewrite to avoid loading PC. A possible solution would be to
6910 do the load in two halves, something like (with suitable cleanup
6914 ldm[id][ab] r8!, {r0-r7}
6916 ldm[id][ab] r8, {r7-r14}
6919 but at present there's no suitable place for <temp>, since the scratch space
6920 is overwritten before the cleanup routine is called. For now, we simply
6921 emulate the instruction. */
6924 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6925 struct displaced_step_closure *dsc)
6927 int inc = dsc->u.block.increment;
6928 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6929 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6930 uint32_t regmask = dsc->u.block.regmask;
6931 int regno = inc ? 0 : 15;
6932 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6933 int exception_return = dsc->u.block.load && dsc->u.block.user
6934 && (regmask & 0x8000) != 0;
6935 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6936 int do_transfer = condition_true (dsc->u.block.cond, status);
6937 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6942 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6943 sensible we can do here. Complain loudly. */
6944 if (exception_return)
6945 error (_("Cannot single-step exception return"));
6947 /* We don't handle any stores here for now. */
6948 gdb_assert (dsc->u.block.load != 0);
6950 if (debug_displaced)
6951 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6952 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6953 dsc->u.block.increment ? "inc" : "dec",
6954 dsc->u.block.before ? "before" : "after");
6961 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6964 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6967 xfer_addr += bump_before;
6969 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6970 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6972 xfer_addr += bump_after;
6974 regmask &= ~(1 << regno);
6977 if (dsc->u.block.writeback)
6978 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6982 /* Clean up an STM which included the PC in the register list. */
6985 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6986 struct displaced_step_closure *dsc)
6988 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6989 int store_executed = condition_true (dsc->u.block.cond, status);
6990 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6991 CORE_ADDR stm_insn_addr;
6994 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6996 /* If condition code fails, there's nothing else to do. */
6997 if (!store_executed)
7000 if (dsc->u.block.increment)
7002 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7004 if (dsc->u.block.before)
7009 pc_stored_at = dsc->u.block.xfer_addr;
7011 if (dsc->u.block.before)
7015 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7016 stm_insn_addr = dsc->scratch_base;
7017 offset = pc_val - stm_insn_addr;
7019 if (debug_displaced)
7020 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7021 "STM instruction\n", offset);
7023 /* Rewrite the stored PC to the proper value for the non-displaced original
7025 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7026 dsc->insn_addr + offset);
7029 /* Clean up an LDM which includes the PC in the register list. We clumped all
7030 the registers in the transferred list into a contiguous range r0...rX (to
7031 avoid loading PC directly and losing control of the debugged program), so we
7032 must undo that here. */
7035 cleanup_block_load_pc (struct gdbarch *gdbarch,
7036 struct regcache *regs,
7037 struct displaced_step_closure *dsc)
7039 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7040 int load_executed = condition_true (dsc->u.block.cond, status);
7041 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7042 unsigned int regs_loaded = bitcount (mask);
7043 unsigned int num_to_shuffle = regs_loaded, clobbered;
7045 /* The method employed here will fail if the register list is fully populated
7046 (we need to avoid loading PC directly). */
7047 gdb_assert (num_to_shuffle < 16);
7052 clobbered = (1 << num_to_shuffle) - 1;
7054 while (num_to_shuffle > 0)
7056 if ((mask & (1 << write_reg)) != 0)
7058 unsigned int read_reg = num_to_shuffle - 1;
7060 if (read_reg != write_reg)
7062 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7063 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7064 if (debug_displaced)
7065 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7066 "loaded register r%d to r%d\n"), read_reg,
7069 else if (debug_displaced)
7070 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7071 "r%d already in the right place\n"),
7074 clobbered &= ~(1 << write_reg);
7082 /* Restore any registers we scribbled over. */
7083 for (write_reg = 0; clobbered != 0; write_reg++)
7085 if ((clobbered & (1 << write_reg)) != 0)
7087 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7091 "clobbered register r%d\n"), write_reg);
7092 clobbered &= ~(1 << write_reg);
7096 /* Perform register writeback manually. */
7097 if (dsc->u.block.writeback)
7099 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7101 if (dsc->u.block.increment)
7102 new_rn_val += regs_loaded * 4;
7104 new_rn_val -= regs_loaded * 4;
7106 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7111 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7112 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7115 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7116 struct regcache *regs,
7117 struct displaced_step_closure *dsc)
7119 int load = bit (insn, 20);
7120 int user = bit (insn, 22);
7121 int increment = bit (insn, 23);
7122 int before = bit (insn, 24);
7123 int writeback = bit (insn, 21);
7124 int rn = bits (insn, 16, 19);
7126 /* Block transfers which don't mention PC can be run directly
7128 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7129 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7131 if (rn == ARM_PC_REGNUM)
7133 warning (_("displaced: Unpredictable LDM or STM with "
7134 "base register r15"));
7135 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7138 if (debug_displaced)
7139 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7140 "%.8lx\n", (unsigned long) insn);
7142 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7143 dsc->u.block.rn = rn;
7145 dsc->u.block.load = load;
7146 dsc->u.block.user = user;
7147 dsc->u.block.increment = increment;
7148 dsc->u.block.before = before;
7149 dsc->u.block.writeback = writeback;
7150 dsc->u.block.cond = bits (insn, 28, 31);
7152 dsc->u.block.regmask = insn & 0xffff;
7156 if ((insn & 0xffff) == 0xffff)
7158 /* LDM with a fully-populated register list. This case is
7159 particularly tricky. Implement for now by fully emulating the
7160 instruction (which might not behave perfectly in all cases, but
7161 these instructions should be rare enough for that not to matter
7163 dsc->modinsn[0] = ARM_NOP;
7165 dsc->cleanup = &cleanup_block_load_all;
7169 /* LDM of a list of registers which includes PC. Implement by
7170 rewriting the list of registers to be transferred into a
7171 contiguous chunk r0...rX before doing the transfer, then shuffling
7172 registers into the correct places in the cleanup routine. */
7173 unsigned int regmask = insn & 0xffff;
7174 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7175 unsigned int to = 0, from = 0, i, new_rn;
7177 for (i = 0; i < num_in_list; i++)
7178 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7180 /* Writeback makes things complicated. We need to avoid clobbering
7181 the base register with one of the registers in our modified
7182 register list, but just using a different register can't work in
7185 ldm r14!, {r0-r13,pc}
7187 which would need to be rewritten as:
7191 but that can't work, because there's no free register for N.
7193 Solve this by turning off the writeback bit, and emulating
7194 writeback manually in the cleanup routine. */
7199 new_regmask = (1 << num_in_list) - 1;
7201 if (debug_displaced)
7202 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7203 "{..., pc}: original reg list %.4x, modified "
7204 "list %.4x\n"), rn, writeback ? "!" : "",
7205 (int) insn & 0xffff, new_regmask);
7207 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7209 dsc->cleanup = &cleanup_block_load_pc;
7214 /* STM of a list of registers which includes PC. Run the instruction
7215 as-is, but out of line: this will store the wrong value for the PC,
7216 so we must manually fix up the memory in the cleanup routine.
7217 Doing things this way has the advantage that we can auto-detect
7218 the offset of the PC write (which is architecture-dependent) in
7219 the cleanup routine. */
7220 dsc->modinsn[0] = insn;
7222 dsc->cleanup = &cleanup_block_store_pc;
7229 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7230 struct regcache *regs,
7231 struct displaced_step_closure *dsc)
7233 int rn = bits (insn1, 0, 3);
7234 int load = bit (insn1, 4);
7235 int writeback = bit (insn1, 5);
7237 /* Block transfers which don't mention PC can be run directly
7239 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7240 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7242 if (rn == ARM_PC_REGNUM)
7244 warning (_("displaced: Unpredictable LDM or STM with "
7245 "base register r15"));
7246 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7247 "unpredictable ldm/stm", dsc);
7250 if (debug_displaced)
7251 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7252 "%.4x%.4x\n", insn1, insn2);
7254 /* Clear bit 13, since it should be always zero. */
7255 dsc->u.block.regmask = (insn2 & 0xdfff);
7256 dsc->u.block.rn = rn;
7258 dsc->u.block.load = load;
7259 dsc->u.block.user = 0;
7260 dsc->u.block.increment = bit (insn1, 7);
7261 dsc->u.block.before = bit (insn1, 8);
7262 dsc->u.block.writeback = writeback;
7263 dsc->u.block.cond = INST_AL;
7264 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7268 if (dsc->u.block.regmask == 0xffff)
7270 /* This branch is impossible to happen. */
7275 unsigned int regmask = dsc->u.block.regmask;
7276 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7277 unsigned int to = 0, from = 0, i, new_rn;
7279 for (i = 0; i < num_in_list; i++)
7280 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7285 new_regmask = (1 << num_in_list) - 1;
7287 if (debug_displaced)
7288 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7289 "{..., pc}: original reg list %.4x, modified "
7290 "list %.4x\n"), rn, writeback ? "!" : "",
7291 (int) dsc->u.block.regmask, new_regmask);
7293 dsc->modinsn[0] = insn1;
7294 dsc->modinsn[1] = (new_regmask & 0xffff);
7297 dsc->cleanup = &cleanup_block_load_pc;
7302 dsc->modinsn[0] = insn1;
7303 dsc->modinsn[1] = insn2;
7305 dsc->cleanup = &cleanup_block_store_pc;
7310 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7311 for Linux, where some SVC instructions must be treated specially. */
7314 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7315 struct displaced_step_closure *dsc)
7317 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7319 if (debug_displaced)
7320 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7321 "%.8lx\n", (unsigned long) resume_addr);
7323 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7327 /* Common copy routine for svc instruciton. */
7330 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7331 struct displaced_step_closure *dsc)
7333 /* Preparation: none.
7334 Insn: unmodified svc.
7335 Cleanup: pc <- insn_addr + insn_size. */
7337 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7339 dsc->wrote_to_pc = 1;
7341 /* Allow OS-specific code to override SVC handling. */
7342 if (dsc->u.svc.copy_svc_os)
7343 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7346 dsc->cleanup = &cleanup_svc;
7352 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7353 struct regcache *regs, struct displaced_step_closure *dsc)
7356 if (debug_displaced)
7357 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7358 (unsigned long) insn);
7360 dsc->modinsn[0] = insn;
7362 return install_svc (gdbarch, regs, dsc);
7366 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7367 struct regcache *regs, struct displaced_step_closure *dsc)
7370 if (debug_displaced)
7371 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7374 dsc->modinsn[0] = insn;
7376 return install_svc (gdbarch, regs, dsc);
7379 /* Copy undefined instructions. */
7382 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7383 struct displaced_step_closure *dsc)
7385 if (debug_displaced)
7386 fprintf_unfiltered (gdb_stdlog,
7387 "displaced: copying undefined insn %.8lx\n",
7388 (unsigned long) insn);
7390 dsc->modinsn[0] = insn;
7396 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7397 struct displaced_step_closure *dsc)
7400 if (debug_displaced)
7401 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7402 "%.4x %.4x\n", (unsigned short) insn1,
7403 (unsigned short) insn2);
7405 dsc->modinsn[0] = insn1;
7406 dsc->modinsn[1] = insn2;
7412 /* Copy unpredictable instructions. */
7415 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7416 struct displaced_step_closure *dsc)
7418 if (debug_displaced)
7419 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7420 "%.8lx\n", (unsigned long) insn);
7422 dsc->modinsn[0] = insn;
7427 /* The decode_* functions are instruction decoding helpers. They mostly follow
7428 the presentation in the ARM ARM. */
7431 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7432 struct regcache *regs,
7433 struct displaced_step_closure *dsc)
7435 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7436 unsigned int rn = bits (insn, 16, 19);
7438 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7439 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7440 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7441 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7442 else if ((op1 & 0x60) == 0x20)
7443 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7444 else if ((op1 & 0x71) == 0x40)
7445 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7447 else if ((op1 & 0x77) == 0x41)
7448 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7449 else if ((op1 & 0x77) == 0x45)
7450 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7451 else if ((op1 & 0x77) == 0x51)
7454 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7456 return arm_copy_unpred (gdbarch, insn, dsc);
7458 else if ((op1 & 0x77) == 0x55)
7459 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7460 else if (op1 == 0x57)
7463 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7464 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7465 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7466 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7467 default: return arm_copy_unpred (gdbarch, insn, dsc);
7469 else if ((op1 & 0x63) == 0x43)
7470 return arm_copy_unpred (gdbarch, insn, dsc);
7471 else if ((op2 & 0x1) == 0x0)
7472 switch (op1 & ~0x80)
7475 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7477 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7478 case 0x71: case 0x75:
7480 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7481 case 0x63: case 0x67: case 0x73: case 0x77:
7482 return arm_copy_unpred (gdbarch, insn, dsc);
7484 return arm_copy_undef (gdbarch, insn, dsc);
7487 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7491 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7492 struct regcache *regs,
7493 struct displaced_step_closure *dsc)
7495 if (bit (insn, 27) == 0)
7496 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7497 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7498 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7501 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7504 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7506 case 0x4: case 0x5: case 0x6: case 0x7:
7507 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7510 switch ((insn & 0xe00000) >> 21)
7512 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7514 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7517 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7520 return arm_copy_undef (gdbarch, insn, dsc);
7525 int rn_f = (bits (insn, 16, 19) == 0xf);
7526 switch ((insn & 0xe00000) >> 21)
7529 /* ldc/ldc2 imm (undefined for rn == pc). */
7530 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7531 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7534 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7536 case 0x4: case 0x5: case 0x6: case 0x7:
7537 /* ldc/ldc2 lit (undefined for rn != pc). */
7538 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7539 : arm_copy_undef (gdbarch, insn, dsc);
7542 return arm_copy_undef (gdbarch, insn, dsc);
7547 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7550 if (bits (insn, 16, 19) == 0xf)
7552 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7554 return arm_copy_undef (gdbarch, insn, dsc);
7558 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7560 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7564 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7566 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7569 return arm_copy_undef (gdbarch, insn, dsc);
7573 /* Decode miscellaneous instructions in dp/misc encoding space. */
7576 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7577 struct regcache *regs,
7578 struct displaced_step_closure *dsc)
7580 unsigned int op2 = bits (insn, 4, 6);
7581 unsigned int op = bits (insn, 21, 22);
7582 unsigned int op1 = bits (insn, 16, 19);
7587 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7590 if (op == 0x1) /* bx. */
7591 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7593 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7595 return arm_copy_undef (gdbarch, insn, dsc);
7599 /* Not really supported. */
7600 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7602 return arm_copy_undef (gdbarch, insn, dsc);
7606 return arm_copy_bx_blx_reg (gdbarch, insn,
7607 regs, dsc); /* blx register. */
7609 return arm_copy_undef (gdbarch, insn, dsc);
7612 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7616 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7618 /* Not really supported. */
7619 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7622 return arm_copy_undef (gdbarch, insn, dsc);
7627 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7628 struct regcache *regs,
7629 struct displaced_step_closure *dsc)
7632 switch (bits (insn, 20, 24))
7635 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7638 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7640 case 0x12: case 0x16:
7641 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7644 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7648 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7650 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7651 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7652 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7653 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7654 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7655 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7656 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7657 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7658 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7659 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7660 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7661 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7662 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7663 /* 2nd arg means "unpriveleged". */
7664 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7668 /* Should be unreachable. */
7673 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7674 struct regcache *regs,
7675 struct displaced_step_closure *dsc)
7677 int a = bit (insn, 25), b = bit (insn, 4);
7678 uint32_t op1 = bits (insn, 20, 24);
7679 int rn_f = bits (insn, 16, 19) == 0xf;
7681 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7682 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7683 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7684 else if ((!a && (op1 & 0x17) == 0x02)
7685 || (a && (op1 & 0x17) == 0x02 && !b))
7686 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7687 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7688 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7689 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7690 else if ((!a && (op1 & 0x17) == 0x03)
7691 || (a && (op1 & 0x17) == 0x03 && !b))
7692 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7693 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7694 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7695 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7696 else if ((!a && (op1 & 0x17) == 0x06)
7697 || (a && (op1 & 0x17) == 0x06 && !b))
7698 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7699 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7700 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7701 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7702 else if ((!a && (op1 & 0x17) == 0x07)
7703 || (a && (op1 & 0x17) == 0x07 && !b))
7704 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7706 /* Should be unreachable. */
7711 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7712 struct displaced_step_closure *dsc)
7714 switch (bits (insn, 20, 24))
7716 case 0x00: case 0x01: case 0x02: case 0x03:
7717 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7719 case 0x04: case 0x05: case 0x06: case 0x07:
7720 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7722 case 0x08: case 0x09: case 0x0a: case 0x0b:
7723 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7724 return arm_copy_unmodified (gdbarch, insn,
7725 "decode/pack/unpack/saturate/reverse", dsc);
7728 if (bits (insn, 5, 7) == 0) /* op2. */
7730 if (bits (insn, 12, 15) == 0xf)
7731 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7733 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7736 return arm_copy_undef (gdbarch, insn, dsc);
7738 case 0x1a: case 0x1b:
7739 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7740 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7742 return arm_copy_undef (gdbarch, insn, dsc);
7744 case 0x1c: case 0x1d:
7745 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7747 if (bits (insn, 0, 3) == 0xf)
7748 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7750 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7753 return arm_copy_undef (gdbarch, insn, dsc);
7755 case 0x1e: case 0x1f:
7756 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7757 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7759 return arm_copy_undef (gdbarch, insn, dsc);
7762 /* Should be unreachable. */
7767 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7768 struct regcache *regs,
7769 struct displaced_step_closure *dsc)
7772 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7774 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7778 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7779 struct regcache *regs,
7780 struct displaced_step_closure *dsc)
7782 unsigned int opcode = bits (insn, 20, 24);
7786 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7787 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7789 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7790 case 0x12: case 0x16:
7791 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7793 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7794 case 0x13: case 0x17:
7795 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7797 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7798 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7799 /* Note: no writeback for these instructions. Bit 25 will always be
7800 zero though (via caller), so the following works OK. */
7801 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7804 /* Should be unreachable. */
7808 /* Decode shifted register instructions. */
7811 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7812 uint16_t insn2, struct regcache *regs,
7813 struct displaced_step_closure *dsc)
7815 /* PC is only allowed to be used in instruction MOV. */
7817 unsigned int op = bits (insn1, 5, 8);
7818 unsigned int rn = bits (insn1, 0, 3);
7820 if (op == 0x2 && rn == 0xf) /* MOV */
7821 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7823 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7824 "dp (shift reg)", dsc);
7828 /* Decode extension register load/store. Exactly the same as
7829 arm_decode_ext_reg_ld_st. */
7832 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7833 uint16_t insn2, struct regcache *regs,
7834 struct displaced_step_closure *dsc)
7836 unsigned int opcode = bits (insn1, 4, 8);
7840 case 0x04: case 0x05:
7841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7842 "vfp/neon vmov", dsc);
7844 case 0x08: case 0x0c: /* 01x00 */
7845 case 0x0a: case 0x0e: /* 01x10 */
7846 case 0x12: case 0x16: /* 10x10 */
7847 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7848 "vfp/neon vstm/vpush", dsc);
7850 case 0x09: case 0x0d: /* 01x01 */
7851 case 0x0b: case 0x0f: /* 01x11 */
7852 case 0x13: case 0x17: /* 10x11 */
7853 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7854 "vfp/neon vldm/vpop", dsc);
7856 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7857 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7860 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7863 /* Should be unreachable. */
7868 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7869 struct regcache *regs, struct displaced_step_closure *dsc)
7871 unsigned int op1 = bits (insn, 20, 25);
7872 int op = bit (insn, 4);
7873 unsigned int coproc = bits (insn, 8, 11);
7874 unsigned int rn = bits (insn, 16, 19);
7876 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7877 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7878 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7879 && (coproc & 0xe) != 0xa)
7881 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7882 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7883 && (coproc & 0xe) != 0xa)
7884 /* ldc/ldc2 imm/lit. */
7885 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7886 else if ((op1 & 0x3e) == 0x00)
7887 return arm_copy_undef (gdbarch, insn, dsc);
7888 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7889 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7890 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7891 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7892 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7893 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7894 else if ((op1 & 0x30) == 0x20 && !op)
7896 if ((coproc & 0xe) == 0xa)
7897 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7899 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7901 else if ((op1 & 0x30) == 0x20 && op)
7902 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7903 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7904 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7905 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7906 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7907 else if ((op1 & 0x30) == 0x30)
7908 return arm_copy_svc (gdbarch, insn, regs, dsc);
7910 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7914 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7915 uint16_t insn2, struct regcache *regs,
7916 struct displaced_step_closure *dsc)
7918 unsigned int coproc = bits (insn2, 8, 11);
7919 unsigned int op1 = bits (insn1, 4, 9);
7920 unsigned int bit_5_8 = bits (insn1, 5, 8);
7921 unsigned int bit_9 = bit (insn1, 9);
7922 unsigned int bit_4 = bit (insn1, 4);
7923 unsigned int rn = bits (insn1, 0, 3);
7928 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7929 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7931 else if (bit_5_8 == 0) /* UNDEFINED. */
7932 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7935 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7936 if ((coproc & 0xe) == 0xa)
7937 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7939 else /* coproc is not 101x. */
7941 if (bit_4 == 0) /* STC/STC2. */
7942 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7944 else /* LDC/LDC2 {literal, immeidate}. */
7945 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7951 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7957 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7958 struct displaced_step_closure *dsc, int rd)
7964 Preparation: Rd <- PC
7970 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7971 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7975 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7976 struct displaced_step_closure *dsc,
7977 int rd, unsigned int imm)
7980 /* Encoding T2: ADDS Rd, #imm */
7981 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7983 install_pc_relative (gdbarch, regs, dsc, rd);
7989 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7990 struct regcache *regs,
7991 struct displaced_step_closure *dsc)
7993 unsigned int rd = bits (insn, 8, 10);
7994 unsigned int imm8 = bits (insn, 0, 7);
7996 if (debug_displaced)
7997 fprintf_unfiltered (gdb_stdlog,
7998 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8001 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8005 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8006 uint16_t insn2, struct regcache *regs,
8007 struct displaced_step_closure *dsc)
8009 unsigned int rd = bits (insn2, 8, 11);
8010 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8011 extract raw immediate encoding rather than computing immediate. When
8012 generating ADD or SUB instruction, we can simply perform OR operation to
8013 set immediate into ADD. */
8014 unsigned int imm_3_8 = insn2 & 0x70ff;
8015 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8017 if (debug_displaced)
8018 fprintf_unfiltered (gdb_stdlog,
8019 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8020 rd, imm_i, imm_3_8, insn1, insn2);
8022 if (bit (insn1, 7)) /* Encoding T2 */
8024 /* Encoding T3: SUB Rd, Rd, #imm */
8025 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8026 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8028 else /* Encoding T3 */
8030 /* Encoding T3: ADD Rd, Rd, #imm */
8031 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8032 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8036 install_pc_relative (gdbarch, regs, dsc, rd);
8042 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8043 struct regcache *regs,
8044 struct displaced_step_closure *dsc)
8046 unsigned int rt = bits (insn1, 8, 10);
8048 int imm8 = (bits (insn1, 0, 7) << 2);
8049 CORE_ADDR from = dsc->insn_addr;
8055 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8057 Insn: LDR R0, [R2, R3];
8058 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8060 if (debug_displaced)
8061 fprintf_unfiltered (gdb_stdlog,
8062 "displaced: copying thumb ldr r%d [pc #%d]\n"
8065 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8066 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8067 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8068 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8069 /* The assembler calculates the required value of the offset from the
8070 Align(PC,4) value of this instruction to the label. */
8071 pc = pc & 0xfffffffc;
8073 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8074 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8077 dsc->u.ldst.xfersize = 4;
8079 dsc->u.ldst.immed = 0;
8080 dsc->u.ldst.writeback = 0;
8081 dsc->u.ldst.restore_r4 = 0;
8083 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8085 dsc->cleanup = &cleanup_load;
8090 /* Copy Thumb cbnz/cbz insruction. */
8093 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8094 struct regcache *regs,
8095 struct displaced_step_closure *dsc)
8097 int non_zero = bit (insn1, 11);
8098 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8099 CORE_ADDR from = dsc->insn_addr;
8100 int rn = bits (insn1, 0, 2);
8101 int rn_val = displaced_read_reg (regs, dsc, rn);
8103 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8104 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8105 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8106 condition is false, let it be, cleanup_branch will do nothing. */
8107 if (dsc->u.branch.cond)
8109 dsc->u.branch.cond = INST_AL;
8110 dsc->u.branch.dest = from + 4 + imm5;
8113 dsc->u.branch.dest = from + 2;
8115 dsc->u.branch.link = 0;
8116 dsc->u.branch.exchange = 0;
8118 if (debug_displaced)
8119 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8120 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8121 rn, rn_val, insn1, dsc->u.branch.dest);
8123 dsc->modinsn[0] = THUMB_NOP;
8125 dsc->cleanup = &cleanup_branch;
8129 /* Copy Table Branch Byte/Halfword */
8131 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8132 uint16_t insn2, struct regcache *regs,
8133 struct displaced_step_closure *dsc)
8135 ULONGEST rn_val, rm_val;
8136 int is_tbh = bit (insn2, 4);
8137 CORE_ADDR halfwords = 0;
8138 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8140 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8141 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8147 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8148 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8154 target_read_memory (rn_val + rm_val, buf, 1);
8155 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8158 if (debug_displaced)
8159 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8160 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8161 (unsigned int) rn_val, (unsigned int) rm_val,
8162 (unsigned int) halfwords);
8164 dsc->u.branch.cond = INST_AL;
8165 dsc->u.branch.link = 0;
8166 dsc->u.branch.exchange = 0;
8167 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8169 dsc->cleanup = &cleanup_branch;
8175 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8176 struct displaced_step_closure *dsc)
8179 int val = displaced_read_reg (regs, dsc, 7);
8180 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8183 val = displaced_read_reg (regs, dsc, 8);
8184 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8187 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8192 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8193 struct regcache *regs,
8194 struct displaced_step_closure *dsc)
8196 dsc->u.block.regmask = insn1 & 0x00ff;
8198 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8201 (1) register list is full, that is, r0-r7 are used.
8202 Prepare: tmp[0] <- r8
8204 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8205 MOV r8, r7; Move value of r7 to r8;
8206 POP {r7}; Store PC value into r7.
8208 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8210 (2) register list is not full, supposing there are N registers in
8211 register list (except PC, 0 <= N <= 7).
8212 Prepare: for each i, 0 - N, tmp[i] <- ri.
8214 POP {r0, r1, ...., rN};
8216 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8217 from tmp[] properly.
8219 if (debug_displaced)
8220 fprintf_unfiltered (gdb_stdlog,
8221 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8222 dsc->u.block.regmask, insn1);
8224 if (dsc->u.block.regmask == 0xff)
8226 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8228 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8229 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8230 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8233 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8237 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8238 unsigned int new_regmask, bit = 1;
8239 unsigned int to = 0, from = 0, i, new_rn;
8241 for (i = 0; i < num_in_list + 1; i++)
8242 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8244 new_regmask = (1 << (num_in_list + 1)) - 1;
8246 if (debug_displaced)
8247 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8248 "{..., pc}: original reg list %.4x,"
8249 " modified list %.4x\n"),
8250 (int) dsc->u.block.regmask, new_regmask);
8252 dsc->u.block.regmask |= 0x8000;
8253 dsc->u.block.writeback = 0;
8254 dsc->u.block.cond = INST_AL;
8256 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8258 dsc->cleanup = &cleanup_block_load_pc;
8265 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8266 struct regcache *regs,
8267 struct displaced_step_closure *dsc)
8269 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8270 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8273 /* 16-bit thumb instructions. */
8274 switch (op_bit_12_15)
8276 /* Shift (imme), add, subtract, move and compare. */
8277 case 0: case 1: case 2: case 3:
8278 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8279 "shift/add/sub/mov/cmp",
8283 switch (op_bit_10_11)
8285 case 0: /* Data-processing */
8286 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8290 case 1: /* Special data instructions and branch and exchange. */
8292 unsigned short op = bits (insn1, 7, 9);
8293 if (op == 6 || op == 7) /* BX or BLX */
8294 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8295 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8296 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8302 default: /* LDR (literal) */
8303 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8306 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8307 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8310 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8311 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8312 else /* Generate SP-relative address */
8313 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8315 case 11: /* Misc 16-bit instructions */
8317 switch (bits (insn1, 8, 11))
8319 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8320 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8322 case 12: case 13: /* POP */
8323 if (bit (insn1, 8)) /* PC is in register list. */
8324 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8326 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8328 case 15: /* If-Then, and hints */
8329 if (bits (insn1, 0, 3))
8330 /* If-Then makes up to four following instructions conditional.
8331 IT instruction itself is not conditional, so handle it as a
8332 common unmodified instruction. */
8333 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8336 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8339 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8344 if (op_bit_10_11 < 2) /* Store multiple registers */
8345 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8346 else /* Load multiple registers */
8347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8349 case 13: /* Conditional branch and supervisor call */
8350 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8351 err = thumb_copy_b (gdbarch, insn1, dsc);
8353 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8355 case 14: /* Unconditional branch */
8356 err = thumb_copy_b (gdbarch, insn1, dsc);
8363 internal_error (__FILE__, __LINE__,
8364 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8368 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8369 uint16_t insn1, uint16_t insn2,
8370 struct regcache *regs,
8371 struct displaced_step_closure *dsc)
8373 int rt = bits (insn2, 12, 15);
8374 int rn = bits (insn1, 0, 3);
8375 int op1 = bits (insn1, 7, 8);
8378 switch (bits (insn1, 5, 6))
8380 case 0: /* Load byte and memory hints */
8381 if (rt == 0xf) /* PLD/PLI */
8384 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8385 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8387 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8392 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8393 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8397 "ldrb{reg, immediate}/ldrbt",
8402 case 1: /* Load halfword and memory hints. */
8403 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8404 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8405 "pld/unalloc memhint", dsc);
8409 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8416 case 2: /* Load word */
8418 int insn2_bit_8_11 = bits (insn2, 8, 11);
8421 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8422 else if (op1 == 0x1) /* Encoding T3 */
8423 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8425 else /* op1 == 0x0 */
8427 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8428 /* LDR (immediate) */
8429 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8430 dsc, bit (insn2, 8), 1);
8431 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8432 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8435 /* LDR (register) */
8436 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8442 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8449 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8450 uint16_t insn2, struct regcache *regs,
8451 struct displaced_step_closure *dsc)
8454 unsigned short op = bit (insn2, 15);
8455 unsigned int op1 = bits (insn1, 11, 12);
8461 switch (bits (insn1, 9, 10))
8466 /* Load/store {dual, execlusive}, table branch. */
8467 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8468 && bits (insn2, 5, 7) == 0)
8469 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8472 /* PC is not allowed to use in load/store {dual, exclusive}
8474 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8475 "load/store dual/ex", dsc);
8477 else /* load/store multiple */
8479 switch (bits (insn1, 7, 8))
8481 case 0: case 3: /* SRS, RFE */
8482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8485 case 1: case 2: /* LDM/STM/PUSH/POP */
8486 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8493 /* Data-processing (shift register). */
8494 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8497 default: /* Coprocessor instructions. */
8498 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8503 case 2: /* op1 = 2 */
8504 if (op) /* Branch and misc control. */
8506 if (bit (insn2, 14) /* BLX/BL */
8507 || bit (insn2, 12) /* Unconditional branch */
8508 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8509 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8511 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8516 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8518 int op = bits (insn1, 4, 8);
8519 int rn = bits (insn1, 0, 3);
8520 if ((op == 0 || op == 0xa) && rn == 0xf)
8521 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8524 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8527 else /* Data processing (modified immeidate) */
8528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8532 case 3: /* op1 = 3 */
8533 switch (bits (insn1, 9, 10))
8537 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8539 else /* NEON Load/Store and Store single data item */
8540 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8541 "neon elt/struct load/store",
8544 case 1: /* op1 = 3, bits (9, 10) == 1 */
8545 switch (bits (insn1, 7, 8))
8547 case 0: case 1: /* Data processing (register) */
8548 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8551 case 2: /* Multiply and absolute difference */
8552 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8553 "mul/mua/diff", dsc);
8555 case 3: /* Long multiply and divide */
8556 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8561 default: /* Coprocessor instructions */
8562 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8571 internal_error (__FILE__, __LINE__,
8572 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8577 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8578 CORE_ADDR to, struct regcache *regs,
8579 struct displaced_step_closure *dsc)
8581 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8583 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8585 if (debug_displaced)
8586 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8587 "at %.8lx\n", insn1, (unsigned long) from);
8590 dsc->insn_size = thumb_insn_size (insn1);
8591 if (thumb_insn_size (insn1) == 4)
8594 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8595 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8598 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8602 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8603 CORE_ADDR to, struct regcache *regs,
8604 struct displaced_step_closure *dsc)
8607 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8610 /* Most displaced instructions use a 1-instruction scratch space, so set this
8611 here and override below if/when necessary. */
8613 dsc->insn_addr = from;
8614 dsc->scratch_base = to;
8615 dsc->cleanup = NULL;
8616 dsc->wrote_to_pc = 0;
8618 if (!displaced_in_arm_mode (regs))
8619 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8623 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8624 if (debug_displaced)
8625 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8626 "at %.8lx\n", (unsigned long) insn,
8627 (unsigned long) from);
8629 if ((insn & 0xf0000000) == 0xf0000000)
8630 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8631 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8633 case 0x0: case 0x1: case 0x2: case 0x3:
8634 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8637 case 0x4: case 0x5: case 0x6:
8638 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8642 err = arm_decode_media (gdbarch, insn, dsc);
8645 case 0x8: case 0x9: case 0xa: case 0xb:
8646 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8649 case 0xc: case 0xd: case 0xe: case 0xf:
8650 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8655 internal_error (__FILE__, __LINE__,
8656 _("arm_process_displaced_insn: Instruction decode error"));
8659 /* Actually set up the scratch space for a displaced instruction. */
8662 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8663 CORE_ADDR to, struct displaced_step_closure *dsc)
8665 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8666 unsigned int i, len, offset;
8667 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8668 int size = dsc->is_thumb? 2 : 4;
8669 const gdb_byte *bkp_insn;
8672 /* Poke modified instruction(s). */
8673 for (i = 0; i < dsc->numinsns; i++)
8675 if (debug_displaced)
8677 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8679 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8682 fprintf_unfiltered (gdb_stdlog, "%.4x",
8683 (unsigned short)dsc->modinsn[i]);
8685 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8686 (unsigned long) to + offset);
8689 write_memory_unsigned_integer (to + offset, size,
8690 byte_order_for_code,
8695 /* Choose the correct breakpoint instruction. */
8698 bkp_insn = tdep->thumb_breakpoint;
8699 len = tdep->thumb_breakpoint_size;
8703 bkp_insn = tdep->arm_breakpoint;
8704 len = tdep->arm_breakpoint_size;
8707 /* Put breakpoint afterwards. */
8708 write_memory (to + offset, bkp_insn, len);
8710 if (debug_displaced)
8711 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8712 paddress (gdbarch, from), paddress (gdbarch, to));
8715 /* Entry point for copying an instruction into scratch space for displaced
8718 struct displaced_step_closure *
8719 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8720 CORE_ADDR from, CORE_ADDR to,
8721 struct regcache *regs)
8723 struct displaced_step_closure *dsc
8724 = xmalloc (sizeof (struct displaced_step_closure));
8725 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8726 arm_displaced_init_closure (gdbarch, from, to, dsc);
8731 /* Entry point for cleaning things up after a displaced instruction has been
8735 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8736 struct displaced_step_closure *dsc,
8737 CORE_ADDR from, CORE_ADDR to,
8738 struct regcache *regs)
8741 dsc->cleanup (gdbarch, regs, dsc);
8743 if (!dsc->wrote_to_pc)
8744 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8745 dsc->insn_addr + dsc->insn_size);
8749 #include "bfd-in2.h"
8750 #include "libcoff.h"
8753 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8755 struct gdbarch *gdbarch = info->application_data;
8757 if (arm_pc_is_thumb (gdbarch, memaddr))
8759 static asymbol *asym;
8760 static combined_entry_type ce;
8761 static struct coff_symbol_struct csym;
8762 static struct bfd fake_bfd;
8763 static bfd_target fake_target;
8765 if (csym.native == NULL)
8767 /* Create a fake symbol vector containing a Thumb symbol.
8768 This is solely so that the code in print_insn_little_arm()
8769 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8770 the presence of a Thumb symbol and switch to decoding
8771 Thumb instructions. */
8773 fake_target.flavour = bfd_target_coff_flavour;
8774 fake_bfd.xvec = &fake_target;
8775 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8777 csym.symbol.the_bfd = &fake_bfd;
8778 csym.symbol.name = "fake";
8779 asym = (asymbol *) & csym;
8782 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8783 info->symbols = &asym;
8786 info->symbols = NULL;
8788 if (info->endian == BFD_ENDIAN_BIG)
8789 return print_insn_big_arm (memaddr, info);
8791 return print_insn_little_arm (memaddr, info);
8794 /* The following define instruction sequences that will cause ARM
8795 cpu's to take an undefined instruction trap. These are used to
8796 signal a breakpoint to GDB.
8798 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8799 modes. A different instruction is required for each mode. The ARM
8800 cpu's can also be big or little endian. Thus four different
8801 instructions are needed to support all cases.
8803 Note: ARMv4 defines several new instructions that will take the
8804 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8805 not in fact add the new instructions. The new undefined
8806 instructions in ARMv4 are all instructions that had no defined
8807 behaviour in earlier chips. There is no guarantee that they will
8808 raise an exception, but may be treated as NOP's. In practice, it
8809 may only safe to rely on instructions matching:
8811 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8812 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8813 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8815 Even this may only true if the condition predicate is true. The
8816 following use a condition predicate of ALWAYS so it is always TRUE.
8818 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8819 and NetBSD all use a software interrupt rather than an undefined
8820 instruction to force a trap. This can be handled by by the
8821 abi-specific code during establishment of the gdbarch vector. */
8823 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8824 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8825 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8826 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8828 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8829 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8830 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8831 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8833 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8834 the program counter value to determine whether a 16-bit or 32-bit
8835 breakpoint should be used. It returns a pointer to a string of
8836 bytes that encode a breakpoint instruction, stores the length of
8837 the string to *lenptr, and adjusts the program counter (if
8838 necessary) to point to the actual memory location where the
8839 breakpoint should be inserted. */
8841 static const unsigned char *
8842 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8844 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8845 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8847 if (arm_pc_is_thumb (gdbarch, *pcptr))
8849 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8851 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8852 check whether we are replacing a 32-bit instruction. */
8853 if (tdep->thumb2_breakpoint != NULL)
8856 if (target_read_memory (*pcptr, buf, 2) == 0)
8858 unsigned short inst1;
8859 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8860 if (thumb_insn_size (inst1) == 4)
8862 *lenptr = tdep->thumb2_breakpoint_size;
8863 return tdep->thumb2_breakpoint;
8868 *lenptr = tdep->thumb_breakpoint_size;
8869 return tdep->thumb_breakpoint;
8873 *lenptr = tdep->arm_breakpoint_size;
8874 return tdep->arm_breakpoint;
8879 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8882 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8884 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8885 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8886 that this is not confused with a 32-bit ARM breakpoint. */
8890 /* Extract from an array REGBUF containing the (raw) register state a
8891 function return value of type TYPE, and copy that, in virtual
8892 format, into VALBUF. */
8895 arm_extract_return_value (struct type *type, struct regcache *regs,
8898 struct gdbarch *gdbarch = get_regcache_arch (regs);
8899 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8901 if (TYPE_CODE_FLT == TYPE_CODE (type))
8903 switch (gdbarch_tdep (gdbarch)->fp_model)
8907 /* The value is in register F0 in internal format. We need to
8908 extract the raw value and then convert it to the desired
8910 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8912 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8913 convert_from_extended (floatformat_from_type (type), tmpbuf,
8914 valbuf, gdbarch_byte_order (gdbarch));
8918 case ARM_FLOAT_SOFT_FPA:
8919 case ARM_FLOAT_SOFT_VFP:
8920 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8921 not using the VFP ABI code. */
8923 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8924 if (TYPE_LENGTH (type) > 4)
8925 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8926 valbuf + INT_REGISTER_SIZE);
8930 internal_error (__FILE__, __LINE__,
8931 _("arm_extract_return_value: "
8932 "Floating point model not supported"));
8936 else if (TYPE_CODE (type) == TYPE_CODE_INT
8937 || TYPE_CODE (type) == TYPE_CODE_CHAR
8938 || TYPE_CODE (type) == TYPE_CODE_BOOL
8939 || TYPE_CODE (type) == TYPE_CODE_PTR
8940 || TYPE_CODE (type) == TYPE_CODE_REF
8941 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8943 /* If the type is a plain integer, then the access is
8944 straight-forward. Otherwise we have to play around a bit
8946 int len = TYPE_LENGTH (type);
8947 int regno = ARM_A1_REGNUM;
8952 /* By using store_unsigned_integer we avoid having to do
8953 anything special for small big-endian values. */
8954 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8955 store_unsigned_integer (valbuf,
8956 (len > INT_REGISTER_SIZE
8957 ? INT_REGISTER_SIZE : len),
8959 len -= INT_REGISTER_SIZE;
8960 valbuf += INT_REGISTER_SIZE;
8965 /* For a structure or union the behaviour is as if the value had
8966 been stored to word-aligned memory and then loaded into
8967 registers with 32-bit load instruction(s). */
8968 int len = TYPE_LENGTH (type);
8969 int regno = ARM_A1_REGNUM;
8970 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8974 regcache_cooked_read (regs, regno++, tmpbuf);
8975 memcpy (valbuf, tmpbuf,
8976 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8977 len -= INT_REGISTER_SIZE;
8978 valbuf += INT_REGISTER_SIZE;
8984 /* Will a function return an aggregate type in memory or in a
8985 register? Return 0 if an aggregate type can be returned in a
8986 register, 1 if it must be returned in memory. */
8989 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8992 enum type_code code;
8994 CHECK_TYPEDEF (type);
8996 /* In the ARM ABI, "integer" like aggregate types are returned in
8997 registers. For an aggregate type to be integer like, its size
8998 must be less than or equal to INT_REGISTER_SIZE and the
8999 offset of each addressable subfield must be zero. Note that bit
9000 fields are not addressable, and all addressable subfields of
9001 unions always start at offset zero.
9003 This function is based on the behaviour of GCC 2.95.1.
9004 See: gcc/arm.c: arm_return_in_memory() for details.
9006 Note: All versions of GCC before GCC 2.95.2 do not set up the
9007 parameters correctly for a function returning the following
9008 structure: struct { float f;}; This should be returned in memory,
9009 not a register. Richard Earnshaw sent me a patch, but I do not
9010 know of any way to detect if a function like the above has been
9011 compiled with the correct calling convention. */
9013 /* All aggregate types that won't fit in a register must be returned
9015 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9020 /* The AAPCS says all aggregates not larger than a word are returned
9022 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9025 /* The only aggregate types that can be returned in a register are
9026 structs and unions. Arrays must be returned in memory. */
9027 code = TYPE_CODE (type);
9028 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9033 /* Assume all other aggregate types can be returned in a register.
9034 Run a check for structures, unions and arrays. */
9037 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9040 /* Need to check if this struct/union is "integer" like. For
9041 this to be true, its size must be less than or equal to
9042 INT_REGISTER_SIZE and the offset of each addressable
9043 subfield must be zero. Note that bit fields are not
9044 addressable, and unions always start at offset zero. If any
9045 of the subfields is a floating point type, the struct/union
9046 cannot be an integer type. */
9048 /* For each field in the object, check:
9049 1) Is it FP? --> yes, nRc = 1;
9050 2) Is it addressable (bitpos != 0) and
9051 not packed (bitsize == 0)?
9055 for (i = 0; i < TYPE_NFIELDS (type); i++)
9057 enum type_code field_type_code;
9058 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9061 /* Is it a floating point type field? */
9062 if (field_type_code == TYPE_CODE_FLT)
9068 /* If bitpos != 0, then we have to care about it. */
9069 if (TYPE_FIELD_BITPOS (type, i) != 0)
9071 /* Bitfields are not addressable. If the field bitsize is
9072 zero, then the field is not packed. Hence it cannot be
9073 a bitfield or any other packed type. */
9074 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9086 /* Write into appropriate registers a function return value of type
9087 TYPE, given in virtual format. */
9090 arm_store_return_value (struct type *type, struct regcache *regs,
9091 const gdb_byte *valbuf)
9093 struct gdbarch *gdbarch = get_regcache_arch (regs);
9094 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9096 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9098 gdb_byte buf[MAX_REGISTER_SIZE];
9100 switch (gdbarch_tdep (gdbarch)->fp_model)
9104 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9105 gdbarch_byte_order (gdbarch));
9106 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9109 case ARM_FLOAT_SOFT_FPA:
9110 case ARM_FLOAT_SOFT_VFP:
9111 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9112 not using the VFP ABI code. */
9114 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9115 if (TYPE_LENGTH (type) > 4)
9116 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9117 valbuf + INT_REGISTER_SIZE);
9121 internal_error (__FILE__, __LINE__,
9122 _("arm_store_return_value: Floating "
9123 "point model not supported"));
9127 else if (TYPE_CODE (type) == TYPE_CODE_INT
9128 || TYPE_CODE (type) == TYPE_CODE_CHAR
9129 || TYPE_CODE (type) == TYPE_CODE_BOOL
9130 || TYPE_CODE (type) == TYPE_CODE_PTR
9131 || TYPE_CODE (type) == TYPE_CODE_REF
9132 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9134 if (TYPE_LENGTH (type) <= 4)
9136 /* Values of one word or less are zero/sign-extended and
9138 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9139 LONGEST val = unpack_long (type, valbuf);
9141 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9142 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9146 /* Integral values greater than one word are stored in consecutive
9147 registers starting with r0. This will always be a multiple of
9148 the regiser size. */
9149 int len = TYPE_LENGTH (type);
9150 int regno = ARM_A1_REGNUM;
9154 regcache_cooked_write (regs, regno++, valbuf);
9155 len -= INT_REGISTER_SIZE;
9156 valbuf += INT_REGISTER_SIZE;
9162 /* For a structure or union the behaviour is as if the value had
9163 been stored to word-aligned memory and then loaded into
9164 registers with 32-bit load instruction(s). */
9165 int len = TYPE_LENGTH (type);
9166 int regno = ARM_A1_REGNUM;
9167 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9171 memcpy (tmpbuf, valbuf,
9172 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9173 regcache_cooked_write (regs, regno++, tmpbuf);
9174 len -= INT_REGISTER_SIZE;
9175 valbuf += INT_REGISTER_SIZE;
9181 /* Handle function return values. */
9183 static enum return_value_convention
9184 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9185 struct type *valtype, struct regcache *regcache,
9186 gdb_byte *readbuf, const gdb_byte *writebuf)
9188 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9189 struct type *func_type = function ? value_type (function) : NULL;
9190 enum arm_vfp_cprc_base_type vfp_base_type;
9193 if (arm_vfp_abi_for_function (gdbarch, func_type)
9194 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9196 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9197 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9199 for (i = 0; i < vfp_base_count; i++)
9201 if (reg_char == 'q')
9204 arm_neon_quad_write (gdbarch, regcache, i,
9205 writebuf + i * unit_length);
9208 arm_neon_quad_read (gdbarch, regcache, i,
9209 readbuf + i * unit_length);
9216 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9217 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9220 regcache_cooked_write (regcache, regnum,
9221 writebuf + i * unit_length);
9223 regcache_cooked_read (regcache, regnum,
9224 readbuf + i * unit_length);
9227 return RETURN_VALUE_REGISTER_CONVENTION;
9230 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9231 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9232 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9234 if (tdep->struct_return == pcc_struct_return
9235 || arm_return_in_memory (gdbarch, valtype))
9236 return RETURN_VALUE_STRUCT_CONVENTION;
9239 /* AAPCS returns complex types longer than a register in memory. */
9240 if (tdep->arm_abi != ARM_ABI_APCS
9241 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9242 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9243 return RETURN_VALUE_STRUCT_CONVENTION;
9246 arm_store_return_value (valtype, regcache, writebuf);
9249 arm_extract_return_value (valtype, regcache, readbuf);
9251 return RETURN_VALUE_REGISTER_CONVENTION;
9256 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9258 struct gdbarch *gdbarch = get_frame_arch (frame);
9259 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9260 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9262 gdb_byte buf[INT_REGISTER_SIZE];
9264 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9266 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9270 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9274 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9275 return the target PC. Otherwise return 0. */
9278 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9282 CORE_ADDR start_addr;
9284 /* Find the starting address and name of the function containing the PC. */
9285 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9287 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9289 start_addr = arm_skip_bx_reg (frame, pc);
9290 if (start_addr != 0)
9296 /* If PC is in a Thumb call or return stub, return the address of the
9297 target PC, which is in a register. The thunk functions are called
9298 _call_via_xx, where x is the register name. The possible names
9299 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9300 functions, named __ARM_call_via_r[0-7]. */
9301 if (strncmp (name, "_call_via_", 10) == 0
9302 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9304 /* Use the name suffix to determine which register contains the
9306 static char *table[15] =
9307 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9308 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9311 int offset = strlen (name) - 2;
9313 for (regno = 0; regno <= 14; regno++)
9314 if (strcmp (&name[offset], table[regno]) == 0)
9315 return get_frame_register_unsigned (frame, regno);
9318 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9319 non-interworking calls to foo. We could decode the stubs
9320 to find the target but it's easier to use the symbol table. */
9321 namelen = strlen (name);
9322 if (name[0] == '_' && name[1] == '_'
9323 && ((namelen > 2 + strlen ("_from_thumb")
9324 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9325 strlen ("_from_thumb")) == 0)
9326 || (namelen > 2 + strlen ("_from_arm")
9327 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9328 strlen ("_from_arm")) == 0)))
9331 int target_len = namelen - 2;
9332 struct bound_minimal_symbol minsym;
9333 struct objfile *objfile;
9334 struct obj_section *sec;
9336 if (name[namelen - 1] == 'b')
9337 target_len -= strlen ("_from_thumb");
9339 target_len -= strlen ("_from_arm");
9341 target_name = alloca (target_len + 1);
9342 memcpy (target_name, name + 2, target_len);
9343 target_name[target_len] = '\0';
9345 sec = find_pc_section (pc);
9346 objfile = (sec == NULL) ? NULL : sec->objfile;
9347 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9348 if (minsym.minsym != NULL)
9349 return BMSYMBOL_VALUE_ADDRESS (minsym);
9354 return 0; /* not a stub */
9358 set_arm_command (char *args, int from_tty)
9360 printf_unfiltered (_("\
9361 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9362 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9366 show_arm_command (char *args, int from_tty)
9368 cmd_show_list (showarmcmdlist, from_tty, "");
9372 arm_update_current_architecture (void)
9374 struct gdbarch_info info;
9376 /* If the current architecture is not ARM, we have nothing to do. */
9377 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9380 /* Update the architecture. */
9381 gdbarch_info_init (&info);
9383 if (!gdbarch_update_p (info))
9384 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9388 set_fp_model_sfunc (char *args, int from_tty,
9389 struct cmd_list_element *c)
9391 enum arm_float_model fp_model;
9393 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9394 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9396 arm_fp_model = fp_model;
9400 if (fp_model == ARM_FLOAT_LAST)
9401 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9404 arm_update_current_architecture ();
9408 show_fp_model (struct ui_file *file, int from_tty,
9409 struct cmd_list_element *c, const char *value)
9411 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9413 if (arm_fp_model == ARM_FLOAT_AUTO
9414 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9415 fprintf_filtered (file, _("\
9416 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9417 fp_model_strings[tdep->fp_model]);
9419 fprintf_filtered (file, _("\
9420 The current ARM floating point model is \"%s\".\n"),
9421 fp_model_strings[arm_fp_model]);
9425 arm_set_abi (char *args, int from_tty,
9426 struct cmd_list_element *c)
9428 enum arm_abi_kind arm_abi;
9430 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9431 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9433 arm_abi_global = arm_abi;
9437 if (arm_abi == ARM_ABI_LAST)
9438 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9441 arm_update_current_architecture ();
9445 arm_show_abi (struct ui_file *file, int from_tty,
9446 struct cmd_list_element *c, const char *value)
9448 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9450 if (arm_abi_global == ARM_ABI_AUTO
9451 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9452 fprintf_filtered (file, _("\
9453 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9454 arm_abi_strings[tdep->arm_abi]);
9456 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9461 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9462 struct cmd_list_element *c, const char *value)
9464 fprintf_filtered (file,
9465 _("The current execution mode assumed "
9466 "(when symbols are unavailable) is \"%s\".\n"),
9467 arm_fallback_mode_string);
9471 arm_show_force_mode (struct ui_file *file, int from_tty,
9472 struct cmd_list_element *c, const char *value)
9474 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9476 fprintf_filtered (file,
9477 _("The current execution mode assumed "
9478 "(even when symbols are available) is \"%s\".\n"),
9479 arm_force_mode_string);
9482 /* If the user changes the register disassembly style used for info
9483 register and other commands, we have to also switch the style used
9484 in opcodes for disassembly output. This function is run in the "set
9485 arm disassembly" command, and does that. */
9488 set_disassembly_style_sfunc (char *args, int from_tty,
9489 struct cmd_list_element *c)
9491 set_disassembly_style ();
9494 /* Return the ARM register name corresponding to register I. */
9496 arm_register_name (struct gdbarch *gdbarch, int i)
9498 const int num_regs = gdbarch_num_regs (gdbarch);
9500 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9501 && i >= num_regs && i < num_regs + 32)
9503 static const char *const vfp_pseudo_names[] = {
9504 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9505 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9506 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9507 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9510 return vfp_pseudo_names[i - num_regs];
9513 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9514 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9516 static const char *const neon_pseudo_names[] = {
9517 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9518 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9521 return neon_pseudo_names[i - num_regs - 32];
9524 if (i >= ARRAY_SIZE (arm_register_names))
9525 /* These registers are only supported on targets which supply
9526 an XML description. */
9529 return arm_register_names[i];
9533 set_disassembly_style (void)
9537 /* Find the style that the user wants. */
9538 for (current = 0; current < num_disassembly_options; current++)
9539 if (disassembly_style == valid_disassembly_styles[current])
9541 gdb_assert (current < num_disassembly_options);
9543 /* Synchronize the disassembler. */
9544 set_arm_regname_option (current);
9547 /* Test whether the coff symbol specific value corresponds to a Thumb
9551 coff_sym_is_thumb (int val)
9553 return (val == C_THUMBEXT
9554 || val == C_THUMBSTAT
9555 || val == C_THUMBEXTFUNC
9556 || val == C_THUMBSTATFUNC
9557 || val == C_THUMBLABEL);
9560 /* arm_coff_make_msymbol_special()
9561 arm_elf_make_msymbol_special()
9563 These functions test whether the COFF or ELF symbol corresponds to
9564 an address in thumb code, and set a "special" bit in a minimal
9565 symbol to indicate that it does. */
9568 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9570 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9571 == ST_BRANCH_TO_THUMB)
9572 MSYMBOL_SET_SPECIAL (msym);
9576 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9578 if (coff_sym_is_thumb (val))
9579 MSYMBOL_SET_SPECIAL (msym);
9583 arm_objfile_data_free (struct objfile *objfile, void *arg)
9585 struct arm_per_objfile *data = arg;
9588 for (i = 0; i < objfile->obfd->section_count; i++)
9589 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9593 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9596 const char *name = bfd_asymbol_name (sym);
9597 struct arm_per_objfile *data;
9598 VEC(arm_mapping_symbol_s) **map_p;
9599 struct arm_mapping_symbol new_map_sym;
9601 gdb_assert (name[0] == '$');
9602 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9605 data = objfile_data (objfile, arm_objfile_data_key);
9608 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9609 struct arm_per_objfile);
9610 set_objfile_data (objfile, arm_objfile_data_key, data);
9611 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9612 objfile->obfd->section_count,
9613 VEC(arm_mapping_symbol_s) *);
9615 map_p = &data->section_maps[bfd_get_section (sym)->index];
9617 new_map_sym.value = sym->value;
9618 new_map_sym.type = name[1];
9620 /* Assume that most mapping symbols appear in order of increasing
9621 value. If they were randomly distributed, it would be faster to
9622 always push here and then sort at first use. */
9623 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9625 struct arm_mapping_symbol *prev_map_sym;
9627 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9628 if (prev_map_sym->value >= sym->value)
9631 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9632 arm_compare_mapping_symbols);
9633 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9638 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9642 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9644 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9645 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9647 /* If necessary, set the T bit. */
9650 ULONGEST val, t_bit;
9651 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9652 t_bit = arm_psr_thumb_bit (gdbarch);
9653 if (arm_pc_is_thumb (gdbarch, pc))
9654 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9657 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9662 /* Read the contents of a NEON quad register, by reading from two
9663 double registers. This is used to implement the quad pseudo
9664 registers, and for argument passing in case the quad registers are
9665 missing; vectors are passed in quad registers when using the VFP
9666 ABI, even if a NEON unit is not present. REGNUM is the index of
9667 the quad register, in [0, 15]. */
9669 static enum register_status
9670 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9671 int regnum, gdb_byte *buf)
9674 gdb_byte reg_buf[8];
9675 int offset, double_regnum;
9676 enum register_status status;
9678 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9679 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9682 /* d0 is always the least significant half of q0. */
9683 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9688 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9689 if (status != REG_VALID)
9691 memcpy (buf + offset, reg_buf, 8);
9693 offset = 8 - offset;
9694 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9695 if (status != REG_VALID)
9697 memcpy (buf + offset, reg_buf, 8);
9702 static enum register_status
9703 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9704 int regnum, gdb_byte *buf)
9706 const int num_regs = gdbarch_num_regs (gdbarch);
9708 gdb_byte reg_buf[8];
9709 int offset, double_regnum;
9711 gdb_assert (regnum >= num_regs);
9714 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9715 /* Quad-precision register. */
9716 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9719 enum register_status status;
9721 /* Single-precision register. */
9722 gdb_assert (regnum < 32);
9724 /* s0 is always the least significant half of d0. */
9725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9726 offset = (regnum & 1) ? 0 : 4;
9728 offset = (regnum & 1) ? 4 : 0;
9730 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9731 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9734 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9735 if (status == REG_VALID)
9736 memcpy (buf, reg_buf + offset, 4);
9741 /* Store the contents of BUF to a NEON quad register, by writing to
9742 two double registers. This is used to implement the quad pseudo
9743 registers, and for argument passing in case the quad registers are
9744 missing; vectors are passed in quad registers when using the VFP
9745 ABI, even if a NEON unit is not present. REGNUM is the index
9746 of the quad register, in [0, 15]. */
9749 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9750 int regnum, const gdb_byte *buf)
9753 int offset, double_regnum;
9755 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9756 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9759 /* d0 is always the least significant half of q0. */
9760 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9765 regcache_raw_write (regcache, double_regnum, buf + offset);
9766 offset = 8 - offset;
9767 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9771 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9772 int regnum, const gdb_byte *buf)
9774 const int num_regs = gdbarch_num_regs (gdbarch);
9776 gdb_byte reg_buf[8];
9777 int offset, double_regnum;
9779 gdb_assert (regnum >= num_regs);
9782 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9783 /* Quad-precision register. */
9784 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9787 /* Single-precision register. */
9788 gdb_assert (regnum < 32);
9790 /* s0 is always the least significant half of d0. */
9791 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9792 offset = (regnum & 1) ? 0 : 4;
9794 offset = (regnum & 1) ? 4 : 0;
9796 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9797 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9800 regcache_raw_read (regcache, double_regnum, reg_buf);
9801 memcpy (reg_buf + offset, buf, 4);
9802 regcache_raw_write (regcache, double_regnum, reg_buf);
9806 static struct value *
9807 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9809 const int *reg_p = baton;
9810 return value_of_register (*reg_p, frame);
9813 static enum gdb_osabi
9814 arm_elf_osabi_sniffer (bfd *abfd)
9816 unsigned int elfosabi;
9817 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9819 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9821 if (elfosabi == ELFOSABI_ARM)
9822 /* GNU tools use this value. Check note sections in this case,
9824 bfd_map_over_sections (abfd,
9825 generic_elf_osabi_sniff_abi_tag_sections,
9828 /* Anything else will be handled by the generic ELF sniffer. */
9833 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9834 struct reggroup *group)
9836 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9837 this, FPS register belongs to save_regroup, restore_reggroup, and
9838 all_reggroup, of course. */
9839 if (regnum == ARM_FPS_REGNUM)
9840 return (group == float_reggroup
9841 || group == save_reggroup
9842 || group == restore_reggroup
9843 || group == all_reggroup);
9845 return default_register_reggroup_p (gdbarch, regnum, group);
9849 /* For backward-compatibility we allow two 'g' packet lengths with
9850 the remote protocol depending on whether FPA registers are
9851 supplied. M-profile targets do not have FPA registers, but some
9852 stubs already exist in the wild which use a 'g' packet which
9853 supplies them albeit with dummy values. The packet format which
9854 includes FPA registers should be considered deprecated for
9855 M-profile targets. */
9858 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9860 if (gdbarch_tdep (gdbarch)->is_m)
9862 /* If we know from the executable this is an M-profile target,
9863 cater for remote targets whose register set layout is the
9864 same as the FPA layout. */
9865 register_remote_g_packet_guess (gdbarch,
9866 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9867 (16 * INT_REGISTER_SIZE)
9868 + (8 * FP_REGISTER_SIZE)
9869 + (2 * INT_REGISTER_SIZE),
9870 tdesc_arm_with_m_fpa_layout);
9872 /* The regular M-profile layout. */
9873 register_remote_g_packet_guess (gdbarch,
9874 /* r0-r12,sp,lr,pc; xpsr */
9875 (16 * INT_REGISTER_SIZE)
9876 + INT_REGISTER_SIZE,
9879 /* M-profile plus M4F VFP. */
9880 register_remote_g_packet_guess (gdbarch,
9881 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9882 (16 * INT_REGISTER_SIZE)
9883 + (16 * VFP_REGISTER_SIZE)
9884 + (2 * INT_REGISTER_SIZE),
9885 tdesc_arm_with_m_vfp_d16);
9888 /* Otherwise we don't have a useful guess. */
9892 /* Initialize the current architecture based on INFO. If possible,
9893 re-use an architecture from ARCHES, which is a list of
9894 architectures already created during this debugging session.
9896 Called e.g. at program startup, when reading a core file, and when
9897 reading a binary file. */
9899 static struct gdbarch *
9900 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9902 struct gdbarch_tdep *tdep;
9903 struct gdbarch *gdbarch;
9904 struct gdbarch_list *best_arch;
9905 enum arm_abi_kind arm_abi = arm_abi_global;
9906 enum arm_float_model fp_model = arm_fp_model;
9907 struct tdesc_arch_data *tdesc_data = NULL;
9909 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9911 int have_fpa_registers = 1;
9912 const struct target_desc *tdesc = info.target_desc;
9914 /* If we have an object to base this architecture on, try to determine
9917 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9919 int ei_osabi, e_flags;
9921 switch (bfd_get_flavour (info.abfd))
9923 case bfd_target_aout_flavour:
9924 /* Assume it's an old APCS-style ABI. */
9925 arm_abi = ARM_ABI_APCS;
9928 case bfd_target_coff_flavour:
9929 /* Assume it's an old APCS-style ABI. */
9931 arm_abi = ARM_ABI_APCS;
9934 case bfd_target_elf_flavour:
9935 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9936 e_flags = elf_elfheader (info.abfd)->e_flags;
9938 if (ei_osabi == ELFOSABI_ARM)
9940 /* GNU tools used to use this value, but do not for EABI
9941 objects. There's nowhere to tag an EABI version
9942 anyway, so assume APCS. */
9943 arm_abi = ARM_ABI_APCS;
9945 else if (ei_osabi == ELFOSABI_NONE)
9947 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9948 int attr_arch, attr_profile;
9952 case EF_ARM_EABI_UNKNOWN:
9953 /* Assume GNU tools. */
9954 arm_abi = ARM_ABI_APCS;
9957 case EF_ARM_EABI_VER4:
9958 case EF_ARM_EABI_VER5:
9959 arm_abi = ARM_ABI_AAPCS;
9960 /* EABI binaries default to VFP float ordering.
9961 They may also contain build attributes that can
9962 be used to identify if the VFP argument-passing
9964 if (fp_model == ARM_FLOAT_AUTO)
9967 switch (bfd_elf_get_obj_attr_int (info.abfd,
9972 /* "The user intended FP parameter/result
9973 passing to conform to AAPCS, base
9975 fp_model = ARM_FLOAT_SOFT_VFP;
9978 /* "The user intended FP parameter/result
9979 passing to conform to AAPCS, VFP
9981 fp_model = ARM_FLOAT_VFP;
9984 /* "The user intended FP parameter/result
9985 passing to conform to tool chain-specific
9986 conventions" - we don't know any such
9987 conventions, so leave it as "auto". */
9990 /* Attribute value not mentioned in the
9991 October 2008 ABI, so leave it as
9996 fp_model = ARM_FLOAT_SOFT_VFP;
10002 /* Leave it as "auto". */
10003 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10008 /* Detect M-profile programs. This only works if the
10009 executable file includes build attributes; GCC does
10010 copy them to the executable, but e.g. RealView does
10012 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10014 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10016 Tag_CPU_arch_profile);
10017 /* GCC specifies the profile for v6-M; RealView only
10018 specifies the profile for architectures starting with
10019 V7 (as opposed to architectures with a tag
10020 numerically greater than TAG_CPU_ARCH_V7). */
10021 if (!tdesc_has_registers (tdesc)
10022 && (attr_arch == TAG_CPU_ARCH_V6_M
10023 || attr_arch == TAG_CPU_ARCH_V6S_M
10024 || attr_profile == 'M'))
10029 if (fp_model == ARM_FLOAT_AUTO)
10031 int e_flags = elf_elfheader (info.abfd)->e_flags;
10033 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10036 /* Leave it as "auto". Strictly speaking this case
10037 means FPA, but almost nobody uses that now, and
10038 many toolchains fail to set the appropriate bits
10039 for the floating-point model they use. */
10041 case EF_ARM_SOFT_FLOAT:
10042 fp_model = ARM_FLOAT_SOFT_FPA;
10044 case EF_ARM_VFP_FLOAT:
10045 fp_model = ARM_FLOAT_VFP;
10047 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10048 fp_model = ARM_FLOAT_SOFT_VFP;
10053 if (e_flags & EF_ARM_BE8)
10054 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10059 /* Leave it as "auto". */
10064 /* Check any target description for validity. */
10065 if (tdesc_has_registers (tdesc))
10067 /* For most registers we require GDB's default names; but also allow
10068 the numeric names for sp / lr / pc, as a convenience. */
10069 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10070 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10071 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10073 const struct tdesc_feature *feature;
10076 feature = tdesc_find_feature (tdesc,
10077 "org.gnu.gdb.arm.core");
10078 if (feature == NULL)
10080 feature = tdesc_find_feature (tdesc,
10081 "org.gnu.gdb.arm.m-profile");
10082 if (feature == NULL)
10088 tdesc_data = tdesc_data_alloc ();
10091 for (i = 0; i < ARM_SP_REGNUM; i++)
10092 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10093 arm_register_names[i]);
10094 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10097 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10100 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10104 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10105 ARM_PS_REGNUM, "xpsr");
10107 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10108 ARM_PS_REGNUM, "cpsr");
10112 tdesc_data_cleanup (tdesc_data);
10116 feature = tdesc_find_feature (tdesc,
10117 "org.gnu.gdb.arm.fpa");
10118 if (feature != NULL)
10121 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10122 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10123 arm_register_names[i]);
10126 tdesc_data_cleanup (tdesc_data);
10131 have_fpa_registers = 0;
10133 feature = tdesc_find_feature (tdesc,
10134 "org.gnu.gdb.xscale.iwmmxt");
10135 if (feature != NULL)
10137 static const char *const iwmmxt_names[] = {
10138 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10139 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10140 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10141 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10145 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10147 &= tdesc_numbered_register (feature, tdesc_data, i,
10148 iwmmxt_names[i - ARM_WR0_REGNUM]);
10150 /* Check for the control registers, but do not fail if they
10152 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10153 tdesc_numbered_register (feature, tdesc_data, i,
10154 iwmmxt_names[i - ARM_WR0_REGNUM]);
10156 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10158 &= tdesc_numbered_register (feature, tdesc_data, i,
10159 iwmmxt_names[i - ARM_WR0_REGNUM]);
10163 tdesc_data_cleanup (tdesc_data);
10168 /* If we have a VFP unit, check whether the single precision registers
10169 are present. If not, then we will synthesize them as pseudo
10171 feature = tdesc_find_feature (tdesc,
10172 "org.gnu.gdb.arm.vfp");
10173 if (feature != NULL)
10175 static const char *const vfp_double_names[] = {
10176 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10177 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10178 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10179 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10182 /* Require the double precision registers. There must be either
10185 for (i = 0; i < 32; i++)
10187 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10189 vfp_double_names[i]);
10193 if (!valid_p && i == 16)
10196 /* Also require FPSCR. */
10197 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10198 ARM_FPSCR_REGNUM, "fpscr");
10201 tdesc_data_cleanup (tdesc_data);
10205 if (tdesc_unnumbered_register (feature, "s0") == 0)
10206 have_vfp_pseudos = 1;
10208 have_vfp_registers = 1;
10210 /* If we have VFP, also check for NEON. The architecture allows
10211 NEON without VFP (integer vector operations only), but GDB
10212 does not support that. */
10213 feature = tdesc_find_feature (tdesc,
10214 "org.gnu.gdb.arm.neon");
10215 if (feature != NULL)
10217 /* NEON requires 32 double-precision registers. */
10220 tdesc_data_cleanup (tdesc_data);
10224 /* If there are quad registers defined by the stub, use
10225 their type; otherwise (normally) provide them with
10226 the default type. */
10227 if (tdesc_unnumbered_register (feature, "q0") == 0)
10228 have_neon_pseudos = 1;
10235 /* If there is already a candidate, use it. */
10236 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10238 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10240 if (arm_abi != ARM_ABI_AUTO
10241 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10244 if (fp_model != ARM_FLOAT_AUTO
10245 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10248 /* There are various other properties in tdep that we do not
10249 need to check here: those derived from a target description,
10250 since gdbarches with a different target description are
10251 automatically disqualified. */
10253 /* Do check is_m, though, since it might come from the binary. */
10254 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10257 /* Found a match. */
10261 if (best_arch != NULL)
10263 if (tdesc_data != NULL)
10264 tdesc_data_cleanup (tdesc_data);
10265 return best_arch->gdbarch;
10268 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10269 gdbarch = gdbarch_alloc (&info, tdep);
10271 /* Record additional information about the architecture we are defining.
10272 These are gdbarch discriminators, like the OSABI. */
10273 tdep->arm_abi = arm_abi;
10274 tdep->fp_model = fp_model;
10276 tdep->have_fpa_registers = have_fpa_registers;
10277 tdep->have_vfp_registers = have_vfp_registers;
10278 tdep->have_vfp_pseudos = have_vfp_pseudos;
10279 tdep->have_neon_pseudos = have_neon_pseudos;
10280 tdep->have_neon = have_neon;
10282 arm_register_g_packet_guesses (gdbarch);
10285 switch (info.byte_order_for_code)
10287 case BFD_ENDIAN_BIG:
10288 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10289 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10290 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10291 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10295 case BFD_ENDIAN_LITTLE:
10296 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10297 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10298 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10299 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10304 internal_error (__FILE__, __LINE__,
10305 _("arm_gdbarch_init: bad byte order for float format"));
10308 /* On ARM targets char defaults to unsigned. */
10309 set_gdbarch_char_signed (gdbarch, 0);
10311 /* Note: for displaced stepping, this includes the breakpoint, and one word
10312 of additional scratch space. This setting isn't used for anything beside
10313 displaced stepping at present. */
10314 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10316 /* This should be low enough for everything. */
10317 tdep->lowest_pc = 0x20;
10318 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10320 /* The default, for both APCS and AAPCS, is to return small
10321 structures in registers. */
10322 tdep->struct_return = reg_struct_return;
10324 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10325 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10327 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10329 /* Frame handling. */
10330 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10331 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10332 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10334 frame_base_set_default (gdbarch, &arm_normal_base);
10336 /* Address manipulation. */
10337 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10339 /* Advance PC across function entry code. */
10340 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10342 /* Detect whether PC is in function epilogue. */
10343 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10345 /* Skip trampolines. */
10346 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10348 /* The stack grows downward. */
10349 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10351 /* Breakpoint manipulation. */
10352 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10353 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10354 arm_remote_breakpoint_from_pc);
10356 /* Information about registers, etc. */
10357 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10358 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10359 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10360 set_gdbarch_register_type (gdbarch, arm_register_type);
10361 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10363 /* This "info float" is FPA-specific. Use the generic version if we
10364 do not have FPA. */
10365 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10366 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10368 /* Internal <-> external register number maps. */
10369 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10370 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10372 set_gdbarch_register_name (gdbarch, arm_register_name);
10374 /* Returning results. */
10375 set_gdbarch_return_value (gdbarch, arm_return_value);
10378 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10380 /* Minsymbol frobbing. */
10381 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10382 set_gdbarch_coff_make_msymbol_special (gdbarch,
10383 arm_coff_make_msymbol_special);
10384 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10386 /* Thumb-2 IT block support. */
10387 set_gdbarch_adjust_breakpoint_address (gdbarch,
10388 arm_adjust_breakpoint_address);
10390 /* Virtual tables. */
10391 set_gdbarch_vbit_in_delta (gdbarch, 1);
10393 /* Hook in the ABI-specific overrides, if they have been registered. */
10394 gdbarch_init_osabi (info, gdbarch);
10396 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10398 /* Add some default predicates. */
10400 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10401 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10402 dwarf2_append_unwinders (gdbarch);
10403 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10404 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10406 /* Now we have tuned the configuration, set a few final things,
10407 based on what the OS ABI has told us. */
10409 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10410 binaries are always marked. */
10411 if (tdep->arm_abi == ARM_ABI_AUTO)
10412 tdep->arm_abi = ARM_ABI_APCS;
10414 /* Watchpoints are not steppable. */
10415 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10417 /* We used to default to FPA for generic ARM, but almost nobody
10418 uses that now, and we now provide a way for the user to force
10419 the model. So default to the most useful variant. */
10420 if (tdep->fp_model == ARM_FLOAT_AUTO)
10421 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10423 if (tdep->jb_pc >= 0)
10424 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10426 /* Floating point sizes and format. */
10427 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10428 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10430 set_gdbarch_double_format
10431 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10432 set_gdbarch_long_double_format
10433 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10437 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10438 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10441 if (have_vfp_pseudos)
10443 /* NOTE: These are the only pseudo registers used by
10444 the ARM target at the moment. If more are added, a
10445 little more care in numbering will be needed. */
10447 int num_pseudos = 32;
10448 if (have_neon_pseudos)
10450 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10451 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10452 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10457 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10459 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10461 /* Override tdesc_register_type to adjust the types of VFP
10462 registers for NEON. */
10463 set_gdbarch_register_type (gdbarch, arm_register_type);
10466 /* Add standard register aliases. We add aliases even for those
10467 nanes which are used by the current architecture - it's simpler,
10468 and does no harm, since nothing ever lists user registers. */
10469 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10470 user_reg_add (gdbarch, arm_register_aliases[i].name,
10471 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10477 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10479 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10484 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10485 (unsigned long) tdep->lowest_pc);
10488 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10491 _initialize_arm_tdep (void)
10493 struct ui_file *stb;
10495 struct cmd_list_element *new_set, *new_show;
10496 const char *setname;
10497 const char *setdesc;
10498 const char *const *regnames;
10500 static char *helptext;
10501 char regdesc[1024], *rdptr = regdesc;
10502 size_t rest = sizeof (regdesc);
10504 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10506 arm_objfile_data_key
10507 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10509 /* Add ourselves to objfile event chain. */
10510 observer_attach_new_objfile (arm_exidx_new_objfile);
10512 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10514 /* Register an ELF OS ABI sniffer for ARM binaries. */
10515 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10516 bfd_target_elf_flavour,
10517 arm_elf_osabi_sniffer);
10519 /* Initialize the standard target descriptions. */
10520 initialize_tdesc_arm_with_m ();
10521 initialize_tdesc_arm_with_m_fpa_layout ();
10522 initialize_tdesc_arm_with_m_vfp_d16 ();
10523 initialize_tdesc_arm_with_iwmmxt ();
10524 initialize_tdesc_arm_with_vfpv2 ();
10525 initialize_tdesc_arm_with_vfpv3 ();
10526 initialize_tdesc_arm_with_neon ();
10528 /* Get the number of possible sets of register names defined in opcodes. */
10529 num_disassembly_options = get_arm_regname_num_options ();
10531 /* Add root prefix command for all "set arm"/"show arm" commands. */
10532 add_prefix_cmd ("arm", no_class, set_arm_command,
10533 _("Various ARM-specific commands."),
10534 &setarmcmdlist, "set arm ", 0, &setlist);
10536 add_prefix_cmd ("arm", no_class, show_arm_command,
10537 _("Various ARM-specific commands."),
10538 &showarmcmdlist, "show arm ", 0, &showlist);
10540 /* Sync the opcode insn printer with our register viewer. */
10541 parse_arm_disassembler_option ("reg-names-std");
10543 /* Initialize the array that will be passed to
10544 add_setshow_enum_cmd(). */
10545 valid_disassembly_styles
10546 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10547 for (i = 0; i < num_disassembly_options; i++)
10549 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10550 valid_disassembly_styles[i] = setname;
10551 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10554 /* When we find the default names, tell the disassembler to use
10556 if (!strcmp (setname, "std"))
10558 disassembly_style = setname;
10559 set_arm_regname_option (i);
10562 /* Mark the end of valid options. */
10563 valid_disassembly_styles[num_disassembly_options] = NULL;
10565 /* Create the help text. */
10566 stb = mem_fileopen ();
10567 fprintf_unfiltered (stb, "%s%s%s",
10568 _("The valid values are:\n"),
10570 _("The default is \"std\"."));
10571 helptext = ui_file_xstrdup (stb, NULL);
10572 ui_file_delete (stb);
10574 add_setshow_enum_cmd("disassembler", no_class,
10575 valid_disassembly_styles, &disassembly_style,
10576 _("Set the disassembly style."),
10577 _("Show the disassembly style."),
10579 set_disassembly_style_sfunc,
10580 NULL, /* FIXME: i18n: The disassembly style is
10582 &setarmcmdlist, &showarmcmdlist);
10584 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10585 _("Set usage of ARM 32-bit mode."),
10586 _("Show usage of ARM 32-bit mode."),
10587 _("When off, a 26-bit PC will be used."),
10589 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10591 &setarmcmdlist, &showarmcmdlist);
10593 /* Add a command to allow the user to force the FPU model. */
10594 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10595 _("Set the floating point type."),
10596 _("Show the floating point type."),
10597 _("auto - Determine the FP typefrom the OS-ABI.\n\
10598 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10599 fpa - FPA co-processor (GCC compiled).\n\
10600 softvfp - Software FP with pure-endian doubles.\n\
10601 vfp - VFP co-processor."),
10602 set_fp_model_sfunc, show_fp_model,
10603 &setarmcmdlist, &showarmcmdlist);
10605 /* Add a command to allow the user to force the ABI. */
10606 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10608 _("Show the ABI."),
10609 NULL, arm_set_abi, arm_show_abi,
10610 &setarmcmdlist, &showarmcmdlist);
10612 /* Add two commands to allow the user to force the assumed
10614 add_setshow_enum_cmd ("fallback-mode", class_support,
10615 arm_mode_strings, &arm_fallback_mode_string,
10616 _("Set the mode assumed when symbols are unavailable."),
10617 _("Show the mode assumed when symbols are unavailable."),
10618 NULL, NULL, arm_show_fallback_mode,
10619 &setarmcmdlist, &showarmcmdlist);
10620 add_setshow_enum_cmd ("force-mode", class_support,
10621 arm_mode_strings, &arm_force_mode_string,
10622 _("Set the mode assumed even when symbols are available."),
10623 _("Show the mode assumed even when symbols are available."),
10624 NULL, NULL, arm_show_force_mode,
10625 &setarmcmdlist, &showarmcmdlist);
10627 /* Debugging flag. */
10628 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10629 _("Set ARM debugging."),
10630 _("Show ARM debugging."),
10631 _("When on, arm-specific debugging is enabled."),
10633 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10634 &setdebuglist, &showdebuglist);
10637 /* ARM-reversible process record data structures. */
10639 #define ARM_INSN_SIZE_BYTES 4
10640 #define THUMB_INSN_SIZE_BYTES 2
10641 #define THUMB2_INSN_SIZE_BYTES 4
10644 /* Position of the bit within a 32-bit ARM instruction
10645 that defines whether the instruction is a load or store. */
10646 #define INSN_S_L_BIT_NUM 20
10648 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10651 unsigned int reg_len = LENGTH; \
10654 REGS = XNEWVEC (uint32_t, reg_len); \
10655 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10660 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10663 unsigned int mem_len = LENGTH; \
10666 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10667 memcpy(&MEMS->len, &RECORD_BUF[0], \
10668 sizeof(struct arm_mem_r) * LENGTH); \
10673 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10674 #define INSN_RECORDED(ARM_RECORD) \
10675 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10677 /* ARM memory record structure. */
10680 uint32_t len; /* Record length. */
10681 uint32_t addr; /* Memory address. */
10684 /* ARM instruction record contains opcode of current insn
10685 and execution state (before entry to decode_insn()),
10686 contains list of to-be-modified registers and
10687 memory blocks (on return from decode_insn()). */
10689 typedef struct insn_decode_record_t
10691 struct gdbarch *gdbarch;
10692 struct regcache *regcache;
10693 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10694 uint32_t arm_insn; /* Should accommodate thumb. */
10695 uint32_t cond; /* Condition code. */
10696 uint32_t opcode; /* Insn opcode. */
10697 uint32_t decode; /* Insn decode bits. */
10698 uint32_t mem_rec_count; /* No of mem records. */
10699 uint32_t reg_rec_count; /* No of reg records. */
10700 uint32_t *arm_regs; /* Registers to be saved for this record. */
10701 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10702 } insn_decode_record;
10705 /* Checks ARM SBZ and SBO mandatory fields. */
10708 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10710 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10729 enum arm_record_result
10731 ARM_RECORD_SUCCESS = 0,
10732 ARM_RECORD_FAILURE = 1
10739 } arm_record_strx_t;
10750 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10751 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10754 struct regcache *reg_cache = arm_insn_r->regcache;
10755 ULONGEST u_regval[2]= {0};
10757 uint32_t reg_src1 = 0, reg_src2 = 0;
10758 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10759 uint32_t opcode1 = 0;
10761 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10762 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10763 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10766 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10768 /* 1) Handle misc store, immediate offset. */
10769 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10770 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10771 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10772 regcache_raw_read_unsigned (reg_cache, reg_src1,
10774 if (ARM_PC_REGNUM == reg_src1)
10776 /* If R15 was used as Rn, hence current PC+8. */
10777 u_regval[0] = u_regval[0] + 8;
10779 offset_8 = (immed_high << 4) | immed_low;
10780 /* Calculate target store address. */
10781 if (14 == arm_insn_r->opcode)
10783 tgt_mem_addr = u_regval[0] + offset_8;
10787 tgt_mem_addr = u_regval[0] - offset_8;
10789 if (ARM_RECORD_STRH == str_type)
10791 record_buf_mem[0] = 2;
10792 record_buf_mem[1] = tgt_mem_addr;
10793 arm_insn_r->mem_rec_count = 1;
10795 else if (ARM_RECORD_STRD == str_type)
10797 record_buf_mem[0] = 4;
10798 record_buf_mem[1] = tgt_mem_addr;
10799 record_buf_mem[2] = 4;
10800 record_buf_mem[3] = tgt_mem_addr + 4;
10801 arm_insn_r->mem_rec_count = 2;
10804 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10806 /* 2) Store, register offset. */
10808 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10810 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10811 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10812 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10813 if (15 == reg_src2)
10815 /* If R15 was used as Rn, hence current PC+8. */
10816 u_regval[0] = u_regval[0] + 8;
10818 /* Calculate target store address, Rn +/- Rm, register offset. */
10819 if (12 == arm_insn_r->opcode)
10821 tgt_mem_addr = u_regval[0] + u_regval[1];
10825 tgt_mem_addr = u_regval[1] - u_regval[0];
10827 if (ARM_RECORD_STRH == str_type)
10829 record_buf_mem[0] = 2;
10830 record_buf_mem[1] = tgt_mem_addr;
10831 arm_insn_r->mem_rec_count = 1;
10833 else if (ARM_RECORD_STRD == str_type)
10835 record_buf_mem[0] = 4;
10836 record_buf_mem[1] = tgt_mem_addr;
10837 record_buf_mem[2] = 4;
10838 record_buf_mem[3] = tgt_mem_addr + 4;
10839 arm_insn_r->mem_rec_count = 2;
10842 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10843 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10845 /* 3) Store, immediate pre-indexed. */
10846 /* 5) Store, immediate post-indexed. */
10847 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10848 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10849 offset_8 = (immed_high << 4) | immed_low;
10850 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10851 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10852 /* Calculate target store address, Rn +/- Rm, register offset. */
10853 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10855 tgt_mem_addr = u_regval[0] + offset_8;
10859 tgt_mem_addr = u_regval[0] - offset_8;
10861 if (ARM_RECORD_STRH == str_type)
10863 record_buf_mem[0] = 2;
10864 record_buf_mem[1] = tgt_mem_addr;
10865 arm_insn_r->mem_rec_count = 1;
10867 else if (ARM_RECORD_STRD == str_type)
10869 record_buf_mem[0] = 4;
10870 record_buf_mem[1] = tgt_mem_addr;
10871 record_buf_mem[2] = 4;
10872 record_buf_mem[3] = tgt_mem_addr + 4;
10873 arm_insn_r->mem_rec_count = 2;
10875 /* Record Rn also as it changes. */
10876 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10877 arm_insn_r->reg_rec_count = 1;
10879 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10880 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10882 /* 4) Store, register pre-indexed. */
10883 /* 6) Store, register post -indexed. */
10884 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10885 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10886 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10887 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10888 /* Calculate target store address, Rn +/- Rm, register offset. */
10889 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10891 tgt_mem_addr = u_regval[0] + u_regval[1];
10895 tgt_mem_addr = u_regval[1] - u_regval[0];
10897 if (ARM_RECORD_STRH == str_type)
10899 record_buf_mem[0] = 2;
10900 record_buf_mem[1] = tgt_mem_addr;
10901 arm_insn_r->mem_rec_count = 1;
10903 else if (ARM_RECORD_STRD == str_type)
10905 record_buf_mem[0] = 4;
10906 record_buf_mem[1] = tgt_mem_addr;
10907 record_buf_mem[2] = 4;
10908 record_buf_mem[3] = tgt_mem_addr + 4;
10909 arm_insn_r->mem_rec_count = 2;
10911 /* Record Rn also as it changes. */
10912 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10913 arm_insn_r->reg_rec_count = 1;
10918 /* Handling ARM extension space insns. */
10921 arm_record_extension_space (insn_decode_record *arm_insn_r)
10923 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10924 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10925 uint32_t record_buf[8], record_buf_mem[8];
10926 uint32_t reg_src1 = 0;
10927 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10928 struct regcache *reg_cache = arm_insn_r->regcache;
10929 ULONGEST u_regval = 0;
10931 gdb_assert (!INSN_RECORDED(arm_insn_r));
10932 /* Handle unconditional insn extension space. */
10934 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10935 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10936 if (arm_insn_r->cond)
10938 /* PLD has no affect on architectural state, it just affects
10940 if (5 == ((opcode1 & 0xE0) >> 5))
10943 record_buf[0] = ARM_PS_REGNUM;
10944 record_buf[1] = ARM_LR_REGNUM;
10945 arm_insn_r->reg_rec_count = 2;
10947 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10951 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10952 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10955 /* Undefined instruction on ARM V5; need to handle if later
10956 versions define it. */
10959 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10960 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10961 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10963 /* Handle arithmetic insn extension space. */
10964 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10965 && !INSN_RECORDED(arm_insn_r))
10967 /* Handle MLA(S) and MUL(S). */
10968 if (0 <= insn_op1 && 3 >= insn_op1)
10970 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10971 record_buf[1] = ARM_PS_REGNUM;
10972 arm_insn_r->reg_rec_count = 2;
10974 else if (4 <= insn_op1 && 15 >= insn_op1)
10976 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10977 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10978 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10979 record_buf[2] = ARM_PS_REGNUM;
10980 arm_insn_r->reg_rec_count = 3;
10984 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10985 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10986 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10988 /* Handle control insn extension space. */
10990 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10991 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10993 if (!bit (arm_insn_r->arm_insn,25))
10995 if (!bits (arm_insn_r->arm_insn, 4, 7))
10997 if ((0 == insn_op1) || (2 == insn_op1))
11000 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11001 arm_insn_r->reg_rec_count = 1;
11003 else if (1 == insn_op1)
11005 /* CSPR is going to be changed. */
11006 record_buf[0] = ARM_PS_REGNUM;
11007 arm_insn_r->reg_rec_count = 1;
11009 else if (3 == insn_op1)
11011 /* SPSR is going to be changed. */
11012 /* We need to get SPSR value, which is yet to be done. */
11013 printf_unfiltered (_("Process record does not support "
11014 "instruction 0x%0x at address %s.\n"),
11015 arm_insn_r->arm_insn,
11016 paddress (arm_insn_r->gdbarch,
11017 arm_insn_r->this_addr));
11021 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11026 record_buf[0] = ARM_PS_REGNUM;
11027 arm_insn_r->reg_rec_count = 1;
11029 else if (3 == insn_op1)
11032 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11033 arm_insn_r->reg_rec_count = 1;
11036 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11039 record_buf[0] = ARM_PS_REGNUM;
11040 record_buf[1] = ARM_LR_REGNUM;
11041 arm_insn_r->reg_rec_count = 2;
11043 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11045 /* QADD, QSUB, QDADD, QDSUB */
11046 record_buf[0] = ARM_PS_REGNUM;
11047 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11048 arm_insn_r->reg_rec_count = 2;
11050 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11053 record_buf[0] = ARM_PS_REGNUM;
11054 record_buf[1] = ARM_LR_REGNUM;
11055 arm_insn_r->reg_rec_count = 2;
11057 /* Save SPSR also;how? */
11058 printf_unfiltered (_("Process record does not support "
11059 "instruction 0x%0x at address %s.\n"),
11060 arm_insn_r->arm_insn,
11061 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11064 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11065 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11066 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11067 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11070 if (0 == insn_op1 || 1 == insn_op1)
11072 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11073 /* We dont do optimization for SMULW<y> where we
11075 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11076 record_buf[1] = ARM_PS_REGNUM;
11077 arm_insn_r->reg_rec_count = 2;
11079 else if (2 == insn_op1)
11082 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11083 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11084 arm_insn_r->reg_rec_count = 2;
11086 else if (3 == insn_op1)
11089 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11090 arm_insn_r->reg_rec_count = 1;
11096 /* MSR : immediate form. */
11099 /* CSPR is going to be changed. */
11100 record_buf[0] = ARM_PS_REGNUM;
11101 arm_insn_r->reg_rec_count = 1;
11103 else if (3 == insn_op1)
11105 /* SPSR is going to be changed. */
11106 /* we need to get SPSR value, which is yet to be done */
11107 printf_unfiltered (_("Process record does not support "
11108 "instruction 0x%0x at address %s.\n"),
11109 arm_insn_r->arm_insn,
11110 paddress (arm_insn_r->gdbarch,
11111 arm_insn_r->this_addr));
11117 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11118 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11119 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11121 /* Handle load/store insn extension space. */
11123 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11124 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11125 && !INSN_RECORDED(arm_insn_r))
11130 /* These insn, changes register and memory as well. */
11131 /* SWP or SWPB insn. */
11132 /* Get memory address given by Rn. */
11133 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11134 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11135 /* SWP insn ?, swaps word. */
11136 if (8 == arm_insn_r->opcode)
11138 record_buf_mem[0] = 4;
11142 /* SWPB insn, swaps only byte. */
11143 record_buf_mem[0] = 1;
11145 record_buf_mem[1] = u_regval;
11146 arm_insn_r->mem_rec_count = 1;
11147 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11148 arm_insn_r->reg_rec_count = 1;
11150 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11153 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11156 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11159 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11160 record_buf[1] = record_buf[0] + 1;
11161 arm_insn_r->reg_rec_count = 2;
11163 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11166 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11169 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11171 /* LDRH, LDRSB, LDRSH. */
11172 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11173 arm_insn_r->reg_rec_count = 1;
11178 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11179 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11180 && !INSN_RECORDED(arm_insn_r))
11183 /* Handle coprocessor insn extension space. */
11186 /* To be done for ARMv5 and later; as of now we return -1. */
11188 printf_unfiltered (_("Process record does not support instruction x%0x "
11189 "at address %s.\n"),arm_insn_r->arm_insn,
11190 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11193 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11194 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11199 /* Handling opcode 000 insns. */
11202 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11204 struct regcache *reg_cache = arm_insn_r->regcache;
11205 uint32_t record_buf[8], record_buf_mem[8];
11206 ULONGEST u_regval[2] = {0};
11208 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11209 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11210 uint32_t opcode1 = 0;
11212 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11213 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11214 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11216 /* Data processing insn /multiply insn. */
11217 if (9 == arm_insn_r->decode
11218 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11219 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11221 /* Handle multiply instructions. */
11222 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11223 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11225 /* Handle MLA and MUL. */
11226 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11227 record_buf[1] = ARM_PS_REGNUM;
11228 arm_insn_r->reg_rec_count = 2;
11230 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11232 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11233 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11234 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11235 record_buf[2] = ARM_PS_REGNUM;
11236 arm_insn_r->reg_rec_count = 3;
11239 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11240 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11242 /* Handle misc load insns, as 20th bit (L = 1). */
11243 /* LDR insn has a capability to do branching, if
11244 MOV LR, PC is precceded by LDR insn having Rn as R15
11245 in that case, it emulates branch and link insn, and hence we
11246 need to save CSPR and PC as well. I am not sure this is right
11247 place; as opcode = 010 LDR insn make this happen, if R15 was
11249 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11250 if (15 != reg_dest)
11252 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11253 arm_insn_r->reg_rec_count = 1;
11257 record_buf[0] = reg_dest;
11258 record_buf[1] = ARM_PS_REGNUM;
11259 arm_insn_r->reg_rec_count = 2;
11262 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11263 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11264 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11265 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11267 /* Handle MSR insn. */
11268 if (9 == arm_insn_r->opcode)
11270 /* CSPR is going to be changed. */
11271 record_buf[0] = ARM_PS_REGNUM;
11272 arm_insn_r->reg_rec_count = 1;
11276 /* SPSR is going to be changed. */
11277 /* How to read SPSR value? */
11278 printf_unfiltered (_("Process record does not support instruction "
11279 "0x%0x at address %s.\n"),
11280 arm_insn_r->arm_insn,
11281 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11285 else if (9 == arm_insn_r->decode
11286 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11287 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11289 /* Handling SWP, SWPB. */
11290 /* These insn, changes register and memory as well. */
11291 /* SWP or SWPB insn. */
11293 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11294 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11295 /* SWP insn ?, swaps word. */
11296 if (8 == arm_insn_r->opcode)
11298 record_buf_mem[0] = 4;
11302 /* SWPB insn, swaps only byte. */
11303 record_buf_mem[0] = 1;
11305 record_buf_mem[1] = u_regval[0];
11306 arm_insn_r->mem_rec_count = 1;
11307 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11308 arm_insn_r->reg_rec_count = 1;
11310 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11311 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11313 /* Handle BLX, branch and link/exchange. */
11314 if (9 == arm_insn_r->opcode)
11316 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11317 and R14 stores the return address. */
11318 record_buf[0] = ARM_PS_REGNUM;
11319 record_buf[1] = ARM_LR_REGNUM;
11320 arm_insn_r->reg_rec_count = 2;
11323 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11325 /* Handle enhanced software breakpoint insn, BKPT. */
11326 /* CPSR is changed to be executed in ARM state, disabling normal
11327 interrupts, entering abort mode. */
11328 /* According to high vector configuration PC is set. */
11329 /* user hit breakpoint and type reverse, in
11330 that case, we need to go back with previous CPSR and
11331 Program Counter. */
11332 record_buf[0] = ARM_PS_REGNUM;
11333 record_buf[1] = ARM_LR_REGNUM;
11334 arm_insn_r->reg_rec_count = 2;
11336 /* Save SPSR also; how? */
11337 printf_unfiltered (_("Process record does not support instruction "
11338 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11339 paddress (arm_insn_r->gdbarch,
11340 arm_insn_r->this_addr));
11343 else if (11 == arm_insn_r->decode
11344 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11346 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11348 /* Handle str(x) insn */
11349 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11352 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11353 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11355 /* Handle BX, branch and link/exchange. */
11356 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11357 record_buf[0] = ARM_PS_REGNUM;
11358 arm_insn_r->reg_rec_count = 1;
11360 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11361 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11362 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11364 /* Count leading zeros: CLZ. */
11365 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11366 arm_insn_r->reg_rec_count = 1;
11368 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11369 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11370 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11371 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11374 /* Handle MRS insn. */
11375 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11376 arm_insn_r->reg_rec_count = 1;
11378 else if (arm_insn_r->opcode <= 15)
11380 /* Normal data processing insns. */
11381 /* Out of 11 shifter operands mode, all the insn modifies destination
11382 register, which is specified by 13-16 decode. */
11383 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11384 record_buf[1] = ARM_PS_REGNUM;
11385 arm_insn_r->reg_rec_count = 2;
11392 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11393 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11397 /* Handling opcode 001 insns. */
11400 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11402 uint32_t record_buf[8], record_buf_mem[8];
11404 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11405 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11407 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11408 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11409 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11412 /* Handle MSR insn. */
11413 if (9 == arm_insn_r->opcode)
11415 /* CSPR is going to be changed. */
11416 record_buf[0] = ARM_PS_REGNUM;
11417 arm_insn_r->reg_rec_count = 1;
11421 /* SPSR is going to be changed. */
11424 else if (arm_insn_r->opcode <= 15)
11426 /* Normal data processing insns. */
11427 /* Out of 11 shifter operands mode, all the insn modifies destination
11428 register, which is specified by 13-16 decode. */
11429 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11430 record_buf[1] = ARM_PS_REGNUM;
11431 arm_insn_r->reg_rec_count = 2;
11438 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11439 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11443 /* Handle ARM mode instructions with opcode 010. */
11446 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11448 struct regcache *reg_cache = arm_insn_r->regcache;
11450 uint32_t reg_base , reg_dest;
11451 uint32_t offset_12, tgt_mem_addr;
11452 uint32_t record_buf[8], record_buf_mem[8];
11453 unsigned char wback;
11456 /* Calculate wback. */
11457 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11458 || (bit (arm_insn_r->arm_insn, 21) == 1);
11460 arm_insn_r->reg_rec_count = 0;
11461 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11463 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11465 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11468 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11469 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11471 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11472 preceeds a LDR instruction having R15 as reg_base, it
11473 emulates a branch and link instruction, and hence we need to save
11474 CPSR and PC as well. */
11475 if (ARM_PC_REGNUM == reg_dest)
11476 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11478 /* If wback is true, also save the base register, which is going to be
11481 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11485 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11487 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11488 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11490 /* Handle bit U. */
11491 if (bit (arm_insn_r->arm_insn, 23))
11493 /* U == 1: Add the offset. */
11494 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11498 /* U == 0: subtract the offset. */
11499 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11502 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11504 if (bit (arm_insn_r->arm_insn, 22))
11506 /* STRB and STRBT: 1 byte. */
11507 record_buf_mem[0] = 1;
11511 /* STR and STRT: 4 bytes. */
11512 record_buf_mem[0] = 4;
11515 /* Handle bit P. */
11516 if (bit (arm_insn_r->arm_insn, 24))
11517 record_buf_mem[1] = tgt_mem_addr;
11519 record_buf_mem[1] = (uint32_t) u_regval;
11521 arm_insn_r->mem_rec_count = 1;
11523 /* If wback is true, also save the base register, which is going to be
11526 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11529 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11530 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11534 /* Handling opcode 011 insns. */
11537 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11539 struct regcache *reg_cache = arm_insn_r->regcache;
11541 uint32_t shift_imm = 0;
11542 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11543 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11544 uint32_t record_buf[8], record_buf_mem[8];
11547 ULONGEST u_regval[2];
11549 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11550 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11552 /* Handle enhanced store insns and LDRD DSP insn,
11553 order begins according to addressing modes for store insns
11557 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11559 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11560 /* LDR insn has a capability to do branching, if
11561 MOV LR, PC is precedded by LDR insn having Rn as R15
11562 in that case, it emulates branch and link insn, and hence we
11563 need to save CSPR and PC as well. */
11564 if (15 != reg_dest)
11566 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11567 arm_insn_r->reg_rec_count = 1;
11571 record_buf[0] = reg_dest;
11572 record_buf[1] = ARM_PS_REGNUM;
11573 arm_insn_r->reg_rec_count = 2;
11578 if (! bits (arm_insn_r->arm_insn, 4, 11))
11580 /* Store insn, register offset and register pre-indexed,
11581 register post-indexed. */
11583 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11585 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11586 regcache_raw_read_unsigned (reg_cache, reg_src1
11588 regcache_raw_read_unsigned (reg_cache, reg_src2
11590 if (15 == reg_src2)
11592 /* If R15 was used as Rn, hence current PC+8. */
11593 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11594 u_regval[0] = u_regval[0] + 8;
11596 /* Calculate target store address, Rn +/- Rm, register offset. */
11598 if (bit (arm_insn_r->arm_insn, 23))
11600 tgt_mem_addr = u_regval[0] + u_regval[1];
11604 tgt_mem_addr = u_regval[1] - u_regval[0];
11607 switch (arm_insn_r->opcode)
11621 record_buf_mem[0] = 4;
11636 record_buf_mem[0] = 1;
11640 gdb_assert_not_reached ("no decoding pattern found");
11643 record_buf_mem[1] = tgt_mem_addr;
11644 arm_insn_r->mem_rec_count = 1;
11646 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11647 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11648 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11649 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11650 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11651 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11654 /* Rn is going to be changed in pre-indexed mode and
11655 post-indexed mode as well. */
11656 record_buf[0] = reg_src2;
11657 arm_insn_r->reg_rec_count = 1;
11662 /* Store insn, scaled register offset; scaled pre-indexed. */
11663 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11665 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11667 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11668 /* Get shift_imm. */
11669 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11670 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11671 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11672 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11673 /* Offset_12 used as shift. */
11677 /* Offset_12 used as index. */
11678 offset_12 = u_regval[0] << shift_imm;
11682 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11688 if (bit (u_regval[0], 31))
11690 offset_12 = 0xFFFFFFFF;
11699 /* This is arithmetic shift. */
11700 offset_12 = s_word >> shift_imm;
11707 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11709 /* Get C flag value and shift it by 31. */
11710 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11711 | (u_regval[0]) >> 1);
11715 offset_12 = (u_regval[0] >> shift_imm) \
11717 (sizeof(uint32_t) - shift_imm));
11722 gdb_assert_not_reached ("no decoding pattern found");
11726 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11728 if (bit (arm_insn_r->arm_insn, 23))
11730 tgt_mem_addr = u_regval[1] + offset_12;
11734 tgt_mem_addr = u_regval[1] - offset_12;
11737 switch (arm_insn_r->opcode)
11751 record_buf_mem[0] = 4;
11766 record_buf_mem[0] = 1;
11770 gdb_assert_not_reached ("no decoding pattern found");
11773 record_buf_mem[1] = tgt_mem_addr;
11774 arm_insn_r->mem_rec_count = 1;
11776 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11777 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11778 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11779 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11780 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11781 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11784 /* Rn is going to be changed in register scaled pre-indexed
11785 mode,and scaled post indexed mode. */
11786 record_buf[0] = reg_src2;
11787 arm_insn_r->reg_rec_count = 1;
11792 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11793 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11797 /* Handle ARM mode instructions with opcode 100. */
11800 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11802 struct regcache *reg_cache = arm_insn_r->regcache;
11803 uint32_t register_count = 0, register_bits;
11804 uint32_t reg_base, addr_mode;
11805 uint32_t record_buf[24], record_buf_mem[48];
11809 /* Fetch the list of registers. */
11810 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11811 arm_insn_r->reg_rec_count = 0;
11813 /* Fetch the base register that contains the address we are loading data
11815 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11817 /* Calculate wback. */
11818 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11820 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11822 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11824 /* Find out which registers are going to be loaded from memory. */
11825 while (register_bits)
11827 if (register_bits & 0x00000001)
11828 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11829 register_bits = register_bits >> 1;
11834 /* If wback is true, also save the base register, which is going to be
11837 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11839 /* Save the CPSR register. */
11840 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11844 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11846 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11848 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11850 /* Find out how many registers are going to be stored to memory. */
11851 while (register_bits)
11853 if (register_bits & 0x00000001)
11855 register_bits = register_bits >> 1;
11860 /* STMDA (STMED): Decrement after. */
11862 record_buf_mem[1] = (uint32_t) u_regval
11863 - register_count * INT_REGISTER_SIZE + 4;
11865 /* STM (STMIA, STMEA): Increment after. */
11867 record_buf_mem[1] = (uint32_t) u_regval;
11869 /* STMDB (STMFD): Decrement before. */
11871 record_buf_mem[1] = (uint32_t) u_regval
11872 - register_count * INT_REGISTER_SIZE;
11874 /* STMIB (STMFA): Increment before. */
11876 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11879 gdb_assert_not_reached ("no decoding pattern found");
11883 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11884 arm_insn_r->mem_rec_count = 1;
11886 /* If wback is true, also save the base register, which is going to be
11889 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11892 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11893 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11897 /* Handling opcode 101 insns. */
11900 arm_record_b_bl (insn_decode_record *arm_insn_r)
11902 uint32_t record_buf[8];
11904 /* Handle B, BL, BLX(1) insns. */
11905 /* B simply branches so we do nothing here. */
11906 /* Note: BLX(1) doesnt fall here but instead it falls into
11907 extension space. */
11908 if (bit (arm_insn_r->arm_insn, 24))
11910 record_buf[0] = ARM_LR_REGNUM;
11911 arm_insn_r->reg_rec_count = 1;
11914 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11919 /* Handling opcode 110 insns. */
11922 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11924 printf_unfiltered (_("Process record does not support instruction "
11925 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11926 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11931 /* Record handler for vector data transfer instructions. */
11934 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11936 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11937 uint32_t record_buf[4];
11939 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11940 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11941 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11942 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11943 bit_l = bit (arm_insn_r->arm_insn, 20);
11944 bit_c = bit (arm_insn_r->arm_insn, 8);
11946 /* Handle VMOV instruction. */
11947 if (bit_l && bit_c)
11949 record_buf[0] = reg_t;
11950 arm_insn_r->reg_rec_count = 1;
11952 else if (bit_l && !bit_c)
11954 /* Handle VMOV instruction. */
11955 if (bits_a == 0x00)
11957 if (bit (arm_insn_r->arm_insn, 20))
11958 record_buf[0] = reg_t;
11960 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11963 arm_insn_r->reg_rec_count = 1;
11965 /* Handle VMRS instruction. */
11966 else if (bits_a == 0x07)
11969 reg_t = ARM_PS_REGNUM;
11971 record_buf[0] = reg_t;
11972 arm_insn_r->reg_rec_count = 1;
11975 else if (!bit_l && !bit_c)
11977 /* Handle VMOV instruction. */
11978 if (bits_a == 0x00)
11980 if (bit (arm_insn_r->arm_insn, 20))
11981 record_buf[0] = reg_t;
11983 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11986 arm_insn_r->reg_rec_count = 1;
11988 /* Handle VMSR instruction. */
11989 else if (bits_a == 0x07)
11991 record_buf[0] = ARM_FPSCR_REGNUM;
11992 arm_insn_r->reg_rec_count = 1;
11995 else if (!bit_l && bit_c)
11997 /* Handle VMOV instruction. */
11998 if (!(bits_a & 0x04))
12000 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12002 arm_insn_r->reg_rec_count = 1;
12004 /* Handle VDUP instruction. */
12007 if (bit (arm_insn_r->arm_insn, 21))
12009 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12010 record_buf[0] = reg_v + ARM_D0_REGNUM;
12011 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12012 arm_insn_r->reg_rec_count = 2;
12016 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12017 record_buf[0] = reg_v + ARM_D0_REGNUM;
12018 arm_insn_r->reg_rec_count = 1;
12023 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12027 /* Record handler for extension register load/store instructions. */
12030 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12032 uint32_t opcode, single_reg;
12033 uint8_t op_vldm_vstm;
12034 uint32_t record_buf[8], record_buf_mem[128];
12035 ULONGEST u_regval = 0;
12037 struct regcache *reg_cache = arm_insn_r->regcache;
12038 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12040 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12041 single_reg = bit (arm_insn_r->arm_insn, 8);
12042 op_vldm_vstm = opcode & 0x1b;
12044 /* Handle VMOV instructions. */
12045 if ((opcode & 0x1e) == 0x04)
12047 if (bit (arm_insn_r->arm_insn, 4))
12049 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12050 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12051 arm_insn_r->reg_rec_count = 2;
12055 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12056 | bit (arm_insn_r->arm_insn, 5);
12060 record_buf[0] = num_regs + reg_m;
12061 record_buf[1] = num_regs + reg_m + 1;
12062 arm_insn_r->reg_rec_count = 2;
12066 record_buf[0] = reg_m + ARM_D0_REGNUM;
12067 arm_insn_r->reg_rec_count = 1;
12071 /* Handle VSTM and VPUSH instructions. */
12072 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12073 || op_vldm_vstm == 0x12)
12075 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12076 uint32_t memory_index = 0;
12078 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12079 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12080 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12081 imm_off32 = imm_off8 << 24;
12082 memory_count = imm_off8;
12084 if (bit (arm_insn_r->arm_insn, 23))
12085 start_address = u_regval;
12087 start_address = u_regval - imm_off32;
12089 if (bit (arm_insn_r->arm_insn, 21))
12091 record_buf[0] = reg_rn;
12092 arm_insn_r->reg_rec_count = 1;
12095 while (memory_count > 0)
12099 record_buf_mem[memory_index] = start_address;
12100 record_buf_mem[memory_index + 1] = 4;
12101 start_address = start_address + 4;
12102 memory_index = memory_index + 2;
12106 record_buf_mem[memory_index] = start_address;
12107 record_buf_mem[memory_index + 1] = 4;
12108 record_buf_mem[memory_index + 2] = start_address + 4;
12109 record_buf_mem[memory_index + 3] = 4;
12110 start_address = start_address + 8;
12111 memory_index = memory_index + 4;
12115 arm_insn_r->mem_rec_count = (memory_index >> 1);
12117 /* Handle VLDM instructions. */
12118 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12119 || op_vldm_vstm == 0x13)
12121 uint32_t reg_count, reg_vd;
12122 uint32_t reg_index = 0;
12124 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12125 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12128 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12130 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12132 if (bit (arm_insn_r->arm_insn, 21))
12133 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12135 while (reg_count > 0)
12138 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12140 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12144 arm_insn_r->reg_rec_count = reg_index;
12146 /* VSTR Vector store register. */
12147 else if ((opcode & 0x13) == 0x10)
12149 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12150 uint32_t memory_index = 0;
12152 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12153 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12154 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12155 imm_off32 = imm_off8 << 24;
12156 memory_count = imm_off8;
12158 if (bit (arm_insn_r->arm_insn, 23))
12159 start_address = u_regval + imm_off32;
12161 start_address = u_regval - imm_off32;
12165 record_buf_mem[memory_index] = start_address;
12166 record_buf_mem[memory_index + 1] = 4;
12167 arm_insn_r->mem_rec_count = 1;
12171 record_buf_mem[memory_index] = start_address;
12172 record_buf_mem[memory_index + 1] = 4;
12173 record_buf_mem[memory_index + 2] = start_address + 4;
12174 record_buf_mem[memory_index + 3] = 4;
12175 arm_insn_r->mem_rec_count = 2;
12178 /* VLDR Vector load register. */
12179 else if ((opcode & 0x13) == 0x11)
12181 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12185 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12186 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12190 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12191 record_buf[0] = num_regs + reg_vd;
12193 arm_insn_r->reg_rec_count = 1;
12196 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12197 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12201 /* Record handler for arm/thumb mode VFP data processing instructions. */
12204 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12206 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12207 uint32_t record_buf[4];
12208 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12209 enum insn_types curr_insn_type = INSN_INV;
12211 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12212 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12213 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12214 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12215 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12216 bit_d = bit (arm_insn_r->arm_insn, 22);
12217 opc1 = opc1 & 0x04;
12219 /* Handle VMLA, VMLS. */
12222 if (bit (arm_insn_r->arm_insn, 10))
12224 if (bit (arm_insn_r->arm_insn, 6))
12225 curr_insn_type = INSN_T0;
12227 curr_insn_type = INSN_T1;
12232 curr_insn_type = INSN_T1;
12234 curr_insn_type = INSN_T2;
12237 /* Handle VNMLA, VNMLS, VNMUL. */
12238 else if (opc1 == 0x01)
12241 curr_insn_type = INSN_T1;
12243 curr_insn_type = INSN_T2;
12246 else if (opc1 == 0x02 && !(opc3 & 0x01))
12248 if (bit (arm_insn_r->arm_insn, 10))
12250 if (bit (arm_insn_r->arm_insn, 6))
12251 curr_insn_type = INSN_T0;
12253 curr_insn_type = INSN_T1;
12258 curr_insn_type = INSN_T1;
12260 curr_insn_type = INSN_T2;
12263 /* Handle VADD, VSUB. */
12264 else if (opc1 == 0x03)
12266 if (!bit (arm_insn_r->arm_insn, 9))
12268 if (bit (arm_insn_r->arm_insn, 6))
12269 curr_insn_type = INSN_T0;
12271 curr_insn_type = INSN_T1;
12276 curr_insn_type = INSN_T1;
12278 curr_insn_type = INSN_T2;
12282 else if (opc1 == 0x0b)
12285 curr_insn_type = INSN_T1;
12287 curr_insn_type = INSN_T2;
12289 /* Handle all other vfp data processing instructions. */
12290 else if (opc1 == 0x0b)
12293 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12295 if (bit (arm_insn_r->arm_insn, 4))
12297 if (bit (arm_insn_r->arm_insn, 6))
12298 curr_insn_type = INSN_T0;
12300 curr_insn_type = INSN_T1;
12305 curr_insn_type = INSN_T1;
12307 curr_insn_type = INSN_T2;
12310 /* Handle VNEG and VABS. */
12311 else if ((opc2 == 0x01 && opc3 == 0x01)
12312 || (opc2 == 0x00 && opc3 == 0x03))
12314 if (!bit (arm_insn_r->arm_insn, 11))
12316 if (bit (arm_insn_r->arm_insn, 6))
12317 curr_insn_type = INSN_T0;
12319 curr_insn_type = INSN_T1;
12324 curr_insn_type = INSN_T1;
12326 curr_insn_type = INSN_T2;
12329 /* Handle VSQRT. */
12330 else if (opc2 == 0x01 && opc3 == 0x03)
12333 curr_insn_type = INSN_T1;
12335 curr_insn_type = INSN_T2;
12338 else if (opc2 == 0x07 && opc3 == 0x03)
12341 curr_insn_type = INSN_T1;
12343 curr_insn_type = INSN_T2;
12345 else if (opc3 & 0x01)
12348 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12350 if (!bit (arm_insn_r->arm_insn, 18))
12351 curr_insn_type = INSN_T2;
12355 curr_insn_type = INSN_T1;
12357 curr_insn_type = INSN_T2;
12361 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12364 curr_insn_type = INSN_T1;
12366 curr_insn_type = INSN_T2;
12368 /* Handle VCVTB, VCVTT. */
12369 else if ((opc2 & 0x0e) == 0x02)
12370 curr_insn_type = INSN_T2;
12371 /* Handle VCMP, VCMPE. */
12372 else if ((opc2 & 0x0e) == 0x04)
12373 curr_insn_type = INSN_T3;
12377 switch (curr_insn_type)
12380 reg_vd = reg_vd | (bit_d << 4);
12381 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12382 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12383 arm_insn_r->reg_rec_count = 2;
12387 reg_vd = reg_vd | (bit_d << 4);
12388 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12389 arm_insn_r->reg_rec_count = 1;
12393 reg_vd = (reg_vd << 1) | bit_d;
12394 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12395 arm_insn_r->reg_rec_count = 1;
12399 record_buf[0] = ARM_FPSCR_REGNUM;
12400 arm_insn_r->reg_rec_count = 1;
12404 gdb_assert_not_reached ("no decoding pattern found");
12408 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12412 /* Handling opcode 110 insns. */
12415 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12417 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12419 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12420 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12421 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12423 if ((coproc & 0x0e) == 0x0a)
12425 /* Handle extension register ld/st instructions. */
12427 return arm_record_exreg_ld_st_insn (arm_insn_r);
12429 /* 64-bit transfers between arm core and extension registers. */
12430 if ((op1 & 0x3e) == 0x04)
12431 return arm_record_exreg_ld_st_insn (arm_insn_r);
12435 /* Handle coprocessor ld/st instructions. */
12440 return arm_record_unsupported_insn (arm_insn_r);
12443 return arm_record_unsupported_insn (arm_insn_r);
12446 /* Move to coprocessor from two arm core registers. */
12448 return arm_record_unsupported_insn (arm_insn_r);
12450 /* Move to two arm core registers from coprocessor. */
12455 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12456 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12457 arm_insn_r->reg_rec_count = 2;
12459 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12463 return arm_record_unsupported_insn (arm_insn_r);
12466 /* Handling opcode 111 insns. */
12469 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12471 uint32_t op, op1_sbit, op1_ebit, coproc;
12472 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12473 struct regcache *reg_cache = arm_insn_r->regcache;
12474 ULONGEST u_regval = 0;
12476 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12477 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12478 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12479 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12480 op = bit (arm_insn_r->arm_insn, 4);
12482 /* Handle arm SWI/SVC system call instructions. */
12485 if (tdep->arm_syscall_record != NULL)
12487 ULONGEST svc_operand, svc_number;
12489 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12491 if (svc_operand) /* OABI. */
12492 svc_number = svc_operand - 0x900000;
12494 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12496 return tdep->arm_syscall_record (reg_cache, svc_number);
12500 printf_unfiltered (_("no syscall record support\n"));
12505 if ((coproc & 0x0e) == 0x0a)
12507 /* VFP data-processing instructions. */
12508 if (!op1_sbit && !op)
12509 return arm_record_vfp_data_proc_insn (arm_insn_r);
12511 /* Advanced SIMD, VFP instructions. */
12512 if (!op1_sbit && op)
12513 return arm_record_vdata_transfer_insn (arm_insn_r);
12517 /* Coprocessor data operations. */
12518 if (!op1_sbit && !op)
12519 return arm_record_unsupported_insn (arm_insn_r);
12521 /* Move to Coprocessor from ARM core register. */
12522 if (!op1_sbit && !op1_ebit && op)
12523 return arm_record_unsupported_insn (arm_insn_r);
12525 /* Move to arm core register from coprocessor. */
12526 if (!op1_sbit && op1_ebit && op)
12528 uint32_t record_buf[1];
12530 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12531 if (record_buf[0] == 15)
12532 record_buf[0] = ARM_PS_REGNUM;
12534 arm_insn_r->reg_rec_count = 1;
12535 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12541 return arm_record_unsupported_insn (arm_insn_r);
12544 /* Handling opcode 000 insns. */
12547 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12549 uint32_t record_buf[8];
12550 uint32_t reg_src1 = 0;
12552 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12554 record_buf[0] = ARM_PS_REGNUM;
12555 record_buf[1] = reg_src1;
12556 thumb_insn_r->reg_rec_count = 2;
12558 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12564 /* Handling opcode 001 insns. */
12567 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12569 uint32_t record_buf[8];
12570 uint32_t reg_src1 = 0;
12572 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12574 record_buf[0] = ARM_PS_REGNUM;
12575 record_buf[1] = reg_src1;
12576 thumb_insn_r->reg_rec_count = 2;
12578 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12583 /* Handling opcode 010 insns. */
12586 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12588 struct regcache *reg_cache = thumb_insn_r->regcache;
12589 uint32_t record_buf[8], record_buf_mem[8];
12591 uint32_t reg_src1 = 0, reg_src2 = 0;
12592 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12594 ULONGEST u_regval[2] = {0};
12596 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12598 if (bit (thumb_insn_r->arm_insn, 12))
12600 /* Handle load/store register offset. */
12601 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12602 if (opcode2 >= 12 && opcode2 <= 15)
12604 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12605 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12606 record_buf[0] = reg_src1;
12607 thumb_insn_r->reg_rec_count = 1;
12609 else if (opcode2 >= 8 && opcode2 <= 10)
12611 /* STR(2), STRB(2), STRH(2) . */
12612 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12613 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12614 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12615 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12617 record_buf_mem[0] = 4; /* STR (2). */
12618 else if (10 == opcode2)
12619 record_buf_mem[0] = 1; /* STRB (2). */
12620 else if (9 == opcode2)
12621 record_buf_mem[0] = 2; /* STRH (2). */
12622 record_buf_mem[1] = u_regval[0] + u_regval[1];
12623 thumb_insn_r->mem_rec_count = 1;
12626 else if (bit (thumb_insn_r->arm_insn, 11))
12628 /* Handle load from literal pool. */
12630 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12631 record_buf[0] = reg_src1;
12632 thumb_insn_r->reg_rec_count = 1;
12636 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12637 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12638 if ((3 == opcode2) && (!opcode3))
12640 /* Branch with exchange. */
12641 record_buf[0] = ARM_PS_REGNUM;
12642 thumb_insn_r->reg_rec_count = 1;
12646 /* Format 8; special data processing insns. */
12647 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12648 record_buf[0] = ARM_PS_REGNUM;
12649 record_buf[1] = reg_src1;
12650 thumb_insn_r->reg_rec_count = 2;
12655 /* Format 5; data processing insns. */
12656 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12657 if (bit (thumb_insn_r->arm_insn, 7))
12659 reg_src1 = reg_src1 + 8;
12661 record_buf[0] = ARM_PS_REGNUM;
12662 record_buf[1] = reg_src1;
12663 thumb_insn_r->reg_rec_count = 2;
12666 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12667 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12673 /* Handling opcode 001 insns. */
12676 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12678 struct regcache *reg_cache = thumb_insn_r->regcache;
12679 uint32_t record_buf[8], record_buf_mem[8];
12681 uint32_t reg_src1 = 0;
12682 uint32_t opcode = 0, immed_5 = 0;
12684 ULONGEST u_regval = 0;
12686 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12691 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12692 record_buf[0] = reg_src1;
12693 thumb_insn_r->reg_rec_count = 1;
12698 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12699 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12700 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12701 record_buf_mem[0] = 4;
12702 record_buf_mem[1] = u_regval + (immed_5 * 4);
12703 thumb_insn_r->mem_rec_count = 1;
12706 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12707 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12713 /* Handling opcode 100 insns. */
12716 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12718 struct regcache *reg_cache = thumb_insn_r->regcache;
12719 uint32_t record_buf[8], record_buf_mem[8];
12721 uint32_t reg_src1 = 0;
12722 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12724 ULONGEST u_regval = 0;
12726 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12731 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12732 record_buf[0] = reg_src1;
12733 thumb_insn_r->reg_rec_count = 1;
12735 else if (1 == opcode)
12738 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12739 record_buf[0] = reg_src1;
12740 thumb_insn_r->reg_rec_count = 1;
12742 else if (2 == opcode)
12745 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12746 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12747 record_buf_mem[0] = 4;
12748 record_buf_mem[1] = u_regval + (immed_8 * 4);
12749 thumb_insn_r->mem_rec_count = 1;
12751 else if (0 == opcode)
12754 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12755 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12756 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12757 record_buf_mem[0] = 2;
12758 record_buf_mem[1] = u_regval + (immed_5 * 2);
12759 thumb_insn_r->mem_rec_count = 1;
12762 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12763 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12769 /* Handling opcode 101 insns. */
12772 thumb_record_misc (insn_decode_record *thumb_insn_r)
12774 struct regcache *reg_cache = thumb_insn_r->regcache;
12776 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12777 uint32_t register_bits = 0, register_count = 0;
12778 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12779 uint32_t record_buf[24], record_buf_mem[48];
12782 ULONGEST u_regval = 0;
12784 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12785 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12786 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12791 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12792 while (register_bits)
12794 if (register_bits & 0x00000001)
12795 record_buf[index++] = register_count;
12796 register_bits = register_bits >> 1;
12799 record_buf[index++] = ARM_PS_REGNUM;
12800 record_buf[index++] = ARM_SP_REGNUM;
12801 thumb_insn_r->reg_rec_count = index;
12803 else if (10 == opcode2)
12806 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12807 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12808 while (register_bits)
12810 if (register_bits & 0x00000001)
12812 register_bits = register_bits >> 1;
12814 start_address = u_regval - \
12815 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12816 thumb_insn_r->mem_rec_count = register_count;
12817 while (register_count)
12819 record_buf_mem[(register_count * 2) - 1] = start_address;
12820 record_buf_mem[(register_count * 2) - 2] = 4;
12821 start_address = start_address + 4;
12824 record_buf[0] = ARM_SP_REGNUM;
12825 thumb_insn_r->reg_rec_count = 1;
12827 else if (0x1E == opcode1)
12830 /* Handle enhanced software breakpoint insn, BKPT. */
12831 /* CPSR is changed to be executed in ARM state, disabling normal
12832 interrupts, entering abort mode. */
12833 /* According to high vector configuration PC is set. */
12834 /* User hits breakpoint and type reverse, in that case, we need to go back with
12835 previous CPSR and Program Counter. */
12836 record_buf[0] = ARM_PS_REGNUM;
12837 record_buf[1] = ARM_LR_REGNUM;
12838 thumb_insn_r->reg_rec_count = 2;
12839 /* We need to save SPSR value, which is not yet done. */
12840 printf_unfiltered (_("Process record does not support instruction "
12841 "0x%0x at address %s.\n"),
12842 thumb_insn_r->arm_insn,
12843 paddress (thumb_insn_r->gdbarch,
12844 thumb_insn_r->this_addr));
12847 else if ((0 == opcode) || (1 == opcode))
12849 /* ADD(5), ADD(6). */
12850 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12851 record_buf[0] = reg_src1;
12852 thumb_insn_r->reg_rec_count = 1;
12854 else if (2 == opcode)
12856 /* ADD(7), SUB(4). */
12857 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12858 record_buf[0] = ARM_SP_REGNUM;
12859 thumb_insn_r->reg_rec_count = 1;
12862 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12863 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12869 /* Handling opcode 110 insns. */
12872 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12874 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12875 struct regcache *reg_cache = thumb_insn_r->regcache;
12877 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12878 uint32_t reg_src1 = 0;
12879 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12880 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12881 uint32_t record_buf[24], record_buf_mem[48];
12883 ULONGEST u_regval = 0;
12885 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12886 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12892 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12894 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12895 while (register_bits)
12897 if (register_bits & 0x00000001)
12898 record_buf[index++] = register_count;
12899 register_bits = register_bits >> 1;
12902 record_buf[index++] = reg_src1;
12903 thumb_insn_r->reg_rec_count = index;
12905 else if (0 == opcode2)
12907 /* It handles both STMIA. */
12908 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12910 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12911 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12912 while (register_bits)
12914 if (register_bits & 0x00000001)
12916 register_bits = register_bits >> 1;
12918 start_address = u_regval;
12919 thumb_insn_r->mem_rec_count = register_count;
12920 while (register_count)
12922 record_buf_mem[(register_count * 2) - 1] = start_address;
12923 record_buf_mem[(register_count * 2) - 2] = 4;
12924 start_address = start_address + 4;
12928 else if (0x1F == opcode1)
12930 /* Handle arm syscall insn. */
12931 if (tdep->arm_syscall_record != NULL)
12933 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12934 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12938 printf_unfiltered (_("no syscall record support\n"));
12943 /* B (1), conditional branch is automatically taken care in process_record,
12944 as PC is saved there. */
12946 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12947 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12953 /* Handling opcode 111 insns. */
12956 thumb_record_branch (insn_decode_record *thumb_insn_r)
12958 uint32_t record_buf[8];
12959 uint32_t bits_h = 0;
12961 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12963 if (2 == bits_h || 3 == bits_h)
12966 record_buf[0] = ARM_LR_REGNUM;
12967 thumb_insn_r->reg_rec_count = 1;
12969 else if (1 == bits_h)
12972 record_buf[0] = ARM_PS_REGNUM;
12973 record_buf[1] = ARM_LR_REGNUM;
12974 thumb_insn_r->reg_rec_count = 2;
12977 /* B(2) is automatically taken care in process_record, as PC is
12980 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12985 /* Handler for thumb2 load/store multiple instructions. */
12988 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12990 struct regcache *reg_cache = thumb2_insn_r->regcache;
12992 uint32_t reg_rn, op;
12993 uint32_t register_bits = 0, register_count = 0;
12994 uint32_t index = 0, start_address = 0;
12995 uint32_t record_buf[24], record_buf_mem[48];
12997 ULONGEST u_regval = 0;
12999 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13000 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13002 if (0 == op || 3 == op)
13004 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13006 /* Handle RFE instruction. */
13007 record_buf[0] = ARM_PS_REGNUM;
13008 thumb2_insn_r->reg_rec_count = 1;
13012 /* Handle SRS instruction after reading banked SP. */
13013 return arm_record_unsupported_insn (thumb2_insn_r);
13016 else if (1 == op || 2 == op)
13018 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13020 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13021 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13022 while (register_bits)
13024 if (register_bits & 0x00000001)
13025 record_buf[index++] = register_count;
13028 register_bits = register_bits >> 1;
13030 record_buf[index++] = reg_rn;
13031 record_buf[index++] = ARM_PS_REGNUM;
13032 thumb2_insn_r->reg_rec_count = index;
13036 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13037 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13038 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13039 while (register_bits)
13041 if (register_bits & 0x00000001)
13044 register_bits = register_bits >> 1;
13049 /* Start address calculation for LDMDB/LDMEA. */
13050 start_address = u_regval;
13054 /* Start address calculation for LDMDB/LDMEA. */
13055 start_address = u_regval - register_count * 4;
13058 thumb2_insn_r->mem_rec_count = register_count;
13059 while (register_count)
13061 record_buf_mem[register_count * 2 - 1] = start_address;
13062 record_buf_mem[register_count * 2 - 2] = 4;
13063 start_address = start_address + 4;
13066 record_buf[0] = reg_rn;
13067 record_buf[1] = ARM_PS_REGNUM;
13068 thumb2_insn_r->reg_rec_count = 2;
13072 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13074 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13076 return ARM_RECORD_SUCCESS;
13079 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13083 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13085 struct regcache *reg_cache = thumb2_insn_r->regcache;
13087 uint32_t reg_rd, reg_rn, offset_imm;
13088 uint32_t reg_dest1, reg_dest2;
13089 uint32_t address, offset_addr;
13090 uint32_t record_buf[8], record_buf_mem[8];
13091 uint32_t op1, op2, op3;
13094 ULONGEST u_regval[2];
13096 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13097 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13098 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13100 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13102 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13104 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13105 record_buf[0] = reg_dest1;
13106 record_buf[1] = ARM_PS_REGNUM;
13107 thumb2_insn_r->reg_rec_count = 2;
13110 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13112 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13113 record_buf[2] = reg_dest2;
13114 thumb2_insn_r->reg_rec_count = 3;
13119 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13120 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13122 if (0 == op1 && 0 == op2)
13124 /* Handle STREX. */
13125 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13126 address = u_regval[0] + (offset_imm * 4);
13127 record_buf_mem[0] = 4;
13128 record_buf_mem[1] = address;
13129 thumb2_insn_r->mem_rec_count = 1;
13130 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13131 record_buf[0] = reg_rd;
13132 thumb2_insn_r->reg_rec_count = 1;
13134 else if (1 == op1 && 0 == op2)
13136 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13137 record_buf[0] = reg_rd;
13138 thumb2_insn_r->reg_rec_count = 1;
13139 address = u_regval[0];
13140 record_buf_mem[1] = address;
13144 /* Handle STREXB. */
13145 record_buf_mem[0] = 1;
13146 thumb2_insn_r->mem_rec_count = 1;
13150 /* Handle STREXH. */
13151 record_buf_mem[0] = 2 ;
13152 thumb2_insn_r->mem_rec_count = 1;
13156 /* Handle STREXD. */
13157 address = u_regval[0];
13158 record_buf_mem[0] = 4;
13159 record_buf_mem[2] = 4;
13160 record_buf_mem[3] = address + 4;
13161 thumb2_insn_r->mem_rec_count = 2;
13166 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13168 if (bit (thumb2_insn_r->arm_insn, 24))
13170 if (bit (thumb2_insn_r->arm_insn, 23))
13171 offset_addr = u_regval[0] + (offset_imm * 4);
13173 offset_addr = u_regval[0] - (offset_imm * 4);
13175 address = offset_addr;
13178 address = u_regval[0];
13180 record_buf_mem[0] = 4;
13181 record_buf_mem[1] = address;
13182 record_buf_mem[2] = 4;
13183 record_buf_mem[3] = address + 4;
13184 thumb2_insn_r->mem_rec_count = 2;
13185 record_buf[0] = reg_rn;
13186 thumb2_insn_r->reg_rec_count = 1;
13190 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13192 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13194 return ARM_RECORD_SUCCESS;
13197 /* Handler for thumb2 data processing (shift register and modified immediate)
13201 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13203 uint32_t reg_rd, op;
13204 uint32_t record_buf[8];
13206 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13207 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13209 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13211 record_buf[0] = ARM_PS_REGNUM;
13212 thumb2_insn_r->reg_rec_count = 1;
13216 record_buf[0] = reg_rd;
13217 record_buf[1] = ARM_PS_REGNUM;
13218 thumb2_insn_r->reg_rec_count = 2;
13221 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13223 return ARM_RECORD_SUCCESS;
13226 /* Generic handler for thumb2 instructions which effect destination and PS
13230 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13233 uint32_t record_buf[8];
13235 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13237 record_buf[0] = reg_rd;
13238 record_buf[1] = ARM_PS_REGNUM;
13239 thumb2_insn_r->reg_rec_count = 2;
13241 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13243 return ARM_RECORD_SUCCESS;
13246 /* Handler for thumb2 branch and miscellaneous control instructions. */
13249 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13251 uint32_t op, op1, op2;
13252 uint32_t record_buf[8];
13254 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13255 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13256 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13258 /* Handle MSR insn. */
13259 if (!(op1 & 0x2) && 0x38 == op)
13263 /* CPSR is going to be changed. */
13264 record_buf[0] = ARM_PS_REGNUM;
13265 thumb2_insn_r->reg_rec_count = 1;
13269 arm_record_unsupported_insn(thumb2_insn_r);
13273 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13276 record_buf[0] = ARM_PS_REGNUM;
13277 record_buf[1] = ARM_LR_REGNUM;
13278 thumb2_insn_r->reg_rec_count = 2;
13281 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13283 return ARM_RECORD_SUCCESS;
13286 /* Handler for thumb2 store single data item instructions. */
13289 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13291 struct regcache *reg_cache = thumb2_insn_r->regcache;
13293 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13294 uint32_t address, offset_addr;
13295 uint32_t record_buf[8], record_buf_mem[8];
13298 ULONGEST u_regval[2];
13300 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13301 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13302 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13303 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13305 if (bit (thumb2_insn_r->arm_insn, 23))
13308 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13309 offset_addr = u_regval[0] + offset_imm;
13310 address = offset_addr;
13315 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13317 /* Handle STRB (register). */
13318 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13319 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13320 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13321 offset_addr = u_regval[1] << shift_imm;
13322 address = u_regval[0] + offset_addr;
13326 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13327 if (bit (thumb2_insn_r->arm_insn, 10))
13329 if (bit (thumb2_insn_r->arm_insn, 9))
13330 offset_addr = u_regval[0] + offset_imm;
13332 offset_addr = u_regval[0] - offset_imm;
13334 address = offset_addr;
13337 address = u_regval[0];
13343 /* Store byte instructions. */
13346 record_buf_mem[0] = 1;
13348 /* Store half word instructions. */
13351 record_buf_mem[0] = 2;
13353 /* Store word instructions. */
13356 record_buf_mem[0] = 4;
13360 gdb_assert_not_reached ("no decoding pattern found");
13364 record_buf_mem[1] = address;
13365 thumb2_insn_r->mem_rec_count = 1;
13366 record_buf[0] = reg_rn;
13367 thumb2_insn_r->reg_rec_count = 1;
13369 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13371 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13373 return ARM_RECORD_SUCCESS;
13376 /* Handler for thumb2 load memory hints instructions. */
13379 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13381 uint32_t record_buf[8];
13382 uint32_t reg_rt, reg_rn;
13384 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13385 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13387 if (ARM_PC_REGNUM != reg_rt)
13389 record_buf[0] = reg_rt;
13390 record_buf[1] = reg_rn;
13391 record_buf[2] = ARM_PS_REGNUM;
13392 thumb2_insn_r->reg_rec_count = 3;
13394 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13396 return ARM_RECORD_SUCCESS;
13399 return ARM_RECORD_FAILURE;
13402 /* Handler for thumb2 load word instructions. */
13405 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13407 uint32_t opcode1 = 0, opcode2 = 0;
13408 uint32_t record_buf[8];
13410 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13411 record_buf[1] = ARM_PS_REGNUM;
13412 thumb2_insn_r->reg_rec_count = 2;
13414 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13416 return ARM_RECORD_SUCCESS;
13419 /* Handler for thumb2 long multiply, long multiply accumulate, and
13420 divide instructions. */
13423 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13425 uint32_t opcode1 = 0, opcode2 = 0;
13426 uint32_t record_buf[8];
13427 uint32_t reg_src1 = 0;
13429 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13430 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13432 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13434 /* Handle SMULL, UMULL, SMULAL. */
13435 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13436 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13437 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13438 record_buf[2] = ARM_PS_REGNUM;
13439 thumb2_insn_r->reg_rec_count = 3;
13441 else if (1 == opcode1 || 3 == opcode2)
13443 /* Handle SDIV and UDIV. */
13444 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13445 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13446 record_buf[2] = ARM_PS_REGNUM;
13447 thumb2_insn_r->reg_rec_count = 3;
13450 return ARM_RECORD_FAILURE;
13452 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13454 return ARM_RECORD_SUCCESS;
13457 /* Record handler for thumb32 coprocessor instructions. */
13460 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13462 if (bit (thumb2_insn_r->arm_insn, 25))
13463 return arm_record_coproc_data_proc (thumb2_insn_r);
13465 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13468 /* Record handler for advance SIMD structure load/store instructions. */
13471 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13473 struct regcache *reg_cache = thumb2_insn_r->regcache;
13474 uint32_t l_bit, a_bit, b_bits;
13475 uint32_t record_buf[128], record_buf_mem[128];
13476 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13477 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13480 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13481 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13482 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13483 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13484 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13485 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13486 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13487 f_esize = 8 * f_ebytes;
13488 f_elem = 8 / f_ebytes;
13492 ULONGEST u_regval = 0;
13493 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13494 address = u_regval;
13499 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13501 if (b_bits == 0x07)
13503 else if (b_bits == 0x0a)
13505 else if (b_bits == 0x06)
13507 else if (b_bits == 0x02)
13512 for (index_r = 0; index_r < bf_regs; index_r++)
13514 for (index_e = 0; index_e < f_elem; index_e++)
13516 record_buf_mem[index_m++] = f_ebytes;
13517 record_buf_mem[index_m++] = address;
13518 address = address + f_ebytes;
13519 thumb2_insn_r->mem_rec_count += 1;
13524 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13526 if (b_bits == 0x09 || b_bits == 0x08)
13528 else if (b_bits == 0x03)
13533 for (index_r = 0; index_r < bf_regs; index_r++)
13534 for (index_e = 0; index_e < f_elem; index_e++)
13536 for (loop_t = 0; loop_t < 2; loop_t++)
13538 record_buf_mem[index_m++] = f_ebytes;
13539 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13540 thumb2_insn_r->mem_rec_count += 1;
13542 address = address + (2 * f_ebytes);
13546 else if ((b_bits & 0x0e) == 0x04)
13548 for (index_e = 0; index_e < f_elem; index_e++)
13550 for (loop_t = 0; loop_t < 3; loop_t++)
13552 record_buf_mem[index_m++] = f_ebytes;
13553 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13554 thumb2_insn_r->mem_rec_count += 1;
13556 address = address + (3 * f_ebytes);
13560 else if (!(b_bits & 0x0e))
13562 for (index_e = 0; index_e < f_elem; index_e++)
13564 for (loop_t = 0; loop_t < 4; loop_t++)
13566 record_buf_mem[index_m++] = f_ebytes;
13567 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13568 thumb2_insn_r->mem_rec_count += 1;
13570 address = address + (4 * f_ebytes);
13576 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13578 if (bft_size == 0x00)
13580 else if (bft_size == 0x01)
13582 else if (bft_size == 0x02)
13588 if (!(b_bits & 0x0b) || b_bits == 0x08)
13589 thumb2_insn_r->mem_rec_count = 1;
13591 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13592 thumb2_insn_r->mem_rec_count = 2;
13594 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13595 thumb2_insn_r->mem_rec_count = 3;
13597 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13598 thumb2_insn_r->mem_rec_count = 4;
13600 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13602 record_buf_mem[index_m] = f_ebytes;
13603 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13612 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13613 thumb2_insn_r->reg_rec_count = 1;
13615 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13616 thumb2_insn_r->reg_rec_count = 2;
13618 else if ((b_bits & 0x0e) == 0x04)
13619 thumb2_insn_r->reg_rec_count = 3;
13621 else if (!(b_bits & 0x0e))
13622 thumb2_insn_r->reg_rec_count = 4;
13627 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13628 thumb2_insn_r->reg_rec_count = 1;
13630 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13631 thumb2_insn_r->reg_rec_count = 2;
13633 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13634 thumb2_insn_r->reg_rec_count = 3;
13636 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13637 thumb2_insn_r->reg_rec_count = 4;
13639 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13640 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13644 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13646 record_buf[index_r] = reg_rn;
13647 thumb2_insn_r->reg_rec_count += 1;
13650 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13652 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13657 /* Decodes thumb2 instruction type and invokes its record handler. */
13659 static unsigned int
13660 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13662 uint32_t op, op1, op2;
13664 op = bit (thumb2_insn_r->arm_insn, 15);
13665 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13666 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13670 if (!(op2 & 0x64 ))
13672 /* Load/store multiple instruction. */
13673 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13675 else if (!((op2 & 0x64) ^ 0x04))
13677 /* Load/store (dual/exclusive) and table branch instruction. */
13678 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13680 else if (!((op2 & 0x20) ^ 0x20))
13682 /* Data-processing (shifted register). */
13683 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13685 else if (op2 & 0x40)
13687 /* Co-processor instructions. */
13688 return thumb2_record_coproc_insn (thumb2_insn_r);
13691 else if (op1 == 0x02)
13695 /* Branches and miscellaneous control instructions. */
13696 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13698 else if (op2 & 0x20)
13700 /* Data-processing (plain binary immediate) instruction. */
13701 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13705 /* Data-processing (modified immediate). */
13706 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13709 else if (op1 == 0x03)
13711 if (!(op2 & 0x71 ))
13713 /* Store single data item. */
13714 return thumb2_record_str_single_data (thumb2_insn_r);
13716 else if (!((op2 & 0x71) ^ 0x10))
13718 /* Advanced SIMD or structure load/store instructions. */
13719 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13721 else if (!((op2 & 0x67) ^ 0x01))
13723 /* Load byte, memory hints instruction. */
13724 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13726 else if (!((op2 & 0x67) ^ 0x03))
13728 /* Load halfword, memory hints instruction. */
13729 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13731 else if (!((op2 & 0x67) ^ 0x05))
13733 /* Load word instruction. */
13734 return thumb2_record_ld_word (thumb2_insn_r);
13736 else if (!((op2 & 0x70) ^ 0x20))
13738 /* Data-processing (register) instruction. */
13739 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13741 else if (!((op2 & 0x78) ^ 0x30))
13743 /* Multiply, multiply accumulate, abs diff instruction. */
13744 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13746 else if (!((op2 & 0x78) ^ 0x38))
13748 /* Long multiply, long multiply accumulate, and divide. */
13749 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13751 else if (op2 & 0x40)
13753 /* Co-processor instructions. */
13754 return thumb2_record_coproc_insn (thumb2_insn_r);
13761 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13762 and positive val on fauilure. */
13765 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13767 gdb_byte buf[insn_size];
13769 memset (&buf[0], 0, insn_size);
13771 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13773 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13775 gdbarch_byte_order_for_code (insn_record->gdbarch));
13779 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13781 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13785 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13786 uint32_t insn_size)
13789 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13790 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13792 arm_record_data_proc_misc_ld_str, /* 000. */
13793 arm_record_data_proc_imm, /* 001. */
13794 arm_record_ld_st_imm_offset, /* 010. */
13795 arm_record_ld_st_reg_offset, /* 011. */
13796 arm_record_ld_st_multiple, /* 100. */
13797 arm_record_b_bl, /* 101. */
13798 arm_record_asimd_vfp_coproc, /* 110. */
13799 arm_record_coproc_data_proc /* 111. */
13802 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13803 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13805 thumb_record_shift_add_sub, /* 000. */
13806 thumb_record_add_sub_cmp_mov, /* 001. */
13807 thumb_record_ld_st_reg_offset, /* 010. */
13808 thumb_record_ld_st_imm_offset, /* 011. */
13809 thumb_record_ld_st_stack, /* 100. */
13810 thumb_record_misc, /* 101. */
13811 thumb_record_ldm_stm_swi, /* 110. */
13812 thumb_record_branch /* 111. */
13815 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13816 uint32_t insn_id = 0;
13818 if (extract_arm_insn (arm_record, insn_size))
13822 printf_unfiltered (_("Process record: error reading memory at "
13823 "addr %s len = %d.\n"),
13824 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13828 else if (ARM_RECORD == record_type)
13830 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13831 insn_id = bits (arm_record->arm_insn, 25, 27);
13832 ret = arm_record_extension_space (arm_record);
13833 /* If this insn has fallen into extension space
13834 then we need not decode it anymore. */
13835 if (ret != -1 && !INSN_RECORDED(arm_record))
13837 ret = arm_handle_insn[insn_id] (arm_record);
13840 else if (THUMB_RECORD == record_type)
13842 /* As thumb does not have condition codes, we set negative. */
13843 arm_record->cond = -1;
13844 insn_id = bits (arm_record->arm_insn, 13, 15);
13845 ret = thumb_handle_insn[insn_id] (arm_record);
13847 else if (THUMB2_RECORD == record_type)
13849 /* As thumb does not have condition codes, we set negative. */
13850 arm_record->cond = -1;
13852 /* Swap first half of 32bit thumb instruction with second half. */
13853 arm_record->arm_insn
13854 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13856 insn_id = thumb2_record_decode_insn_handler (arm_record);
13858 if (insn_id != ARM_RECORD_SUCCESS)
13860 arm_record_unsupported_insn (arm_record);
13866 /* Throw assertion. */
13867 gdb_assert_not_reached ("not a valid instruction, could not decode");
13874 /* Cleans up local record registers and memory allocations. */
13877 deallocate_reg_mem (insn_decode_record *record)
13879 xfree (record->arm_regs);
13880 xfree (record->arm_mems);
13884 /* Parse the current instruction and record the values of the registers and
13885 memory that will be changed in current instruction to record_arch_list".
13886 Return -1 if something is wrong. */
13889 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13890 CORE_ADDR insn_addr)
13893 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13894 uint32_t no_of_rec = 0;
13895 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13896 ULONGEST t_bit = 0, insn_id = 0;
13898 ULONGEST u_regval = 0;
13900 insn_decode_record arm_record;
13902 memset (&arm_record, 0, sizeof (insn_decode_record));
13903 arm_record.regcache = regcache;
13904 arm_record.this_addr = insn_addr;
13905 arm_record.gdbarch = gdbarch;
13908 if (record_debug > 1)
13910 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13912 paddress (gdbarch, arm_record.this_addr));
13915 if (extract_arm_insn (&arm_record, 2))
13919 printf_unfiltered (_("Process record: error reading memory at "
13920 "addr %s len = %d.\n"),
13921 paddress (arm_record.gdbarch,
13922 arm_record.this_addr), 2);
13927 /* Check the insn, whether it is thumb or arm one. */
13929 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13930 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13933 if (!(u_regval & t_bit))
13935 /* We are decoding arm insn. */
13936 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13940 insn_id = bits (arm_record.arm_insn, 11, 15);
13941 /* is it thumb2 insn? */
13942 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13944 ret = decode_insn (&arm_record, THUMB2_RECORD,
13945 THUMB2_INSN_SIZE_BYTES);
13949 /* We are decoding thumb insn. */
13950 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13956 /* Record registers. */
13957 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13958 if (arm_record.arm_regs)
13960 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13962 if (record_full_arch_list_add_reg
13963 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13967 /* Record memories. */
13968 if (arm_record.arm_mems)
13970 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13972 if (record_full_arch_list_add_mem
13973 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13974 arm_record.arm_mems[no_of_rec].len))
13979 if (record_full_arch_list_add_end ())
13984 deallocate_reg_mem (&arm_record);